Skip to content

Commit b1a8ef8

Browse files
committed
ok so we now have working nets for 0, 1, 2 hidden layers
1 parent 9d5bf99 commit b1a8ef8

File tree

4 files changed

+217
-6
lines changed

4 files changed

+217
-6
lines changed

neural_network.py

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def feed_forward(self, inputs):
8080

8181
# Back propagates a single example to update weights
8282
# correct_outputs is a numpy array of size num_outputs
83-
def back_propagate(self, inputs, correct_outputs):
83+
def back_propagate(self, inputs, correct_outputs, learning_rate):
8484
# get the outputs we currently get for that input
8585
(sums, activations) = self.feed_forward(inputs)
8686
network_outputs = activations[self.num_hidden_layers]
@@ -133,22 +133,22 @@ def back_propagate(self, inputs, correct_outputs):
133133
for layer in range(1, self.num_hidden_layers):
134134
for prev_neuron in range(self.neurons_per_layer):
135135
for next_neuron in range(self.neurons_per_layer):
136-
self.weights[layer][prev_neuron, next_neuron] += activations[layer-1][prev_neuron] * deltas[layer][next_neuron] # and this
136+
self.weights[layer][prev_neuron, next_neuron] += learning_rate * activations[layer-1][prev_neuron] * deltas[layer][next_neuron] # and this
137137

138138
# Update output weights
139139
for output in range(self.num_outputs):
140140
for prev_neuron in range(self.neurons_per_layer):
141-
self.weights[self.num_hidden_layers][prev_neuron, output] += activations[self.num_hidden_layers-1][prev_neuron] * deltas[self.num_hidden_layers][output] # and this
141+
self.weights[self.num_hidden_layers][prev_neuron, output] += learning_rate * activations[self.num_hidden_layers-1][prev_neuron] * deltas[self.num_hidden_layers][output] # and this
142142

143143
# doesn't return anything (weights update in place)
144144

145145

146146

147147
# Train a network by repeatedly backpropagating all the examples
148-
def train_network(self, num_examples, inputs_list, outputs_list, num_iterations):
148+
def train_network(self, num_examples, inputs_list, outputs_list, num_iterations, learning_rate):
149149
for i in range(num_iterations):
150150
for j in range(num_examples):
151-
self.back_propagate(inputs_list[j], outputs_list[j])
151+
self.back_propagate(inputs_list[j], outputs_list[j], learning_rate)
152152

153153
# Convenience function for feeding forward then returning only the output
154154
def get_output(self, inputs):
@@ -168,4 +168,9 @@ def get_output(self, inputs):
168168
examples = [a, b, c, d]
169169
labels = [np.array([0.]), np.array([1.]), np.array([1.]), np.array([0.])]
170170

171-
xor_net.train_network(4, examples, labels, 10000)
171+
xor_net.train_network(4, examples, labels, 10000, 0.3)
172+
173+
print xor_net.get_output(a)
174+
print xor_net.get_output(b)
175+
print xor_net.get_output(c)
176+
print xor_net.get_output(d)

neural_network_1_hidden.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ def back_propagate(self, inputs, given_outputs, learning_rate):
5151

5252
deltas = [np.random.rand(self.num_hidden_neurons), np.random.rand(self.num_outputs)]
5353

54+
# back propagate deltas
5455
for output in range(self.num_outputs):
5556
deltas[1][output] = sigmoid_derivative(sums[1][output]) * (given_outputs[output] - activations[1][output])
5657

@@ -60,6 +61,7 @@ def back_propagate(self, inputs, given_outputs, learning_rate):
6061
sum += (self.weights[1][neuron, output] * deltas[1][output])
6162
deltas[0][neuron] = (sigmoid_derivative(sums[0][neuron]) * sum)
6263

64+
# use deltas to update weights
6365
for input in range(self.num_inputs + 1):
6466
for neuron in range(self.num_hidden_neurons):
6567
self.weights[0][input, neuron] += (learning_rate * inputs[input] * deltas[0][neuron])

neural_network_2_hidden.py

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
# modification of neural network code to only be able to have 2 hidden layers
2+
3+
import numpy as np
4+
5+
# Sigmoid function for activation
6+
def sigmoid(x):
7+
return 1/(1 + np.exp(-x))
8+
9+
# Derivative of sigmoid for back-propagating
10+
# Actually has a pretty cool derivative
11+
def sigmoid_derivative(x):
12+
return sigmoid(x) * (1 - sigmoid(x))
13+
14+
class NeuralNetwork:
15+
16+
def __init__(self, num_inputs, num_hidden_first, num_hidden_second, num_outputs):
17+
self.num_inputs = num_inputs
18+
self.num_hidden_first = num_hidden_first
19+
self.num_hidden_second = num_hidden_second
20+
self.num_outputs = num_outputs
21+
22+
# +1 for bias
23+
self.weights = [np.random.rand(self.num_inputs + 1, self.num_hidden_first), np.random.rand(self.num_hidden_first, self.num_hidden_second), np.random.rand(self.num_hidden_second, self.num_outputs)]
24+
25+
def feed_forward(self, inputs):
26+
# add 1 to end of inputs for bias
27+
inputs = np.append(inputs, 1.)
28+
29+
sums = [np.random.rand(self.num_hidden_first), np.random.rand(self.num_hidden_second), np.random.rand(self.num_outputs)]
30+
activations = [np.random.rand(self.num_hidden_first), np.random.rand(self.num_hidden_second), np.random.rand(self.num_outputs)]
31+
32+
for neuron in range(self.num_hidden_first):
33+
sum = 0
34+
for input in range(self.num_inputs + 1):
35+
sum += (self.weights[0][input, neuron] * inputs[input])
36+
sums[0][neuron] = sum
37+
activations[0][neuron] = sigmoid(sum)
38+
39+
for next_neuron in range(self.num_hidden_second):
40+
sum = 0
41+
for prev_neuron in range(self.num_hidden_second):
42+
sum += (self.weights[1][prev_neuron, next_neuron] * activations[0][prev_neuron])
43+
sums[1][next_neuron] = sum
44+
activations[1][next_neuron] = sigmoid(sum)
45+
46+
for output in range(self.num_outputs):
47+
sum = 0
48+
for neuron in range(self.num_hidden_second):
49+
sum += (self.weights[2][neuron, output] * activations[1][neuron])
50+
sums[2][output] = sum
51+
activations[2][output] = sigmoid(sum)
52+
53+
return (sums, activations)
54+
55+
def back_propagate(self, inputs, given_outputs, learning_rate):
56+
(sums, activations) = self.feed_forward(inputs)
57+
inputs = np.append(inputs, 1.)
58+
59+
deltas = [np.random.rand(self.num_hidden_first), np.random.rand(self.num_hidden_second), np.random.rand(self.num_outputs)]
60+
61+
# back propagate deltas
62+
for output in range(self.num_outputs):
63+
deltas[2][output] = sigmoid_derivative(sums[2][output]) * (given_outputs[output] - activations[2][output])
64+
65+
for neuron in range(self.num_hidden_second):
66+
sum = 0
67+
for output in range(self.num_outputs):
68+
sum += (self.weights[2][neuron, output] * deltas[2][output])
69+
deltas[1][neuron] = (sigmoid_derivative(sums[1][neuron]) * sum)
70+
71+
for prev_neuron in range(self.num_hidden_first):
72+
sum = 0
73+
for next_neuron in range(self.num_hidden_second):
74+
sum += (self.weights[1][prev_neuron, next_neuron] * deltas[1][next_neuron])
75+
deltas[0][prev_neuron] = (sigmoid_derivative(sums[0][prev_neuron]) * sum)
76+
77+
# use deltas to update weights
78+
for input in range(self.num_inputs + 1):
79+
for neuron in range(self.num_hidden_first):
80+
self.weights[0][input, neuron] += (learning_rate * inputs[input] * deltas[0][neuron])
81+
82+
for prev_neuron in range(self.num_hidden_first):
83+
for next_neuron in range(self.num_hidden_second):
84+
self.weights[1][prev_neuron, next_neuron] += (learning_rate * activations[0][prev_neuron] * deltas[1][next_neuron])
85+
86+
for neuron in range(self.num_hidden_second):
87+
for output in range(self.num_outputs):
88+
self.weights[2][neuron, output] += (learning_rate * activations[1][neuron] * deltas[2][output])
89+
90+
91+
# Train a network by repeatedly backpropagating all the examples
92+
def train_network(self, num_examples, inputs_list, outputs_list, num_iterations, learning_rate):
93+
for i in range(num_iterations):
94+
for j in range(num_examples):
95+
self.back_propagate(inputs_list[j], outputs_list[j], learning_rate)
96+
97+
def get_output(self, inputs):
98+
return self.feed_forward(inputs)[1][2]
99+
100+
# main program ##########################################################################
101+
xor_net = NeuralNetwork(2, 3, 3, 1)
102+
a = np.array([1.,1.])
103+
b = np.array([1.,0.])
104+
c = np.array([0.,1.])
105+
d = np.array([0.,0.])
106+
examples = [a, b, c, d]
107+
labels = [np.array([0.]), np.array([1.]), np.array([1.]), np.array([0.])]
108+
109+
xor_net.train_network(4, examples, labels, 5000, 0.3)
110+
111+
print xor_net.get_output(a)
112+
print xor_net.get_output(b)
113+
print xor_net.get_output(c)
114+
print xor_net.get_output(d)

perceptron.py

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
# Perceptron (neural net with no hidden layers)
2+
# Can't learn things that aren't linearly separable
3+
# But tends to work well on most real life data
4+
5+
import numpy as np
6+
7+
# Sigmoid function for activation
8+
def sigmoid(x):
9+
return 1/(1 + np.exp(-x))
10+
11+
# Derivative of sigmoid for back-propagating
12+
# Actually has a pretty cool derivative
13+
def sigmoid_derivative(x):
14+
return sigmoid(x) * (1 - sigmoid(x))
15+
16+
class Perceptron:
17+
18+
def __init__(self, num_inputs, num_outputs):
19+
self.num_inputs = num_inputs
20+
self.num_outputs = num_outputs
21+
22+
self.weights = np.random.rand(self.num_inputs + 1, self.num_outputs)
23+
24+
def feed_forward(self, inputs):
25+
inputs = np.append(inputs, 1.) # bias
26+
sums = np.random.rand(self.num_outputs)
27+
activations = np.random.rand(self.num_outputs)
28+
29+
for output in range(self.num_outputs):
30+
sum = 0
31+
for input in range(self.num_inputs + 1):
32+
sum += self.weights[input, output] * inputs[input]
33+
sums[output] = sum
34+
activations[output] = sigmoid(sum)
35+
36+
return (sums, activations)
37+
38+
def back_propagate(self, inputs, given_outputs, learning_rate):
39+
(sums, activations) = self.feed_forward(inputs)
40+
inputs = np.append(inputs, 1.)
41+
deltas = np.random.rand(self.num_outputs)
42+
43+
for output in range(self.num_outputs):
44+
deltas[output] = sigmoid_derivative(sums[output]) * (given_outputs[output] - activations[output])
45+
46+
for input in range(self.num_inputs + 1):
47+
for output in range(self.num_outputs):
48+
self.weights[input, output] += (learning_rate * inputs[input] * deltas[output])
49+
50+
def train_network(self, num_examples, inputs_list, outputs_list, num_iterations, learning_rate):
51+
for i in range(num_iterations):
52+
for j in range(num_examples):
53+
self.back_propagate(inputs_list[j], outputs_list[j], learning_rate)
54+
55+
def get_output(self, inputs):
56+
return self.feed_forward(inputs)[1]
57+
58+
#### Main program ####
59+
60+
print "doing and"
61+
and_net = Perceptron(2, 1)
62+
a = np.array([1.,1.])
63+
b = np.array([1.,0.])
64+
c = np.array([0.,1.])
65+
d = np.array([0.,0.])
66+
examples = [a, b, c, d]
67+
labels = [np.array([1.]), np.array([0.]), np.array([0.]), np.array([0.])]
68+
69+
and_net.train_network(4, examples, labels, 5000, 0.3)
70+
71+
print and_net.get_output(a)
72+
print and_net.get_output(b)
73+
print and_net.get_output(c)
74+
print and_net.get_output(d)
75+
76+
print "doing or"
77+
or_net = Perceptron(2, 1)
78+
a = np.array([1.,1.])
79+
b = np.array([1.,0.])
80+
c = np.array([0.,1.])
81+
d = np.array([0.,0.])
82+
examples = [a, b, c, d]
83+
labels = [np.array([1.]), np.array([1.]), np.array([1.]), np.array([0.])]
84+
85+
or_net.train_network(4, examples, labels, 5000, 0.3)
86+
87+
print or_net.get_output(a)
88+
print or_net.get_output(b)
89+
print or_net.get_output(c)
90+
print or_net.get_output(d)

0 commit comments

Comments
 (0)