Skip to content

Commit d906b41

Browse files
added perceptron learning using stochastic gradient descent
1 parent 052f948 commit d906b41

File tree

1 file changed

+98
-0
lines changed

1 file changed

+98
-0
lines changed

ML/adalineSGD.py

Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
import numpy as np
2+
from numpy.random import seed
3+
4+
class AdalineSGD(object):
5+
"""
6+
ADAptive LInear NEuron Classifier using Stochastic Gradient Descent
7+
8+
Parameters
9+
-------------------------------------------------------------------
10+
11+
eta: float
12+
Learning Rate (between 0.0 and 1.0)
13+
14+
n_iter: int
15+
Passes over the training data
16+
17+
18+
Attributes
19+
-------------------------------------------------------------------
20+
21+
w_: 1-d array
22+
Weights after fitting
23+
24+
errors_ : list
25+
Number of misclassifications in every epoch
26+
27+
shuffle : bool (default: True)
28+
Shuffles training data every epoch if True to prevent cycles.
29+
30+
random_state : int (default: None)
31+
Set random state for shuffling and initializing the weights.
32+
"""
33+
34+
def __init__(self, eta=0.01, n_iter=10,
35+
shuffle=True, random_state=None):
36+
37+
self.eta = eta
38+
self.n_iter = n_iter
39+
self.w_initialized = True
40+
self.shuffle = shuffle
41+
if random_state:
42+
seed(random_state)
43+
44+
def fit(self, X, y):
45+
"""Fit training data without reinitializing the weights."""
46+
47+
if not self.w_initialized:
48+
self._initialize_weights(X.shape[1])
49+
50+
if y.ravel().shape[0] > 1:
51+
for xi, target in zip(X, y):
52+
self._update_weights(xi, target)
53+
54+
else:
55+
self._update_weights(X, y)
56+
57+
return self
58+
59+
def shuffle(self, X, y):
60+
"""Shuffle training data"""
61+
62+
r = np.random.permutation(len(y))
63+
return X[r], y[r]
64+
65+
def _initialize_weights(self, m):
66+
"""Initialize weights to zero"""
67+
68+
self.w_ = np.zeros(1 + m)
69+
self.w_initialized = True
70+
71+
def _update_weights(self, xi, target):
72+
"""Apply Adaline learning rule to update the weights"""
73+
74+
output = self.net_input(xi)
75+
error = target - output
76+
self.w_[1:] += self.eta * xi.dot(error)
77+
self.w_[0] += self.eta * error
78+
cost = 0.5 * error**2
79+
return cost
80+
81+
def net_input(self, X):
82+
"""Calculate net input"""
83+
84+
return np.dot(X, self.w_[1:]) + self.w_[0]
85+
86+
def activation(self, X):
87+
"""Computer linear activation"""
88+
89+
return self.net_input(X)
90+
91+
def predict(self, X):
92+
"""Return class label after unit step"""
93+
94+
return np.where(self.activation(X) >= 0.0, 1,-1)
95+
96+
97+
98+

0 commit comments

Comments
 (0)