Skip to content

Commit 10ab758

Browse files
committedJun 23, 2019
update
1 parent 30e757e commit 10ab758

File tree

4 files changed

+11
-3
lines changed

4 files changed

+11
-3
lines changed
 

‎logistic_regression_class/l1_regularization.py

+11
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010

1111
import numpy as np
12+
from mpl_toolkits.mplot3d import Axes3D
1213
import matplotlib.pyplot as plt
1314

1415
def sigmoid(z):
@@ -19,13 +20,23 @@ def sigmoid(z):
1920

2021
# uniformly distributed numbers between -5, +5
2122
X = (np.random.random((N, D)) - 0.5)*10
23+
# X = (np.random.randn(N, D) - 0.5)*10
2224

2325
# true weights - only the first 3 dimensions of X affect Y
2426
true_w = np.array([1, 0.5, -0.5] + [0]*(D - 3))
2527

2628
# generate Y - add noise with variance 0.5
2729
Y = np.round(sigmoid(X.dot(true_w) + np.random.randn(N)*0.5))
2830

31+
32+
33+
34+
# let's plot the data to see what it looks like
35+
fig = plt.figure()
36+
ax = fig.add_subplot(111, projection='3d')
37+
ax.scatter(X[:,0], X[:,1], X[:,2], c=Y)
38+
plt.show()
39+
2940
# perform gradient descent to find w
3041
costs = [] # keep track of squared error cost
3142
w = np.random.randn(D) / np.sqrt(D) # randomly initialize w

‎logistic_regression_class/logistic3.py

-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,6 @@ def cross_entropy(T, Y):
6565
print(cross_entropy(T, Y))
6666

6767
# gradient descent weight udpate
68-
# w += learning_rate * np.dot((T - Y).T, Xb) # old
6968
w += learning_rate * Xb.T.dot(T - Y)
7069

7170
# recalculate Y

‎logistic_regression_class/logistic4.py

-1
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ def cross_entropy(T, Y):
6464
print(cross_entropy(T, Y))
6565

6666
# gradient descent weight udpate with regularization
67-
# w += learning_rate * ( np.dot((T - Y).T, Xb) - 0.1*w ) # old
6867
w += learning_rate * ( Xb.T.dot(T - Y) - 0.1*w )
6968

7069
# recalculate Y

‎logistic_regression_class/logistic_donut.py

-1
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,6 @@ def cross_entropy(T, Y):
7373
print(e)
7474

7575
# gradient descent weight udpate with regularization
76-
# w += learning_rate * ( np.dot((T - Y).T, Xb) - 0.01*w ) # old
7776
w += learning_rate * ( Xb.T.dot(T - Y) - 0.1*w )
7877

7978
# recalculate Y

0 commit comments

Comments
 (0)
Please sign in to comment.