1
1
# https://deeplearningcourses.com/c/machine-learning-in-python-random-forest-adaboost
2
2
# https://www.udemy.com/machine-learning-in-python-random-forest-adaboost
3
+ from __future__ import print_function , division
4
+ from builtins import range , input
5
+ # Note: you may need to update your version of future
6
+ # sudo pip install -U future
7
+
8
+
3
9
import numpy as np
4
10
import matplotlib .pyplot as plt
5
11
from sklearn .tree import DecisionTreeRegressor , DecisionTreeClassifier
27
33
28
34
plt .scatter (Xtrain , Ytrain , s = 50 , alpha = 0.7 , c = 'blue' )
29
35
plt .scatter (Xtrain , model .predict (Xtrain .reshape (Ntrain , 1 )), s = 50 , alpha = 0.7 , c = 'green' )
36
+ plt .title ("decision tree - low bias, high variance" )
30
37
# plt.show()
31
38
32
39
# plt.scatter(X, Y)
46
53
plt .scatter (Xtrain , model .predict (Xtrain .reshape (Ntrain , 1 )), s = 50 , alpha = 0.7 , c = 'green' )
47
54
plt .plot (Xaxis , Yaxis )
48
55
plt .plot (Xaxis , model .predict (Xaxis .reshape (T , 1 )))
56
+ plt .title ("decision tree - high bias, low variance" )
49
57
plt .show ()
50
58
51
59
57
65
plt .scatter (Xtrain , model .predict (Xtrain .reshape (Ntrain , 1 )), s = 50 , alpha = 0.7 , c = 'green' )
58
66
plt .plot (Xaxis , Yaxis )
59
67
plt .plot (Xaxis , model .predict (Xaxis .reshape (T , 1 )))
68
+ plt .title ("knn - low bias, high variance" )
60
69
plt .show ()
61
70
62
71
# knn - high bias, low variance
67
76
plt .scatter (Xtrain , model .predict (Xtrain .reshape (Ntrain , 1 )), s = 50 , alpha = 0.7 , c = 'green' )
68
77
plt .plot (Xaxis , Yaxis )
69
78
plt .plot (Xaxis , model .predict (Xaxis .reshape (T , 1 )))
79
+ plt .title ("knn - high bias, low variance" )
70
80
plt .show ()
71
81
72
82
76
86
N = 100
77
87
D = 2
78
88
X = np .random .randn (N , D )
79
- X [:N / 2 ] += np .array ([1 , 1 ]) # center it at (1,1)
80
- X [N / 2 :] += np .array ([- 1 , - 1 ]) # center it at (-1, -1)
89
+ X [:N // 2 ] += np .array ([1 , 1 ]) # center it at (1,1)
90
+ X [N // 2 :] += np .array ([- 1 , - 1 ]) # center it at (-1, -1)
81
91
82
- Y = np .array ([0 ]* (N / 2 ) + [1 ]* (N / 2 ))
92
+ Y = np .array ([0 ]* (N // 2 ) + [1 ]* (N / /2 ))
83
93
84
94
85
95
def plot_decision_boundary (X , model ):
@@ -110,6 +120,7 @@ def plot_decision_boundary(X, model):
110
120
111
121
plt .scatter (X [:,0 ], X [:,1 ], s = 50 , c = Y , alpha = 0.7 )
112
122
plot_decision_boundary (X , model )
123
+ plt .title ("dt - low bias, high variance" )
113
124
plt .show ()
114
125
115
126
# dt - high bias, low variance
@@ -118,6 +129,7 @@ def plot_decision_boundary(X, model):
118
129
119
130
plt .scatter (X [:,0 ], X [:,1 ], s = 50 , c = Y , alpha = 0.7 )
120
131
plot_decision_boundary (X , model )
132
+ plt .title ("dt - high bias, low variance" )
121
133
plt .show ()
122
134
123
135
@@ -127,6 +139,7 @@ def plot_decision_boundary(X, model):
127
139
128
140
plt .scatter (X [:,0 ], X [:,1 ], s = 50 , c = Y , alpha = 0.7 )
129
141
plot_decision_boundary (X , model )
142
+ plt .title ("knn - low bias, high variance" )
130
143
plt .show ()
131
144
132
145
# knn - high bias, low variance
@@ -135,4 +148,5 @@ def plot_decision_boundary(X, model):
135
148
136
149
plt .scatter (X [:,0 ], X [:,1 ], s = 50 , c = Y , alpha = 0.7 )
137
150
plot_decision_boundary (X , model )
151
+ plt .title ("knn - high bias, low variance" )
138
152
plt .show ()
0 commit comments