Skip to content

Commit a5a3395

Browse files
committed
tensorflow/ kears
1 parent 173569d commit a5a3395

15 files changed

+1663
-0
lines changed

2019-11-23

+1,123
Large diffs are not rendered by default.

data/iris.data

+151
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,151 @@
1+
.1,3.5,1.4,0.2,Iris-setosa
2+
4.9,3.0,1.4,0.2,Iris-setosa
3+
4.7,3.2,1.3,0.2,Iris-setosa
4+
4.6,3.1,1.5,0.2,Iris-setosa
5+
5.0,3.6,1.4,0.2,Iris-setosa
6+
5.4,3.9,1.7,0.4,Iris-setosa
7+
4.6,3.4,1.4,0.3,Iris-setosa
8+
5.0,3.4,1.5,0.2,Iris-setosa
9+
4.4,2.9,1.4,0.2,Iris-setosa
10+
4.9,3.1,1.5,0.1,Iris-setosa
11+
5.4,3.7,1.5,0.2,Iris-setosa
12+
4.8,3.4,1.6,0.2,Iris-setosa
13+
4.8,3.0,1.4,0.1,Iris-setosa
14+
4.3,3.0,1.1,0.1,Iris-setosa
15+
5.8,4.0,1.2,0.2,Iris-setosa
16+
5.7,4.4,1.5,0.4,Iris-setosa
17+
5.4,3.9,1.3,0.4,Iris-setosa
18+
5.1,3.5,1.4,0.3,Iris-setosa
19+
5.7,3.8,1.7,0.3,Iris-setosa
20+
5.1,3.8,1.5,0.3,Iris-setosa
21+
5.4,3.4,1.7,0.2,Iris-setosa
22+
5.1,3.7,1.5,0.4,Iris-setosa
23+
4.6,3.6,1.0,0.2,Iris-setosa
24+
5.1,3.3,1.7,0.5,Iris-setosa
25+
4.8,3.4,1.9,0.2,Iris-setosa
26+
5.0,3.0,1.6,0.2,Iris-setosa
27+
5.0,3.4,1.6,0.4,Iris-setosa
28+
5.2,3.5,1.5,0.2,Iris-setosa
29+
5.2,3.4,1.4,0.2,Iris-setosa
30+
4.7,3.2,1.6,0.2,Iris-setosa
31+
4.8,3.1,1.6,0.2,Iris-setosa
32+
5.4,3.4,1.5,0.4,Iris-setosa
33+
5.2,4.1,1.5,0.1,Iris-setosa
34+
5.5,4.2,1.4,0.2,Iris-setosa
35+
4.9,3.1,1.5,0.1,Iris-setosa
36+
5.0,3.2,1.2,0.2,Iris-setosa
37+
5.5,3.5,1.3,0.2,Iris-setosa
38+
4.9,3.1,1.5,0.1,Iris-setosa
39+
4.4,3.0,1.3,0.2,Iris-setosa
40+
5.1,3.4,1.5,0.2,Iris-setosa
41+
5.0,3.5,1.3,0.3,Iris-setosa
42+
4.5,2.3,1.3,0.3,Iris-setosa
43+
4.4,3.2,1.3,0.2,Iris-setosa
44+
5.0,3.5,1.6,0.6,Iris-setosa
45+
5.1,3.8,1.9,0.4,Iris-setosa
46+
4.8,3.0,1.4,0.3,Iris-setosa
47+
5.1,3.8,1.6,0.2,Iris-setosa
48+
4.6,3.2,1.4,0.2,Iris-setosa
49+
5.3,3.7,1.5,0.2,Iris-setosa
50+
5.0,3.3,1.4,0.2,Iris-setosa
51+
7.0,3.2,4.7,1.4,Iris-versicolor
52+
6.4,3.2,4.5,1.5,Iris-versicolor
53+
6.9,3.1,4.9,1.5,Iris-versicolor
54+
5.5,2.3,4.0,1.3,Iris-versicolor
55+
6.5,2.8,4.6,1.5,Iris-versicolor
56+
5.7,2.8,4.5,1.3,Iris-versicolor
57+
6.3,3.3,4.7,1.6,Iris-versicolor
58+
4.9,2.4,3.3,1.0,Iris-versicolor
59+
6.6,2.9,4.6,1.3,Iris-versicolor
60+
5.2,2.7,3.9,1.4,Iris-versicolor
61+
5.0,2.0,3.5,1.0,Iris-versicolor
62+
5.9,3.0,4.2,1.5,Iris-versicolor
63+
6.0,2.2,4.0,1.0,Iris-versicolor
64+
6.1,2.9,4.7,1.4,Iris-versicolor
65+
5.6,2.9,3.6,1.3,Iris-versicolor
66+
6.7,3.1,4.4,1.4,Iris-versicolor
67+
5.6,3.0,4.5,1.5,Iris-versicolor
68+
5.8,2.7,4.1,1.0,Iris-versicolor
69+
6.2,2.2,4.5,1.5,Iris-versicolor
70+
5.6,2.5,3.9,1.1,Iris-versicolor
71+
5.9,3.2,4.8,1.8,Iris-versicolor
72+
6.1,2.8,4.0,1.3,Iris-versicolor
73+
6.3,2.5,4.9,1.5,Iris-versicolor
74+
6.1,2.8,4.7,1.2,Iris-versicolor
75+
6.4,2.9,4.3,1.3,Iris-versicolor
76+
6.6,3.0,4.4,1.4,Iris-versicolor
77+
6.8,2.8,4.8,1.4,Iris-versicolor
78+
6.7,3.0,5.0,1.7,Iris-versicolor
79+
6.0,2.9,4.5,1.5,Iris-versicolor
80+
5.7,2.6,3.5,1.0,Iris-versicolor
81+
5.5,2.4,3.8,1.1,Iris-versicolor
82+
5.5,2.4,3.7,1.0,Iris-versicolor
83+
5.8,2.7,3.9,1.2,Iris-versicolor
84+
6.0,2.7,5.1,1.6,Iris-versicolor
85+
5.4,3.0,4.5,1.5,Iris-versicolor
86+
6.0,3.4,4.5,1.6,Iris-versicolor
87+
6.7,3.1,4.7,1.5,Iris-versicolor
88+
6.3,2.3,4.4,1.3,Iris-versicolor
89+
5.6,3.0,4.1,1.3,Iris-versicolor
90+
5.5,2.5,4.0,1.3,Iris-versicolor
91+
5.5,2.6,4.4,1.2,Iris-versicolor
92+
6.1,3.0,4.6,1.4,Iris-versicolor
93+
5.8,2.6,4.0,1.2,Iris-versicolor
94+
5.0,2.3,3.3,1.0,Iris-versicolor
95+
5.6,2.7,4.2,1.3,Iris-versicolor
96+
5.7,3.0,4.2,1.2,Iris-versicolor
97+
5.7,2.9,4.2,1.3,Iris-versicolor
98+
6.2,2.9,4.3,1.3,Iris-versicolor
99+
5.1,2.5,3.0,1.1,Iris-versicolor
100+
5.7,2.8,4.1,1.3,Iris-versicolor
101+
6.3,3.3,6.0,2.5,Iris-virginica
102+
5.8,2.7,5.1,1.9,Iris-virginica
103+
7.1,3.0,5.9,2.1,Iris-virginica
104+
6.3,2.9,5.6,1.8,Iris-virginica
105+
6.5,3.0,5.8,2.2,Iris-virginica
106+
7.6,3.0,6.6,2.1,Iris-virginica
107+
4.9,2.5,4.5,1.7,Iris-virginica
108+
7.3,2.9,6.3,1.8,Iris-virginica
109+
6.7,2.5,5.8,1.8,Iris-virginica
110+
7.2,3.6,6.1,2.5,Iris-virginica
111+
6.5,3.2,5.1,2.0,Iris-virginica
112+
6.4,2.7,5.3,1.9,Iris-virginica
113+
6.8,3.0,5.5,2.1,Iris-virginica
114+
5.7,2.5,5.0,2.0,Iris-virginica
115+
5.8,2.8,5.1,2.4,Iris-virginica
116+
6.4,3.2,5.3,2.3,Iris-virginica
117+
6.5,3.0,5.5,1.8,Iris-virginica
118+
7.7,3.8,6.7,2.2,Iris-virginica
119+
7.7,2.6,6.9,2.3,Iris-virginica
120+
6.0,2.2,5.0,1.5,Iris-virginica
121+
6.9,3.2,5.7,2.3,Iris-virginica
122+
5.6,2.8,4.9,2.0,Iris-virginica
123+
7.7,2.8,6.7,2.0,Iris-virginica
124+
6.3,2.7,4.9,1.8,Iris-virginica
125+
6.7,3.3,5.7,2.1,Iris-virginica
126+
7.2,3.2,6.0,1.8,Iris-virginica
127+
6.2,2.8,4.8,1.8,Iris-virginica
128+
6.1,3.0,4.9,1.8,Iris-virginica
129+
6.4,2.8,5.6,2.1,Iris-virginica
130+
7.2,3.0,5.8,1.6,Iris-virginica
131+
7.4,2.8,6.1,1.9,Iris-virginica
132+
7.9,3.8,6.4,2.0,Iris-virginica
133+
6.4,2.8,5.6,2.2,Iris-virginica
134+
6.3,2.8,5.1,1.5,Iris-virginica
135+
6.1,2.6,5.6,1.4,Iris-virginica
136+
7.7,3.0,6.1,2.3,Iris-virginica
137+
6.3,3.4,5.6,2.4,Iris-virginica
138+
6.4,3.1,5.5,1.8,Iris-virginica
139+
6.0,3.0,4.8,1.8,Iris-virginica
140+
6.9,3.1,5.4,2.1,Iris-virginica
141+
6.7,3.1,5.6,2.4,Iris-virginica
142+
6.9,3.1,5.1,2.3,Iris-virginica
143+
5.8,2.7,5.1,1.9,Iris-virginica
144+
6.8,3.2,5.9,2.3,Iris-virginica
145+
6.7,3.3,5.7,2.5,Iris-virginica
146+
6.7,3.0,5.2,2.3,Iris-virginica
147+
6.3,2.5,5.0,1.9,Iris-virginica
148+
6.5,3.0,5.2,2.0,Iris-virginica
149+
6.2,3.4,5.4,2.3,Iris-virginica
150+
5.9,3.0,5.1,1.8,Iris-virginica
151+

demo43.py

+39
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import numpy
2+
from keras.layers import Dense
3+
from keras.models import Sequential
4+
import os
5+
from keras.wrappers.scikit_learn import KerasClassifier
6+
from sklearn.model_selection import StratifiedKFold, cross_val_score, GridSearchCV
7+
8+
print(os.getcwd())
9+
10+
dataset1 = numpy.loadtxt('data\\diabetes.csv', delimiter=',', skiprows=1)
11+
print(type(dataset1), dataset1.shape)
12+
13+
inputList = dataset1[:, 0:8]
14+
resultList = dataset1[:, 8]
15+
print(inputList[:5])
16+
print(numpy.unique(resultList))
17+
18+
19+
def create_default_model(optimizer='adam', init='uniform'): #1
20+
model = Sequential()
21+
model.add(Dense(14, input_dim=8, kernel_initializer=init, activation='relu')) # 2
22+
model.add(Dense(12, activation='relu'))
23+
model.add(Dense(1, activation='sigmoid'))
24+
model.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy']) # 3
25+
print(model.summary())
26+
return model
27+
28+
29+
# , epochs=200, batch_size=20, ==> remove
30+
model = KerasClassifier(build_fn=create_default_model, verbose=0) # 4
31+
# 5
32+
optimizers = ['rmsprop', 'adam']
33+
inits = ['normal', 'uniform']
34+
epochs = [50, 100, 150]
35+
batches = [5, 10, 15]
36+
fiveFold = StratifiedKFold(n_splits=5, shuffle=True)
37+
38+
result = cross_val_score(model, inputList, resultList, cv=fiveFold)
39+
print(f"result mean={result.mean()}, result std={result.std()}")

demo44.py

+36
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
import numpy as np
2+
from keras import models
3+
from keras import layers
4+
from keras.datasets import boston_housing
5+
6+
(train_data, train_target), (test_data, test_target) = boston_housing.load_data()
7+
print(train_data.shape, test_data.shape)
8+
9+
mean = train_data.mean(axis=0)
10+
train_data -= mean
11+
std = train_data.std(axis=0)
12+
train_data /= std
13+
test_data -= mean
14+
test_data /= std
15+
print(train_data.shape, test_data.shape)
16+
17+
18+
def build_model():
19+
model = models.Sequential()
20+
model.add(layers.Dense(64, activation='relu', input_shape=(train_data.shape[1],)))
21+
model.add(layers.Dense(32, activation='relu'))
22+
model.add(layers.Dense(1))
23+
model.compile(optimizer='rmsprop', loss='mse', metrics=['mae'])
24+
model.summary()
25+
return model
26+
27+
28+
model = build_model()
29+
model.fit(train_data, train_target, validation_split=0.1, epochs=100, batch_size=10, verbose=1)
30+
31+
# for item in test_target:
32+
# print(item)
33+
34+
for (i, j) in zip(test_data, test_target):
35+
predict = model.predict(i.reshape(1, -1))
36+
print(f'actual={j}, predict as={predict}')

demo45.py

+47
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
from keras import Sequential
2+
from keras.layers import Dense
3+
from keras.wrappers.scikit_learn import KerasClassifier
4+
from sklearn import preprocessing
5+
from pandas import read_csv
6+
from sklearn.model_selection import KFold, cross_val_score
7+
from sklearn.preprocessing import LabelEncoder
8+
from keras.utils import np_utils
9+
10+
dataFrame = read_csv('data\\iris.data', header=None)
11+
print(type(dataFrame), dataFrame.shape)
12+
print(dataFrame.columns)
13+
print(dataFrame.index)
14+
print(type(dataFrame.values))
15+
print(dataFrame.values)
16+
print(dataFrame.values[0,], type(dataFrame.values[0,][0]), type(dataFrame.values[0,][4]))
17+
18+
# cut data
19+
dataset = dataFrame.values
20+
features = dataset[:, 0:4].astype(float)
21+
labels = dataset[:, 4]
22+
print(features.mean(axis=0))
23+
result = preprocessing.scale(features, axis=0, with_mean=True)
24+
print(result.mean(axis=0))
25+
26+
encoder = LabelEncoder()
27+
encoder.fit(labels)
28+
encoded_Y = encoder.transform(labels)
29+
print(type(encoded_Y), encoded_Y[:10], encoded_Y[50:60], encoded_Y[100:110])
30+
dummy_y = np_utils.to_categorical(encoded_Y)
31+
print(type(dummy_y), dummy_y[:10], dummy_y[50:60], dummy_y[100:110])
32+
33+
34+
def baseline_model():
35+
model = Sequential()
36+
model.add(Dense(8, input_dim=4, activation='relu'))
37+
model.add(Dense(12, activation='relu'))
38+
model.add(Dense(3, activation='softmax'))
39+
model.summary()
40+
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
41+
return model
42+
43+
44+
estimator = KerasClassifier(build_fn=baseline_model, epochs=200, batch_size=10, verbose=1)
45+
Kfold = KFold(n_splits=3, shuffle=True)
46+
results = cross_val_score(estimator, features, dummy_y, cv=Kfold)
47+
print("accuracy: %.4f, std: %.4f"%(results.mean(), results.std()))

demo46.py

+20
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
scores = [3.0, 4.0, 3.0]
2+
import numpy as np
3+
4+
5+
def manualSoftmax(x):
6+
x = np.array(x)
7+
return np.exp(x) / np.sum(np.exp(x), axis=0)
8+
9+
10+
print(manualSoftmax(scores))
11+
12+
import tensorflow as tf
13+
14+
result2 = tf.nn.softmax(scores)
15+
print(result2)
16+
17+
scores2 = [1.0, 1.0, 1.0, 3.0, 1.0, 1.0, 1.0, 1.0]
18+
result3 = tf.nn.softmax(scores2)
19+
print(result3)
20+

demo47.py

+25
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
import numpy
2+
from keras.datasets import imdb
3+
from matplotlib import pyplot
4+
5+
(X_train, y_train), (X_test, y_test) = imdb.load_data()
6+
print(X_train.shape)
7+
print(X_test.shape)
8+
print(y_train.shape)
9+
print(y_test.shape)
10+
print(numpy.unique(y_train), numpy.unique(y_test))
11+
X = numpy.concatenate((X_train, X_test), axis=0)
12+
y = numpy.concatenate((y_train, y_test), axis=0)
13+
print(X[0])
14+
print(X.shape)
15+
print(y.shape)
16+
print(len(numpy.unique(numpy.hstack(X))))
17+
result = [len(x) for x in X]
18+
print(result[:50])
19+
print(f'comments mean={numpy.mean(result)}, std={numpy.std(result)}')
20+
21+
pyplot.subplot(121)
22+
pyplot.boxplot(result)
23+
pyplot.subplot(122)
24+
pyplot.hist(result)
25+
pyplot.show()

demo48.py

+43
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
from keras import layers
2+
from keras import models
3+
from keras.datasets import imdb
4+
import numpy as np
5+
6+
(train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=10000)
7+
print(train_data[0])
8+
print(max([max(sequence) for sequence in train_data]))
9+
10+
word_index = imdb.get_word_index()
11+
reverse_word_index = dict([(v, k) for k, v in word_index.items()])
12+
for k in range(5):
13+
decoded_review = ' '.join([reverse_word_index.get(i - 3, '?') for i in train_data[k]])
14+
print(decoded_review)
15+
print(train_labels[:5])
16+
17+
18+
def vectorize_sequence(sequences, dimension=10000):
19+
results = np.zeros((len(sequences), dimension))
20+
for i, sequence in enumerate(sequences):
21+
results[i, sequence] = 1.
22+
return results
23+
24+
x_train = vectorize_sequence(train_data)
25+
x_test = vectorize_sequence(test_data)
26+
y_train = np.asarray(train_labels).astype('float32')
27+
y_test = np.asarray(test_labels).astype('float32')
28+
print(x_train.shape, x_test.shape, y_train.shape, y_test.shape)
29+
print(x_train[0])
30+
31+
model = models.Sequential()
32+
model.add(layers.Dense(32, activation='relu', input_shape=(10000,)))
33+
model.add(layers.Dense(16, activation='relu'))
34+
model.add(layers.Dense(1, activation='sigmoid'))
35+
model.compile(optimizer='rmsprop', loss='binary_crossentropy',metrics=['accuracy'])
36+
model.summary()
37+
38+
x_val = x_train[:10000]
39+
partial_x_train = x_train[10000:]
40+
y_val = y_train[:10000]
41+
partial_y_train = y_train[10000:]
42+
43+
model.fit(partial_x_train, partial_y_train, epochs=20, batch_size=128)

0 commit comments

Comments
 (0)