-
Notifications
You must be signed in to change notification settings - Fork 40
/
Copy pathmodel.py
68 lines (56 loc) · 2.39 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
import data_processing
data = data_processing.load_data(download=True)
new_data = data_processing.convert2onehot(data)
# prepare training data
new_data = new_data.values.astype(np.float32) # change to numpy array and float32
np.random.shuffle(new_data)
sep = int(0.7*len(new_data))
train_data = new_data[:sep] # training data (70%)
test_data = new_data[sep:] # test data (30%)
# build network
tf_input = tf.placeholder(tf.float32, [None, 25], "input")
tfx = tf_input[:, :21]
tfy = tf_input[:, 21:]
l1 = tf.layers.dense(tfx, 128, tf.nn.relu, name="l1")
l2 = tf.layers.dense(l1, 128, tf.nn.relu, name="l2")
out = tf.layers.dense(l2, 4, name="l3")
prediction = tf.nn.softmax(out, name="pred")
loss = tf.losses.softmax_cross_entropy(onehot_labels=tfy, logits=out)
accuracy = tf.metrics.accuracy( # return (acc, update_op), and create 2 local variables
labels=tf.argmax(tfy, axis=1), predictions=tf.argmax(out, axis=1),)[1]
opt = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_op = opt.minimize(loss)
sess = tf.Session()
sess.run(tf.group(tf.global_variables_initializer(), tf.local_variables_initializer()))
# training
plt.ion()
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(8, 4))
accuracies, steps = [], []
for t in range(4000):
# training
batch_index = np.random.randint(len(train_data), size=32)
sess.run(train_op, {tf_input: train_data[batch_index]})
if t % 50 == 0:
# testing
acc_, pred_, loss_ = sess.run([accuracy, prediction, loss], {tf_input: test_data})
accuracies.append(acc_)
steps.append(t)
print("Step: %i" % t,"| Accurate: %.2f" % acc_,"| Loss: %.2f" % loss_,)
# visualize testing
ax1.cla()
for c in range(4):
bp = ax1.bar(c+0.1, height=sum((np.argmax(pred_, axis=1) == c)), width=0.2, color='red')
bt = ax1.bar(c-0.1, height=sum((np.argmax(test_data[:, 21:], axis=1) == c)), width=0.2, color='blue')
ax1.set_xticks(range(4), ["accepted", "good", "unaccepted", "very good"])
ax1.legend(handles=[bp, bt], labels=["prediction", "target"])
ax1.set_ylim((0, 400))
ax2.cla()
ax2.plot(steps, accuracies, label="accuracy")
ax2.set_ylim(ymax=1)
ax2.set_ylabel("accuracy")
plt.pause(0.01)
plt.ioff()
plt.show()