-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathevaluate_models.py
36 lines (27 loc) · 1.16 KB
/
evaluate_models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
from train_test_api import Model
import seaborn as sn
import pandas as pd
import matplotlib.pyplot as plt
def plot_conf_matrix(conf_matrix, accuracy):
print(model.model_name)
df_cm = pd.DataFrame(conf_matrix, index = [i for i in "ABCDEF"],
columns = [i for i in "ABCDEF"])
for i in "ABCDEF":
print(i,float(df_cm[i][i])/float( sum(df_cm[i])))
plt.figure(figsize = (10,7))
plt.title('Model: '+model.model_name + ' Accuracy: '+ str(accuracy))
sn.set(font_scale=1.4)#for label size
sn.heatmap(df_cm, annot=True,annot_kws={"size": 16})# font size
plt.show()
print('Start evaluation')
model_names = ['knn','random_forest','svm']
for name in model_names:
print('----------------------------------')
print('Model name: {0}'.format( name))
model = Model(name)
train_time, test_time, accuracy_train, accuracy_test, conf_matrix = model.evaluate()
plot_conf_matrix(conf_matrix, accuracy_test)
print('Accuracy test: {0}'.format(accuracy_test))
print('Accuracy train: {0}'.format(accuracy_train))
print("test time: {0}s".format(test_time))
print("train time: {0}s".format(train_time))