This repository was archived by the owner on Mar 7, 2025. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathtrain_test.py
208 lines (131 loc) · 7.77 KB
/
train_test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
import joblib
import argparse
import numpy as np
import pandas as pd
from scipy.stats import pearsonr
from sklearn.metrics import mean_squared_error
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import ExtraTreesRegressor
class Loading():
def __init__(self, features_file, tests_list, binding_affinity):
"""Constructor creates an object for making training and testing data which uses features file (features_file)
is generated by generate_feature module, a text file (tests_list) contains PDB id's of complexes for making test
set, a xlsx file (binding_affinity) contains all binding affinity in pKd for all complexes."""
self.binding_affinity = binding_affinity
self.features_file = features_file
self.tests_list = tests_list
def _load_excel(self):
"""Internal function which is used for loading features file generated from generate_feature module,
add binding affinity values to it and returns Pandas data frame"""
data_frame = pd.read_excel(self.features_file, index_col=0)
pkd = pd.read_excel(self.binding_affinity, index_col=0)
data_frame = pd.concat([data_frame, pkd.reindex(data_frame.index)], axis = 1)
return data_frame
def _core_set(self):
"""Internal function reads text file that contains PDBid of test set and returns a list"""
file = open(self.tests_list,'r')
text = file.readlines()
file.close()
test_set_pdbid = list(map(str.rstrip, text))
return test_set_pdbid
def make_train_test(self):
"""Function which is uesd for making training and test set and returns x_train,x_test,y_train and y_test"""
data_frame = self._load_excel()
test_set_pdbid = self._core_set()
x_test = data_frame.loc[test_set_pdbid,:].iloc[:, :-1]
y_test = data_frame.loc[test_set_pdbid, :].iloc[:, -1]
x_train = data_frame.drop(test_set_pdbid, axis = 0).iloc[:, :-1]
y_train = data_frame.drop(test_set_pdbid, axis = 0).iloc[:, -1]
return x_train, x_test, y_train, y_test
class Tuning():
def __init__(self, x_train, y_train):
"""Constructor to create an object for tuning Random Forest and Extra Trees hyperparameters (only max_features)
.Out of bag is used for optimizing hyperparameters of both algorithms which training data and target
(x_train and y_train) need for this task."""
self.x_train = x_train
self.y_train = y_train
def random_forest_tuning(self, n_estimators):
"""Tuning max features hyperparameter of Random Forest for a given number of estimators (n_estimators) and
return best max_features"""
oob_scores = []
for i in range(2, int(self.x_train.shape[1]) + 1):
rand_reg = RandomForestRegressor(n_estimators = n_estimators, max_features = i,
random_state = 42, oob_score = True, n_jobs = -1)
rand_reg.fit(self.x_train, self.y_train)
oob_scores.append(rand_reg.oob_score_)
best_max_features = np.argmax(oob_scores) + 1
return best_max_features
def extra_trees_tuning(self, n_estimators):
"""Tuning max features hyperparameter of Extra Trees for a given number of estimators (n_estimators) and
return best max_features"""
oob_scores = []
for i in range(2, int(self.x_train.shape[1]) + 1):
et_reg = ExtraTreesRegressor(n_estimators = n_estimators, max_features = i, random_state = 42
,bootstrap = True,oob_score = True, n_jobs = -1)
et_reg.fit(self.x_train, self.y_train)
oob_scores.append(et_reg.oob_score_)
best_max_features = np.argmax(oob_scores) + 1
return best_max_features
class Model():
def __init__(self, x_train, x_test, y_train, y_test):
"""Constructor to create an object for training and testing machine learning model based on Random Forest and
Extra Trees algorithms that needs data of train and test sets (x_train, y_train, x_test, y_test)"""
self.x_train = x_train
self.x_test = x_test
self.y_train = y_train
self.y_test = y_test
def random_forest(self, n_estimators, max_features):
"""Make a statistical model based on Random Forest regression method with the given number
of estimators (n_estimators) and max features (max_features). It returns RMSE and
Pearson's correlaton coefficient. Model save after training."""
rand_reg = RandomForestRegressor(n_estimators = n_estimators, max_features = max_features,
random_state = 42, n_jobs = -1)
rand_reg.fit(self.x_train, self.y_train)
joblib.dump(rand_reg, 'rf_model.sav')
y_pred = rand_reg.predict(self.x_test)
pd.DataFrame( data = y_pred, index = x_test.index, columns = ['prediction']).to_excel('random_forest_test_predicted.xlsx')
return pearsonr(self.y_test, y_pred)[0], np.sqrt(mean_squared_error(self.y_test, y_pred))
def extra_trees(self, n_estimators, max_features):
"""Make a statistical model based on Extra Trees regression method with the given number
of estimators (n_estimators) and max features (max_features). It returns RMSE and
Pearson's correlaton coefficient. Model save after training."""
et_reg = ExtraTreesRegressor(n_estimators = n_estimators, max_features = max_features,
random_state = 42, n_jobs = -1, bootstrap = True)
et_reg.fit(self.x_train, self.y_train)
joblib.dump(et_reg, 'et_model.sav')
y_pred = et_reg.predict(self.x_test)
pd.DataFrame( data = y_pred, index = x_test.index, columns = ['prediction']).to_excel('extra_trees_test_predicted.xlsx')
return pearsonr(self.y_test, y_pred)[0], np.sqrt(mean_squared_error(self.y_test, y_pred))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = "Train Random Forest and Extra Trees models to predict pkd values of a test set")
parser.add_argument("excel_file", help = "generated features for complexes")
parser.add_argument("test_set_file", help = "list of test complexes")
parser.add_argument('binding_affinity', help = "binding affinity in pKd of complexes")
parser.add_argument('-r', "--max_features_rf", type = int, default = None, help = " Random Forest's max features hyperparameters")
parser.add_argument('-n', "--n_estimators_rf", type = int, default = 500, help = "Estimators number of random forest method")
parser.add_argument('-t', "--max_features_et", type = int, default = None, help = " Extra trees's max features hyperparameters")
parser.add_argument('-e', "--n_estimators_et", type = int, default = 500, help = "Estimators number of extra trees method")
parser.add_argument('-y', "--hyper", default = False, help = "Calculate max features hyperparameters for Random Forest and Extra Trees")
args = parser.parse_args()
train_test=Loading(features_file = args.excel_file, tests_list = args.test_set_file,
binding_affinity = args.binding_affinity)
x_train, x_test, y_train, y_test = train_test.make_train_test()
if args.hyper == True :
tune_hyper = Tuning(x_train, y_train)
rand_best = tune_hyper.random_forest_tuning(n_estimators = args.n_estimators_rf)
et_best = tune_hyper.extra_trees_tuning(n_estimators = args.n_estimators_et)
rf_hyper = rand_best
et_hyper = et_best
else :
rf_hyper = args.max_features_rf
et_hyper = args.max_features_et
ml = Model(x_train, x_test, y_train, y_test)
rf_pearson, rf_rmse = ml.random_forest(n_estimators = args.n_estimators_rf, max_features = rf_hyper)
print(f'''Random Forest results:\n
Pearson's correlation coefficient : {rf_pearson : .3f}\n
RMSE : {rf_rmse : .3f}\n
----------------------------------------''')
et_pearson, et_rmse = ml.extra_trees(n_estimators = args.n_estimators_et, max_features = et_hyper)
print(f'''Extra Trees results:\n
Pearson's correlation coefficient : {et_pearson : .3f}\n
RMSE : {et_rmse : .3f}''')