Skip to content

Commit bc56fe6

Browse files
committed
setting up frameworks branch
1 parent 89f27b1 commit bc56fe6

16 files changed

+2321
-0
lines changed

Pilot1/P1B1/p1b1_baseline_mxnet.py

Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
from __future__ import print_function
2+
3+
import numpy as np
4+
5+
import argparse
6+
7+
import mxnet as mx
8+
from mxnet.io import DataBatch, DataIter
9+
10+
11+
import matplotlib as mpl
12+
mpl.use('Agg')
13+
import matplotlib.pyplot as plt
14+
15+
import p1b1
16+
import p1_common
17+
import p1_common_mxnet
18+
19+
20+
def get_p1b1_parser():
21+
22+
parser = argparse.ArgumentParser(prog='p1b1_baseline', formatter_class=argparse.ArgumentDefaultsHelpFormatter,
23+
description='Train Autoencoder - Pilot 1 Benchmark 1')
24+
25+
return p1b1.common_parser(parser)
26+
27+
28+
29+
def main():
30+
31+
# Get command-line parameters
32+
parser = get_p1b1_parser()
33+
args = parser.parse_args()
34+
#print('Args:', args)
35+
# Get parameters from configuration file
36+
fileParameters = p1b1.read_config_file(args.config_file)
37+
#print ('Params:', fileParameters)
38+
# Consolidate parameter set. Command-line parameters overwrite file configuration
39+
gParameters = p1_common.args_overwrite_config(args, fileParameters)
40+
print ('Params:', gParameters)
41+
42+
# Construct extension to save model
43+
ext = p1b1.extension_from_parameters(gParameters, '.mx')
44+
logfile = args.logfile if args.logfile else args.save+ext+'.log'
45+
p1b1.logger.info('Params: {}'.format(gParameters))
46+
47+
# Get default parameters for initialization and optimizer functions
48+
kerasDefaults = p1_common.keras_default_config()
49+
seed = gParameters['rng_seed']
50+
51+
# Load dataset
52+
X_train, X_val, X_test = p1b1.load_data(gParameters, seed)
53+
54+
print ("Shape X_train: ", X_train.shape)
55+
print ("Shape X_val: ", X_val.shape)
56+
print ("Shape X_test: ", X_test.shape)
57+
58+
print ("Range X_train --> Min: ", np.min(X_train), ", max: ", np.max(X_train))
59+
print ("Range X_val --> Min: ", np.min(X_val), ", max: ", np.max(X_val))
60+
print ("Range X_test --> Min: ", np.min(X_test), ", max: ", np.max(X_test))
61+
62+
63+
# Set input and target to X_train
64+
train_iter = mx.io.NDArrayIter(X_train, X_train, gParameters['batch_size'], shuffle=gParameters['shuffle'])
65+
val_iter = mx.io.NDArrayIter(X_val, X_val, gParameters['batch_size'])
66+
test_iter = mx.io.NDArrayIter(X_test, X_test, gParameters['batch_size'])
67+
68+
net = mx.sym.Variable('data')
69+
out = mx.sym.Variable('softmax_label')
70+
input_dim = X_train.shape[1]
71+
output_dim = input_dim
72+
73+
# Initialize weights and learning rule
74+
initializer_weights = p1_common_mxnet.build_initializer(gParameters['initialization'], kerasDefaults)
75+
initializer_bias = p1_common_mxnet.build_initializer('constant', kerasDefaults, 0.)
76+
init = mx.initializer.Mixed(['bias', '.*'], [initializer_bias, initializer_weights])
77+
78+
activation = gParameters['activation']
79+
80+
# Define Autoencoder architecture
81+
layers = gParameters['dense']
82+
83+
if layers != None:
84+
if type(layers) != list:
85+
layers = list(layers)
86+
# Encoder Part
87+
for i,l in enumerate(layers):
88+
net = mx.sym.FullyConnected(data=net, num_hidden=l)
89+
net = mx.sym.Activation(data=net, act_type=activation)
90+
# Decoder Part
91+
for i,l in reversed( list(enumerate(layers)) ):
92+
if i < len(layers)-1:
93+
net = mx.sym.FullyConnected(data=net, num_hidden=l)
94+
net = mx.sym.Activation(data=net, act_type=activation)
95+
96+
net = mx.sym.FullyConnected(data=net, num_hidden=output_dim)
97+
#net = mx.sym.Activation(data=net, act_type=activation)
98+
net = mx.symbol.LinearRegressionOutput(data=net, label=out)
99+
100+
101+
# Display model
102+
p1_common_mxnet.plot_network(net, 'net'+ext)
103+
104+
# Define context
105+
devices = mx.cpu()
106+
if gParameters['gpus']:
107+
devices = [mx.gpu(i) for i in gParameters['gpus']]
108+
109+
110+
# Build Autoencoder model
111+
ae = mx.mod.Module(symbol=net, context=devices)
112+
113+
# Define optimizer
114+
optimizer = p1_common_mxnet.build_optimizer(gParameters['optimizer'],
115+
gParameters['learning_rate'],
116+
kerasDefaults)
117+
118+
# Seed random generator for training
119+
mx.random.seed(seed)
120+
121+
freq_log = 1
122+
ae.fit(train_iter, eval_data=val_iter,
123+
eval_metric=gParameters['loss'],
124+
optimizer=optimizer,
125+
num_epoch=gParameters['epochs'])#,
126+
#epoch_end_callback = mx.callback.Speedometer(gParameters['batch_size'], freq_log))
127+
128+
# model save
129+
#save_filepath = "model_ae_" + ext
130+
#ae.save(save_filepath)
131+
132+
# Evalute model on test set
133+
X_pred = ae.predict(test_iter).asnumpy()
134+
#print ("Shape X_pred: ", X_pred.shape)
135+
136+
scores = p1b1.evaluate_autoencoder(X_pred, X_test)
137+
print('Evaluation on test data:', scores)
138+
139+
diff = X_pred - X_test
140+
plt.hist(diff.ravel(), bins='auto')
141+
plt.title("Histogram of Errors with 'auto' bins")
142+
plt.savefig('histogram_mx.png')
143+
144+
145+
if __name__ == '__main__':
146+
main()

Pilot1/P1B1/p1b1_baseline_neon.py

Lines changed: 175 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,175 @@
1+
from __future__ import division, print_function
2+
3+
import os
4+
import sys
5+
import argparse
6+
import logging
7+
8+
9+
import numpy as np
10+
11+
# For non-interactive plot
12+
import matplotlib as mpl
13+
mpl.use('Agg')
14+
import matplotlib.pyplot as plt
15+
16+
17+
import neon
18+
from neon.util.argparser import NeonArgparser
19+
from neon.data import ArrayIterator
20+
from neon.callbacks.callbacks import Callbacks
21+
from neon.layers import GeneralizedCost, Affine, Dropout, Reshape
22+
from neon.models import Model
23+
from neon.backends import gen_backend
24+
25+
#from neon import logger as neon_logger
26+
27+
import p1b1
28+
import p1_common
29+
import p1_common_neon
30+
31+
32+
def get_p1b1_parser():
33+
34+
# Construct neon arg parser. It generates a large set of options by default
35+
parser = NeonArgparser(__doc__)
36+
# Specify the default config_file
37+
parser.add_argument("--config_file", dest='config_file', type=str,
38+
default=os.path.join(p1b1.file_path, 'p1b1_default_model.txt'),
39+
help="specify model configuration file")
40+
41+
# Parse other options that are not included on neon arg parser
42+
parser = p1_common.get_p1_common_parser(parser)
43+
44+
45+
return parser
46+
47+
48+
def main():
49+
# Get command-line parameters
50+
parser = get_p1b1_parser()
51+
args = parser.parse_args()
52+
#print('Args:', args)
53+
# Get parameters from configuration file
54+
fileParameters = p1b1.read_config_file(args.config_file)
55+
#print ('Params:', fileParameters)
56+
57+
# Correct for arguments set by default by neon parser
58+
# (i.e. instead of taking the neon parser default value fall back to the config file,
59+
# if effectively the command-line was used, then use the command-line value)
60+
# This applies to conflictive parameters: batch_size, epochs and rng_seed
61+
if not any("--batch_size" in ag or "-z" in ag for ag in sys.argv):
62+
args.batch_size = fileParameters['batch_size']
63+
if not any("--epochs" in ag or "-e" in ag for ag in sys.argv):
64+
args.epochs = fileParameters['epochs']
65+
if not any("--rng_seed" in ag or "-r" in ag for ag in sys.argv):
66+
args.rng_seed = fileParameters['rng_seed']
67+
68+
# Consolidate parameter set. Command-line parameters overwrite file configuration
69+
gParameters = p1_common.args_overwrite_config(args, fileParameters)
70+
print ('Params:', gParameters)
71+
72+
# Determine verbosity level
73+
loggingLevel = logging.DEBUG if args.verbose else logging.INFO
74+
logging.basicConfig(level=loggingLevel, format='')
75+
# Construct extension to save model
76+
ext = p1b1.extension_from_parameters(gParameters, '.neon')
77+
78+
# Get default parameters for initialization and optimizer functions
79+
kerasDefaults = p1_common.keras_default_config()
80+
seed = gParameters['rng_seed']
81+
82+
# Load dataset
83+
X_train, X_val, X_test = p1b1.load_data(gParameters, seed)
84+
85+
print ("Shape X_train: ", X_train.shape)
86+
print ("Shape X_val: ", X_val.shape)
87+
print ("Shape X_test: ", X_test.shape)
88+
89+
print ("Range X_train --> Min: ", np.min(X_train), ", max: ", np.max(X_train))
90+
print ("Range X_val --> Min: ", np.min(X_val), ", max: ", np.max(X_val))
91+
print ("Range X_test --> Min: ", np.min(X_test), ", max: ", np.max(X_test))
92+
93+
input_dim = X_train.shape[1]
94+
output_dim = input_dim
95+
96+
# Re-generate the backend after consolidating parsing and file config
97+
gen_backend(backend=args.backend,
98+
rng_seed=seed,
99+
device_id=args.device_id,
100+
batch_size=gParameters['batch_size'],
101+
datatype=gParameters['datatype'],
102+
max_devices=args.max_devices,
103+
compat_mode=args.compat_mode)
104+
105+
# Set input and target to X_train
106+
train = ArrayIterator(X_train)
107+
val = ArrayIterator(X_val)
108+
test = ArrayIterator(X_test)
109+
110+
# Initialize weights and learning rule
111+
initializer_weights = p1_common_neon.build_initializer(gParameters['initialization'], kerasDefaults)
112+
initializer_bias = p1_common_neon.build_initializer('constant', kerasDefaults, 0.)
113+
114+
activation = p1_common_neon.get_function(gParameters['activation'])()
115+
116+
# Define Autoencoder architecture
117+
layers = []
118+
reshape = None
119+
120+
# Autoencoder
121+
layers_params = gParameters['dense']
122+
123+
if layers_params != None:
124+
if type(layers_params) != list:
125+
layers_params = list(layers_params)
126+
# Encoder Part
127+
for i,l in enumerate(layers_params):
128+
layers.append(Affine(nout=l, init=initializer_weights, bias=initializer_bias, activation=activation))
129+
# Decoder Part
130+
for i,l in reversed( list(enumerate(layers_params)) ):
131+
if i < len(layers)-1:
132+
layers.append(Affine(nout=l, init=initializer_weights, bias=initializer_bias, activation=activation))
133+
134+
layers.append(Affine(nout=output_dim, init=initializer_weights, bias=initializer_bias, activation=activation))
135+
136+
# Build Autoencoder model
137+
ae = Model(layers=layers)
138+
139+
# Define cost and optimizer
140+
cost = GeneralizedCost(p1_common_neon.get_function(gParameters['loss'])())
141+
optimizer = p1_common_neon.build_optimizer(gParameters['optimizer'],
142+
gParameters['learning_rate'],
143+
kerasDefaults)
144+
145+
callbacks = Callbacks(ae, eval_set=val, eval_freq = 1)
146+
147+
# Seed random generator for training
148+
np.random.seed(seed)
149+
150+
151+
ae.fit(train, optimizer=optimizer, num_epochs=gParameters['epochs'], cost=cost, callbacks=callbacks)
152+
153+
# model save
154+
#save_fname = "model_ae_W" + ext
155+
#ae.save_params(save_fname)
156+
157+
# Compute errors
158+
X_pred = ae.get_outputs(test)
159+
scores = p1b1.evaluate_autoencoder(X_pred, X_test)
160+
print('Evaluation on test data:', scores)
161+
162+
diff = X_pred - X_test
163+
# Plot histogram of errors comparing input and output of autoencoder
164+
plt.hist(diff.ravel(), bins='auto')
165+
plt.title("Histogram of Errors with 'auto' bins")
166+
plt.savefig('histogram_neon.png')
167+
168+
169+
170+
if __name__ == '__main__':
171+
main()
172+
173+
174+
175+

0 commit comments

Comments
 (0)