1
+ '''
2
+ This script shows how to predict stock prices using a basic RNN
3
+ '''
1
4
import tensorflow as tf
2
5
import numpy as np
3
6
import matplotlib .pyplot as plt
4
7
tf .set_random_seed (777 ) # reproducibility
5
8
6
9
7
10
def MinMaxScaler (data ):
11
+ ''' Min Max Normalization
12
+
13
+ Parameters
14
+ ----------
15
+ data : numpy.ndarray
16
+ input data to be normalized
17
+ shape: [Batch size, dimension]
18
+
19
+ Returns
20
+ ----------
21
+ data : numpy.ndarry
22
+ normalized data
23
+ shape: [Batch size, dimension]
24
+
25
+ References
26
+ ----------
27
+ .. [1] http://sebastianraschka.com/Articles/2014_about_feature_scaling.html
28
+
29
+ '''
8
30
numerator = data - np .min (data , 0 )
9
31
denominator = np .max (data , 0 ) - np .min (data , 0 )
10
32
# noise term prevents the zero division
11
33
return numerator / (denominator + 1e-7 )
12
34
13
35
36
+ # train Parameters
14
37
timesteps = seq_length = 7
15
38
data_dim = 5
39
+ hidden_dim = 10
16
40
output_dim = 1
41
+ learing_rate = 0.01
42
+ iterations = 500
17
43
18
44
# Open, High, Low, Volume, Close
19
45
xy = np .loadtxt ('data-02-stock_daily.csv' , delimiter = ',' )
@@ -22,6 +48,7 @@ def MinMaxScaler(data):
22
48
x = xy
23
49
y = xy [:, [- 1 ]] # Close as label
24
50
51
+ # build a dataset
25
52
dataX = []
26
53
dataY = []
27
54
for i in range (0 , len (y ) - seq_length ):
@@ -31,47 +58,49 @@ def MinMaxScaler(data):
31
58
dataX .append (_x )
32
59
dataY .append (_y )
33
60
34
- # split to train and testing
61
+ # train/test split
35
62
train_size = int (len (dataY ) * 0.7 )
36
63
test_size = len (dataY ) - train_size
37
- trainX , testX = np .array (dataX [0 :train_size ]), np .array (
38
- dataX [train_size :len (dataX )])
39
- trainY , testY = np .array (dataY [0 :train_size ]), np .array (
40
- dataY [train_size :len (dataY )])
64
+ trainX , testX = np .array (dataX [0 :train_size ]), np .array (dataX [train_size :len (dataX )])
65
+ trainY , testY = np .array (dataY [0 :train_size ]), np .array (dataY [train_size :len (dataY )])
41
66
42
67
# input place holders
43
68
X = tf .placeholder (tf .float32 , [None , seq_length , data_dim ])
44
69
Y = tf .placeholder (tf .float32 , [None , 1 ])
45
70
46
- cell = tf .contrib .rnn .BasicLSTMCell (num_units = output_dim , state_is_tuple = True )
71
+ # build a LSTM network
72
+ cell = tf .contrib .rnn .BasicLSTMCell (num_units = hidden_dim , state_is_tuple = True , activation = tf .tanh )
47
73
outputs , _states = tf .nn .dynamic_rnn (cell , X , dtype = tf .float32 )
48
- Y_pred = outputs [:, - 1 ] # We use the last cell's output
49
-
50
- print (outputs [:, - 1 ])
74
+ Y_pred = tf .contrib .layers .fully_connected (outputs [:, - 1 ], output_dim , activation_fn = None ) # We use the last cell's output
51
75
52
76
# cost/loss
53
77
loss = tf .reduce_sum (tf .square (Y_pred - Y )) # sum of the squares
54
78
# optimizer
55
- optimizer = tf .train .GradientDescentOptimizer ( 0.01 )
79
+ optimizer = tf .train .AdamOptimizer ( learing_rate )
56
80
train = optimizer .minimize (loss )
57
81
58
82
# RMSE
59
83
targets = tf .placeholder (tf .float32 , [None , 1 ])
60
84
predictions = tf .placeholder (tf .float32 , [None , 1 ])
61
85
rmse = tf .sqrt (tf .reduce_mean (tf .square (targets - predictions )))
62
86
63
- sess = tf .Session ()
64
- sess .run (tf .global_variables_initializer ())
65
-
66
- for i in range (500 ):
67
- _ , step_loss = sess .run ([train , loss ], feed_dict = {X : trainX , Y : trainY })
68
- print (i , step_loss )
69
-
70
- testPredict = sess .run (Y_pred , feed_dict = {X : testX })
71
- print ("RMSE" , sess .run (rmse , feed_dict = {
72
- targets : testY , predictions : testPredict }))
73
- plt .plot (testY )
74
- plt .plot (testPredict )
75
- plt .xlabel ("Time Period" )
76
- plt .ylabel ("Stock Price" )
77
- plt .show ()
87
+ with tf .Session () as sess :
88
+ init = tf .global_variables_initializer ()
89
+ sess .run (init )
90
+
91
+ # Training step
92
+ for i in range (iterations ):
93
+ _ , step_loss = sess .run ([train , loss ], feed_dict = {X : trainX , Y : trainY })
94
+ print ("[step: {}] loss: {}" .format (i , step_loss ))
95
+
96
+ # Test step
97
+ test_predict = sess .run (Y_pred , feed_dict = {X : testX })
98
+ rmse = sess .run (rmse , feed_dict = {targets : testY , predictions : test_predict })
99
+ print ("RMSE: {}" .format (rmse ))
100
+
101
+ # Plot predictions
102
+ plt .plot (testY )
103
+ plt .plot (test_predict )
104
+ plt .xlabel ("Time Period" )
105
+ plt .ylabel ("Stock Price" )
106
+ plt .show ()
0 commit comments