Skip to content

Commit 21b530c

Browse files
committed
Merge pull request lisa-lab#97 from DelightRun/master
replace time.clock() by time.time()
2 parents 3c430e9 + 9a0b657 commit 21b530c

11 files changed

+40
-40
lines changed

Diff for: code/DBN.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"""
33
import os
44
import sys
5-
import time
5+
import timeit
66

77
import numpy
88

@@ -327,7 +327,7 @@ def test_DBN(finetune_lr=0.1, pretraining_epochs=100,
327327
k=k)
328328

329329
print '... pre-training the model'
330-
start_time = time.clock()
330+
start_time = timeit.default_timer()
331331
## Pre-train layer-wise
332332
for i in xrange(dbn.n_layers):
333333
# go through pretraining epochs
@@ -340,7 +340,7 @@ def test_DBN(finetune_lr=0.1, pretraining_epochs=100,
340340
print 'Pre-training layer %i, epoch %d, cost ' % (i, epoch),
341341
print numpy.mean(c)
342342

343-
end_time = time.clock()
343+
end_time = timeit.default_timer()
344344
# end-snippet-2
345345
print >> sys.stderr, ('The pretraining code for file ' +
346346
os.path.split(__file__)[1] +
@@ -372,7 +372,7 @@ def test_DBN(finetune_lr=0.1, pretraining_epochs=100,
372372

373373
best_validation_loss = numpy.inf
374374
test_score = 0.
375-
start_time = time.clock()
375+
start_time = timeit.default_timer()
376376

377377
done_looping = False
378378
epoch = 0
@@ -424,7 +424,7 @@ def test_DBN(finetune_lr=0.1, pretraining_epochs=100,
424424
done_looping = True
425425
break
426426

427-
end_time = time.clock()
427+
end_time = timeit.default_timer()
428428
print(
429429
(
430430
'Optimization complete with best validation score of %f %%, '

Diff for: code/SdA.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
"""
3232
import os
3333
import sys
34-
import time
34+
import timeit
3535

3636
import numpy
3737

@@ -379,7 +379,7 @@ def test_SdA(finetune_lr=0.1, pretraining_epochs=15,
379379
batch_size=batch_size)
380380

381381
print '... pre-training the model'
382-
start_time = time.clock()
382+
start_time = timeit.default_timer()
383383
## Pre-train layer-wise
384384
corruption_levels = [.1, .2, .3]
385385
for i in xrange(sda.n_layers):
@@ -394,7 +394,7 @@ def test_SdA(finetune_lr=0.1, pretraining_epochs=15,
394394
print 'Pre-training layer %i, epoch %d, cost ' % (i, epoch),
395395
print numpy.mean(c)
396396

397-
end_time = time.clock()
397+
end_time = timeit.default_timer()
398398

399399
print >> sys.stderr, ('The pretraining code for file ' +
400400
os.path.split(__file__)[1] +
@@ -427,7 +427,7 @@ def test_SdA(finetune_lr=0.1, pretraining_epochs=15,
427427

428428
best_validation_loss = numpy.inf
429429
test_score = 0.
430-
start_time = time.clock()
430+
start_time = timeit.default_timer()
431431

432432
done_looping = False
433433
epoch = 0
@@ -471,7 +471,7 @@ def test_SdA(finetune_lr=0.1, pretraining_epochs=15,
471471
done_looping = True
472472
break
473473

474-
end_time = time.clock()
474+
end_time = timeit.default_timer()
475475
print(
476476
(
477477
'Optimization complete with best validation score of %f %%, '

Diff for: code/cA.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"""
3131
import os
3232
import sys
33-
import time
33+
import timeit
3434

3535
import numpy
3636

@@ -276,7 +276,7 @@ def test_cA(learning_rate=0.01, training_epochs=20,
276276
}
277277
)
278278

279-
start_time = time.clock()
279+
start_time = timeit.default_timer()
280280

281281
############
282282
# TRAINING #
@@ -293,7 +293,7 @@ def test_cA(learning_rate=0.01, training_epochs=20,
293293
print 'Training epoch %d, reconstruction cost ' % epoch, numpy.mean(
294294
c_array[0]), ' jacobian norm ', numpy.mean(numpy.sqrt(c_array[1]))
295295

296-
end_time = time.clock()
296+
end_time = timeit.default_timer()
297297

298298
training_time = (end_time - start_time)
299299

Diff for: code/convolutional_mlp.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
"""
2424
import os
2525
import sys
26-
import time
26+
import timeit
2727

2828
import numpy
2929

@@ -274,7 +274,7 @@ def evaluate_lenet5(learning_rate=0.1, n_epochs=200,
274274
best_validation_loss = numpy.inf
275275
best_iter = 0
276276
test_score = 0.
277-
start_time = time.clock()
277+
start_time = timeit.default_timer()
278278

279279
epoch = 0
280280
done_looping = False
@@ -326,7 +326,7 @@ def evaluate_lenet5(learning_rate=0.1, n_epochs=200,
326326
done_looping = True
327327
break
328328

329-
end_time = time.clock()
329+
end_time = timeit.default_timer()
330330
print('Optimization complete.')
331331
print('Best validation score of %f %% obtained at iteration %i, '
332332
'with test performance %f %%' %

Diff for: code/dA.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232

3333
import os
3434
import sys
35-
import time
35+
import timeit
3636

3737
import numpy
3838

@@ -321,7 +321,7 @@ def test_dA(learning_rate=0.1, training_epochs=15,
321321
}
322322
)
323323

324-
start_time = time.clock()
324+
start_time = timeit.default_timer()
325325

326326
############
327327
# TRAINING #
@@ -336,7 +336,7 @@ def test_dA(learning_rate=0.1, training_epochs=15,
336336

337337
print 'Training epoch %d, cost ' % epoch, numpy.mean(c)
338338

339-
end_time = time.clock()
339+
end_time = timeit.default_timer()
340340

341341
training_time = (end_time - start_time)
342342

@@ -379,7 +379,7 @@ def test_dA(learning_rate=0.1, training_epochs=15,
379379
}
380380
)
381381

382-
start_time = time.clock()
382+
start_time = timeit.default_timer()
383383

384384
############
385385
# TRAINING #
@@ -394,7 +394,7 @@ def test_dA(learning_rate=0.1, training_epochs=15,
394394

395395
print 'Training epoch %d, cost ' % epoch, numpy.mean(c)
396396

397-
end_time = time.clock()
397+
end_time = timeit.default_timer()
398398

399399
training_time = (end_time - start_time)
400400

Diff for: code/logistic_cg.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@
3838

3939
import os
4040
import sys
41-
import time
41+
import timeit
4242

4343
import numpy
4444

@@ -275,7 +275,7 @@ def callback(theta_value):
275275
# using scipy conjugate gradient optimizer
276276
import scipy.optimize
277277
print ("Optimizing using scipy.optimize.fmin_cg...")
278-
start_time = time.clock()
278+
start_time = timeit.default_timer()
279279
best_w_b = scipy.optimize.fmin_cg(
280280
f=train_fn,
281281
x0=numpy.zeros((n_in + 1) * n_out, dtype=x.dtype),
@@ -284,7 +284,7 @@ def callback(theta_value):
284284
disp=0,
285285
maxiter=n_epochs
286286
)
287-
end_time = time.clock()
287+
end_time = timeit.default_timer()
288288
print(
289289
(
290290
'Optimization complete with best validation score of %f %%, with '

Diff for: code/logistic_sgd.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@
3838
import gzip
3939
import os
4040
import sys
41-
import time
41+
import timeit
4242

4343
import numpy
4444

@@ -360,7 +360,7 @@ def sgd_optimization_mnist(learning_rate=0.13, n_epochs=1000,
360360

361361
best_validation_loss = numpy.inf
362362
test_score = 0.
363-
start_time = time.clock()
363+
start_time = timeit.default_timer()
364364

365365
done_looping = False
366366
epoch = 0
@@ -419,7 +419,7 @@ def sgd_optimization_mnist(learning_rate=0.13, n_epochs=1000,
419419
done_looping = True
420420
break
421421

422-
end_time = time.clock()
422+
end_time = timeit.default_timer()
423423
print(
424424
(
425425
'Optimization complete with best validation score of %f %%,'

Diff for: code/lstm.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -543,7 +543,7 @@ def train_lstm(
543543

544544
uidx = 0 # the number of update done
545545
estop = False # early stop
546-
start_time = time.clock()
546+
start_time = time.time()
547547
try:
548548
for eidx in xrange(max_epochs):
549549
n_samples = 0
@@ -622,7 +622,7 @@ def train_lstm(
622622
except KeyboardInterrupt:
623623
print "Training interupted"
624624

625-
end_time = time.clock()
625+
end_time = time.time()
626626
if best_p is not None:
627627
zipp(best_p, tparams)
628628
else:

Diff for: code/mlp.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323

2424
import os
2525
import sys
26-
import time
26+
import timeit
2727

2828
import numpy
2929

@@ -336,7 +336,7 @@ def test_mlp(learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000,
336336
best_validation_loss = numpy.inf
337337
best_iter = 0
338338
test_score = 0.
339-
start_time = time.clock()
339+
start_time = timeit.default_timer()
340340

341341
epoch = 0
342342
done_looping = False
@@ -391,7 +391,7 @@ def test_mlp(learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000,
391391
done_looping = True
392392
break
393393

394-
end_time = time.clock()
394+
end_time = timeit.default_timer()
395395
print(('Optimization complete. Best validation score of %f %% '
396396
'obtained at iteration %i, with test performance %f %%') %
397397
(best_validation_loss * 100., best_iter + 1, test_score * 100.))

Diff for: code/rbm.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
contain hidden variables. Restricted Boltzmann Machines further restrict BMs
55
to those without visible-visible and hidden-hidden connections.
66
"""
7-
import time
7+
import timeit
88

99
try:
1010
import PIL.Image as Image
@@ -428,7 +428,7 @@ def test_rbm(learning_rate=0.1, training_epochs=15,
428428
)
429429

430430
plotting_time = 0.
431-
start_time = time.clock()
431+
start_time = timeit.default_timer()
432432

433433
# go through training epochs
434434
for epoch in xrange(training_epochs):
@@ -441,7 +441,7 @@ def test_rbm(learning_rate=0.1, training_epochs=15,
441441
print 'Training epoch %d, cost is ' % epoch, numpy.mean(mean_cost)
442442

443443
# Plot filters after each training epoch
444-
plotting_start = time.clock()
444+
plotting_start = timeit.default_timer()
445445
# Construct image from the weight matrix
446446
image = Image.fromarray(
447447
tile_raster_images(
@@ -452,10 +452,10 @@ def test_rbm(learning_rate=0.1, training_epochs=15,
452452
)
453453
)
454454
image.save('filters_at_epoch_%i.png' % epoch)
455-
plotting_stop = time.clock()
455+
plotting_stop = timeit.default_timer()
456456
plotting_time += (plotting_stop - plotting_start)
457457

458-
end_time = time.clock()
458+
end_time = timeit.default_timer()
459459

460460
pretraining_time = (end_time - start_time) - plotting_time
461461

Diff for: code/rnnslu.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import stat
99
import subprocess
1010
import sys
11-
import time
11+
import timeit
1212

1313
import numpy
1414

@@ -318,13 +318,13 @@ def main(param=None):
318318
shuffle([train_lex, train_ne, train_y], param['seed'])
319319

320320
param['ce'] = e
321-
tic = time.time()
321+
tic = timeit.default_timer()
322322

323323
for i, (x, y) in enumerate(zip(train_lex, train_y)):
324324
rnn.train(x, y, param['win'], param['clr'])
325325
print '[learning] epoch %i >> %2.2f%%' % (
326326
e, (i + 1) * 100. / nsentences),
327-
print 'completed in %.2f (sec) <<\r' % (time.time() - tic),
327+
print 'completed in %.2f (sec) <<\r' % (timeit.default_timer() - tic),
328328
sys.stdout.flush()
329329

330330
# evaluation // back into the real world : idx -> words

0 commit comments

Comments
 (0)