Skip to content

Commit

Permalink
Fix set_learning_rate_lstm bug refered issued #64
Browse files Browse the repository at this point in the history
  • Loading branch information
josephjaspers committed May 7, 2020
1 parent ec7ac8d commit 87e9749
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 9 deletions.
1 change: 0 additions & 1 deletion blackcat/neural_networks/layers/lstm.h
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,6 @@ struct LSTM:
virtual
void set_learning_rate_hook(value_type lr) override final
{
parent_type::set_learning_rate(lr);
value_type batched_lr = this->get_batched_learning_rate();

auto optimizers = enumerate(
Expand Down
18 changes: 10 additions & 8 deletions examples/mnist_test_recurrent/mnist_test.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,22 @@ int percept_MNIST(System system_tag, std::string mnist_dataset,
bc::nn::logging_output_layer(system_tag, 10, bc::nn::RMSE).skip_every(100)
);

auto lstm = bc::nn::lstm(system_tag, 784 / 4, 128);
lstm = lstm;
auto ff = bc::nn::feedforward(system_tag, 64, 10);
ff = ff;
auto softmax = bc::nn::softmax(system_tag, 10);
softmax = softmax;

//Regression-test to ensure compilation
if (false)
network = network;
if (false) {
auto lstm = bc::nn::lstm(system_tag, 784 / 4, 128);
lstm = lstm;
auto ff = bc::nn::feedforward(system_tag, 64, 10);
ff = ff;
auto softmax = bc::nn::softmax(system_tag, 10);
softmax = softmax;
network = network;
}

bc::print("Neural Network architecture:");
bc::print(network.get_string_architecture());

network.set_learning_rate(.003);
network.set_batch_size(batch_size);

std::pair<cube, cube> data = load_mnist(
Expand Down

0 comments on commit 87e9749

Please sign in to comment.