-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathnn.lua
68 lines (57 loc) · 1.58 KB
/
nn.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
require 'torch'
require 'nn'
-- Define a simple feedforward neural network
local inputSize = 2
local hiddenSize = 3
local outputSize = 1
local model = nn.Sequential()
model:add(nn.Linear(inputSize, hiddenSize)) -- Input layer to hidden layer
model:add(nn.Tanh()) -- Activation function (Tanh)
model:add(nn.Linear(hiddenSize, outputSize)) -- Hidden layer to output layer
model:add(nn.Sigmoid()) -- Activation function (Sigmoid)
-- Define the loss function (Binary Cross-Entropy for binary classification)
local criterion = nn.BCECriterion()
-- Generate some sample data for training
local input = torch.Tensor{
{0, 0},
{0, 1},
{1, 0},
{1, 1}
}
local target = torch.Tensor{
{0},
{1},
{1},
{0}
}
-- Define the optimization algorithm (Stochastic Gradient Descent)
local learningRate = 0.1
local optimizer = nn.SGD(model.parameters, learningRate)
-- Training loop
local epochs = 10000
for epoch = 1, epochs do
-- Forward pass
local output = model:forward(input)
-- Compute the loss
local loss = criterion:forward(output, target)
-- Backpropagation
model:zeroGradParameters()
local gradOutput = criterion:backward(output, target)
model:backward(input, gradOutput)
-- Update the weights
optimizer:update()
-- Print the loss every 1000 epochs
if epoch % 1000 == 0 then
print('Epoch ' .. epoch .. ', Loss: ' .. loss)
end
end
-- Test the trained model
local testInput = torch.Tensor{
{0, 0},
{0, 1},
{1, 0},
{1, 1}
}
local testOutput = model:forward(testInput)
print('Predicted Output:')
print(testOutput)