-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.cpp
More file actions
69 lines (57 loc) · 1.6 KB
/
main.cpp
File metadata and controls
69 lines (57 loc) · 1.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
#include<iostream>
#include "tensor.h"
#include "sgd.h"
using namespace std;
int main() {
// XOR dataset
Tensor X({4, 2}, {
0, 0,
0, 1,
1, 0,
1, 1
});
Tensor Y({4, 1}, {
0,
1,
1,
0
});
// weights — small random init
Tensor W1({2, 4}, {
0.1f, -0.2f, 0.3f, 0.1f,
-0.1f, 0.2f, -0.3f, 0.2f
});
Tensor W2({4, 1}, {
0.2f,
-0.1f,
0.3f,
-0.2f
});
SGD optimizer({&W1, &W2}, 0.01f);
// training loop
for (int epoch = 0; epoch <= 1000; epoch++) {
// forward pass
auto h = matmul(X, W1); // (4,2) @ (2,4) = (4,4)
auto h2 = relu(*h); // (4,4)
auto out = matmul(*h2, W2); // (4,4) @ (4,1) = (4,1)
auto loss = mse(*out, Y); // scalar (1,)
// print every 100 epochs
if (epoch % 100 == 0) {
cout << "epoch " << epoch << " loss: " << loss->data[0] << endl;
}
// backward + update
Tensor::backward(loss.get());
optimizer.step();
optimizer.zero_grad();
}
// final predictions
cout << "\nFinal predictions:" << endl;
auto h = matmul(X, W1);
auto h2 = relu(*h);
auto out = matmul(*h2, W2);
cout << "Input [0,0] -> " << out->data[0] << " (expected 0)" << endl;
cout << "Input [0,1] -> " << out->data[1] << " (expected 1)" << endl;
cout << "Input [1,0] -> " << out->data[2] << " (expected 1)" << endl;
cout << "Input [1,1] -> " << out->data[3] << " (expected 0)" << endl;
return 0;
}