-
Notifications
You must be signed in to change notification settings - Fork 0
/
predict_xor.rs
43 lines (39 loc) · 1.21 KB
/
predict_xor.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
use fnn::prelude::*;
fn main() {
// Create a new feed forward neural network.
//
// The const generics are in the order:
// - Input count
// - Hidden layer count
// - Output count
//
// The number of hidden layers is something you can tune. I found that for
// this example any more than 2 did not result in any accuracy improvement.
let mut nn = FeedForward::<Sigmoid, 2, 2, 1>::new();
// Data
let training_data = [
([0.0, 0.0], [0.0]),
([0.0, 1.0], [1.0]),
([1.0, 0.0], [1.0]),
([1.0, 1.0], [0.0]),
];
// Train
for _ in 0..50_000 {
for (input, target) in &training_data {
let input = SVector::from_column_slice(input);
let target = SVector::from_column_slice(target);
nn.train(&input, &target, 0.1);
}
}
// Predict
for (input, expected) in &training_data {
let output = nn.forward(&SVector::from_column_slice(input));
let difference = (expected[0] - output[0]).abs() * 100.0;
println!(
"Input: {input:?}, Output: {}, Expected: {}, Accuracy: {}%",
output[0],
expected[0],
100.0 - difference
);
}
}