Skip to content

Commit 6879fa6

Browse files
authored
Merge pull request #2 from daniel4x/xor-example
Added XOR example to examples/
2 parents 7e6b434 + 4f439d5 commit 6879fa6

File tree

4 files changed

+142
-5
lines changed

4 files changed

+142
-5
lines changed

engine/engine.go

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,22 @@ func (v *Value) Backward() {
110110
}
111111
}
112112

113+
func (v *Value) Data() float64 {
114+
return v.data
115+
}
116+
117+
func (v *Value) SetData(data float64) {
118+
v.data = data
119+
}
120+
121+
func (v *Value) Grad() float64 {
122+
return v.grad
123+
}
124+
125+
func (v *Value) ZeroGrad() {
126+
v.grad = 0
127+
}
128+
113129
func (v Value) String() string {
114130
return fmt.Sprintf("Value(label=%s, data=%f, children=(%v), op=%s, grad=%f)", v.label, v.data, v.children, v.op, v.grad)
115131
}
@@ -126,7 +142,7 @@ func validateValue(candidate interface{}) *Value {
126142
}
127143

128144
// Helper functions to create Value slices and matrices (Tensor like objects)
129-
func makeValues(data []float64) []*Value {
145+
func MakeValues(data []float64) []*Value {
130146
/**
131147
* Create a slice of Value pointers from a slice of float64.
132148
**/
@@ -137,13 +153,13 @@ func makeValues(data []float64) []*Value {
137153
return ans
138154
}
139155

140-
func makeValueMatrix(data [][]float64) [][]*Value {
156+
func MakeValueMatrix(data [][]float64) [][]*Value {
141157
/**
142158
* Create a matrix of Value pointers from a matrix of float64.
143159
**/
144160
ans := make([][]*Value, len(data))
145161
for i := 0; i < len(data); i++ {
146-
ans[i] = makeValues(data[i])
162+
ans[i] = MakeValues(data[i])
147163
}
148164
return ans
149165
}

engine/nn.go

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
package engine
22

3-
import "math/rand"
3+
import (
4+
"fmt"
5+
"math/rand"
6+
)
47

58
/**
69
* The structs and functions in this file are used to create a simple feedforward neural network (MLP).
@@ -41,6 +44,12 @@ func NewNeuron(in int) *Neuron {
4144
return &Neuron{weights: weights, bias: bias}
4245
}
4346

47+
func (n *Neuron) String() string {
48+
ans := "Neuron{"
49+
ans += fmt.Sprintf("Weights=%v, ", len(n.weights))
50+
return ans + "Bias}"
51+
}
52+
4453
type Layer struct {
4554
neurons []*Neuron
4655
}
@@ -70,6 +79,15 @@ func NewLayer(in, out int) *Layer {
7079
return &Layer{neurons: neurons}
7180
}
7281

82+
func (l *Layer) String() string {
83+
ans := "Layer{\n"
84+
for i := 0; i < len(l.neurons); i++ {
85+
ans += "\t\t" + l.neurons[i].String() + ",\n"
86+
}
87+
ans += "\t}"
88+
return ans
89+
}
90+
7391
type MLP struct {
7492
layers []*Layer
7593
}
@@ -106,3 +124,12 @@ func NewMLP(in int, outs []int) *MLP {
106124

107125
return &MLP{layers: layers}
108126
}
127+
128+
func (m *MLP) String() string {
129+
ans := "MLP{\n"
130+
for i := 0; i < len(m.layers); i++ {
131+
ans += "\t" + m.layers[i].String() + ",\n"
132+
}
133+
ans += "}"
134+
return ans
135+
}

engine/nn_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import (
66
)
77

88
func TestMLPSimpleScenario(t *testing.T) {
9-
x := makeValueMatrix(
9+
x := MakeValueMatrix(
1010
[][]float64{
1111
{2.0, 3.0, -1.0},
1212
{3.0, -1.0, 0.5},

examples/xor.go

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
package main
2+
3+
import (
4+
"fmt"
5+
"math"
6+
7+
e "github.com/daniel4x/GoGrad/engine"
8+
)
9+
10+
func createXORData() ([][]*e.Value, []float64) {
11+
x := [][]float64{
12+
{0, 0},
13+
{0, 1},
14+
{1, 0},
15+
{1, 1},
16+
}
17+
y := []float64{-1, 1, 1, -1} // setting False as -1 and True as 1 just to get a cleaner outputs from the model
18+
19+
return e.MakeValueMatrix(x), y
20+
}
21+
22+
func printData(X [][]*e.Value, y []float64) {
23+
for i := 0; i < len(X); i++ {
24+
fmt.Printf("(%v, %v) -> %v\n", X[i][0].Data(), X[i][1].Data(), y[i])
25+
}
26+
}
27+
28+
func main() {
29+
// Create XOR dataset
30+
X, y := createXORData()
31+
fmt.Println("XOR dataset:")
32+
printData(X, y)
33+
34+
// Define a two-layer MLP with 2 input neurons, 2 hidden layers with 4 neurons each, and 1 output neuron
35+
nn := e.NewMLP(2, []int{4, 4, 1})
36+
fmt.Println("\nMulti-layer Perceptron Definition:\n", nn)
37+
38+
// Train the model
39+
epochs := 2000
40+
alpha := 0.01
41+
42+
for i := 0; i < epochs; i++ {
43+
y_model := make([]*e.Value, len(X))
44+
45+
// Forward pass
46+
// Feed in each data point
47+
for j := 0; j < len(X); j++ {
48+
y_model[j] = nn.Call(X[j])
49+
}
50+
51+
// Compute the loss
52+
loss := y_model[0].Sub(y[0]).Pow(2)
53+
for j := 1; j < len(y_model); j++ {
54+
loss = loss.Add(y_model[j].Sub(y[j]).Pow(2))
55+
}
56+
57+
// Backward pass
58+
// zero the gradients to avoid accumulation between epochs
59+
params := nn.Parameters()
60+
for j := 0; j < len(params); j++ {
61+
params[j].ZeroGrad()
62+
}
63+
64+
loss.Backward() // backward
65+
66+
// Update the parameters
67+
for j := 0; j < len(params); j++ {
68+
params[j].SetData(params[j].Data() - alpha*params[j].Grad())
69+
}
70+
71+
if (i+1)%100 == 0 {
72+
// Print the loss every 100 epochs
73+
fmt.Println("epoch", i, "loss", loss.Data())
74+
}
75+
}
76+
77+
// Test the model
78+
predictions := make([]float64, len(X))
79+
for i := 0; i < len(X); i++ {
80+
predictions[i] = nn.Call(X[i]).Data()
81+
}
82+
83+
fmt.Println("\nTesting the model:")
84+
for i := 0; i < len(X); i++ {
85+
fmt.Printf("(%v, %v) -> Actual: %v Prediction %v\n", X[i][0].Data(), X[i][1].Data(), y[i], predictions[i])
86+
}
87+
88+
// Raise error if the difference between the actual and predicted values is greater than 0.1
89+
for i := 0; i < len(X); i++ {
90+
if math.Abs(y[i]-predictions[i]) > 0.1 {
91+
panic(fmt.Sprintf("\nTest failed: (%v, %v) -> Actual: %v Prediction %v\n", X[i][0].Data(), X[i][1].Data(), y[i], predictions[i]))
92+
}
93+
}
94+
}

0 commit comments

Comments
 (0)