forked from cdipaolo/goml
-
Notifications
You must be signed in to change notification settings - Fork 0
/
local_linear_test.go
112 lines (92 loc) · 2.68 KB
/
local_linear_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
package linear
import (
"fmt"
"math/rand"
"testing"
"github.com/Fabse333/goml/base"
"github.com/stretchr/testify/assert"
)
func init() {
rand.Seed(42)
}
func TestLocalLinearShouldPass1(t *testing.T) {
x := [][]float64{}
y := []float64{}
// throw in some junk points which
// should be more-or-less ignored
// by the weighting
for i := -70.0; i < -65; i += 2 {
for j := -70.0; j < -65; j += 2 {
x = append(x, []float64{i, j})
y = append(y, 20*(rand.Float64()-0.5))
}
}
for i := 65.0; i < 70; i += 2 {
for j := 65.0; j < 70; j += 2 {
x = append(x, []float64{i, j})
y = append(y, 20*(rand.Float64()-0.5))
}
}
// put in some linear points
for i := -20.0; i < 20; i++ {
for j := -20.0; j < 20; j++ {
x = append(x, []float64{i, j})
y = append(y, 5*i-5*j-10)
}
}
model := NewLocalLinear(base.BatchGA, 1e-4, 0, 0.75, 500, x, y)
var count int
var err float64
for i := -15.0; i < 15; i += 7 {
for j := -15.0; j < 15; j += 7 {
guess, predErr := model.Predict([]float64{i, j})
assert.Nil(t, predErr, "learning/prediction error should be nil")
count++
err += abs(guess[0] - (5*i - 5*j - 10))
}
}
avgError := err / float64(count)
assert.True(t, avgError < 0.4, "Average error should be less than 0.4 from the expected value of the linear data (currently %v)", avgError)
fmt.Printf("Average Error: %v\n\tPoints Tested: %v\n\tTotal Error: %v\n", avgError, count, err)
}
// same as above but with
func TestLocalLinearShouldPass2(t *testing.T) {
x := [][]float64{}
y := []float64{}
// throw in some junk points which
// should be more-or-less ignored
// by the weighting
for i := -70.0; i < -65; i += 2 {
for j := -70.0; j < -65; j += 2 {
x = append(x, []float64{i, j})
y = append(y, 20*(rand.Float64()-0.5))
}
}
for i := 65.0; i < 70; i += 2 {
for j := 65.0; j < 70; j += 2 {
x = append(x, []float64{i, j})
y = append(y, 20*(rand.Float64()-0.5))
}
}
// put in some linear points
for i := -20.0; i < 20; i++ {
for j := -20.0; j < 20; j++ {
x = append(x, []float64{i, j})
y = append(y, 5*i-5*j-10)
}
}
model := NewLocalLinear(base.StochasticGA, 1e-4, 0, 0.75, 500, x, y)
var count int
var err float64
for i := -15.0; i < 15; i += 7 {
for j := -15.0; j < 15; j += 7 {
guess, predErr := model.Predict([]float64{i, j})
assert.Nil(t, predErr, "learning/prediction error should be nil")
count++
err += abs(guess[0] - (5*i - 5*j - 10))
}
}
avgError := err / float64(count)
assert.True(t, avgError < 0.4, "Average error should be less than 0.4 from the expected value of the linear data (currently %v)", avgError)
fmt.Printf("Average Error: %v\n\tPoints Tested: %v\n\tTotal Error: %v\n", avgError, count, err)
}