-
Notifications
You must be signed in to change notification settings - Fork 0
/
SuperLearn.py
66 lines (55 loc) · 1.73 KB
/
SuperLearn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from pylab import zeros, sin, cos, normal, random
from Tilecoder import numTilings, tilecode
# initialize weights appropriately here
# initialize step size parameter appropriately here
# initialize your global list of tile indices here
def f(x,y):
# write your linear function approximator here (5 lines or so)
def learn(x,y,target):
# write your gradient descent learning algorithm here (3 lines or so)
def test1():
for x,y,target in \
[ (0.1, 0.1, 3.0), \
(4.0, 2.0, -1.0), \
(5.99, 5.99, 2.0), \
(4.0, 2.1, -1.0) ]:
before = f(x,y)
learn(x,y,target)
after = f(x,y)
print 'Example (', x, ',', y, ',', target, '):',
print ' f before learning: ', before,
print ' f after learning : ', after
def targetFunction(x,y):
return sin(x-3.0)*cos(y) + normal(0,0.1)
def train(numSteps):
for i in range(numSteps):
x = random() * 6.0
y = random() * 6.0
target = targetFunction(x,y)
learn(x,y,target)
def writeF(filename):
fout = open(filename, 'w')
steps = 50
for i in range(steps):
for j in range(steps):
target = f(i * 6.0/steps, j * 6.0/steps)
fout.write(repr(target) + ' ')
fout.write('\n')
fout.close()
def MSE(sampleSize):
totalSE = 0.0
for i in range(sampleSize):
x = random() * 6.0
y = random() * 6.0
error = targetFunction(x,y) - f(x,y)
totalSE = totalSE + error * error
print 'The estimated MSE: ', (totalSE / sampleSize)
def test2():
train(20)
writeF('f20')
MSE(10000)
for i in range(10):
train(1000)
MSE(10000)
writeF('f10000')
test1()