-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathjjb_ff.wgt
40 lines (34 loc) · 3.58 KB
/
jjb_ff.wgt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
# This weight file is generated by BP (Erhan Oztop -Dec'99)
# This file specfies the network size and the weight values
# That the network sizes excludes the clamped 1's for input and hidden layer
# So the weight matrices has one more column for the clamped unit.
# Note: To train the network you need to load a pattern file
# Note: You can not specify learning parameters from this file
# Note: If you want to continue a learning session that you saved the
# weights from, use Make net.Network from Weight followed by Load Pattern then continue training.
# First matrix is the input(x)->hidden(y) weights(inputToHiddenW)
# Second matrix is the hidden(y)->output(z) weights(hiddenToOutputW)
# Third matrix is the recurrent output(z)->recurrent input(x) weights(recurrentOutputToInputW)
# The network computes sgn(recurrentOutputToInputW.sgn(hiddenToOutputW.sgn(inputToHiddenW.x))) where sgn(t)=1/(1+exp(-t))
outputdim 3
hiddendim 10
inputdim 7
audioinputdim 9
rindim 0
routdim 0
#input -> hidden weights inputToHiddenW[10][7]
-7.059137391170618 6.463050639402881 -3.511803801654052 4.774832613334538 -4.270467134273403 7.990851154837235 -6.73930955680775
3.6699464394209285 -5.336975766405665 -2.1516135088786275 -2.4701255209870068 -0.6706583366322723 -1.6176764285346532 -0.9523195429008952
-0.6940402420234912 -5.057296813937226 -4.653839919051055 -8.761087385387654 4.4052532884262625 0.3413015123426787 0.8687409320916274
-3.43948323114263 1.1003953908235968 -0.6760545418228375 -1.1272611251058238 -0.05469797766492167 -1.2385837782748383 -1.7187074407399463
0.7385213400389143 -4.703988075458534 -2.3597856290009545 -4.235798268341189 1.2995576685480639 -0.23108033069838854 -0.69037351762516
5.614706276259728 -6.683969783402472 -2.3559570862595662 -1.5026116115313393 -2.839474474990875 -4.475781114780892 1.9969946943974435
-2.4847899949361314 -0.0019727168891642347 -3.4860613623734817 -5.468054957712512 7.20455424149962 -1.1407812770231698 0.7787647537822221
-0.022386374089894433 -5.119779936527246 -1.757031818994646 -2.9767408531282387 -5.390393923294963 -3.9904209715174264 5.493407768250129
-1.09118575245548 -3.6417981293942936 -2.026319818873863 -2.962996603600014 2.118380918275659 0.797905792865741 -1.5420696951045094
-4.499021232143773 1.2121646155719596 -1.0478309537840087 -1.7891742791822263 0.6840263242755522 -0.33854316624601716 -2.489187802446381
#hidden -> output weights hiddenToOutputW[3][19]:
-0.14217260579907398 -0.4352499763798692 -0.14192330736081163 -0.1422629205870198 -0.43589812150976803 -0.4375794509113747 -0.1423444526704539 -0.4387285799779153 -0.4365174462448113 0.7605746040229626 -4.295511912221883 -3.6637372342224723 0.24626888549920842 -2.31070247374736 -7.129927507694353 -0.6485610959995216 -2.5943507394255216 -1.3285013387840146 -0.628555750865248
4.3805157834161035 -0.0026010785269022507 4.386554519715216 4.380934782256525 -0.0025984727827588344 -0.002586381317141246 4.3788612464132255 -0.0025990920347779686 -0.0026112098219474075 -7.430837661043465 -4.322220258568131 -3.770321088769684 -3.172933369422269 -3.702230098480067 -5.419155268420285 -5.2066423507047475 -4.175332858716189 -3.4481747929605704 -3.6237660105016696
-0.47731161078482187 4.562148945093228 -0.47152217292397236 -0.47680229715645556 4.561503405707473 4.559834167771484 -0.47879430091632075 4.558672327987306 4.560871343933242 -6.504772562787158 0.22794056817731104 -1.753463161081759 -1.7021203475387132 -0.2977162269171222 0.2591740751889683 -1.5554689098147387 1.602730355619052 0.8083947817262649 -2.0389000439099974
#recurrent hidden -> recurrent input weights recurrentOutputToInputW[0][0]: