From de94e834b20c699979cc81af76910260338cf13b Mon Sep 17 00:00:00 2001 From: Zihao Zhao <39960618@qq.com> Date: Tue, 7 Jun 2022 12:52:44 +0800 Subject: [PATCH] init commit --- .DS_Store | Bin 0 -> 10244 bytes BaseLayer.py | 53 + Neuron.py | 139 + README.assets/1630985347373.png | Bin 0 -> 62914 bytes README.md | 47 + STLayer.py | 97 + SensingLayer.py | 34 + Visualization.py | 194 + agent.py | 115 + base.py | 8 + cfg.py | 34 + data/.DS_Store | Bin 0 -> 6148 bytes data/10-4_best/train_result.log | 2 + data/example/UnitA_id.txt | 941 ++ data/example/UnitA_information.txt | 941 ++ data/example/UnitC_id.txt | 294 + data/example/UnitC_information.txt | 294 + data/example/train_result.log | 133 + data/test/train_result.log | 1 + data/unita_id.txt | 338 + data/unita_information.txt | 338 + data/unitc_id.txt | 108 + data/unitc_information.txt | 108 + dl_src/README.md | 0 dl_src/cnn.py | 390 + dl_src/cnn_cfg.py | 19 + dl_src/comparison.xlsx | Bin 0 -> 8908 bytes ...tfevents.1632039143.pytorch-97ba8904-hcdsd | Bin 0 -> 37920 bytes ...tfevents.1632039123.pytorch-1ab1ab1a-gjcg2 | Bin 0 -> 37920 bytes dl_src/dl_results/result_log | 10015 ++++++++++++++++ dl_src/dl_scripts/c3d.sh | 19 + dl_src/dl_scripts/c3d_train_num_1.sh | 20 + dl_src/dl_scripts/c3d_train_num_12.sh | 20 + dl_src/dl_scripts/c3d_train_num_2.sh | 20 + dl_src/dl_scripts/c3d_train_num_24.sh | 20 + dl_src/dl_scripts/c3d_train_num_3.sh | 20 + dl_src/dl_scripts/c3d_train_num_36.sh | 20 + dl_src/dl_scripts/c3d_train_num_48.sh | 20 + dl_src/dl_scripts/c3d_train_num_6.sh | 20 + dl_src/dl_scripts/c3d_train_num_60.sh | 20 + dl_src/dl_scripts/c3d_train_num_72.sh | 20 + dl_src/dl_scripts/c3d_train_num_84.sh | 20 + dl_src/dl_scripts/c3d_train_num_90.sh | 20 + dl_src/dl_scripts/c3d_train_num_96.sh | 20 + dl_src/dl_scripts/convnet.sh | 19 + dl_src/dl_scripts/epochs.sh | 26 + dl_src/experiments.md | 0 dl_src/model_cfg.py | 22 + dl_src/test | 1 + dl_src/utils.py | 21 + dvsgesture_i.py | 157 + dvsgesture_t.py | 301 + envs/env.sh | 9 + events_timeslices.py | 141 + expert.py | 1518 +++ inference.py | 258 + knowledge.py | 440 + logger.py | 24 + main.py | 152 + main_dl.py | 105 + npy_to_txt.py | 8 + prior_knowledge.py | 421 + process_dvs_gesture.py | 163 + sys_profiling.py | 366 + train.py | 259 + visualization_utils.py | 247 + vote_infer.py | 56 + write_excel.py | 316 + 68 files changed, 19952 insertions(+) create mode 100644 .DS_Store create mode 100644 BaseLayer.py create mode 100644 Neuron.py create mode 100644 README.assets/1630985347373.png create mode 100644 README.md create mode 100644 STLayer.py create mode 100644 SensingLayer.py create mode 100644 Visualization.py create mode 100644 agent.py create mode 100644 base.py create mode 100644 cfg.py create mode 100644 data/.DS_Store create mode 100644 data/10-4_best/train_result.log create mode 100644 data/example/UnitA_id.txt create mode 100644 data/example/UnitA_information.txt create mode 100644 data/example/UnitC_id.txt create mode 100644 data/example/UnitC_information.txt create mode 100644 data/example/train_result.log create mode 100644 data/test/train_result.log create mode 100644 data/unita_id.txt create mode 100644 data/unita_information.txt create mode 100644 data/unitc_id.txt create mode 100644 data/unitc_information.txt create mode 100644 dl_src/README.md create mode 100644 dl_src/cnn.py create mode 100644 dl_src/cnn_cfg.py create mode 100644 dl_src/comparison.xlsx create mode 100644 dl_src/dl_results/c3d/20210919_080956/events.out.tfevents.1632039143.pytorch-97ba8904-hcdsd create mode 100644 dl_src/dl_results/convnet/20210919_080929/events.out.tfevents.1632039123.pytorch-1ab1ab1a-gjcg2 create mode 100644 dl_src/dl_results/result_log create mode 100644 dl_src/dl_scripts/c3d.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_1.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_12.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_2.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_24.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_3.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_36.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_48.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_6.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_60.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_72.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_84.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_90.sh create mode 100644 dl_src/dl_scripts/c3d_train_num_96.sh create mode 100644 dl_src/dl_scripts/convnet.sh create mode 100644 dl_src/dl_scripts/epochs.sh create mode 100644 dl_src/experiments.md create mode 100644 dl_src/model_cfg.py create mode 100644 dl_src/test create mode 100644 dl_src/utils.py create mode 100644 dvsgesture_i.py create mode 100644 dvsgesture_t.py create mode 100644 envs/env.sh create mode 100644 events_timeslices.py create mode 100644 expert.py create mode 100644 inference.py create mode 100644 knowledge.py create mode 100644 logger.py create mode 100644 main.py create mode 100644 main_dl.py create mode 100644 npy_to_txt.py create mode 100644 prior_knowledge.py create mode 100644 process_dvs_gesture.py create mode 100644 sys_profiling.py create mode 100644 train.py create mode 100644 visualization_utils.py create mode 100644 vote_infer.py create mode 100644 write_excel.py diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..86bbda92d8741e6fa644c8e7faa7f249d3c08cb9 GIT binary patch literal 10244 zcmeHMTWl0n7(V|{$_xxJg)N;@bQjkPLV>p2gM{sNxm1u=b_-hyEVDb)c4T&DnVH>! zh!%N51MwDPi2CA#qUeKBAzor40nrB^v|b=aBfe-PktYr6ga4T`TedCuqA?ifBs2dx z|9}2-`Of#B*>jc<0`a2SOh|+f!sA1wB;xBDiQ4HiCUL%IB1%Aik{xggS;NX=`Q(_| zfg(U4Kp;RMKp;RMK;T9|0KeHZ@spSg-T?vu0s#WU2+;i@f)AB3Uk)(yX2bn|*CSa~8nJf553}Ei4kNEXsz8qwdxfAdYAK>>4{s{%V-m(3N zSDiqN$>1Fz5FjuX0eW_?A}UdcOO(pNHn`OF+ZfdsYpsN~I*}hdbRcjz_6;0Lge3Q(;3%rn# z>h|wnvZyiA+_dOGV`Tq|mZrwY^5*6P2L!%0x^U&@%x)uZT8G&Tz~2SP+2gb()6s7 z+udi=(YINKWyg)8mezBYnbhys(yq4GrJ0O0ZB)I@n!59?R7Laosp(GIomRfPRr3<-7>o?tlirk1iszX^6h4?g*}!K^o|0p=4{by2jv+5 zbhl#LirJ^d_w6v0tY&nWam#hBJgrGv!-;g-8g0qzDOpf*y4e?9eVHK~jdhkYb67?q zQI^49kfCzUmyj|oQxk+}Y=f`i$vo0Z?j}jnPX@?Qa*UiHr^p#{fm|ZLKo!h@TVNh6 zhE=c{)j>{yJ!m#As{IGKzWA$w)Oh#mE&6#noC3gE9bhqt~v29ivR?ZH{HT$N_9D zBS}R^+pJ+*dPz%Qi_S2=#sDn5Hl=RX?AzupT)bjceC>vgA&;^lQQU?xWx9H=uGw7! zy6X0KD^5-`Rb0%iCEd}phL$pwyjBB^f-p2qs+lU+%MH8~<ms;H!BtXnV3+nj#I);b1oCz3wpc~8F)^&acdLfcuR>aJz`#!k;D>1IjC zZ4M)e?RGrR53@ZQo69KX;fNe>@)^gIn@9^ukd0&)vB`dNggirDA#Whd&yvr{H{?h1 zGr0l))gZt`m;@oHg*uR7Hq1rtH^Kr~2?LA!}IVWyadPLb$AEfh4L zL~G9$qhV=0MhGk3F3t%{)fgSDI4?XuEO8i=tk@K85v6e$a)Rf(Bs^b~fM}_VLy>qD& zhSP8c&cZqP2tJ0-;A{8}E{>w^#*x&8pDL-Fw-O2eZwdZbb=RQ}drr5Gw5xwmPlVq8uetvJ|2NpEgW&`S1PJ_- z2tZXLnTTVH*ZT!a@7g2y?81j8T5pg^&jnxJbv#P%Ivxx3I^Me|LS6ZMO#Nc0r*e=< V%whW1{|pGu|H1iR_ thres: + V_spike = 1 + V_internal = 0 + else: + V_spike = 0 + return V_spike + + def neuron_temporal_expert(self, syn, stim, thres): + if syn > 0.5: + syn = 0.5 + + V_internal = syn - 0.5 + stim # 0.5 is a user defined value + + if V_internal >= thres: + V_spike = 1 + V_internal = 0 + else: + V_spike = 0 + + if V_internal >= 2: ## max voltage range + V_internal = 0 + elif V_internal < 0: ## min voltage range + V_internal = 0 + else: + pass + + return V_spike, V_internal + + def neuron_ST(self, init, syn, thres, st_ions, st_leakage): + V_internal = init + syn + st_ions + st_leakage + if V_internal >= thres: + V_spike = 1 + V_internal = 0 + else: + V_spike = 0 + if self.if_st_neuron_clear: + V_internal = 0 + + if V_internal > 2: ## max voltage range + V_internal = 2 + elif V_internal < -0.5: ## min voltage range + V_internal = -0.5 + else: + pass + return V_spike, V_internal + + def neuron_SIMO(self, init, input_value, threshold, output_num,ions, leakage): + self.input_value = input_value + self.output_num = output_num + syn = input_value + V_spike_index = 0 + V_spike_internal = np.empty(self.output_num,int) + V_decoder = np.full((1,self.output_num),0) + V_internal = syn + init + ions - leakage + for i in range(0, self.output_num): + if V_internal >= threshold[i]: + V_spike_internal[i] = 1 + else: + V_spike_internal[i] = 0 + + if np.all(V_spike_internal == 0): + V_spike = 0 + V_spike_index = 0 + else: + V_decoder = np.transpose(V_spike_internal) * threshold + V_spike_index =np.argsort(V_decoder)[-1] + V_spike = 1 + + return V_spike, V_spike_index + + def neuron_computation(self, st_1d_spike,threshold_value,input_num,output_num): + n1 = Neuron() + self.neuron_output = np.full((output_num,1),0) + + for i in range(0, output_num): # event + decoder = np.full((output_num,1),0) + for j in range(0, input_num): #ST core number + init = 0 + input_value = st_1d_spike[j,i] + threshold = threshold_value[j,:] + V_spike, V_spike_index = n1.neuron_SIMO(init, input_value, threshold, output_num,0, 0) + if V_spike == 1: + decoder[V_spike_index] +=1 + else: + pass + self.neuron_output[i] = np.argsort(decoder[:,0])[-1] + print(self.neuron_output) + + def neuron_CANN(self, syn, init, stim, resting, thres): + V_internal = init + syn + stim + resting + if V_internal >= thres: + V_spike = 1 + V_internal = 0 + else: + V_spike = 0 + + return V_spike, V_internal + + def neuron_weight(self,neuro_num,type): + weight = np.full((neuro_num,neuro_num),0, dtype=float) + if type == 0: #random + mean = 0 + stdv = 0.1 + for i in range(0, neuro_num): + seed(1) + weight[i,:] = np.random.normal(mean, stdv, neuro_num) + return weight + elif type == 1: #sequence + np.fill_diagonal(weight , 1) + weight = np.roll(weight,1,axis = 0) + else: + pass + + return weight + + def neuron_location(self, init, syn1, thres): + V_internal = init + syn1 + if V_internal >= thres: + V_spike = 1 + V_internal = 0 + else: + V_spike = 0 + + return V_spike, V_internal + + + diff --git a/README.assets/1630985347373.png b/README.assets/1630985347373.png new file mode 100644 index 0000000000000000000000000000000000000000..9eebb4dbc40c90b9e092e0ccade7ee7f463c210e GIT binary patch literal 62914 zcmbTecOaI3_%?h?(NIJgWzSN!k`+mc6e%*YB9Wb$y+=aXTSzKfC^I4=g+#JPMOI|* z=eWPW_xb02p7)>k{^#4bxbExve9q5#9_Mi!=M}7a_2T}$NA?m3g#C(_U!c|&6L_W$?&ntt@5!xyVlCq zWXbumyX$V3=mSW0?(mjn-8w*$zKfM=_s)@25@Gt?_`9Lq#)A0&9kmBP8W8`%hvRXr zEb$K)S$**fRJ+4u$w-NRVDzRSBVOXn4h9P1wQP3&|G0qUlP6EET)85jO}uh7DFcP$ z(%(USH8r{7y<1#tY{SjbCnmdcLxO@>wJ(ottSwFsRJl6+9r*QwM>xLa+qZ9*WBETc zHqK9-uLxAgnC>p*;N}i6W%E=qH#e7;_lu4`+%8AuNhwSp#7xD^>@olQvr%K%%U7@d zjR{m{W^xG$&3$~rVQXtEwcgg(*VopjYHmKM|ITBMcs*tA^|dvRwS{2f^|ZBnK7`Qq z_xA_W`mDHOMHUD=JXzN=6l#5c{`{Gu$IQf(92wa`S-!<$Ydg0%`Q6;qbYU{9AT1-q z$k4F%lgdtl>dS9;TT>hy9LL+zIy*ZX!{WqcF*&3JGTVd3)QtnBRXMTCMp_sGfbqmRQyu^g)_D@kYFCHVPg zX8J4k?%lgQ)1T39O2>a|Wn=aAlPB$&mtzfvQ)N5{Dx9qAgAUHk&C$}*hS2i87o3_O zYi&)D77-F!U0ck~%E~G#`nNhimZcmocOxVsGP0_&a$<6_uCDIr(WCgw+a@Lr#=Y})=xJzKBJ<&GF3v*K~EK$H!k}W=2OwiroFeXk;{cJ?pBp`g$~a?|1k0F&;T`goOVJ-0WP`zxF#|NPOda12$*=&y2pQ(j*F{JDaxY)y0XJ?s`@ zMa+7j<;$09n_-{Q(k1K|DJf}5Ny+rol(x3^K;?tcKY#vAO}YJUiU0Z2*xh||A00mh zvlKmHc5~gy+S=N|VG%21(f8JN;@iv3?+OFnY+JDi-8*;gEEYy!BhA0(kq{iGx}QWv zMWv?7SacVdbv*y^<41H%%-Y)84ual0kNuaMo0|gy1C8iusj1o7*sk5Yc~eghr^Ad; z^WJ}-ySw{&IXQcKdmIL=;Rk=(B@#2UKUnVJCOp`#ojVQaW1c)=V`H-&Z)2mVQ@VVa zi<5I?bX169erYKtI@Ct4DseC0ySf~0Y|gl?%;K0tMn>Y%92^|1`^&NQMg|5? z)6?ZL+65^>!o&67STXt5np#`ySG$#^r{A`;`4 z{cbsU_^`D7xT=LkPDVz?)2BlSd?5;R3yT}7s?MwbxB?6j=LmS*AAP{7Q>U)P39?B@ z9GUpu*%|%#@i?XB{{8zOK75G%qz}-oc4Hve{QKGT?AfyyFJ81~T+Dv?QunRRaD6b1 zPN{{VAU!uX_kjZkX5S;Yj~zR%$a&S0&@SxUP8@2#Lgb`xMUV^DkkdiXo!$r^;$#Tl(g8}v{DrzUeOJ)y6 z-2uikf_SKn9IVa8Sni7#ai=Y{O-#503}3!{N$`q_Iz&+?EFwZe*xK4EaI>&DNvQeq zCGYS1wH?{D$4;JPJnQB>qCY+{p`@s2NUwnd;4txRW5aDq;0H27eUKo1Q&SVp<7_x{ zUBS|lqq=%$P|)sU&dAmTFWj#Yed+BF4{b*q3cCbNTIlWU?3_17KI|jNUb}WM!0=P7 zfRnAQ_o?n5KS&5tQc?vTr|Ea>*s=U~Ft8DeJxdUyr=kw7PpB7EK7Za$k;MC0a7 zasu&xrAizoHL!o&k$WO*Yirrr*$X%W4DBU!&z&>eTwCnc4;Qi$)X3G|wVy1&kWN7F z5EWHTzQp~XyWKu^baeRp`B76-3sM*f(zBd(8xQZAV;1H7z~SfbugFPlPCc`vdgF$& zl2W?b%YlIbGcz-SS83_#%DBgEp6{ND+x<4ZcklVLXR{g(PRJ@jAt7|MwC{h;jW&`H zEG;c3CMHtPy5GBZ4@uO=M@y$(jt`OG`^Bf`)RQZ+^9{lxTqQ;7!O~QGU|{2m zYZ;7;jODxnCr<3!E?ee|+hg z!(`Woh6cIBYx?@F27)mH`utp6ofQ(t4=K62u77)Z{Xylcv7Qoh6o6O(edM}D6u_=L z-QJ8(@yfq}5$2vHChB<7Q)oQ>=D+J@eg1GanP=JWLQuN_WIT3T8JuY!Vtr%#`ThlfW;cZMDn z)2nnoc<7MmgMUBImin{EWa@9<+TrckhXD!!FQgG zb_j}U_x0bCldiKv=R7?<5zHSaP6BORH*li&Vid78?(a7+^;X{^pfpA!pYD7 zMVpksDl8mv)TM$$QnI(J>-32emsM1(*O#V^h089+?%TcF?Y1}h>wIHjtxK0om!^Ic z78arsF3irV-n@C2k5xdRHBp*254d~a(jup7IIvtDVY+v%H1efY3$ zsYQH6g=8P0y{pT_+`Jdhm7iN9IE4&NAY8ff`C>RTe!MnI?#7L%X_KN;X^FIp

D2 zGANgSo?l5MqhwBsit;n0pB&xa7=$u555zS)Yol3RR1{Hbu+SJWnV!xLBnoiBB;(w|D z_|;xQT0%k_XhmQDU2(CfkkB(7;ggMz$Bt8oe!{`~{$6c!bI z6cRG3MCy9+`t`^+Z^W>dqGoC+6iL4ja^5InK|w(Y2|ZEAuj3d9QTXO|eEr%}U|9D# zQ4GO&wNZF3apE!~I>eVZcuT2Jmz>o;YXW7|_si`U%?Y~=71voi> z4h^a5=p;UVEa*IIKte)-put0%_5!v7gaL`%zI}U$Vik!36>wo;0hkOZcKCT^qL}s9 zKMO@g!c0s|1TTbGL4ycMvH80^3tRL@4U`^(rs1KRJ2wB#M~TKxLWp z+->1_ilb+N0_*GRQ8tM9jhx)>jjsH;_s;7_-fsh4OR%y&4G)>b=L9eBP$AaOU(U?R zDnJn%u%dK;fIFu@>$VckB2)DGH8wzii)-;u2hX{4wMcn)@7xK+GO@DCDkuafW%6ql z$SN!62t=&yj#uVCaiT5~h{Wc_8|)7o+c^|e;MyHKwxfB!bQbB9m6_{Db5*RHOZ#Ke8!2q%O?$is&QYREk2&Uq_kDqOg*XWzcoL@~=RJAyQE>1zb@e;_f>DWy>pY{F}%jQBgn33lO90%YR$rgfc2C(YpF8Ws;GTr;6JtT)$59mkakZ+@X!~kNjnA zWtAdw_rS(|w%puY1RnYh5tEjWpFUl`c{8x}T~$?8Xy_GPU0r+oVsvJY7(@(Kv49fE zDX3c-8mF!L&hYW^<%xyT2bF5Nk-v%QTsqiYw|nnicJ19e=9V>ebUrmVQ|J050~r8x z#70FGm;jDcy06FRu-MLxAmN^~>TKA4Do$~BSDoPOcQ1h*wPObK4S}uq(3>D6a&mKD z7Zn{jdK9rEB`uBTr{mME&kgtT%;l$4Z!Q;zfUqGsfUg%QyJ+KSoCD)%w8oB4WG5iv2q!e1T02 zj&FqniOdBl<+nP6yYKz>to-C7G8i^(G8~e?S(_3^di(mM?0zeckC1tfbD)Jl<<_cj zoNf$f3GVWtUt#qyaJolhf9A zF*tKvZi7Qsn!@8QAxDXxaAl>@dqzB6MppXK)(7ByxwK1{F9-Vhy)7#~-o(CsTK$NI1h z;!CpRn>(Lq)8*O8Smmebf9kw_+aW0-(bLvO#XN}a?bIne!y>{KAqc`ja2q}PEUToX z&aGRFls!hq#yji2+)!6<`t&I=A>sZ7H5RlJB!s>yS2}K$WQkpMzd`WGlK0>OsAm1u z?ym0cRhgN0E?wFkV0eSq;oFOAE-o(g0VqkAuUy%$$IQU6y0L2Klkkr+pvf5VHJTEZ zl$=aL_&ZQlCwTJgS&!+S62W6_{`=?voT+JOg1qV=~)7eeNr>h5!ZF5tM#MP#FX-A0N3}w{F?m3i-<&=jZqM_cOSx z40k@SRh7YUh7$ycgv1)y4?)ysi;Zt6j+vR60Mz6JQ!6VI;dpe>Z~t=dOb=)EZ4W0x zct8li6;!oq$LWMuukJMpW5EdoF|jwO8av6!>pqGM4-ez8*o}ShzcWPgmrer}QI_tr z>XNlP4V_DX_iQ)6ReeZ8OL z{q0j|TO1c1ii?YP@7_(d^^=v1*EITuOvK~Ith~G~f4*iS_kH^GiAB;er{gxURL8^w zaI_P=`ucA1@#P|Sym)b}QrXe*G-JvL7apr(nGT)J)AIu4lP<@@N=qHh%}+3gkr7m{ zU!O2PHxBVI&np3x9zP>H05O#6?KI#kuwoz92^ ztZ!%tIMqP7hK#Up-@dwn=O|uiCgOCT3zI^#I zmI-M01P90YV!e8TPq(Lta+l`e}D zD|4ejKPgY17=k0<5#nAtD=k9+k{5C{UG?yA6s~u*p~$ zyj4PCB2XxBu*4S|)-!%rv#=Z|jvoh-K<`siBY}TR5fP&>D4KG^8NQKw(7xZE2|H>guYa(>s`_F~Wrg z4KeDpOy#{I#vve}QtUS~RKZL(yl^}E27u1OXq8pJ1d8wJ-XWaxU(JW#R=jfJJHBW<`!Sj>p4p3j#(;EOohBZV>)8@sz zLx}ZUvEp2ZyhQZ-APRd9ux#Xg%>$FqJo0s%L?~pQIuR=D;_>};?eb9G~%IUKF<4AJNK6Uyu$XxI0W+93b0s?eYRNZBEJOPHD8*>-YShQ@< zx61G+$F-U5$BF=JA&u?`{gSgX8y8n5z^CWdhBz-TuYiC6 zu8TuScnp4|t!?Pnugu)s&!^>1fgu5;2Kyl+EnSsibj{Gvkf`>QnYObNYO+*PJ+?NW z22G5Ok2&ql`cuL*{GT#=T{KiC}~Z`ML9Vq|1Ppp1$pd1U}a=y zA7y0h`});q`z9DH;NDkfX38_+!3y$-~bE-90_%IME-XtdoauU zPE4TYjVOYgp$_K6L$I>W4g<8ZoUPeFmc_r}mjRHeZwn6X%>%`N0#L(Vxr6CMO(ma<2wHLtYLHm*?W<#s#<57DGyEfF(YB z`0(-uuX5M-@1z8f<&7GxIa;rSm?G|3Ti;YwJp-hdVh6|qM*sg=_bV>TDOhXkzPI2z zVrmTt1oR@fB5Gt~QwcJ`-Q-^>P@krJ3LKB4k3?QF5VR!J;J6WUgsEva<&{4)IP{K= zj)+-QHUeR<|H#-F5{r1c0~^(De&2v+YxR*lDmYZ77C(^qHHG6DnV3-RS~N_+YuigC zpUOhvFD;F+ymF|Rfti^ZXH)Q^wzdQ}cf-Jd2dWuTIPyFgXr?R)@B@^-Wz-+R=pfBI z{vF88%DSnevyO)MtX8eREbH6d|F}LJc=$H(zf2wQ?Ejvr|F_^t??p!Jx&&hP?cF<) z_(nV}HG_2}xgHFH+5+>l#QQAQp&+Dn&M0nuG z&XN(V%*m(b%_n)$!CYR)q)0gzrRxLwpxS(7%y^n zDU1-bn!L-9=8FN|tcoO)S7q)V`p{nc`7N;Z$Nik(ZYT-39o8 zkx=vPTNJ3M^a%XZ#!{~}vMSG$O8~LB_u=v{dQ`Nur!Be#SL~R@tZsp0YJa#zPDx35 z{(K!C5kHm?9}f=fVD1ZYY+qeVi!z8BQ?`xwHGd3&Fd^MRM57t)lp97Y)zrwYn9xyE z|3fn@CMLEqg+ju`#kEcye+nFdk`gZfw}{9*$^rHrAg{3%GR-wj&A0fdSdNqW`}-f- z%f-VJh!grdRoc}N#{l&nr#=;JA^P2S?_{pAv;B|K{67uQmoHz?9q!$~f7*--I19&j z_1~|{5P9*OSfPFW=cq3$DmI~PAzJ}%0g@sERGD>fr(f8VL0ZH4M9d|E`vReX)Pa8G zLh-Sy$AL7#2%J1=VQtN$y>C#q_``_s<(QN45E%6D4OIp{NlI#2OP4@iIv;qTFq%Qa z-n5aKEL!PvOUsldn{VyP()7_3aR2Ya6B7>EU%LFiy#NUWqPUXt3w45ouyYASx5(+! zAi9N9kU~oe3u`0MY9Z-ZxN{8Dx$p>4&`?vmEQ}vaF&2(TSCy<6R53KI+4Pw0JkHo# z-q&KcAb*vX(yA~^Ia3i-_4KYj<f#+=PF(IdHqg+%Q<=@wF3IRfeJ+cRA)z+lKQqx`!_2p!L_)P>Lg`v>0bQD|rx zTO~`a;MDS0IZj0{UYP#;_~htmiyAw6iU0`qGn;f{ zTwGETW6HC3Q`BKJHXN$d=wVMmM?>Z0&)!c%OM77N-un&?cS;!0QfIcCqQFmU?ul0( zC@}O53E3xe#`@;9Ylc?4?MO~)rXvjGw{@@^61 zoLO;(Ufp4wBkI4h5WPWM3QS zw4$Q>s;X_MQ=yL@l^4+xydW~Vtt}jvkSGU=0>98lGw>H_+QrqCc>GM+w(kHB;`Tse zfInu+(gP%gQVgQe?Dp-i(Io`JN&RXmP^5f(SG2X)AZ>2umy^FOFK3o?qzF@*BSlRA zCx1ziGuOG*%N#ToeCvxM3;x|-uIbB{-y6IaE?huchMoTL`mU4y*P5E4?m{CRQTNCu zNkIxpx-RS4HDrd6`w?IZAC1(7Q>cFSECohnm8yXvtm$t3R9nlLM zK6Gg9Rm4r=g#jETBqZeb{n@Fj+mFV{zAxp$!uU1r$oS-BSQF@}sAk_pU%GUuMu_&k zqKb;=@(j~E2U$>9z#v*TJ?ci)vo#L#>}}_HTsw3G+9WawsKZ~+1Sc!~6B4uy42B@+ z7*NbwO}r^C{u$A|^$YuqvIMzbN}2Z1Auv=2PXtF#O-(ULIu_g+nWu%!hpblx772~| za@4?1ESZYHD$libAXIb-@X+W6g(~=#4FR%(Dc5w<>>XNATz29 z*pKzS^Lb-&GBzb;0H|Ru3HJo(!7V9yw5|Z~1*xqx_k+#vCBPPGGZrxjgn&?3KhT=a zzad_r5pO7ZnvGW06ZQMp@ubSms|1irjdPFSt-TKyLau@*KgmF#H>@WBLWk{QeJ8v9@SM< z?jUuo-))`-$^qe2#J_uWb^fk}g~f#ny9fsGVgQ9V(n*3-4)F7{pW+8shfWvWW}UIJ z@}unR6UkSJJ%8nc`R?E1;4qP!;6_2${46W$jd4?io@l86x>Eek`=fcV&sYygA@;!B zZ!>9L4r7*f0j`#cX;iv>dju67*Uh?`EP;C|vl}ZS!oijrA^N@@7y8ZvFkd}q_y1`wGq?y=-9h0Hk(qr=ve+8; z#Wa{Pme#AU`cgn73#t>NGjj3)-RU{Q2|4T$TIOG4reheVtwvISOP{D!)g)+#6v zsmDG0(fy%px#!gX_z~~vAvwFp`>G+bD)OyLar!pV9b(>&YB zgX_MrQuuulT)(Ea_HA7O_tMv|N8=?k(CeZjIhWdrUS)ZH9#)=)R+k44+&9-wK||o< zBl6}*)xQI;Mte9HrKP<{)8mDXT+^kKIopLl?dZ zAcf7$qCjxUXWiEht$8M&GVcOQd*99utX(kj6MC&z`T61Nvb_Ku@7`?#DFv&4*n^t@ z`S0T@5{sIA=Ti(6EziEkN4_(#lGXL}B%FGgmv{AT>RF1qFDcRhti5x{I8dOD_Uhb& zF9vcsh{?dk@87aL~(INI?p z_VwLXc<$_PwXz0$gqzP=@-rZ>#wr2DUXN-&8LpZ-tnTR=o+LTa_&sal34*v~V>657 z^v-j;8X^gLraHH+RlSYP&BcWU35T33>`$1_y2YiYhWApK^1=53!`IcW2y_i-&fR|P zdOFGkA*$lpGtO%+n&5h$svfgBK-iJI{`~oKYMzJJSH{|ZogfRUH6Yqn(ZMiXWOi1B zK4EDI>dvl$mzAHNnTm>FNoK`&hVsWIs2TV^b_{mG1zCh|+StB%@nU0l_X>JexT3&y zWBctJe@TEGMd|XR_zh?XsAGv!kd0<~M>w-|^QTX*OG-ZE?lKSGO8}KPHa-2hmGlR) zFnkfP%CTvG^z^Q+*K{=!6y`Vgd*rzj7y>|L>~rz4RFO6cc#~kr010Lxu)A_+gU<|o zN9cZ25wyA}=lvY!Pa-4dM;j@>th3+BdiLxm!Wx#B8D}ICqB#@i8;fI+#jjt@NKgN9 zJyxHh4*r47H{O?#1>prvg6-1qy2MYKO3GT@6()4PguN$zPibLPzOy-g{7r4&sFO9jvZAmGZd;rzlz1y5)D_J zN$PswDgZUXP_4f^=ot<-Ac`k`B$)Q!_9J>BXn+Z)OogU&W;^2s21h0)%E5O7EK>9N z3DB#O)qVba1>V)|dtlPFHbdthfv9?Yz=>?<&YIaha4Fy63ka8IIC3N*E{;GbXAm*G z10!@%wSl~uRfqQVc37Z99z>uxVgN-UE z62Mv%Djw+Uo<1eE_LgT!S%P~CjT+Ha2^>J0z+Qhx5($p37e~VlX!*e!wt!V^d@$_=!Hs`y%TJJCoNn>&@y%F_ zN;!f?N&*Mf%*?FR#`O%-elmYK!xCC5QxP|J_aFQMyu4`L643qMJU8iVB-qp2D`wGM z;~0p{8L#|2GLnX1fOtg_g)lNuYN-PqvyvB{#8IVsaI;$)z58)V-&%tbTIb)OW|EVV zBy#;cDHPPysQa8-n}J;cS-0WU7>2(|nOlU5>%!&BCqzUdO)oIcVROUgpMvw0pJb0C zp_csv++uNA0A0b{wJqlM41yAvca$oS732hnYLAYbdg#z8PCV#nfI;}z*+E5xVetq1 zI0*q55{HjMtnnOlu^~z%Wll~}_qBzvC*Pn(p++q9TEDaHs~-SlZnG8mi9*J6HR+>) zAQUU;{cy3uZzu#`PbKUg%CLG(av_bOTe+({{KW|L9W10;si8Q;cycf=ucL8a;Xy$) zfgGC0VLQ8eHGnj@J|ZgWhL%=S4AG}jeMB4L`Otuzw?kDKKhI(pO&Vjd7lcD zXXn$e-q4?kw?_@(_hFv_(I}s_k)6x+-_Qd2|2eeK%>Z-bJ2yRW-K-LKhAjHbt%;~6 zC&fT@MdmSE5Db*lo0mUrmQx$=L%#LtFtf0m7Zuf9j zK{-dW!17o9!i5h|cEPK@&3t$Q_*>d#A&8Rcrw}~_Je6<9-QY~xj#9@I33ns|J^c#y z{G2Zt2$@jL?SmUT(n4Iy$da$><0PeeZar9>PzSU}Ng0sugDcr>Vf+a^&^M1R$0`5z z*|pDw$rb|GEXWk#E?vt$C|_zfZv6b*P=)ZE7$48kF1ZKe5rKeq7IN)I4g&a(092^C zTzdL|YsaFtH^yXOV!~;VYWo4FCKv9G243T8w+HzrUgqT3+u8#3p(8s@SF-p7i59#i z%Kc>}CF)%NWO#pRXd(fhp#McjpMqqwuwb8l!FH!_t!CN%U${XOd6<$wed7LtXfH^S z)*Gy>t^%UejKaJh{P3aU>nm?uU5o{XOv-b0-ZJH&~VVmNqzbJSskoNdpny3 zwTz%F*(A2WcI*P|n4t9qvS?TGIDfe2Ux4Z3lxuP)o2`efg~?gW815 z3__f-D>XS;7bGpD7Jk!T2=EWctRsrZMyR$}fjD~y2Q&(c4-cOR*niZ-$cXe!H7Nl- z82`yu(KKhwlqoXlHUlS^` zvy*J@Ckt6rtru!kH!3JT&=%Nh28kcfZvDP$cS#WuJ0KWruLTzd1TZN@dr}j+cGH7R z=pT;u_VtrQ_Ys&tm}0?<)pt7DU&St>cR@4IsZIip7PMyFvJ#X<*x@!{^n8pCE|Erc zD-A3ZJNs?KD*C#HEJLa)nC`A$yH;paUxl1b!}F>!38r?ipDXtL?~=U517QPu310^f z&&7{fh`xKUnmK>%@0Z4a7EoS}Z<*Jff!jzoaF#&|FwVN1C2`x_{8PD%IU+wpAr$@n z#JE=SBnTcrC;cjy!z_29(;A!Kgox@d2M~!-6wu+-)zttJD95uWldq0>f6=Rvu!afj zu9ely@UZvN#_}%s-JRz~(6mVL96zpj>C#7lKUsC@_t4DnJkB{S486EMC8bOp#pcgX-+(SC3<+d2_Akiw=w$Fo8kVN|kVU7`X=N zQ*#n_fAsf6sMDy*(3hE)2hGGClSNnHHzB(F6${_IflT}I#S8dYYh&qw)WMP040gf* zK=fj~Wh2PK|ARuayN{#4(@nDX5-T2Yj- z#bw`SK6w0%jg1Y)v(V7a&d!1+q2YUIN>tF85dJ)zoc1_V$RB-`4?1w{zpF2rjxt9p zZ7%$YMEesLrwOJLiM~;oiDL#q3e!=r<);&oO?|+}$XS{qAQXIjeNm!8`X@(4eFNW9 z=z$8Fp?VB5hmor@L=H?Iyko}#%E-uo-2m|dl5FEWVh@~x>w^cI|EXXUgOiO$@L*$Q zPT&HoX+B=whtmnY5pE#*D4m?VpvKj5@MSlo`m>C-Lz?>7{8pJeM=SQR9ke>uS zq@%43dJaCNo_vXlbaY#Jc?VR0bcZUM2n1L~7E zy{={-+|{(7p850l4osi0*D-Byi=S=N&jxJUl$~wtrDtb4~=osg5`t5(+Sc`gvMd_@9YAf~Q5+V1ssVL0;aM_V#OPYDK_nkfu;**g)$M#Vtw6x*%5|SIyTZ`-!w3AumGv zwMi&MPxK9oE)C+hw75!;seTmU_BIsG?X9J09pV{?E~y-b z_ZEnWh}jGc)84#+o5UCS4p?LO2f~Yd*RFFMkiQRNj$^W=wRN!AtOF*$yjx{EHuufN z0m5S16#o~Yl!;mcn?Ybez|z9s>hkg<4f>dP0$r8=;>GOT9NUJ^wv9y;51+?k1_KhKzW0gn=g7l9DI@>ZrU6X;s&8)3k6YMbU0|y+l`q%DmQzN6g zBXu+QVI->Qkb<+f4Gq2jI?XRG?jk_fe9!4GCv@u6^a4Vi9W?Af{m!X}uqliTn!!M{ zTF}8t=Pl2U(Zcww``dI7)+;}~NJ}e3>)w@YWiR3Wt2vrUm;HHG78uH-Dj@G@!zW%m zdsck^mr|EfkesTj4V0W&O=V?F8>NF$SLUY4-V2N7U5vIk--v^05b_s#bgJx!G*}=M zVL~{v0$k;q>gxWt)DAErCC63K^Ef)k^C?Mpoe6L z2D1f@1QliFzSnnEoSj!dMxqIWJHuHZ<#XF@`i2QcJ$ZR!XmrFLw1fe)2|{Ka=B2qL z&|9z7v$L~%Wn5fk8=s?S!RAYq85j*V8@2HsOc3I*z!1EkTaC!uDmBKw#s)20MY+nJ zv!@F*2b>WS7ss>*xB^nSz-Qey_wV-<-MQRI92jOedh|_c>1}#i1_p673s!woKz%kJ z9v;4Rqw^xBCZ6J~OO&<^3@{%(x{v7zzfLKINx6;h*#Gtd$mN&A-nuY1XN!1JRmEt} zJs4@vy@GTHObkf@=%@Q1oPBpcC#dV|hZ};l@4*oTHC_H{X9I@C=Zv=RO^ahpxYA*g zww-qi$@&Pv%fT5M8U)#sCwJfI+=u1{Qi6W9n_@B52uNfO(3Y0^y}=SsJo@BE9t(6H3n7!1g1u7cs&ps9l^`jmo%lLr)Mvr8&)yz zS1z^KFI2os#e1s_Sn1K%6~P?{iVk}NyZhTiC9hsRROVhzKRt8g(Z~q8Av50S9(8c& z2Y^wbVc1_+)YVP)^OM)rWqNaC@gVA`)z$*EFU)ns#?l2C!fx5w-d?LB6yE~T_9VRb zbi9%rVq)JB^NF#+$Q4g6adxXuKSbQZ?=aunVu_3bd-qDSIv+)c>+co@De^VXsb+(D z9xyC0)J*lKHq{uhwip_ln5?a?VqyXH5}&^qLnIJqYdtip#>U3p*#G&ONBbU=Dkwco zJw0m}*zKp7fnyAzPIH*@VJLzRI*r4T)Wgs|Mc}~c{t12l+xPFIS(iAe89$Erw;g~$ zF^Fak9GGsXJXlH06-=Brt{I#VR+iwgd{*R6AZY^(=3}efkJQ% zyLECL&6%eav+5H>%Dg97i;lRzKUpVY?LD|#oOmxDI5ZkvFJ!7AMGPJNfn6jaK=63q z`hLi(^4cdQn1^tvHNPiaHJ1`{sgisHjSpF3q;S8k{kMldq_V7n4MGD`O4RAJEtn*A`BT+hQ~4V1vh;I+CPB zK#-StI*5eXU3~BZLD!gFS~AgP+(m$MT~$MapO=@NH)7`(6s-TmEWWuUj3rA-R^VpP z96-ORJ(B}%EVJW}po;+l0BUckVI3L0J0`%K;J`zvcW${nAe*U%xw5j`ANIOw*#McN zO~k$IjkLKEm7gzA{cH@}B6JP7`d6GTW_5he(Gr1}>n3AjYg-6~c{s5Qo5FtV*crGX zl}Ck zqw9I;=|1nqpjaYVIwt*Enx7{+0#bmi$#uAZ@<7B7>xxJa`@FAAIGr3ChfhFxxm9j& z26F?hcp9+1!b9e-xci!DuWmG~GdD(#y*WN4fDj1(|OyPrJGY znm>ZqDJe!4Iqt~e1&^Co(=qo4^od5|Luwe4__hR9)l;a2XwG1np4Agh4}Z#XUgoPU z=iaRBcWGg(ZdSakXu$dc*C8mNheF=(s89lk0%?UJ3@0XBlx5ikxFC3oPcX>P@IA9bE&;E-8QxusX>HLv z&4-|Vz*L9{q|sH;Bh#31Ml!_~13zF6i_V*UkMR)wBSbw&`xjduuvdVO!PIsL3LvM+ zP5;VDH?$s@<8-;vgnWpxfmtQY^?*1=&iSE|AkHQ#TKwwO-T^~|5^NrEgGg<-AArwy zuFGMaeE?6em)d?#IEAtO@9piV2>1+FO`&EWP%+2?O$uioLoi}!A+w?}+X3_2KeUcm z7U&UY;B`anub;P6fzG+KumI*~6IQx*Z3#Ba>OkE^j@zKg%s7E}SukN13A0WqDg4Kd zy>(f$cJ}Pq9C5tf z`PPHb=oc_AU;PlH?wbZb1mUYLK4sm(N&T7d2k(S<1OW@S$1lyzyc`_9nnDmSi6`;1 z;l7xgnC`=#PE6b@-G{RO`T73$OPpFaz94>K}8B7aRvh)YNqIY16| z!^^7%Jq4fAcvRm-CN$iEeux$ z!3(ZX2mG(q0u~M*wIlx6=yY%%PM~!x+vX z21T2Ry%8U@alE8qF9FiY(2U*Ji!glcRpy>Y4R2E)0TCy`Kkp1*hug z<s%Tp1i!d5km(*h@2GA90NAX)U6-w^v71wZQ57{qF{td(zd@t4^e{#t;t(1JK*k z&m-RL@4dJi018eAY4fXMWJIq7MhYhv&#*cDC^QO$oRgJ>q=gq@@bo{00_-c#e(-2& z(qVl0KHu6nh~mgkS|YvR$ESOu6B92-@xEtZjU2GR2yAVdeWd>*EfxBhbmO)q*@pD6b1DRW!B4|)zJ zX8;jorHL6!GQ3dY{E4t3c8k_vjBUV+W%~xhIb}!@I~6$|k&INW0=$8@Mz;+3x^gDG950x_8?H*b`;k=u8qna2TZ=kffI$HQE<}%^ zZmUa|f}TI;PCDPRA_AqX=`$K*?-OZZwAE|&J~P(?G2wD{`*%Sae*-$$XXJ~GkDX+&``}M4SN;z zvV+ox7FaNSwQ-UXX7^~6l)7rXGs_TQLROX_b;sJ8g_&6ow#VUN5(3`SB;&DR(7$&} zz!2shNL}J*&aAAg#Kp%0RG;iBayGbkPlD)B@xptB5S~z5 z^px>(rVk(d8%JObhZF=&r0BuC3EEFIoFMx*PKGMLUe=hq3l{yqC_euKxCH~{(tvC* zIBtnf2{jf6?sp_B3(V^{{xUM2cr6teBK+HE_7Dvo6GSNnNWlyKTVB>LFgUlIc6~Qw zR7?~!{P@AJHL?|6B|_=^Mwe2^$;_fCzP)VgU%E|N{{8ocxQbQ^;v3@Z^*#ITW~#EptmflgzpT0qU{l*d z$xK+=|19A3K0im`o4uWNNW`X5TA_RVZ6I7yB2n}S=JaQX9}9BI|M|m7Z*mH-V2q40 zHKb&yxCahgrughdBrTJYZh>tNbOjeMYjKhmQlRlT+mlNzXGwrKkONve9|*y3<$syX zP_pmkUIG|J81cSvQOXV;%}woDBaK&1!RR#mZ=Bxo>u67qVsS}@LA=T16kaC;L(;BY z{s>irm9&`D)ORom?>~Ba;qGII7hnSMT9Mb659^p(SlBx{;%iTK$(3W?^j@vcE+~$m z%%p9GWCtHb7J~CM2Zr&L7%B{5iwGpKb#j7S?gpy?&|s+&{4j1K^?RP(Z-9Zs$jHcb z^6fG@0r*&14gX>Rz$!vrlu9gFQj37mG#~)gMwV4mORIJZF+prIva;ZSNY8I=YQi}7 zEmPBDEG&blRwy=jivj9+-Gg65nHcc-N!FCd9qt8NM?!!;v~0&%dS>Qa z2EQK+p__}}^)E##b35I%EO`5to*7(ZrN`zG_lG}9!|UMexqiI^ZPg6F0sqPY+I#%Ct>)F#69y3z99f}d5O@v`++#O^;o3Snj9wRc)Fpo)r(`?>h#o46DFh%H8D}@Gh|ogp|-eefG@H<3n1!=p zFX$rym?^|&z-7HOWygzyjE()yhCH|cKIa7twdkBJeiU(ui^oUnz#HiI<=Xd-j>)_O z905@G(a?cp7F^J;be;oLh`V&;NDU8*%Vk;RD_5F~?_-2L;FZeC#mj)B@iw-DOd>GY;r87y zBEwG(h+$@7@vXCy$)>EIgO_*W=g((pX|`w!peh7VbL;Nl$)^HqfS!U}*?{FB6;;rq zM;l;`J2u+H{2eN(N{>+{cdX4rUP4mbF{u$G}Pxz+Cys=FdWy zuJQ!khfD+^1U;IBq~z_CXclcVjsskHH^O zf`K?UCgFAyo~EUdh+L^6I#wY(H)!v25|ZDc0NXVF!cbQeLL4dzO3>}1x6Ookw?pso z$^)SOBJ|kM6jCyF4f(7d6NOdEeQ_chqj@=fP3%BDa5*!oZAACt1tiWONb!Ct%zuVm z+*eBYWR?C#>-x(Z&r&~?p+ux#2w5xbxW8LO-cb>~dYY4>N}S-it5^BqDnGCI81K^a z@bEy8@wbApzZx&h1xU!omKI3#c*3+Gev7*DE-(THtGj&v(2xQMVBG8Hzc=eBVTo~Z zn6MFNXWtP{yNP!)#fOLIV}1U3H$qD54!#|X9F9)T0@@?|_Jmp3>RCdwl+#&_?M z5&*3H_^r1RedhT2PR~tro*>9#RNg}E=4pP5NK5Maw+tB}*xxiJpl0ReF^i(-Mu3O{mcua2Vo5UfjtL5<5<9f2*S z+Q^@FcJngZ8#Gtr29WTeuf|mrbMx@v{!F>*33a^7m^H^rAR(HeA`yCB@54S z0RdQC`|vyP@q^q#t^_rPH(UBtKgT%K#{XjPO}u*U`~BZfMMX)XREi>Gh{%-;X)=UN zA(5e!%8-gEMWs|qrF0>eQYl>KLX<)SG9+aTQHVl{q)6&KKJLBG+GqXtS?l}($J%?X z`*xG>_w#uVuh(;Wz0Hw9r(eT{;!=-nH^YyAa_I`R%iyT{!t&DZkbbAyqlYJ+bDo01 zilyN9@3hLT?d)cz%#Hta&i*d%2a375`gTu2h_!5LX}RX{TJp(Zk5+|-symnKwqs;K zWPqyzA-}hm7t)cYdq+y>mG9h%&A^&hJcMj9I{CX1pjpfhI+ND7oVI>Uxd0BOz+ zKr8C);bGc#W!zMnK37-qFyw}IbWEk;ZuH>-V1I)0HKiqGW&OZ*1^vn1jxB*|1_?n? zz$)OBL(rJN?QprdjuL5yaD+gh#QTvO#tjq3si&=_Wsi!1RR`cOW%A@Y-XaDh+g%_3 z>eZD98-4`2Q7cb(b`%Q>{-4qzVT4-$`PP5x3aM^=_2FhcJZvTZM}aXFWF!4_-gat` z!bJI4EBEnE>> zw)6d`^^dLcMA*kUSLy=xMbwO%^&pxlqQqEz-Q?xdHyNZo{6M|^yQLS0^8tzoJDrRk z^$R)+aPdgkcKi}Do;;Yg{OVS~D#UO*G8IqdkCEHqMtI1m4p*A4y282ZrdK$gslHQK z_|UG8a3M+4oHNBF^LrR&cGJ`cARq&dfk+Z{z_ZGW$*QTZA3^$1%Yf)UV--fVRZZl< zVb)yZ9qu>0k>&shXQSPl<-WcmenJ3Py2^vjY%^3mp6l12;Q+z7vu&I4lL+5?;HzCa zNgm1qQw+P+XR57jIn)&bGKcfIxfRc!`w|=)zcZ!Gm43qq!Ty`97Rmsid`_>HH6p=^ z_C+hio0@tnQUSn-^NZKJXAuYj)l_xJQF41=wGKtX0vnrX)los(Lq-CPaAu!56K$$R zhKn~%9{Pre$jIv4M)n}o7@CL_6Xz|UUH~Q-Ns)p%rC#*6o zU_|MiQ%)lDx=zEJoibGS6ZayU!1Hy3N}u#-+jSV;5wl9X+=#mj#Hq#YLi;rb)Dpc^ zR1W(g%YB3VVa&mQ;L7*JyCWkMomCUpySkbVO*CbVm0sMIfI#rW$B*!Fo4w8UNBd#$ zS`Ay*`1NbIQ+N7)I5&paoA9WpRafI@9NGMA_kL*y%|3k&;-2Y0D)9S-p#IRT83VDn9ZplAZzOg z#7wD4{qS=C^cq#x1#g1CS4tQifjm)Df8Dpk;1Ia{LMg77&_}M$XCPg$8+dh(ntDDX zgTxpEn?0dIudeTa0dD!32!hR0f4oYY5BnCe(Xf=3Temp4>i3eAmZB>`E+Iz#F1gi# z(0VTz7kAAA@{$2owzRix(Q(dm&A@`f)jC23dvY+G*Yk_Jztw*D0D=Wqociy-w~&nh zlXa*WaQTz1YXV^YqXLF7GeU1qm6V6TLPl?{w~y?$rD zQYan~d=eL(ZY!M%@#tZpZ_jDwvGDQ!5i7j;iTLa|`w?};xMrZhUf!+1;bydE4qYu2 z3Jp6kzmv8!iu&L0FKF#6NxrSEHS*~CE0SZ7RG>&T%L^cGH& z)3r+%husyE z7Eb$nnFErd04GiCiFtbwbt232@{8v>&}+o!a!QUISy`xM{G!<*2z-%0_UU~wMfhTp zj=_7R7c4Y+T~pH!lQs9uuk&$TWz47p`In!0&Vjd@FmJdRZ~W;Kc#CjQzvvT85#S8r^<@Q_n3nmgbnbTL&7__$^J!{GJ&bX&m5-@?n1y< zvkpdkxs4xQTRoyR0ShDp$V^KSLOxyPuPiZfa?F^?lQv5TU*i2`ntLVtWpHJwg!#RY~eV2{pr)LF*ILg6C%f1d5imV6TI-PGw&c1pw}z2Hy*od z_Qfsp)J9M=y4C)%vImK9eEi@+fsJ*)Jk1ux&;@yv`HD(PYe>twueDBIl>!{h+OfW& zw$^l-oF%0lHxufr8y;V2`$d~`YFT;zCUCM@>J51Omniz24BjMtZZ;sot+WahuigJtA*Xs=5Co5DFJ<_kuwCKp9RbzP46 zye)%6+i^ppeRckn!GHMi!zc6;d(V!zPqZEFpFVUavGI>MUVV_yS?rM7sr_-~B9NJV zjHH^#ekOfer9(qPM*UsL;rI+D_4$iv11S1j{4fL)3R=W1LKY?`3v)JY-i!@_7A5xe zKA;Ogqo%1fT>V@KsD1_LqYIea#}?0148Kk{i#k$D;LbZX>)_3W%178}*RBPuUOfca zan9z`bh6w=vsfc=@Q_Wv<{n(Xb5xq7v>h)=yxcE4$*wfozabUF~K)cW&Mc%_oPTlc;AL zYJmhWK)Y;<;gq9{51q}Ki;vv@&L`UJt${Cg2m*+)yjqBut2S=qpx^?&8n!UXXI#Hd z?n;C53L1Y8AN=&egU0vw`XDIWe0-(cf}wwr)b!wi(pv3hUpOteq;`deivcDvDla^| zy)Rw5)bO~YGNsGWgTBvhBR%6^B{mj~4{lTkxV%Qw;)v1N#2c%h|m6@QYPPy_Pd%tkM zF!|vJi$R_Z)Zi%cEm4VacpY)K_-JmP$*g6J;b;lG+*#?Axw?qPYF)-)e*HWp zxC^2#j@FYnu=#c3gn4l+!~}$2RmU)|JXv{p0`=r465e=)?FbekefdKZiZk^nL?K!2 zogz^)58ZpmP31Ij8*hcpR#gz#Lqmf=$_J&T!oh=vZletiZ`xS9|7`y~;lfv-rERXN zx)K6)(=De)Ks0-JY|X+oAenB54jpoRs2mzW=+wexmlsftA}kWrs;QQCnhg7-N5nMj-*I{sFk8kGSDiwS|Y8bsuL zNOqE?4ub1czv_4Iw)ps5*63uB26jc1-z;S@h?!n{r(OgHE_FF{SH{8{<9l@_^6xu= z3AS>tRYEQE^bHMda6@KqSjAy}z-kgbYw&jU!R&mqJ~H`}mp1*~0-r`{xrN-0re$2; zp#7H1?7&Yi$QYe>>sOPF-_XY92~BJ-Wo5eAmn0@7UD8)o#G$uz`t;R#otk^#A{Cj; z9PiW#;gjER{QI8iANkOL=R_t@?W20IE(rmgE4}CA3EaGz8XD45j5n8vNCqcEQc@*7 z?Vve_azTzo|IDJFd%s~g#z5HTibC<-yT7<4KnKMupDHKi*fCn6Vvh70T0@kJq~+a2 zhX6o@3<&$B?_cJSl~DZBmdgX!FC;QdUMU@^!matb{ql(lE5021*4AN?!%s6pVUY-1 zfL?$bPI6CD+an(vC?Uj~1YBjL0@n&8J~eoU_xOxmBaZE{FJ{!iH!g+_gC$;LWy81; z@Cu6xPF`lq=NG6`mB`?3{Vb+^+`rFO?PnD7TNJdg;=%iGrgqK&l@^3AUw?PlSgGp! z{xX*8NiTsK{ebt`w)gUhfu5l2bSwz4NjJd;ZD94o9x>Kk3T;}C_&F$$e*XG}=vwqN z8Bg2Qcipe_g2F;aU|o6vr?0;1QJaaWqpqsYC|UKMCD}>Rp*VgN|NQ$CmCah6N1V({ zm)Kquywc?}uu6ODr;!r}?%EERh!_%zZNbu|2)i_U^vFQU0EdIOi74=u!-kI@y#YMt z<*X3)Da_~^SJ$oWZP{UgWvmk$Rdw~z3l4vl=-qoi>gSDHrqlTVy+NoqQF0-4a@2Dv zP31fvFrct5Yb!`%5W$VKOGr!V_;yrYe~>4;&AM8!~fxet-_>6pEAh71`O z^PpAIchDL%&)l;O8$R_?`1GZ*kv!Nc9;xMh`Wm|9tS=7E1jW8@xYjZCAI>)3C%tP` z=$hv8(UX^F4Vs-`i#m~D_Jh!=lr!-;k_1rDwdCGCJ*#$;oWFlNz!%^pmr>Nu8TAKt z^Q`96{ZCxKZe1HzFZ%zo-laK1_cna~OcP0d3q~G(wIe=?KeN5dvZ0-{ld1s0@X?>(`wyFxt*6RA2Ydw0tL6WJZQ3 zXe=n~o`Ewtd0Gt>`e?SW_AVFCXH*Et)>MK)E(52Ywat)BoY3t<2`DPIsrwQK+JwmXtlukHt|L`w?ws14-v=RVL(JDNIzW*otEbMJsb*#7-vQ3P}5b}7eYOPpl zsEguP1d0IR`ST9S-F9%uQoqr}wycF=pA-1{dx(Hv8pyHuv zTE+V;ZrKnBvVj6KZ5(htd09SIsbvoyK0MH2B?NK@69(IJ^^-@Bfa0`f!S1cMd_-Wy z-CMUz|Dk39x>Nn=@ScB%(}n*I_a9t#A(}qL#f8dWb6M}27AMQj@)rXq)W4>M@w%5qB8;g9MYOjc?U1lCOVz7sLbh|U!34WO6548>SByN`1*+#vo2 zx<^Ym!N>@KRkK43*8`6f)OX&LDXYMH;%l@eN%#5Xu9I(PWhK9?I+O6SyLZ=8tGe1< z;dzPY)6JWtMx#kB;RQX6kPrlF@ruivKC!2HI_LrGUc5EaQQEe3D^?JyID2<@0LOcq z6SoDLr~LT!3lW$V5CAm*Pl^|Nn7kIAKfK%I{+<*Z+}F0lTHRMVI#N&K9V~#*mRR8r*+TPg2$l*FVEeAB|4L~ z67>NuUB=w)Y;u_^4L3QdA;^AC$$bJF0Gkr~|4*ObQz?#Ma$rwauFAfJRCfqZLrQ>* zN%&a@TMJn)dP_^>-YRi#8xd3wv{*LF_WIH878wKWeSCcl344D_WiANv&R5=0QCaT5 zA1f(zl9q0sZ)=i~nF)orvi_L$@iFS%E}NuCg&jhZoAQAP*|Zd)X)d>;6iWw_H9&;n*-%zZ?|Ih_d3uUoRGyx z;d`nHB4i4YrG2S?QFujTs2ayehk9t*iC?iezW?|!VhoEVwpvuE(Ct&^bWi_sNEg#u zR8+UFUGJ2XI2+w4EF|s22Wv*FLwIMOiDU_*2u|r)X%vbwbI(sB$N}zX{L}qi&|C5s zErPU#WW4;CMJ?y^V{hG(?aO&+YipYs|+jt7hrfjhc-wXfYmS;|_`0AqdF z2k-}L8u#%{n}yxEISRrKq;&AX09$~Tt1hZ1YdG)h6v>(|DVd!%Zp|ZIA&=`L;{o(a z1TVM^I;4;BrjryI0|uODqK8*80j#W_WtY}3+aHXxkksTZFGeQ;^026o7#@Nz!_6N6 zW0Q1g-K$)+m%XSebl#7;YdYAutE{Z5swxXjQqzEg54(DJ`$m0vqK+@Xia(SKbG|dS$XUBZL_XJEbgm} zJ`j0amoQgfM(EJq*qX>}yYuYXVl2ZVOCyF5pN&G}gRNZ?2?70td5Xtu(;NI zJ!8hX%*<_6;TYtgx6e)$Q6Nxp;kIKe3?t7#T8*Tu44S+2^gxlcVe8fJ%RIi|BO98i z)%-3uu4o$?HdDkdwa!0GXF21@;ndz<1@`YwN-v7#8zd#+cmCMWP)WiC@@{42z4>qY z+*;1pD9>EG2yz{&>2(b<2e=>iK5^x6>F;q*6h-y%W{}YPCC^ZAQ@D#lENXl2GD$Jd z5kPSg&5t+rV(;G3vJEPI`yRq=1?SK4N7GRU;C`yIPmdmSjhkYA(5E7v`hL_G#Ds`Bz%ZP|b?4O85AkZ&Tg&sXoBgu=lc zANJR4Bqjinka*wqErIt@eo(#(vUL+^Bc4BRHR-1_yYXAi93hZwkq1&MrXz(3Nf~NW z_4cjBNME24nYNrejr=Qu0D58jZ{Z&8#!v}?v+*14x(Rl59tj8n67dn9?!Ad=f)Ck$ zek|=T#~=NcNTFRR8cN)A5KDs%E;Z=jH;6PO`U-*GUxB@r|iTuF1!>cZBDO@R%5B6 z`X~6`ynnwzCUOSk9`Zk!pX-r5Sy`;zbhNc4R+H()*Ns1T&{6kzclI^v)WYx~yvr~? zRHJ3bB>6-ucS{nkF-gF!>~K?A@9TRRnRLaeDJgTwsUL~t`D&oU-d`LT{B!3oUAl(a z{@js~1_tvv-z+So1VZYhBqha&l&}5X1;pI0rVj3-rj{Qy0*sv3FFz@8S;?%1XXmF` zSiBXl)vMd21mxXVutYIeLTookyow8};|?59TlqYe{RelTnIt?M+QJVN1W-qXE?vxC z9d;d~Uc6x1BqQjsDTCrhtuVmw=C*b%{_cqIa0vk%hq4VZ|GI@{m~RL~q2;Y|eB01) z4Sl3j(Kv68D7A~Z+zj;WaM@w>{6T(2gbV_<7rg1y93%O#%c#wJ5)=8UYaJY7X?~MD z%g=?1NzYnZ=>Z+>;^{eVW2pAoEF4V8h)e?7ow7c$9BR8%!illoS(Kmn(Yy`IC#r)0 zEYV?zH4Eg+ixG(A`tpWdzkIVpe7jY$jrls z=w5>>KchuNCl#*KaWOL!4-)YEcuWweZ2<<}DqWkRU@%$!Z-RNa4)9PTaC><_rF>=b zj1Go2|!3I|U7vO(EVTzE~p&T-3+565jKdsWZjs#MQmoW%A6vflPZm@|97eEt-7 z>H>{~?ZW?H*TmoXBi|W7Fjc`Hgia8CD257uG{$^RM%)kz(~+m{B{`I{^xDh%uD%3VgtIcN0MZmbD0ZjRrfG~@|4tyoyem6mbt9FhZbZyqh#Bz4qO zto@e8M)Qsb)Sz77)XHm^Wbw~G&)>!K2LjvG60}WB0wD1y>G_96*#3nU{(*BD8BBmn z`fKezEN_0w&2dzeEU1;I53jY<*~6F?i=j$m2?C=>UM(XN#^suP*WRMJ{w2i3u5kUj z{0(#5uWf-KD$cm<`+ex;%BN4gx!*myZ$jn8D^!%*-?2k)iz@ygq*;`;?G!7V=hT>d z1K$ajlr1!MgsXG1GRVjZ9_;<2sO?Y?nAAK7=NAIWB}q$~nyR<>Ts3aG^lT^%{0#YT z5fbxb;dwR3ZJ3Ovmgu)}U;FFp$Czqy)_J}^raNQ^s5PcXCLAE})=`v&#<`-Ih7X%_ z3L@Fq_+@diybu!~Z=|@!Z{~kY;hg%g50I0cK}7k&orF3hFl+UOQ|Y_m#IM)5e`wOS z8Cr^D_Y^xUTv|}2pxP90=Rk@SQKU46lOu;xo>2*qlCn$c+(7#ut*s~leB=!h99ORN zoghYO_i#T4iYJ)z%OvD`H?~%2Aa>R{T8bPpoe%P)S<`4n|uCJFpZ>EXPN0bdqCp6`o2>_zI#1ULaIN!wXS3+ zaS9amNJTDBw8MSv-LM=jO{Jl|eCv=Q&OCky=&JoI1;NCm16&7lzR__999_4}WpVhR z4{6E?MLjff$pzVEcjcAT)x|n0Et>%ru5}VJJ$m=9IdsAe^*5!P<@ncdFhJU%Mf6Kf zYuE15iKbiNyd(!mdn~HwYrr!oQ4<#AAV<+DoXulEzDZp-j^qN^V|&Y~*x@fA<4+T3 zcBEA$V|fRiU6~hVFy}&iK4-ft4Q=#k$zFg03RvX`gZ$ae5kH(V7oSa{XX?c0lOGm8*r8^u@ zh=P*sf=HTA@~EuLY{m5cv88|YG@dw-Q#)vZP(2a82=v9#qhbOe9Qa`AD6H$Nxn^F@ z$^q$o2S!CxrRtGwq*2W^1_=Xw?;$Z@^k}6kqluexTea%O(zjKYR?14tXBqBl&xzQ% zlQA~P4ituM`Pq0RJp49lJr2%@ED3XV!g=(fB3#H(o!>US6%=1iZWkoDNE94D9=`FT zUm91^>wsv4mWvmw6|O>;LlMGT488}3rWigMXPCnAwy{(JNJlno-h7owbX35tkN9{* zX$4)ER~5z7@pmr+$%4&)hZsW`MW6C6!8c*MXtn{d;Mf3D-WL~lo?7=R9y1~`X`=y# z9ZxFyr6>0IWKz6V)k9rknR*@aXF*VTI_{ZkG6-BkLc%(P^MZ$$m(x`(OsM3I(s4sH zyi{IAN<91noeIU)ix=mo*t$C`FC;3Nq$su4e-O%Zbd9cHgku3xbf1b>udb3PT-{J` z`!9J*iZ5T^@xw=~<&kd9r{(ANfeK-{zgo61ALIh;1ht?bVsX;88%E|m)o<;RusuC9 zbBZrCu^=$iXi%&r$in1(`TjM3MW)!i(h%bbJp@xbyFS)0yZh%f1S~6f19&0k;0{1w|*ZQEPg#hSIKq-0Qz)p{E!;8Nn4EyC(P$Z6}p!byMR=e z<1qLhi;g#=@pa?TKe4$WbO49%t-4%8T}KMm8{jq2Yw+RX-c(T}6*&N-a_AK|s3>ws zaT@Xd|D(xaHc{e>p}$`@b+dI+{QMP1_6b_}q8m4mzLyf62xrke|M(+LU1TZ@m#O@G z)}+(1v?^;SOg|4ER#Z&H1%0aPRUQ_BbZGeA&xgjPx&Qvzb{cuc zXpr|>>a>S4JYrj3U}eRJf93rd4#NpHAiI+8n}Tg>?qeeh)Y^OEp&`T$fbg%T&hjTK zAd0x8c#G3=X23FQ8w`fx;mDc5RHkG8K>_(~Gua#mLPMwWN!GRg`qf8ORh+gW!~n4q zi#K%d$L8*=v%)7%dV>ZHGBQ&A*icv3gi){j>D}}@Zp;vhT2fXyme&UxG2q7xC|t~T z8)!dE+BJ+NdWWUkl<6%U(fu4(EyW$RU!F56_mq^^?yAbZf%^+5ym>a0*a4KAlqmSg zs2;#7#>^cvdNi@%UAlKCK~rQhs3fag)P{lg@a(BfPni?|#P}lwqYwb$f+|6rh|64U z9AmT)^klHdn=Xg^A>|s_B|TE>fYJ~)AxdMwuLwyZk2Gs=dM7_dU5~Lzm6b`ek5LXX z{lKiBPz_MJaN{&&Qlq?os(CB z4s+XZs~@mp7K?eq>zfHYK3Zb5dgOhW_FGpn!8K!NP*4GWM>bbC?CTyR|EuFU`hrRmIqTl)!oL7e`) zGJa8he{fX@RQ}4BVN)ty-P}Uf{<1V5SQ3*)f&~yllDm_$u1O2~N7aA-hX>m;FVUfG z{OR)5{1O(ju0;J0qCveKBWeJ6lX(Muv0bzEMpts_ zcjpX+msj}sTMXo+VTX2XxUWUqj{cg@UuS(og{WVW9H?4F!5EjPzJUQ(j7Vu5JorNE zPSky(BrjYb$HZ&nM!PHz2Y>%bPa?8};azPrqF7J)1wr!Be(;7OG%K^sec8uy>#bk7 zF6ksd1$k9ytpV$C<6Y6z88wQd1yMmZ9DFqHI2{5{nv*P$Dbb#pBCnCr8 zqLl(V1=@k9;Xs|Q+pFmcp9+&nX8{`DQ7duw{d@NqE_9WBaNxk=BpI?dF$NO?@(97W zC?A8kR8{>qHMKD0h77VGp03H(AA*?8uQ7bpFSuy#As962g8>ZNre@37d%c@{eV|#x zi6wdk;HBwZQFQjWMMim-AD6^rn9pYn-Qbk=2j>;cvK?1%CUQOP7dY=4IMn}L@R8?B zhIx!Xdq~Iw+bC_hoRtN_(6=y9LQMMKFOfaw;AR^8!6wGM}SVo&_|! z8di5PSMLj#z$wX#ofts9j=TjaLG`Ni?@+Opt#^VYyyc1F6Iw`KUOwg!mOcI(6i8?e zTMbj#)0bM*0!-yR-KYM=e`^8M^lx{VO`A52RYZVw%pt&Q7#$w6d>=#k>0#2s@TFOb zLg1NtJn-Ud*Ps8)8;(hq6F-URTcz~qzgc4|`>(zwPVxOx42{NAR0WIa!w+LRDJlwE z&=ejW4ch&A&Xbx_nno#T*j?w(f8&Zn+!-?)&WYJk(2~iHp>!;)q4D-w@U$A^y=sxD zt(z`g*9V@E;4$hT%iUll0tvdJ#&+8(H#eqz41Y6-0eoAzA9yVrtliEkDk@@vd3MpQ z&v<7HWk~k@N9xuGw8Ot*A~Z{@Jl;;Hwi_1Eh|%m)?G$UvnV; zvV!12-^7#EF#~1#z!Lg>e0PBa2Lvz=tdD(hU>UQD>%2EkupfEK#Gv9fwR|Q#u)BK$ zNqpItFDvXjEG^ZwufDV*{&6UC$k^Q6i6k|Jp6B7UeErI>S|f}kBIXJ2ABxR?dG>}Q z*ecYoI7ps4wBuD)mrFVNxoIxW&QM)i4LyORISXvZ#Wr=-aV7)iZCQ|B`j27{J?aq< zdJh+3)|W#mK8Z+o(S5q-jWh}uh=JHchs3Ct_vd*a@gSI-J2dXTmhDujOi-tA5Fe9i z#OE%yZoRK)YxcDWu&_iDNZHI}4e6OakDorJF+i?^UKO@n3}`S#Kn%TpUbFL9qAS)j zdL77*O+9f*QxZiW`j)0PJ{ZjdqYA+8>`12p-#T#M8NqMVG7+yY=#LG&=HMY|U~qoK zGRtL4mr@0Ryx^IS;EMfSqow-9`q6_2n9isDvdD8pQ&FxHCq|qS`(i;n-;yPC%V#?* zoSbw(_=}3}-{1YVaq%16A&`C)*5C(xv%DoG$&8Hgh5t34Xi#D0ayqYR1*$NZW(?_lo8YN6g=7zqsD|8$;-lt7tuMo`isN zlq;TtRrP0MJlku?kOMp3u-g74gj|0SankgO-k*~`NHm%hMYf+O!yP%O&f<~7hg-4m zh=R3SINbb*b?iNVekeB>TID<&rT6l!f$(|~!ZI7p+SiXvH*%vA+!14`BbZWz0ScmS z2)kNEmr}cF7l@}g0Sxx!=}quQ50WTs*QK(t#*FDz_!lY5=nANmI2K4bSifFDPHqB% zN`aIm$XZbg1GK^E#zSY&;yVtItS|4b~o>X*>0{x~M0LWSgl8R0$#rtx{A$i}TEA+uM7i6rlEw6j- zo_&msg$`LXd#L4&AWF1S^%C;OH0M?(zHAl#8-sra<}ySZQ?@0 z0dyotP>33X{%8t_%<|y zd-r|30)K?@P&M$8r0-GgvxObj8p~~u_M_9GIAlknrKotDHJH&b%%H+8$=~NmAiCp( zaL|hwNTtUS)Kf#Fji#HUg=q(ri}DeV_e-qOolyV1+vah92x|FJ()l^s+`Duef&Yiy zu1Z2mhF@U=L(TJ&t&ejU6#wQU0HI!aDIv~MJX0_Y2$8nB<3Q|F1KI-qK2AB({^RR_%dL$K#o$T!`nb;-|Fz@BU`5^{3J97<~#y(}yqDy=-MvGs+ z-f2jr<)FAr42^=_17PI5NigyZ+EUC*;Ob3&D2y}Ru%VH61bY4|$qd~9vJ$lCw?`~o zm%_D%qM6gmR_=o&9wiPI^(YkxAKu@gSzEIs@I}B^K7ITc1^ilcJj-laetyn-5Sd!3 zgUuy0G$^2tZmeY*heY0n3VKbdRlX{9((vfpDpCW%ovwAX_vwp^RI%yyaJG6KF2pZC z&R%Pm6mac*pKt5F+RY9B(qnif34tTVGJtSckTbM^>*yyoU@z5|dVAlb!^_*7$-emd zfqx|ht(#kAsq_$#pj6TLWd;h(i9w23>jzOEDb6QeC_RJM4`%?0Ii$C`E-9(hO4)y&|;4PK$|%;Ot&xZB3V@oH%=0Z!@Yzi~&=hS|Mi? zXA08%jXRy?hq5&|*n-}}q63~H`{L|3$--=ll#TZItTC)g->4{o&SZScMe<$b&H$Q< z^ET3ZIrlR{at@DeJ{G4577TjPsPK4D7-k`jULe6g2jEpskd2ao{RQ;@p>ZbiK$Z0E zzkdASrERoq3HaLn+m8;_Yd3*~8#aj0zSqKFoI0Q(ga>LHBe&pEdY>>?rK+Ze*yOCu zpVh}ZYrq5}19|yU)50M*k?xT{J}-5uwsE?~%nchxZ$4~01*H=h4HqRI%paf5UgIE& zxomyS0Cm2;2Plu|3T=GMwd%qp$R-zuOM3;ILiNda?_QbX?M@c8k{+J@12SK~eFIRl zoITr6U*Csm-N!C;$V~1ZoATr`cpex{p_o{S;=tF}H+>5&HN0o?fUf(+XnZ_XgcGas z{Y4@l1;4aS$K-1j7O8KbNs7mO59Lb|TQ-gcip|W=DDD1ntjkUracC3aC8z8zDZ~Z5 z#Ba*=1=uvXe;Eet*QW@jM0N~3g7E$FE(R9SJu$H81EvwW48)4I)G>MKWijR{`ab^D zs>yB{I26!ko+DViD@i|maqYt{_?HiQBhfWEG79VgWxnFAb{tRtui>8Xxq zktujFC<7&OXk$!byb<}3^RR0)yV)Q{_jj-BZ)cZ{D&@xiDasoIU7PR-1} z@$bA;;PZIRmeTuOp5n+NbLc~DZ5HXD@Rg9rm~oVQ_jWuj=NP&!kMjX^SG=9a=31gA zW2Ewi_2p#oISkOr%elOC!&4`JH|zzxSIrZyq0kA~8*-UUg~4%FZl2<`N?Q z>EzpuXo`u9h=eX7*>mkO)!^S@A52S;3IgKLVqN!r%~L|bV|viqV~^Qo6On^c zb}Oo?vQ3`zXgqDVcpHr$4;!_`*H=Pu2!=uSuzg&*bFv{|#<-k3zf zcCjReE8N;QQ68}kwKW-K;}iS#?0Fu~3-@02@DOv@vCG&7TY+AbxJ_)GrB*TomGg-Z zNB-=exnY-)=KFX3)TeYYjB?MH;*?a_2KA_^{bSMFiurow4Y~gMvo@Y zZ{KSXYrBHIe27FN9R$;O2uwVAa;?36^08w@XZkNXWszETI5m}SVa5?_t~%1CiN)wL zXWsEg=m?kr-8E!1+}sb}GUlGf>6QgY_8A+Uy)P8n1muinDE~meG|Y}PL>|6QsB%H2 z+0}u^lHc~-`q<(jlR04SsCN4sgMcBa+&*P4rZf~gV-Yfx)HX=;k5$DXmY6u}*uxaZ zW^kNzmG2$AdXd`VS9UftcK6=;R`?rDJ=P@-%DJ4`OP5xZlqj4W(zV{fLnd;&;9Y@5 zApgT2?VQ@EKURq4|GNs2$>e8#?GXQjlv>v7Div&m%0>FAT9cQf4u&a_InnWl=!#QC+6n1!M|l6yFLXL zf&2s5KCmFV-X|MW`TLNW(BpR58wT$0y%iTQy*hsmxDr8MZ9_D-G=<>W_3M{kaE(O( z7TQ0fQ=#6K-^b|E&;anSyPC-V*#jnsa6o0F`q507o zJopma)1>YXI6EQ9@Fr5Ww5+hMWMV+bs>f>Uz-XyU6EW;itHB`BQGHzPDTRulZbY4q93QS7}w1pQccZ7_|sF$n`WZJz?48?=uq&UmNAw&&(qnT zU?kLmog_t(y6VU4G5MPM2$2}6D8#*G63wsV8_O|4lfKGQU)(QlnFK?g&!(i9Sy`Pj zkr10v4m>`AZRbcpcq$zhv_U7{Zdcky_|@`UE&$9qZHW!fGv%5|q@?M_oIj(hB=*8f zmk{Z_x6LCGFLd=D@@P@Y3HFYT4_}xepaMwXF|Qu;oce{%yR4tCZ7k@q^F8%ir@hQ$ zM@5a)VdK55?fgTS5O|>C0Kui1Ym_hU@vWl4p}XH+1aTk)Qpt}``Rw1%ii(;i2tfyGUORFHJ*O#+UOi?BAu^YqRbeWF zPQbw4?-^967P5Q|Hc6eT~%*yw)7Ga>u!Ijoeijz)FvofpnuAsr@IMrint3 zYd2wmUtRequlbCW(+l6dy8%2_{(B9=6V~4Ar?Des)c#uYcS1r%UhU`3r~oIA#Csqn z?|R-IEge1?s+LjI@(-9}EGJYgDKf_4U2QaO9OGEb$P*RFz3Cb?L7T%v5bBS2x-*>` z))Y>RyxTZN5t*E;2qcFNeGb@3OuR&DNNVbUmiYAMV(Jbn#;( zwQ9|S>~BzLiH^YgUB=A+c?(Q?TzU9<*6DO&ua8|$&=qc!+ z&e`Wnis1a=0PW$!+t_$CW&Aq>$_gqfG}bOV#qDx4y$tAmN4~a{AQrrI_yuQI3j!b= zB8^dPEpPQb?0PpY7N{$;DUFB?L>Hl?x4FLJ*-I@pE8TtZu1C?&OUKywO1?5903fw>l)AjMz_(r`VNMyz4#(#weUQ6|K}`q8tX8U7Fxo6- zE($Fk%*})~QjSw$Er5c3qg5CI!LH*Aluw=H43Q(UH1!#SgGP?8t?AU)VB+AVowu9P zfmBjba#_zg_VB>A+`_^_)`SR$gikD1J0~HPPjikHgU3h197L@Gb3@D^Bd{~~y&8o| zGqfR`E#{h&Mr|Ta{`PNE{pm&aEn6k~nF)6iBJhI;4*bSo>39xJ>1)EqmMv%UF~bf} zV7M3#p2nfB1|oFkgshz41HB2_ntdnF0`n{6`!i4XxeJ;T2tYvJraEaX8-o4J_er-7 z8E0W)&!S^6U!+`5H|`Sx)tFf#S_M(E^0XXJ|J$2FmS8f?+a5?_M=<%06KNeJF2O!b zgqVV8znP1X*|3$s(9cX{yLKh&mw9UPiC@PiXJu!fUXs~$x^f-<(WHTKw+YiDEe}oY zHUf6ZYP_s*og4?o8Bd#rIW=8U-PWxIa#KGmY4Jr`e zl=mWzx%sGmu2Tmp)0rnLVLpNb+{KIx*Q7W=1%$KbuNov?d9fyErZ~8T|9zTa8O~-^ zH8o@S>bqvx0n%UEU>$V((s1t|6h(+XSWcn$0L9tqc`u9ci8Dt7LFA(V(eJ+}V|$*1 z)DhYx2zKo~+!UqHv~`qjS5%1l*z{-5o&lWRuJV|2?4g)atE?O%&ny^}qkl#JmVGJ8 zWj=fe{}*p9wBnd2hug!Fl9FioIYify{J)IW-)_2bUe#_fI=Z?G$`=!lDj|Gw(6wsT z4T+9i?Zcf`-i9b+Tg!-gi2;Uda6`PUsi`EenJo-ZLnDm`>uzfx?p>B;&D*zj7hHSz z9-Ejr+kN$FvuSc^4{zPlH8P5Kh@nyj&SNmmlP5M8|NW?^U#KSJRqcIWUysknN=FIp z4!D2cUcEYvyqJ;U@WUE$%}zs&9Uh~_X`I1kO505XZ9*|Xo2MkFcJ6eO=C*rU5uEIn zcLNZ)8QkV-FcXcK;hzvQg}~iXlJ-va#&#>^p&G0(8P(OAULz>sPM^s`uw@u zFQ!bzKYfLhJMi=pE7c0=8C1<*){YcJ!4UjD&uzvL{I*OcM1jAOs88_Uj|(?Q<;efd zTj5t#S`@-3esIjouUzU7i`4G&m*8jo(X@%T&>RqC;#xFzBqgmtp<0-8jmwz}pAZ#+ z7%5a_DVh(-1tlef`}T*dUFx>Gm-SVljV@BR7FfNod*MK$85ShrX# zuZD(Cu3z8rdPLU{r9OSg$LM+x(<~9d(^hC9>;%P@n14@+vW|Rs`9wlzLi^Ze?*O7D z-egxqgeY}Zv3WfkRE!=1?u_GzeHoFq@m)=g%c@nQmYyOOr{`$@Vbb1ts1E;-6a%0dpHfMM#DC7B8#!@eZW6{ z*Yj;D++-UamtN-(?dqp{nO`u z>?97Ez#*R9nT;Yw0WI45>Dk^e1iJxbf>B4dtLoC6C!vo8K0A5cxNArwCNi}KBKdRpE5{fZeEOyZ z49J~K)YjwwFs592L<5ZlF{Lj*m8lOl#*(0Wr`n%AJDQ?!Y>k6H4k8gO|5KbX@@E5y z4yTkI?gQ=ehdp}w?w1I*)G6>i9N&p-SZDR9LFiwT0%@<9c!LZLR+XRVH2N96qPCa@ znP{pdZXiSUIG@jM*)kfXOKvYh+aw)68`9^8j)F>qr7}{}uX5i&-qi0dsva>s+6@cy zC_6|h5sjh+fnWcihT-Tc%6A-iRpBUGE|nfJoj1D9zrAho+_^E@%57tvJmyTN(MWyi zsA{z7E~LEB{xXHZ5Ff)8FEV)gNV%eLD%Ih4+n0Iqh-C7|4+g5l*+V z=ei5=2Rcfq4=;kJX?K#Mq(^_Zk%dkpjaN zg0$&V@C?6LU?wRg1?t%&Kmn+qJ_@SW@nnQ@Rh`&ytYfSJjCIGMf%nQ_q|2mWl)!%~ z=6_U+Vm>M7w?e;3k@ZYjV1QS*u+z1zR-73Kk7AF)hQp$BC8niSb6&^#gHZ7{S)^$- zI0eyOnLf?Dp~09CR2FEmy^6tn7J~$P?JWGzXLA2hhK4G?6U0~mLA0VI>e`^sD0}kc z6%7UV6v|n84Jx~Il@MbK1AEX&q7R&b!MXWW0O;8TqL5E~8EN~5?w?PNL2`0nJ>|HA zZL72Z0>;P^t>(>qcK{PsYgNpQeV$%k35IU!BFc#X{V%3uEP1Z=p0(-J*pcD={_BFj zI;4`;?*IEQ>)T5r7U=$MxaW>_+0j|6Gt;K{KL0z+(#Q4j@(ZZOT#x*n_O9$`%(CWf z*XlR>2NX9Pc)I1x%(0Iie@krX*CKZQIs5C!cdI)BS74hQzjeIqdhW3$h z`r`3pWw(A8&_ztL8xhdD#T7AnNfUhoz-C z5eGst0zk~!fp3gE%tAalCLmT*6Wd#db30S%P)SlNU)z6C5Kyz6|9l;MJJfJ(FdHX) zYAV=7cWIOdKvU&cXSVj@YPaEqFIr^c^F`~4%BXv$R{yO9FqZKhul=;=kEGLwnikU` zx6>QYxPjS;si#1rPnEtN>PRjS9a+qm4{etcNVHrY3>wsU&zU6jxtJWwXxC=e#chPl|Hj;NxHp6_akriP>LBGuzKW5 zkSmuLN&8=Ih93I%VpwFf`*$Sj-KOKI18TuPb*Nl(^OM?`n*W+D%5^{9K=S>TyN~Ay z>^8xamS3Kr7>oo2p9lhj8Bn1z4DB%04l+G{F-V2^XAAA@4lW(R2dCrb)!+IscFy`v z1%`LGRpm+WAYpT(KWtg9q4D9MH6<^tYBJx{aY(D z;;jZfQ!DP!&0Q}0U;r?I3Kfq)tP}otgQCu6xl2AQV-T0J@{OXRW2vd%S}Fq=85-Li zAf=wBy1J=!(M!||Q=*x&$^qI_MWydwYaY?P_+NQwtg=f?V0Unk5R5id%WnDn3cUyd z6&jlk-1nftG`#<45&d?N_Uqi^448&Z#Xo!JxI+8+F>5d@D~n@|A|ZL~VoMwlc%?Vo z3`Zbfd#=>KC@{g{W~xxc}X7l zMqmK)9w%Pw%yIW0LYQu6S4hZ|O3$r#E&SbOX7GipdprQ@^j@mK%nZU#cI{Kxw*S?J z^5{-x_=#f@OVK+(|D_}YYK?48lb&L3#wpu)Y7_b^ zGOPjgV|R^JkAg2K*cVQ7H*MN>7CT(8=F45}I=poj=3EbP?GI%Z%Q2|Wuczn$8b=4Q z{^^N(5%%(PRin4W3xj_8_NFU76>v>6t$gY1+3Ru2Uwc||h>z~^N=$SHYY}Dn^YHR{N2BpZ^7UsJ^PabfL1XgFXKtbXaUlE+w z64&Hb>(vUM$tm;tjxd>2LL3uq_g7|6VmLZG-<%bSfT30|ZgY?pnKBFK&BG8~8n+dg zgz}9L#aF|uzwcmk7G1wSSNCtROc;A9I&?WaK`zKM(Cbm)Ze~*nrl8ua2G=o5I5?8c zg8928o_AYT6tYH1w?kp_FwTnhyuR{U1LNH7tiN|fSxhZ z!DD<`(tkZ1GZN)g>*FZf_{-cWib_gvGUw)o9-GR%cQFh5$tj5h4P6E<&!0JSEg&vH z{Q(-|y))j8zW-`T80r^=-tH~ew?6>(%e``CNUyGEHFRUQP`z`hz7E=hl#@u|*!`>K z9$!Gh0QcTX=cRu}c?_p81U|C0Lq>D&+`A{XVX$0Dtf++x zMVY*|MQD?m48g0jxYA+#2Ff1BxIBIO2UcOON`xJjc`9ky4#o>!R?ZT-6-FzfQ=W-cARY<{o|- zl5T58$E~okv3bHRcx&x`WQ26(RRv7>V?a(kKmZi%p8G+;ew-cp@rjA2j~;E7JK`Mn zHZHBql{RJY*>gKUqdmM{mi3NE9k6Zw2~Nw}PoGjXOQ5O=B4KvlO!1`CJjvX4rRJ=b z7sKSStfui`*!?qk;MlS#OLO=x`XWO zaf@B}u=son5Jrau{jNu#ax5(^X8#==D3Su3-)C^IOXQ~E8N;R_@>~}#JSr-x0Vw9h zx3rB_zR6Y2$-o~a4vb<12CHOkc)3^~iL|%cTsw@b{a09Q>juP0f+Nc(MYY_`Nne$h zODjNg&wbkmT^$-7^r|tH?@1Gl#D_>T!U2jhrF6Q>ib5`xd-uA1kpEYYaWS+3TlAj0 zTOm<4Q>@-LqiSkg^*!g-(PXBoL-j#}5T>E3tb7E*cx|}P{ebxnSigS!pl$ZADbYl^ z#>*@&wpu>(dNg^D=(n)K!N*k=Wp)-!acRGL)hDWux3i_qMOk`>c?x!W&hW!-5x;r?!Eu*~k=dX*E1sytaPDxE zBE#_D`B2DF$1+}wvv+&y71#OZh<4~RU?WZ4$DykMRYJYQGrxD|PLPe;wm6EXk+$jf z2tW`^7Ck>)ljkni*qr>O19QoiK@WGQVKSpp= zadNlcIGDSEOKlD@D4mau0L}BsGTq(>w6~@1nN;slbpP*&%?Y&W@bG&HL;kn*^y%R@ zRuWDCa83f3QOS9nF4DoTYinWFw~csuy=^7j7}gqfB;Uj_Qw%nTB)SHyMO1f`(R_!@ zc63ZwA6O|f9ea#|0w72y!PI>Tj0n=K1B~}MJLT=bj;|x-1yewG4s6Q3EqbyOTwlH6 zTQOfwY!;X*nK^f9zSC$LHA6~8$|WfQQ3&$A6t(_Dw=IO!%nOv@zjHS5rS$zNs%b(? zl`fy%{$7uRpJ+D}PXHs=*QWT3G(3qZkwJ7q^-s@MasqA9RoYL^iM4L8M`bW$tfzp= zQhwa%&XieC*<&4^~IFR_W(TlurZOJFo4K^qui$lkqaub*{)$6zL3AUx&b zUIa?IP z@L93nlwCbDM_J~u`;1Hik}$VuUHS}i$_A@`HLhFt1CzF029BXOvd4n7XZA4yNa8M8 zGmHqtX!+gJZICm5NDTOjsM*HUFE+M9y=(_=}6zbCrSw5{XQ7x7-xUhfp%=ii&8nXi0D1z1uM( z?C3fk025AtB!2@Du~-LxvWJ`FKt?YD@55P3X|5i4BshyFdCQq+cAxLhKmQ=u)2|lJ>qXXZ{UVY_Lc4bz#I)cHVB~9 zP4b&o9}&JbSG%^L8Mu1%=A*e@N#q$5dR>Kux7LGCeNeNE;?ul4lDo%#Th3o42cMbDl;XByA9C5I;o-M4u*B~U08bo+OLZ^bY@ zJsS2Pzk?lf4_&GCypP5fAZxI~-}t3`z=lvsE+02{Gy}?gzEsgo*iV3*H=X3mVMWX? zdVbG|W0|=G!imgLmx|K>U1VthcwHkEBBhQ0iME{_5BM?Iye*|~U(w}p!vFeP>Ij>N z-3JKtoq~$r0~4iF8!Q_%``FsmtF`-W-ll-RAJO7*w90|N238q!=*XP->X2ttzubQ~A36(5@pxusVZUpodR=H|%($ zLd!_m{>N#1mX#hB76z5{ZHexyZ@dsr$ua7)Y;AqSgKzWa6KBpCju{iVdv{Id(hdS3 zcbqavzKHYhD0TkXDWCqvgM(Er-_4}z=sN0YJ^`^XFWJ#$W9Oq~E1GizTZSNPd8(K- z(uTz_GAE=2?VdunZp*Sfv>#E!k$1X?JmaT_f8T6+r&JS)5QRJHKkU*(9i8e+FP>Jr zeBqOQN$eoVpHh#~jzu^gmn2)@+maERol6q7e5o>H(?cO$J*&rA%e7E7Mp~$1r?W3` z@T#k;bMCVLIL9&03jz?q{ldc3)YYTgY_;u@^T%v+@*a|AfZ8GBVy$wkPk6w7Mq6{# z0tfLgqQGU=#-TrmP}R*f9tl0ZucZ;pws4W2xz8p0c?KvZcVgMuV~T{G- z?;O#r9-L`(Z9?GHtL-EWAXJyseBLqgTfepf{ZXU1&pKE;*zV;WInz0gZmCSzCefH1 z0t$l)2+S6A`Ru#a-kr?AnMkWpz@Kw7mllf~l3F;2SX1t5K99pz@$&xg5bCouooEtV zRlvO*j~J474ID6FBm0FOHD9zFQEOal4jmsd(cSZPL1&%?mBJ=Rzpe5IAf(91QtTv= zvz9-{cH~Xw?)&r${>a?IS``g(`jYh}Q)4#y6@_*9TLNEuw z=^=hve`x2lBi7^2^r09)2U@~MfgibjD5D3LN~hhMQ@lMr$&R-0@JL7K%`a>~%o{{< z2YW>=&c*Ipxh&rWr%x0A6%!+RJ;yZPjY2|T<-xm(D-g_1N|Sb3uW+1f~?asZuIxt z?Z@wWocxzy0<+zILLgOLk4~>Ye_lp_v9hvQPD|7A<)*(pe2WVMRtV!>t=sVip9}lYRnpIpBG5? zMZSbMu6oQEFPfO#kC%VzQlul<25=E&EST8DaqTReCga~Ga+{N{D{M~Q!K7Y|pP$3h zL}2OL+Ptn(R7Ltrw<}BkKg**-P0q(~ZXgV}l-~9SZ0hn~6;s!VIwAZO)ieLEfA_!t z;QxtU<%uxXBKwf-|Njs8|N0ChHYB59L?l;fT!UDVRjm0I>Pj2{!af57TN0y0?Z}=SM^2^6mhaJ0x1Dpj>YWTQooFvLoh`n#3b0yW|{zn>FF_AvVVs3 zp^1!)w3@S{K@VU&gVA5K4Z~MlP0&~|Z=NT-wtCdQg9rN{bw$F=kxx+3_sasu42=!n zjG!t&^GcHx8Eoa)Lnj#;UL-`FffKj)47}psg};sVtaop55D4c-i*I}U3M=$-q%;2K zo|>96*XXFjXsqX{vWduv%v^lO$EwNJj7i#W!C4GPClN)Q&dfs<_eb_oG2 zIH77XCo~)Sk6v4&CvDdx-zLJLWd2wMqgQ8XV^7J?moOYgQ}?pu*ww5408(%cgcOik z!Y@87DH^ZDYQw6TpM>Q^LV(qyjd?bj0Y!)oJ$tWC5Xcc+vO$bH~7L_+q@+IsqH&K-zgZB zIuRc)R_bEVY~)F?ZOqwBZDhN2X`4&+?w>aRG3gQKGUq(s-fLgzj3b!g=}f_pe$RO3 zKd{4ERg7R7P*xJ3x=Zb?S~W<0Xcs1xogG+&=EUo{5GqG_%Jp7(!ltc=o} zaB*-~!e-aQds?8N>-NOqR|bK#L&^* zN^=K%Yu}das0`%Zx!B%6aW)~R1B-}7#US^4Nw)uxj(-ZV zIdPLNbLmH6g_M!|o|@@<^}(aN+Oiq%wwvPO_U-RFJu#`#W>Yxl&Hg$6^=U2H~z6_-%tw^iU>(zA%2K>{m9Ky5q#hIt@B9Jn* z+)<|DY=xqUb0lgS*xqm8w-Q}e*4?`nm;SH9&ODyWylwxNtXZO+NT@_4Az50G%1F|# zMG;D+QWQmH2`Nd7P$@F)n>1QTSw^%|DOyAuqNo%#;(1@)uixLlUeD`(X6}2&_v^Yo zpL02m<2+7F-rhou9#DhJinE`zQZhf!@Q}Q4tW610gmE0*Vz7o<>h^c4oI!f9MF!7? zzh5L@2!M9$*71g(bel|r$P3KYvn#^Tz@Wi!a6~?gmBH}gwbQ}(qj-o+Ul=_Ow52L~ z)|G~hj$cAx@JOR~5{C*Zjpse>9?WGYZpA_<^d>WymLnqV>XUA#jgO{=R8zc#M>PQQ zk5?_eAf@aiDjaTZLV?#jl>>zgtJuG0uf2YyIJ3mOdeFZ1X2dJ>7Yvi%?h1#%?_J!J zp$z&`iVthe(=4L6cMk!V?j-8XA9RxadjjWVxEjV?c|3Q@nF|+W1>@v<^!jZ#6+j4b zEass?Mwo3BAeG=4>Af38(Fj}CrC`}w{M8PsI?!$5f~O%tCwxb4cLEH9bs)|ZkEiUT zr1ra`gsTHiAO)+g2Q>H6-OEzkIcuYgfeW|(=fo4NkiP{m44TUseDKm;(hC!{ zw(;fTq>JfLnXmiQXp}A5q!NYIhjff{3)vK)u_v5Uit~E__k+gd08^zrhq5`U{q~6S ztqg+*EC%r==Jx)=w0o< zf9F_?!k=Ei7=)1Vd~QEB8btXp+>aK)N#$SOxrDHI@nTpXdwnSIO=v{EUK6kB_vy5A zg0V+|)K>)YW$C!ZS>=eZ6(tZ3HGbeBR3eB}x=X?7v8m(}0}ognaJ2Iu;=uz51Rhm- z4RHojTN&hs%uJT_y<+4Pio)s_R5|1og?DgJ5PdGXN3|OQRm->5)&VT$>vyJWw{C}t zNP-REGDrev1gHUw9kg65$R{6+k9Vh-2kSz$Kl!h}c1Kt9zd~AltgRIncEi}vIw{|R z9~ULK0t?DKFv4zVVC@tWFMy4@y1ElJJBR7%xiXzig94R<3#&UqdJx8d;rvQid!l!A zbauw=AzXSRFW`Y@BdGRqr{pdIGgDN6{ITCFuNBE<9A@89jbF}3c64b9yJ&qatgSEbLJ*u$b~5;mmTzk`qzKf= z4R8hij{>t`E{iL$i!y=sWMEfsJ;4#+2dWc{(PL*WCjbx^MJU5eA>mrYe=6t3PoK~% zlgXwyp-5odCF0>7T-6x=W{f9(_iiB6Uwn#1)db=S!3x3{ER#|m)fux31bVq}2DV*U zn2I2t8g(ekV|fi@zd-VVYao|=fuw>a6}ePDvn9qN*us1k8hqgOEXo5Q!wuUKiWYyW z%B6XEUR=x$wn}~^H}2s>HWl4q2NEVmfE0u|ZbGDz0_zkJ8rNI{JJZ!&LKShc2-+(A z$8f48DrJ7frN&~?bIn?waP_K%nXl~xY_V_B!qS?H$o?UVFNF7IraI#IZODx{z$WkC ze_Yg8GhROybE3ZMxK(%19dL`t(Gd4AVp6uOvbLWw;}Wx6Ova%n3)4qO3fz*0{qo+u z<5lc8Z@zKy;uk1YuDqAKdj>6w(fLPeucoOYmz@TZ+=(AB=YdMbLuIHUf)^S`=m_xP zhWdI5(KE`WEK@(0IE4i{Kuvxs5@x|29mnh5xu%gdjg1sN2WPq$K7Za%u+dCy>i0K` zi$5~OA_#;yB4M-7Quzq2Z8XL<2X$JuZEMnx1wKgom)q($`!Z<)ym-mTEgkJVDAohs zuBy^B-{P`|hYt&zYPtN6Sq7-RyYZMYZV>!1V8*mDT?LdtgXP}*PdF`vGNLhXg!_rr zKxuj(#7;L<91=_d{v$u6)w(ZPA|pV^>Bu49JjshYsX=w8EI6=^E1y!z=76V&Ovd1d z>n;d_e(a-1(;?8A?Nx<5Z(&2z;+A3}&Y^Oa&)?BH~CTol$0LhpIb zP%hje_{Z7U*n5D1?#%=cjuo{hiHnw$p_K?3r7ptRd09ipjuj36T_OWT^(8}88q~{r zn9*XXOcq03ac=G<86ktU!9o9IXXCVE%G9$AGs|5BKY#xQr#7A#z9$v(rmBh++TWU+ znG4X8!;ih6swzfq1^+g$pad>5u6zGpdOmlIithF$UghbM8*1j-xD)H7 zY$+hAX?ASeCJ0>GJ@N6zYu?zqt~`{I0)|kn58+I0-fwJ(MX}liHlz3?FIl`8D=>t$ z5G08p$rw5)DeWZvgli=^9cR{05Ka|}gXmxs-=~mjo|;T7d|BY>0?1TiAIvr`o5c8;G@Z~a(;AG0&*=X>lZn#b@UU9|`>+j9Q}0Rbx> ze)}o*2wS8D*oi>%EC4xu9i8xb$}~Pm+6y~rouvfIs`$rvYNaL@&~M8~OXqj;fxP}V zK{4m+;|dtzcVX_=7G2GguXd3myh>DNu0`+DylK3W+Q9U8dmA1 zk@!t9Kb3RY_5gci1Q5Q|hRIs-1zK`PG4F_Le3)!1yYQF-w7yiooATYdMU8MXhy~!C z!fqDnX*>Q>k!qX-gOjS-)?e_4Yu1|6rS0%!2Zy|>^E3!Qf5Yn_t+PPirAP@7GRFfe zsO@KsxmDQP(T7b7CuKT1IAD%4K$kVSpFe+YcA)?kw6N!psq7__y(Y83^~$Ogye5A3 z9DvE^%o(Di^QN)U$yw%;2{5ZvM8^BY2ah(% zO^Rd=1SdqP2tTvcAPB~g^OO5%c5|hITQ~a~lYax*e;3YB<={D|@x20j2-xR%3`cDK zHg<{% z>1V>H;a{evgpDflGT{#2UOg8?9vk$T@kC)kuqld3vRXG(sHVh~vMsF-fN)|4J|KXc zi^ASQg0M_ozkHO@f~$GvE)oMKFO`=x3I5@mSx|9flQkjXRovn11KCv*VMMk`VgVxGqo)a-TAVaD&S(=9ybCRX+b17Q%PD`K&|&Sf zdJKC3>dcWn!ea2%XfM;o`+Z0!q3DVB*SsYj8T*qk(^&>I*fxkNm%^O3ntCv_pWbGDOyBH&y{#TI#wBq2~@~NUYqgO|C&Ij?h%= z#U3F=;db=GZA6hC~J zNJaDbaZYe6#_(UrzKr&kB%D?c2+^XjK;66u%f;`5+ggzOVu+I)q$VHf*p7(|IwoOplJT*^wAgICwI9LcwBqV?+V$l7z9z78r zX}TMh@3c#!K}&a(Yv@s#7rkJdb;O~lqQwjnGx6a;@uoL@;>25dd0LDlAfTt+^6Vd# zb?lPW*|aomBcmHEz{4Aek#^hup+>pTBXp=p0hkR|6jb{6&-E$O8MNo{;VPn3lG6to zDFi2s8~uFsqTV5}R}|+e!Ep@r1ZPLAzP>k1YB%lv?`{H%w2aK!LxxoK^n3)>=tZSB z6)mzTJnb1v{o1AtY0msOb1`*q-%9@1-|g+EPrpgCgiaWP@o)o?aOZIDf84Se&$9#pyc$p$xgeJYCDzTFW}m-0+jsh?Ew)a>&^ zyoixCZPxSKfn1n(r4>T)^1Sle%SF27n(<037{hJs-DZ%9+t=zkHPUEXS84aVmhF9P}OKf3&m^U1+J$|#Ef5kAY zue?kTgin#t7~j-8Rn2wbLQTu*f1GVk_Py|E_o^rDBsdszB+mmXD=W?V-KU-|E9<_= z>9Nm0MouT#m_VOIi$NU^W@CkB^U0W|-t_Qzv{fqRs#^+zRZir21NF1Rd?Ovs52Z6> zBDQ;v9&7#lPExbuo=N#joxAzgVQJs#%#>VA4*s6e?cu>u*lVw(*CAhg3(HrI>9)kr^62OPl=!n#n|4xJr5O23yS@%W~SIbJN{|hSz|^ukcuHN zxcb^fqTkNl|A zj*lSwz9$Of=C{@vJ|tpw5vc6nVRTQ8XB{x^4jBRGBlM%UHw_SMnC^(Q3o{*+8!m1a z5}a$aC;%AOP3u}ZwM#dzPM7h*MHfp^v*I^+GV5rWPk+;1f9xvzp$RK~T;pmw{~eG871d4a z`JsLEcq9mkKtMBgY_20rP@*f(d1$l3nPTZlIyUnGetxCc=9-xB1Dfe8fW^zMG0GY- zL*bRAvpVusTw>E4DzcaYn60dI{NlO7&_aE;ech=${&By-CICye<$BZRjCnaGHkOml zW8jh8Uf+tuPs$cUlh4taz=(C(Pj`o%(V~l`Os-hIdFk6hXrz>(6_u4+zkQ3?pY<-i zbnUaUGSOZB7Nq6s%1r5@zB5k&t8hD`71hFrCcuavS~6)ar%>Y2(XA+`n{)Hkg4VIR zi%P#2T1n|R$ubSmwg1_(;d*+EK}(5 zqaGy%itDOmNEhG+!GOAa5SuzEnT@0yVw;{KApB=XgKIPhMOsbXF6@K$CMB``!$l9) zfXWpkChb;BnQj_aDdD-dJn2i{>!h;fN#B$xg>nA`r*o&x!lk*uXvUvUj-WKA*{=iIv5?kyxIRBd#WEjZ*KE zKg9X6e`ucfnUbh%1r&6IJwxdDni|%!N)1HoQDd+6{6kpLVl~WYQ>+p^bxXtJeQ%2U z73e7p!z)5}gyme`10xaE$v`qPgfwk^Ta^qkcxfoISPFrU%57GTOP-j8#cMq+GS0te z&uaAO(PbCM%GS-B9v(>yzckwB)R7X+1DEB8OFJl9JYYU3dU17g%CTdEw};xY#iJ^EidrV>TSO>JZH!Gi;X&4(rc@@_Md z3SP_C>}aX)Y1X=R#&39Q+%-yw>es<{MkI^Us4n6*QWT3H z-o7=Z-bXxf`Ti1>(tFP_pU_B#9~shXXKXA&7xtzcOHFNN)Gnyk+(c363?Krcg8hn} zt_^6CjF8$}kxrFLu(Uv3$H?gX*|TWfZ&Aq;)@UFZ8P=I>>=TnpP5G$snvGZA2kyqz z;qUQuQI#B0%Q*zK4(w>(;3F?x+pCkQ>!hfONN1D5Ih4<}v0-|ai;GQ;LH_-2OOVf; zSC}K68hB_YA5FoTUSx$}PUmKtxH62A1!Lnq!#0ymfCi&FF)Jg$*8GYc5PqouQXu;A zHRJ~T;ceqVZ9Gqk(HcG4z!f5i^l~V;KdU#wwYD*1$3P*>1B4OspS;RSwd4(CCw|&a zHPeYRX85z71Hq$l2VxhWi}#oaL$moZt5obE0s~o!`sm3MK+LV1WCNZ&e5fq=p`2dz z`DSM3qi4_NFjZieCN6LA@j-r$q6|~AJSku!(fuK$o47ioV)-G2nZsPIk<1h95U-p+ z|CzjM^?b%9SHsDZ?;7=r0$e5Jpl@hsY8uCaJ)dRW!sH9#D`79NUf>&+M)m%JECj}- zFyS4)GhC&_PoIDQv-VXzf%MsL?}*d=OSW5qtWTcIpjXG~Q>T9Z*K~8vSyzds=O|=Q ze&JPn=Z^fbu!&(#x{qJK9t-jtM*ZkTnNG90Rc~TTe&Clj=15XvsfKAEyTr8^gDK_~oAc*8d z(HwsWZQNK09PVV@?t(aNTNP5?#F1N=8u6K{Gl{s%LAtX(4lHK*ZK4Jo*M@Y31uZ;`-l^~nU3|rkNO;>%@_}(2xS>Erk0_6P~-aSPOwPzlKjb30CB2$_1? zwB5MhTt5Z1MEb;$9efuj8h@%pngm(_kj2QZNb9>zSx3Rkh*6{+F)D~8vVQ$#ls6kq zPU_(u%I}4Qo(%x}p@5hc+(sLNuKs8(-%g`@_Yst_yfK>MxTiHt_hw~eh!`-|emL<` zB0XHwaGK6*o$mx1@MWH!Z!uhKXkep)@8w%zllAPc+!GkeinOzpnP2i`mtunB@3ffy zuDRJBm!9k%kZWeELXpH1MnhX|VcF@1RZn|-t*EgmE^c)*7&A11{N$urz|^kjujsok z*=fnrIiQ(_ea>7;rwd1%_Oe25?bM~phuU4`!qH8z?$&kXX1&g;$fCs~R24tpy*O~e ze&6CtS5N)q988}*dts*6Uyz|-GJmWWh?&kz1vF-i=Zi+<*EQy^KRdJf$Q1L+elr;C z)zWHCZ?#jghYMF$*fB9(w0T=OIifm9L5_*Y;^Jp(CLaX62d$ekWy(2_IP_bN#~4i{ zOCcfa`@=0;wsqar+*~&Yhm*C|^|nd;2E%NCsJTS6YY)H78)KNd%1YDSKsjz#<0S8o z6Nbx;+qewZO)_1ve|E8QmdS#uWqkw7iW*v8wFj?gwJ}MFaoo(*WaPOi?-uS^l!`+J z?{Q37pVXXnNe3^H)i?h9>TTgHH>bdkSVc=@<6OD!Rdw4S%i~Q|=~N(NhMzq@{MEV+ zjqjgX<_z&rlXL=w+V&GJdcG5N#>w-I17GJDIf^T9pe^d0OuuJ@Za#mapQ6lT0|Zrw z)}e#;oLj-r8wAo7oIy0@f6Vt|wt&*O_WgU?6$T|>k8GtVSkv>~>o~GC;eyKyF>-_e z_@H}-Vz%X=$e-VVHqfz_vlVs7ko5QSk~+IDydLD?=_%PDGSfSXP(b6^OjB&%s+ib$ zf{ErQM80Ka2at>7qMYE!@{DAQHAfkloa(my%U#jnDEK1K8!nX{*MCSD#J*+*z8w%f zFIR~kkzgNe(!Bj=f50GAQuL3W#;P<6)Uiz~7S8%*@~2{{;XnDeFwpkfA-_ZPr>ZIK}3jcptSIxAhyP|e+-YiPc{zD&)!~YjL{&3@ z+PW7%mm)qQIf?3VB_$9p%i{oBIk@eor1W1nukA=ptBWd&!X7{FX*J@r1L|3@Zw3|P zTN5ah01=Mx2$8Dw-Tu=b`O6=}l)~tmCH^lD?IEqA`y-Ekt$LMo;}+72!(%szvqGA) zMcSWF!BR=fXF(YP|GapSXli>9J1 z6By2ly>2ipnUmMtwDL^N#P*a=e^nWXTiIbHYTrNXPUsPAmgY zZfx~}1HGNhYKC)jeguEm)3kl(nb%tsf4pHC+pxxN0ZxwTW$Qbw2f$wP;n@{( zdXCZ0Y~3+qQi>Mme>q7Dow>oUQM<%rHo<%8`AbL#D%*WbRAXu4H$6)jH44(6=lsA( zY4v{R6NOW3H+Gka0G}=TurevQdevy(Zdax~5JH(eU;BYgnj6CZGosPZXiDVA8-T-P zloz53{G1R-oa(E2j#QL(B0etKWhEA??>p9)U_ZkZYq{PHN@m!i8yZ%1-VvT3N{6c# ze_d>nY~)Lc6snUHLW0GH`%PJoqmNh=ZbBx(I3LHKTjJb<4K6-Q^BzX!8k-Ot%bLhRMo{Z=CQ%<5-V!m)~ zfZ5&MJT(eOuwYAS8LXE`%|29DDfBPM@@7t-jz6=w5TtP>0v8T(tcK-R9$j+_-8DgmpaihK%*Q7$e|geZ zBzHVDl&E5X&kIM5!XX18pm#^A*Fil;o7b6m);&n_g-SW4B5dX5%X-J@8qnsK{_Y7_ z^*KEav+9BZ4zv)ufB#de3nT^$=Fa`!9#`-)AQwt&*gK{wWEE9IuWNUxkl83Fqb0L2 zixUKFR^x70`)Y6KCi&pfI`e2WZ%RA=;y0iJ_=0^3BU+;N8td?oT;$h$(bAC%0S#fZ z(c7(+kxfyp{qW)FX!tFbHEzyi)J4uJNV1%Ham`HfhwjwFbb}LBvCAT~8tUoc0Es4< zt3{)&AS6M1Qd%&l3w=?oT^FGIz=I3mbY$#rE{NM`8`so#mWud`yn8xg;6O=%Ns67C zJ4l0&buF^8EL&hzgk?L*&2h`0oTQTo*AhX$rs;K-BXo4^u#6WnkR|a8Q)8@U3=fu! zo`6!lxrMrcrsILVc-|z0(n;DoC(f8PE5*cH{!hIL>ME|p5YD>g5IJY!em7}SwAMD> zjGDUoj8ik)aDymfb%fPM%dIHWXS2!q+QY3%24BxL04w7Xd0mA$v;IN`tLXdeD1Z9& z$H#JmI%jd_34D`NKfp6a8=y-<#Lvfox1RW=mZ62h< z%#))_{8jmQX@QS(|jp&F`4uallJ!l>3bRo%@j8hJ4* zo>yXQ6yZ`U!Yb|_9(Y%W!m+d@D2QMKY)AW%-Mgu^SaLf>1x4Queyd13rrXf#aHb>2 z9%N_JTDbpK0wACl#rG^2OO$+q$jd z^1i_=1Ye!TrL~pR0?;Uvc8{N`Ct<6?!Aps;qEkn${!V`*<^|Aub#-*J5<5 z$$NCFgd& zWshgnz1z3zlYWYFm>Dz5Z2Ac9&;*cF8KWc*YJydK)yJYdboy>-8_kg0DC3N&F;L)P zwVkJ}kKQsVN!)265F9Aq-zWW~jw!wF@X*6Qb3J1*xP%q$SryXvXASCSncVFBYZ$D$#u5qsYaA6PtTI=nIDq;V7jd85+SS zqHWP1TJ(3}_kM8yJ{;K6yZL!})jSj+#@D$|86Aam<)X+asy0Wo%2Iowt);dkIoBjv z*d7>-L3|ryR%or)gMBgwgM0`(3aS0>R1j`%DUfpWv~DXQIN*pOqQMmb3=nD{c&UC` z;!6}CsL0*PoEg0bS}j`B<-eOChc8D7b8JFo*zAInV}c`AN402=uiku#${j$UH`_8X zGQd2V0{gJsuAdM3$}}1VUf%Ogd|M# z2#Ln``CkTKo!<9WU{f+8wy}g#B%}O6bOM!RxA(kno_;doLR3r0-uOq>rn+F z959wWYAwV>E*pCySUVQs|BOsZxwHUFGbe(oIsVbW2bcBw_KRZp>3hx_f|a=OnHb{} zyJIr(j=04!&L)sNc!VCG#>#r+$nS(MMVay7R<{b5_F$pt)8ezNYha7O(reyJmLQqw z1oHhwF^cCPZ+{)aoH%{hUF7$?Q|fs$|IHImI&9$@~%&aMFXhd~Uq9PS}YFL3}JKZ(*ho z%FaMwP(lMziS_eCn~t72gPfnqAJ0tNW)R!aW5%Qn*l<}k()H%%F*}M5@SXYU5(1a1 zysQibwWgVmN@bevAzyp{uJLTgV1nN6_4U_9_wTRdC36bLwRGVtLNul)W?cH>W6?z{ z%4EpEL_4q|0*}{=JCyrSJxB!t)=%&;6a)_HRkS(`K2F+6Id>2u8kgI}H>_u`678h0 z-us0=)ppSZgI>n^^dYV;Cz(?3<&9116Cj}9aEO|n({hzIR0AdVZl5uIL6=FRXP=wr zy?XU79oT#vP~{aAZhx9bmn&YD(^cL+oqeg_HgD!m71MLgvQ3?)} z>+;<#`vaYXAov`S*=QatLCKAn`1O4zJb%N%!nYXK(!Br=)9l6%9wpBHi0jGdepB4K z&$Ffi}YqaN>t9HFGwuD0ThH3)-^+qQg4+2Sx`(SYKIX3Km!dcGO)%52{qErY*~ zd)M#rcpjj$v&Vfor&2@xShM2_<|+%492{2l`mHgssLNdMK8{|F{nQTVsNOEIc3XLE zSx3e3s@h%3ufOH`J~wR}JN5hMlxK^oF8Mc&X?^Mcu-59oKJbbmcELKUWB%$rjj?qd!M~NK8ybInI;* zOFF2mvA#a+K3aiqL8H@L9Qwtm^oX%X?*m3aofY<$iJGT1ei?=eD;#n2F<5VLH&f-j z{lA_Wdkj_NHU15wsI{N ztA!a%PC0OZYUSI5j(2_*4-kf`$T3(kpK6VD0$-F$Ysl;a@Bi0aBY8KI9{`r_S3@|U zjMd#MS_GX=VJ@41pm~B<%ScL*DsBZ>%`q~8&0)l|%Pc0L`+4qb9)F*Cb^ib78u;5J3uY4AD-!J8Nh{>xkRC^ZOk; zae_6XE3)Qojd#%lD31A~Uo2F`YkbX8T8K!#35JB)TT$P^2+X-_E<-%c>%rm28tUq# z_aNA-UtAu>ydB^OyU-3%ayp-zJzkQS%fptDl|43HU(s^s#KMuv)J&4Hl@Zj8FpcEA zp$T_zT7>}f#qDrDr~V~#TwPnsKCQ>17R?xccrtSh`|NhW>uU?D+l)6fJP{WRO_Cd- z7|}Cf39Y=WM@+N0bPRW=DO32(pt2xFhMZ$cWVyY))GgPc38uy0Ou)>-?W1W}hkdH4nbt+EzgNV!xWyuY9%6pM&yJ z79Mi0-L6~)J$to7EwQL*LX?L?(B9z|DG&IVU8@4EQpSzjP}gZS`p1!&cY2+PAsFY) z*4(^qyxtZabV$75s%w6FH#LsD)7v+0D^Hx@f6;XT2V&Z^FJ}&p|9R=^=HFI@#e4;7X9jQ^J(bAL^@x1TEW{Xf67l#@K#zRD%SRvcij zstd^G%>%H->-osPuPv>t+`t6#ZifI6b-6t!{``Y!7self.s_thre,1,0) + syn = sum(sum(self.dataset[j, k*self.c_space_num:(k+1)*self.c_space_num, l*self.c_space_num:(l+1)*self.c_space_num, i])) + V_spike, V = neuron1.neuron_ST(init[k,l,i] ,syn, self.s_thre,0,0) + self.spaceneuron[j,k,l,i] = V_spike + init[k,l,i] = V + self.V_space[j,k,l,i] = V + return self.spaceneuron + + def Temporalprocessing(self): + neuron1 = Neuron(self.if_st_neuron_clear) + init= np.full((np.shape(self.dataset)[1], int(np.shape(self.dataset)[2]/self.c_space_num), np.shape(self.dataset)[3]),0) + self.V_time= np.full((np.shape(self.dataset)[0], int(np.shape(self.dataset)[1]/self.c_space_num), int(np.shape(self.dataset)[2]/self.c_space_num), np.shape(self.dataset)[3]),0) + #self.timeneuron = np.full((int(np.shape(self.spaceneuron)[0]/self.t_window), np.shape(self.spaceneuron)[1], np.shape(self.spaceneuron)[2], np.shape(self.spaceneuron)[3]),0) + self.timeneuron = np.full((int(np.shape(self.spaceneuron)[0]), np.shape(self.spaceneuron)[1], np.shape(self.spaceneuron)[2], np.shape(self.spaceneuron)[3]),0) + for i in range(0, np.shape(self.timeneuron)[3]): ## event + for j in range(0, np.shape(self.timeneuron)[0]): + for k in range(0, np.shape(self.spaceneuron)[1]): + for l in range(0, np.shape(self.spaceneuron)[2]): + #syn = sum(self.spaceneuron[j*self.t_window:(j+1)*self.t_window, k, l, i]) + syn = sum(self.spaceneuron[j:j+self.t_window, k, l, i]) + V_spike, V = neuron1.neuron_ST(init[k,l,i] ,syn, self.t_thre,0,0) + self.timeneuron[j,k,l,i] = V_spike + init[k,l,i] = V + self.V_time[j,k,l,i] = V + return self.timeneuron + + def Stprocessing(self): + #self.stcore = np.full((np.shape(self.spaceneuron)[0], np.shape(self.spaceneuron)[1], np.shape(self.spaceneuron)[2], np.shape(self.spaceneuron)[3]),0) + #timeneuron = np.full((np.shape(self.spaceneuron)[0], np.shape(self.spaceneuron)[1], np.shape(self.spaceneuron)[2], np.shape(self.spaceneuron)[3]),0) + #for i in range(0, np.shape(self.stcore)[3]): ## event + # for j in range(0, np.shape(self.timeneuron)[0]): + # timeneuron[j*self.t_window :(j+1)*self.t_window,:,:,i] = self.timeneuron[j,:,:,i] + + #for i in range(0, np.shape(self.stcore)[3]): ## event + # for j in range(0, np.shape(self.stcore)[0]): + #self.stcore[j,:,:,i] = self.spaceneuron[j,:,:,i]*self.timeneuron[j,:,:,i] + #self.stcore= self.spaceneuron * timeneuron + self.stcore = self.spaceneuron * self.timeneuron + + + def stspike(self): + event_num = np.shape(self.dataset)[3] + self.ST_spike= np.full((np.shape(self.stcore)[1],np.shape(self.stcore)[2],np.shape(self.stcore)[3]),0) + for i in range(0,event_num): ## event number + for j in range(0, np.shape(self.stcore)[1]): + for k in range (0,np.shape(self.stcore)[2]): + self.ST_spike[j,k,i] = sum(self.stcore[:,j,k,i]) + self.ST_spike_1d = np.reshape(self.ST_spike,(np.shape(self.stcore)[1]*np.shape(self.stcore)[2],event_num)) + # stspike = cfg.code_path + '/data/stspike' + # np.save(stspike, self.ST_spike_1d) + + def stthresholdpatt(self, num): + threshold_index = np.full((num, np.shape(self.ST_spike_1d)[1]),0) + threshold_value = np.full((num, np.shape(self.ST_spike_1d)[1]),0) + for i in range(0, np.shape(self.ST_spike_1d)[1]): ## event + for j in range(0, num): + threshold_index[j,i] = np.argsort(self.ST_spike_1d[:,i])[-j-1] + threshold_value[j,i] = self.ST_spike_1d[threshold_index[j,i],i] + SIMO_thres = cfg.code_path + '/data/reference_threshold' + np.save(SIMO_thres, threshold_value) + return threshold_index, threshold_value + + def stfeature(self): + c_range = 60 + st_num = np.shape(self.ST_spike_1d)[0] + threshold_index = np.full((int(st_num/c_range), np.shape(self.ST_spike_1d)[1]),0) + threshold_value = np.full((int(st_num/c_range), np.shape(self.ST_spike_1d)[1]),0) + for i in range(0, np.shape(self.ST_spike_1d)[1]): ## event + for j in range(0,int(st_num/c_range)): ## + threshold_index[j,i] = np.argsort(self.ST_spike_1d[c_range*j:c_range*(j+1),i])[-1] + c_range*j + threshold_value[j,i] = self.ST_spike_1d[threshold_index[j,i],i] + return threshold_index, threshold_value \ No newline at end of file diff --git a/SensingLayer.py b/SensingLayer.py new file mode 100644 index 0000000..5ce1fc7 --- /dev/null +++ b/SensingLayer.py @@ -0,0 +1,34 @@ +import sys +import os + +import numpy as np +from BaseLayer import BaseLayer +from logger import Logger + + +class SensingLayer(BaseLayer): + def __init__(self, dataset, t_interval, thre, polarity): + #super(SensingLayer, self).__init__(input_channel, input_shape, output_channel) + self.t_interval = t_interval + self.thre = thre + self.dataset = dataset + self.polarity = polarity + + def DiffSensing(self): + dataset = self.dataset + self.sensing_output = np.full((dataset.videos[0].shape[0], dataset.videos[0].shape[1], dataset.videos[0].shape[2], dataset.event_num),0) + self.sensing_diff = np.full(( dataset.videos[0].shape[1], dataset.videos[0].shape[2]),0) + for i in range(0, dataset.event_num): ## event number + Event_register = dataset.videos[i] + for j in range(0,dataset.videos[0].shape[0]-self.t_interval-1): ## frame number + + self.sensing_diff= Event_register[j+self.t_interval] - Event_register[j] + + if self.polarity == 0: + self.sensing_output[j,:,:,i] = np.where(np.logical_and(abs(self.sensing_diff) > self.thre + , abs(self.sensing_diff) < 250), 100, 0) + elif self.polarity == 1: + self.sensing_output[j,:,:,i] = np.where(abs(self.sensing_diff) < self.thre, 0,(np.where(self.sensing_diff>0, 1, -1))) + else: + break + diff --git a/Visualization.py b/Visualization.py new file mode 100644 index 0000000..49ab819 --- /dev/null +++ b/Visualization.py @@ -0,0 +1,194 @@ +import matplotlib +import matplotlib.pyplot as plt +import matplotlib.image as mpimg +import numpy as np +import cv2 +import os +import cfg +class Visualization(object): + def __init__(self, video): + self.dataset = video + + def generateDVS(self): + + for id in range(0,np.shape(self.dataset)[3]): + data_id = str(id)+'.avi' + OUTPUT_FILE = os.path.join(cfg.code_path, data_id) + start_frame = 0 + end_frame = np.shape(self.dataset)[0] + width = 128#int(540/2) + height = 128 #int(420/2) + writer = cv2.VideoWriter(OUTPUT_FILE, + cv2.VideoWriter_fourcc('I', '4', '2', '0'), + 10, # fps + (width,height )) # resolution + have_more_frame = True + c = 0 + while have_more_frame: + + c += 1 + if c>= start_frame and c<= end_frame-1: + # cv2.waitKey(1);, + gray_frame = np.full(( np.shape(self.dataset)[1],np.shape(self.dataset)[2], 3),0) + #gray_frame[:,:,2] = self.dataset[c,:,:,id] + gray_frame[:,:,1] = self.dataset[c,:,:,id]*100 + #gray_frame[:,:,0] = self.dataset[c,:,:,id] + writer.write(np.uint8(gray_frame)) + + #print(str(c) + ' is ok') + if c>end_frame: + print('completely!') + break + + def generate_picture(self,event_id, frame_id, name): + self.event_id = event_id + self.frame_id = frame_id + self.name = name + target = np.full((np.shape(self.dataset)[1],np.shape(self.dataset)[2], 3),0) + target[:,:,1] = self.dataset[self.frame_id,:,:,self.event_id]*100 + fig, ax = plt.subplots() + imgplot = ax.imshow(target) + plt.show() + plt.savefig(str(name)) + + + def generate_neuronout(self, V, x_id, y_id, event_id,name): + self.V = V + self.x_id = x_id + self.y_id = y_id + self.event_id = event_id + fig, ax = plt.subplots() + ax.plot(self.V[:,self.x_id,self.y_id,self.event_id]) + plt.show() + plt.savefig(str(name)) + + def generate_spike(self, V, x_id, y_id, event_id,name): + self.V = V + self.x_id = x_id + self.y_id = y_id + self.event_id = event_id + fig, ax = plt.subplots() + ax.plot(self.V[:,self.x_id,self.y_id,self.event_id],'ro') + plt.show() + plt.savefig(str(name)) + + def generate_st(self): + for i in range(0, np.shape(self.dataset)[2]): + name = "event" + str(i) + self.event_id = i + X = np.arange(0, np.shape(self.dataset)[0]+1,1) + Y = np.arange(0, np.shape(self.dataset)[1]+1,1) + Z = self.dataset[:,:,self.event_id] + fig, ax = plt.subplots() + cmap = plt.get_cmap('PiYG') + levels = matplotlib.ticker.MaxNLocator(nbins=15).tick_values(Z.min(), Z.max()) + norm = matplotlib.colors.BoundaryNorm(levels, ncolors = cmap.N, clip=True) + im = ax.pcolormesh(Y, X, Z, cmap=cmap, norm=norm) + fig.colorbar(im, ax=ax) + plt.show() + plt.savefig(str(name)) + + def generate_st_event(self): + for i in range(0, np.shape(self.dataset)[0]): + name = "single_event" + str(i) + self.event_frame = i + X = np.arange(0, np.shape(self.dataset)[1]+1,1) + Y = np.arange(0, np.shape(self.dataset)[2]+1,1) + Z = self.dataset[self.event_frame,:,:] + fig, ax = plt.subplots() + cmap = plt.get_cmap('PiYG') + levels = matplotlib.ticker.MaxNLocator(nbins=15).tick_values(Z.min(), Z.max()) + norm = matplotlib.colors.BoundaryNorm(levels, ncolors = cmap.N, clip=True) + im = ax.pcolormesh(Y, X, Z, cmap=cmap, norm=norm) + fig.colorbar(im, ax=ax) + plt.show() + plt.savefig(str(name)) + + + def generate_connect(self): + for i in range(0, np.shape(self.dataset)[1]): + name = "connect" + str(i) + target = np.reshape(self.dataset[:,i],(128,128)) + fig, ax = plt.subplots() + imgplot = ax.imshow(target) + plt.show() + plt.savefig(str(name)) + + def generate_structure_pattern(self): + for i in range(0, np.shape(self.dataset)[3]): + target = self.dataset[:,:,:,i] + target_1d = np.reshape(target, (np.shape(self.dataset)[0], np.shape(self.dataset)[1]*np.shape(self.dataset)[2])) + name = "struct" + str(i) + fig, ax = plt.subplots() + imgplot = ax.imshow(np.transpose(target_1d)) + plt.show() + plt.savefig(str(name)) + + def generate_stbar(self): + st_index = [i for i in range (np.shape(self.dataset)[0])] + for i in range(0, np.shape(self.dataset)[1]): + name = "bar" + str(i) + fig, ax = plt.subplots() + ax.bar(st_index, self.dataset[:,i]) + plt.show() + plt.savefig(str(name)) + + def generate_weight_hist(self,sigma,mu): + #name = "weight" + #fig, ax = plt.subplots() + #ax.plot(self.dataset[1,:]) + #plt.show() + #plt.savefig(str(name)) + name1 = "hist" + count, bins, ignored = plt.hist(self.dataset[1,:], 10, density=True) + #plt.plot(bins, 1/(sigma * np.sqrt(2 * np.pi)) * np.exp( - (bins - mu)**2 / (2 * sigma**2) ),linewidth=2, color='r') + plt.show() + plt.savefig(str(name1)) + + #def generate_weight_integral(self) + def generate_temporal_profiling(self, stim, syn, temporal_output): + fig, ax = plt.subplots() + for i in range(0, np.shape(stim)[0]): + for j in range(0, np.shape(stim)[1]): + if stim[i,j] == 1: + ax.plot(i, j, 'o', color='black',markersize=5) + + for i in range(0, np.shape(syn)[0]): + for j in range(0, np.shape(syn)[1]): + if syn[i,j] == 1: + ax.plot(i, j, 's', color='blue',markersize=5) + + for i in range(0, np.shape(temporal_output)[0]): + for j in range(0, np.shape(temporal_output)[1]): + if temporal_output[i,j] == 1: + ax.plot(i, j, 'd', color='red',markersize=5) + + plt.xlim(0, 127) + plt.ylim(0, 127) + plt.show() + + plt.savefig('temporal_profilling') + + def generate_weight_map(self,weight, start, end): + fig, ax = plt.subplots() + for i in range(start, end): + value = weight[:,:,i] + for j in range(0, np.shape(value)[0]): + for k in range(0, np.shape(value)[1]): + if value[j,k] == 1: + ax.plot(k, j, '+', color='black',markersize=5) + + #plt.xlim(0, 127) + #plt.ylim(0, 127) + plt.show() + + plt.savefig('weight_map') + + + + + + + + + diff --git a/agent.py b/agent.py new file mode 100644 index 0000000..fb8d306 --- /dev/null +++ b/agent.py @@ -0,0 +1,115 @@ +from expert import SGF_expert +from STLayer import Spatiotemporal_Core +import numpy as np +from Visualization import Visualization +from knowledge import Knowledge +from logger import Logger +import cfg +import os +from prior_knowledge import SGF_prior_knowledge + +class SGF_agent(object): + def __init__(self, args, train_dataset, label, exp, st_paras, train_succ_list): + super().__init__() + self.args = args + self.resolution_s1 = [int(t) for t in args.resolution_s1.split('_')] + self.resolution_s2 = [int(t) for t in args.resolution_s2.split('_')] + self.resolution_s3 = [int(t) for t in args.resolution_s3.split('_')] + self.thres_bit_s1 = args.thres_bit_s1 + self.thres_bit_s2 = args.thres_bit_s2 + self.selected_event = [event for event in self.args.selected_events.split('_')] + self.sub_events = list() + for e in self.args.selected_events.split("_"): + self.sub_events.extend(e.split("+")) + self.hopfield_frame_para = [int(para) for para in self.args.hopfield_frame_para.split('_')] + self.code_mode = self.args.code_mode + self.event_num = len(self.selected_event) + self.sub_event_num = len(self.sub_events) + self.train_data = train_dataset + self.exp = exp + self.label = label + self.st_paras = st_paras + self.train_succ_list = train_succ_list + + def get_train_succ_list(self): + return self.train_succ_list + + def agent_binary_tree(self, batch_i, data, thres_s1, thres_s2, thres_s3, offset, + expert1_id, expert1_knowledge): + + # print("Space expert 1 domain:") + log_filename = cfg.code_path + "/data/" + self.exp + "/train_result.log" + if batch_i == 0 and os.path.exists(log_filename): + os.remove(log_filename) + logger_object = Logger(log_filename) + e1 = SGF_expert(self.args) ## experts initializations + k1 = Knowledge(self.args) ## knolwledge initializations + # Spatial SNN with feature index A and D + space_neuron1 = e1.expert_space(self.resolution_s1[0], self.resolution_s1[1], \ + data, thres_s1, offset, thres_bit=self.thres_bit_s1, \ + thres_step=self.args.thres_step_s1, thres_inc_factor=[-1,1], if_vote=self.args.vote_thres_step) + # Spatial SNN with feature index B and C + space_neuron2 = e1.expert_space(self.resolution_s2[0], self.resolution_s2[1], \ + data, thres_s2, offset, thres_bit=self.thres_bit_s2, \ + thres_step=self.args.thres_step_s2, thres_inc_factor=[1,-1], if_vote=self.args.vote_thres_step) ## space expert 2 computing (col, row) + space_all = np.concatenate((space_neuron1, space_neuron2), axis = 0) ## combine two experts knowledge + + similarity = Knowledge(self.args).check_similarity(space_all, self.label, self.args.selected_events.split("_")) + np.fill_diagonal(similarity, 1) ## fill diagnoal to 1 + n1, id1 = k1.knowledge_base(space_all, self.label, expert1_id, expert1_knowledge) ## generate expert knowledge + if np.all(similarity) == 1: + print('training sucessful') + logger_object.info(str(batch_i)+ ' training successful') + self.train_succ_list.append(1) + print("UnitA new knowledge:") + for id_i, id in enumerate(self.label): + print(self.label[id_i], space_all[:, id_i]) + else: + print('training failed') + logger_object.info(str(batch_i)+ ' training failed') + self.train_succ_list.append(0) + + if len(id1) != 1 and id1[0] == '-1': + n1 = n1[1:, :] + id1 = id1[1:] + + return n1, id1 + + def check_knowledge(self, id_list, knowledge_list, forbidden_id_list): + id_list_new = list() + knowledge_list_new = list() + for id_i, id in enumerate(id_list): + if id not in forbidden_id_list: + id_list_new.append(id_list[id_i]) + knowledge_list_new.append(knowledge_list[id_i]) + + if len(id_list_new) == 0: + id_list_new = [-1] + knowledge_list_new = -1*np.ones((1, len(knowledge_list[0,:]))) + + return id_list_new, np.array(knowledge_list_new) + + + # def check_similarity(self, data): + # if data.shape[0] == data.size: + # data = data.reshape(data.shape[0], 1) + # similarity = np.full((np.shape(data)[1],np.shape(data)[1]),0) + # for i in range(0, np.shape(data)[1]): ## event + # for j in range(0,np.shape(data)[1]): ## event + # if np.all(data[:,i] == data[:,j]): + # similarity[i,j] = 0 + # else: + # similarity[i,j] = 1 + # return similarity + +def find_movement_index(data): + max_value = max(data) + data = data.tolist() + max_index = data.index(max_value) + ''' + if len(max_index) == 1: ## if there is only one maximum value + movement_index = max_index + else: + movement_index =[] + ''' + return max_index diff --git a/base.py b/base.py new file mode 100644 index 0000000..681c244 --- /dev/null +++ b/base.py @@ -0,0 +1,8 @@ +import sys +import os +sys.path.append('..') + +class DatasetBase(object): + def __init__(self, root): + self.root = root + diff --git a/cfg.py b/cfg.py new file mode 100644 index 0000000..ad80dd7 --- /dev/null +++ b/cfg.py @@ -0,0 +1,34 @@ +import platform + + +# code_path = '/your_path/SGF_submit' +# data_path = '/your_path/dvs_data' +## make sure the dir of DvsGesture folder is /your_path/dvs_data/DvsGesture + +code_path = '/Users/zzh/Code/SGF_submit' +data_path = '/Users/zzh/Data//sgf_data_test' + +# if(platform.system()=='Linux'): +# print(platform.node()) +# if platform.node() == "fudan-bcrc": +# code_path = '/zhzhao/code/SGF_v2' +# data_path = '/zhzhao/dataset/DVS_gesture' +# elif platform.node() == "bh1llmn592poa-0": +# code_path = '/yhwang/0-Projects/1-snn' +# data_path = '/yhwang/0-Projects/Datasets/DVS_gesture' +# elif platform.node() == "2eo5djudc738b-0": +# code_path = '/yhwang/0-Projects/1-snn' +# data_path = '/yhwang/0-Projects/Datasets/DVS_gesture' +# elif platform.node() == "wyh-OptiPlex-7060": +# code_path = '/yhwang/0-Projects/1-snn' +# data_path = 'example_data' +# else: +# code_path = '/zhzhao/code/SGF_v2' +# data_path = '/zhzhao/dataset/DVS_gesture' +# # code_path = '/yhwang/0-Projects/1-snn' +# # data_path = '/yhwang/0-Projects/Datasets/DVS_gesture' +# elif(platform.system()=='Darwin'): +# code_path = '/Users/zzh/Code/SGF_submit' +# data_path = '/Users/zzh/Data/DVS_gesture' + +# if_st_neuron_clear = True \ No newline at end of file diff --git a/data/.DS_Store b/data/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..b71ea6ec089e9f02e12aa7a62f4195dcd8180e37 GIT binary patch literal 6148 zcmeHK&u`N(6n@^ymNJlfV1f&hC9Y*8j7cCap^O7pg5WTwk}L%xGJ9%Lid0p~75HN~ z?myYT1K+bfjkVph6Ph+Z>G^y9@x9oeWIHAz(HrFvQHO{EIAiN3iUr2~>~pr{T2_F< z(0iC2ltopC;gMKcZrxZ} z4OWA-U^n@q$4Q;mqpFtoPlz)9hU_823AOUTRgRg&GVEp~wdq^5K1O>9Td4%+?B-N|HYvn!);bJ~@YXOF|Kd>Td5X**bd@aXxQ{?~F}=^s`}frH1` zZHLG36TyOoeM^R_)ao;OnR$ywq^O{pQY^lpoHh`Dr}}lN_nZKHHokyw{4s`zXnlsK z=o4ug(mwLk$fJ>Uai-rhnNDV~cis%d{TbfL3@q=$GZ4RKGNJ12Ix66{POx?z7w^~X z74Qn2s{o%563!SqY%H3i1BH1409$A_hM4b5=17OJ!^R?dV8T*?mMYv6Ls&Z6r5hJJ zY%E$j3HSIA?#sfxP=xt9@=G(EL~PNQUIDMbX$6*TwZr%StFzz#r%Ar%74QmNDFsAp zJK64GN$zf)TO8lD68;g+#^V}`hJwOf$0p#b_#WIC`XmRy*kNN4EinHhU}W%xSKz-Y F@D~&9jk^E< literal 0 HcmV?d00001 diff --git a/data/10-4_best/train_result.log b/data/10-4_best/train_result.log new file mode 100644 index 0000000..dbaf9a9 --- /dev/null +++ b/data/10-4_best/train_result.log @@ -0,0 +1,2 @@ +[INFO] 0 training successful +[INFO] 1 training successful diff --git a/data/example/UnitA_id.txt b/data/example/UnitA_id.txt new file mode 100644 index 0000000..48d2243 --- /dev/null +++ b/data/example/UnitA_id.txt @@ -0,0 +1,941 @@ +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 diff --git a/data/example/UnitA_information.txt b/data/example/UnitA_information.txt new file mode 100644 index 0000000..b06abab --- /dev/null +++ b/data/example/UnitA_information.txt @@ -0,0 +1,941 @@ +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 1 1 1 1 1 1 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 diff --git a/data/example/UnitC_id.txt b/data/example/UnitC_id.txt new file mode 100644 index 0000000..f7300e2 --- /dev/null +++ b/data/example/UnitC_id.txt @@ -0,0 +1,294 @@ +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 diff --git a/data/example/UnitC_information.txt b/data/example/UnitC_information.txt new file mode 100644 index 0000000..4d21d56 --- /dev/null +++ b/data/example/UnitC_information.txt @@ -0,0 +1,294 @@ +0 0 1 1 0 1 0 1 +0 0 0 1 0 0 0 1 +0 1 1 1 1 0 1 1 +0 0 0 1 0 0 0 1 +0 1 1 1 1 1 1 1 +0 0 1 0 0 1 0 0 +0 1 1 0 1 1 0 0 +0 1 0 0 0 1 0 0 +0 0 1 1 1 0 0 1 +0 0 0 1 0 0 0 1 +0 1 0 0 0 1 0 0 +0 1 1 0 0 1 0 0 +0 1 1 1 0 1 1 1 +0 1 0 1 0 0 1 1 +0 0 0 0 0 0 0 0 +1 0 1 1 1 0 1 1 +0 0 1 1 0 1 0 1 +1 0 0 0 1 0 0 0 +0 1 1 1 0 1 0 1 +0 0 1 1 1 0 0 1 +0 1 1 1 0 1 1 1 +1 0 0 0 1 0 0 0 +1 0 0 1 1 0 0 1 +0 0 0 1 0 0 0 1 +1 1 1 1 1 1 0 1 +0 1 1 0 1 1 0 0 +0 1 0 1 0 0 1 1 +0 0 1 0 0 1 0 0 +0 1 0 1 0 0 1 1 +0 1 0 1 0 1 1 1 +0 0 0 1 0 0 0 1 +0 1 0 1 0 0 1 1 +1 0 0 0 1 0 0 0 +1 0 0 1 1 0 1 1 +1 0 0 0 0 0 1 0 +0 1 0 1 0 1 0 1 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +0 1 0 0 0 1 0 0 +1 0 1 0 1 1 0 0 +1 1 0 1 0 1 1 1 +0 1 0 0 0 0 1 0 +1 0 0 0 0 0 1 0 +0 0 0 1 0 0 0 1 +0 0 1 0 1 0 0 0 +1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 +0 1 1 0 1 1 0 0 +1 0 1 0 1 1 0 0 +0 1 0 0 0 1 1 0 +1 0 0 0 1 0 0 0 +1 0 0 0 0 0 1 0 +0 0 1 1 1 0 0 1 +0 1 0 0 0 0 1 0 +0 0 1 1 1 0 0 1 +0 1 0 1 0 0 1 1 +0 1 1 0 0 1 1 0 +0 1 1 0 0 1 1 0 +1 0 0 0 0 0 1 0 +0 1 0 1 0 1 0 1 +0 1 0 0 0 0 1 0 +1 0 0 0 1 0 0 0 +0 1 1 0 0 1 1 0 +0 0 0 1 0 0 0 1 +0 1 0 0 0 1 0 0 +1 1 0 1 0 1 1 1 +0 0 0 1 0 0 0 1 +0 1 1 1 1 1 1 1 +0 1 0 0 0 0 1 0 +0 0 0 0 0 0 0 0 +0 1 0 1 0 1 0 1 +0 0 0 1 0 0 0 1 +1 0 0 0 1 0 0 0 +0 1 1 0 0 1 1 0 +0 1 0 0 0 0 1 0 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +0 0 1 0 1 0 0 0 +0 0 1 1 0 1 0 1 +0 0 0 0 0 0 0 0 +0 1 0 0 0 1 0 0 +0 0 0 1 0 0 0 1 +1 1 0 0 1 0 1 0 +1 1 1 1 1 1 1 1 +0 1 0 0 0 1 0 0 +0 0 0 0 0 0 0 0 +1 0 1 0 1 1 0 0 +0 1 0 1 0 0 1 1 +1 1 1 1 1 1 0 1 +0 1 0 1 0 1 0 1 +0 1 1 0 0 1 0 0 +1 0 0 0 1 0 1 0 +0 0 0 1 0 0 0 1 +0 0 1 0 1 0 0 0 +0 1 0 1 0 0 1 1 +0 0 0 0 0 0 0 0 +1 0 0 1 0 0 1 1 +1 0 0 0 1 0 0 0 +0 0 0 0 0 0 0 0 +1 0 1 0 1 1 0 0 +0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +1 0 1 0 1 1 0 0 +0 0 1 0 0 1 0 0 +1 0 1 0 1 1 1 0 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +1 0 0 0 0 0 1 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +1 0 0 0 1 0 0 0 +1 0 1 0 1 1 1 0 +0 0 0 0 0 0 0 0 +1 1 0 1 1 1 0 1 +0 1 0 0 0 0 1 0 +1 1 0 0 1 1 0 0 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 +0 0 1 0 1 1 0 0 +1 1 1 0 1 0 1 0 +0 1 1 1 0 1 0 1 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +1 1 0 1 1 0 1 1 +1 0 1 0 1 1 1 0 +0 1 0 1 0 1 0 1 +1 0 0 0 1 0 0 0 +0 0 0 0 0 0 0 0 +1 1 1 0 1 0 1 0 +1 0 1 0 1 1 0 0 +1 0 0 1 1 0 1 1 +1 0 0 0 0 0 1 0 +0 1 0 0 0 1 0 0 +0 0 0 1 0 0 0 1 +0 0 0 1 0 0 0 1 +0 0 0 0 0 0 0 0 +1 1 1 0 1 1 1 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +1 0 0 1 1 0 1 1 +0 1 0 0 0 0 1 0 +1 0 0 0 0 0 1 0 +1 0 0 0 1 0 1 0 +1 1 0 0 1 0 1 0 +0 1 1 0 1 1 0 0 +0 0 0 0 0 0 0 0 +1 0 0 0 0 0 1 0 +1 0 0 0 1 0 0 0 +0 0 1 0 0 1 0 0 +1 0 0 0 1 0 1 0 +1 1 0 1 1 0 1 1 +0 1 1 0 1 1 0 0 +1 0 0 0 0 0 1 0 +0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +1 0 0 0 0 0 1 0 +1 1 1 0 1 0 1 0 +1 0 1 0 1 1 0 0 +0 1 1 0 1 1 0 0 +0 1 0 1 0 1 0 1 +1 1 0 1 1 0 1 1 +1 0 0 1 0 0 1 1 +0 0 1 0 1 1 0 0 +0 0 1 0 0 1 0 0 +0 1 1 1 1 1 0 1 +0 0 0 0 0 0 0 0 +1 0 0 0 1 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 1 0 1 0 1 0 1 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +1 0 0 0 1 0 0 0 +1 1 1 0 1 1 1 0 +1 0 0 0 1 0 0 0 +0 0 0 0 0 0 0 0 +1 0 0 1 1 0 1 1 +1 0 0 0 0 0 1 0 +1 1 0 0 1 1 0 0 +1 0 1 0 1 0 0 0 +1 0 0 0 1 0 0 0 +1 0 1 0 1 0 0 0 +1 0 0 1 0 0 1 1 +0 0 0 0 0 0 0 0 +1 1 0 0 1 1 0 0 +1 0 0 0 1 0 1 0 +1 0 1 0 1 0 1 0 +1 0 0 0 1 0 1 0 +0 0 0 0 0 0 0 0 +1 0 1 0 1 1 1 0 +1 0 0 1 0 0 1 1 +1 1 1 0 1 1 0 0 +0 0 0 0 0 0 0 0 +1 1 0 0 0 0 1 0 +0 1 0 1 0 1 0 1 +0 0 0 0 0 0 0 0 +1 0 1 0 1 1 0 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 1 0 1 0 0 1 1 +1 0 0 0 1 0 1 0 +1 0 0 1 1 0 1 1 +1 0 0 0 1 0 0 0 +1 0 0 0 0 0 1 0 +1 0 0 0 1 0 1 0 +0 0 0 0 0 0 0 0 +1 0 0 1 1 0 1 1 +0 0 1 0 1 0 0 0 +1 0 0 1 1 0 1 1 +1 1 1 0 1 1 0 0 +1 0 0 0 1 0 1 0 +1 0 0 1 0 0 1 1 +0 1 0 1 0 0 1 1 +1 0 0 1 1 0 0 1 +1 0 1 1 0 1 1 1 +0 0 0 1 0 0 0 1 +1 0 0 1 1 0 1 1 +1 1 0 1 0 0 1 1 +1 0 0 1 1 0 0 1 +1 0 1 0 1 1 1 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 0 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 0 0 +1 0 1 1 0 1 1 1 +0 0 0 0 0 0 0 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 1 0 0 1 1 0 +1 0 0 1 1 0 0 1 +0 0 0 1 0 0 0 1 +1 0 1 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 1 1 0 1 1 0 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 0 1 1 0 0 1 +1 1 0 0 1 0 1 0 +1 0 0 1 1 0 0 1 +1 1 0 0 0 0 1 0 +1 1 0 1 1 0 1 1 +1 0 0 0 1 0 1 0 +1 0 0 1 1 0 1 1 +1 0 0 1 1 0 0 1 +1 0 1 0 1 1 1 0 +1 0 0 1 1 0 0 1 +1 0 0 0 1 0 1 0 +1 0 0 1 1 0 1 1 +0 1 0 1 0 1 0 1 +0 0 0 0 0 0 0 0 +1 0 0 0 1 0 1 0 +1 0 1 1 0 1 1 1 +1 0 1 0 1 1 1 0 +1 0 0 1 1 0 0 1 +1 0 0 0 1 0 0 0 +1 0 1 0 1 1 1 0 +1 0 0 0 0 0 1 0 +1 0 1 1 1 0 1 1 +1 0 0 1 1 0 1 1 +1 0 1 1 1 0 0 1 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 1 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 1 0 1 1 0 1 1 +1 0 0 1 1 0 1 1 +1 0 0 1 1 0 1 1 +1 1 0 0 1 0 1 0 +1 0 0 1 1 0 1 1 +1 0 0 0 0 0 1 0 +1 0 1 0 1 0 1 0 +0 0 0 1 0 0 0 1 diff --git a/data/example/train_result.log b/data/example/train_result.log new file mode 100644 index 0000000..22a19c4 --- /dev/null +++ b/data/example/train_result.log @@ -0,0 +1,133 @@ +[INFO] 0 training successful +[INFO] 1 training successful +[INFO] 1 training successful +[INFO] 2 training failed +[INFO] 2 training failed +[INFO] 3 training successful +[INFO] 3 training successful +[INFO] 4 training successful +[INFO] 4 training successful +[INFO] 5 training failed +[INFO] 5 training failed +[INFO] 6 training successful +[INFO] 6 training successful +[INFO] 7 training successful +[INFO] 7 training successful +[INFO] 8 training failed +[INFO] 8 training failed +[INFO] 9 training successful +[INFO] 9 training successful +[INFO] 10 training successful +[INFO] 10 training successful +[INFO] 11 training failed +[INFO] 11 training failed +[INFO] 12 training successful +[INFO] 12 training successful +[INFO] 13 training failed +[INFO] 13 training failed +[INFO] 14 training successful +[INFO] 14 training successful +[INFO] 15 training failed +[INFO] 15 training failed +[INFO] 16 training successful +[INFO] 16 training successful +[INFO] 17 training successful +[INFO] 17 training successful +[INFO] 18 training failed +[INFO] 18 training failed +[INFO] 19 training successful +[INFO] 19 training successful +[INFO] 20 training successful +[INFO] 20 training successful +[INFO] 21 training successful +[INFO] 21 training successful +[INFO] 22 training failed +[INFO] 22 training failed +[INFO] 23 training successful +[INFO] 23 training successful +[INFO] 24 training successful +[INFO] 24 training successful +[INFO] 25 training successful +[INFO] 25 training successful +[INFO] 26 training successful +[INFO] 26 training successful +[INFO] 27 training failed +[INFO] 27 training failed +[INFO] 28 training successful +[INFO] 28 training successful +[INFO] 29 training successful +[INFO] 29 training successful +[INFO] 30 training successful +[INFO] 30 training successful +[INFO] 31 training successful +[INFO] 31 training successful +[INFO] 32 training successful +[INFO] 32 training successful +[INFO] 33 training successful +[INFO] 33 training successful +[INFO] 34 training successful +[INFO] 34 training successful +[INFO] 35 training successful +[INFO] 35 training successful +[INFO] 36 training successful +[INFO] 37 training successful +[INFO] 38 training successful +[INFO] 39 training successful +[INFO] 40 training successful +[INFO] 41 training successful +[INFO] 42 training successful +[INFO] 43 training successful +[INFO] 44 training successful +[INFO] 45 training successful +[INFO] 46 training successful +[INFO] 47 training successful +[INFO] 48 training successful +[INFO] 49 training failed +[INFO] 50 training successful +[INFO] 51 training successful +[INFO] 52 training successful +[INFO] 53 training successful +[INFO] 54 training successful +[INFO] 55 training successful +[INFO] 56 training successful +[INFO] 57 training failed +[INFO] 58 training successful +[INFO] 59 training successful +[INFO] 60 training successful +[INFO] 61 training successful +[INFO] 62 training successful +[INFO] 63 training successful +[INFO] 64 training successful +[INFO] 65 training successful +[INFO] 66 training successful +[INFO] 67 training failed +[INFO] 68 training successful +[INFO] 69 training successful +[INFO] 70 training successful +[INFO] 71 training successful +[INFO] 72 training successful +[INFO] 73 training successful +[INFO] 74 training failed +[INFO] 75 training successful +[INFO] 76 training successful +[INFO] 77 training successful +[INFO] 78 training successful +[INFO] 79 training successful +[INFO] 80 training successful +[INFO] 81 training successful +[INFO] 82 training successful +[INFO] 83 training successful +[INFO] 84 training successful +[INFO] 85 training successful +[INFO] 86 training successful +[INFO] 87 training successful +[INFO] 88 training successful +[INFO] 89 training successful +[INFO] 90 training successful +[INFO] 91 training successful +[INFO] 92 training successful +[INFO] 93 training failed +[INFO] 94 training successful +[INFO] 95 training failed +[INFO] 96 training successful +[INFO] 97 training successful diff --git a/data/test/train_result.log b/data/test/train_result.log new file mode 100644 index 0000000..7445fc5 --- /dev/null +++ b/data/test/train_result.log @@ -0,0 +1 @@ +[INFO] 0 training successful diff --git a/data/unita_id.txt b/data/unita_id.txt new file mode 100644 index 0000000..5f39881 --- /dev/null +++ b/data/unita_id.txt @@ -0,0 +1,338 @@ +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +4 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +6 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +7 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +8 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 diff --git a/data/unita_information.txt b/data/unita_information.txt new file mode 100644 index 0000000..776dfef --- /dev/null +++ b/data/unita_information.txt @@ -0,0 +1,338 @@ +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 1 1 1 1 1 1 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 0 0 1 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 1 1 1 1 1 1 1 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 1 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 0 1 0 0 1 1 0 1 1 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 diff --git a/data/unitc_id.txt b/data/unitc_id.txt new file mode 100644 index 0000000..0c843cc --- /dev/null +++ b/data/unitc_id.txt @@ -0,0 +1,108 @@ +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +10 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 +9 diff --git a/data/unitc_information.txt b/data/unitc_information.txt new file mode 100644 index 0000000..a61c247 --- /dev/null +++ b/data/unitc_information.txt @@ -0,0 +1,108 @@ +0 0 1 1 0 1 0 1 +0 0 0 1 0 0 0 1 +0 1 1 1 1 0 1 1 +0 0 0 1 0 0 0 1 +0 1 1 1 1 1 1 1 +0 0 1 0 0 1 0 0 +0 1 1 0 1 1 0 0 +0 1 0 0 0 1 0 0 +0 0 1 1 1 0 0 1 +0 0 0 1 0 0 0 1 +0 1 0 0 0 1 0 0 +0 1 1 0 0 1 0 0 +0 1 1 1 0 1 1 1 +0 1 0 1 0 0 1 1 +0 0 0 0 0 0 0 0 +1 0 1 1 1 0 1 1 +0 0 1 1 0 1 0 1 +1 0 0 0 1 0 0 0 +0 1 1 1 0 1 0 1 +0 0 1 1 1 0 0 1 +0 1 1 1 0 1 1 1 +1 0 0 0 1 0 0 0 +1 0 0 1 1 0 0 1 +0 0 0 1 0 0 0 1 +1 1 1 1 1 1 0 1 +0 1 1 0 1 1 0 0 +0 1 0 1 0 0 1 1 +0 0 1 0 0 1 0 0 +0 1 0 1 0 0 1 1 +0 1 0 1 0 1 1 1 +0 0 0 1 0 0 0 1 +0 1 0 1 0 0 1 1 +1 0 0 0 1 0 0 0 +1 0 0 1 1 0 1 1 +1 0 0 0 0 0 1 0 +0 1 0 1 0 1 0 1 +0 0 0 0 0 0 0 0 +1 0 0 1 0 0 1 1 +1 0 0 0 1 0 0 0 +0 0 0 0 0 0 0 0 +1 0 1 0 1 1 0 0 +0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +1 0 1 0 1 1 0 0 +0 0 1 0 0 1 0 0 +1 0 1 0 1 1 1 0 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +1 0 0 0 0 0 1 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +1 0 0 0 1 0 0 0 +1 0 1 0 1 1 1 0 +0 0 0 0 0 0 0 0 +1 1 0 1 1 1 0 1 +0 1 0 0 0 0 1 0 +1 1 0 0 1 1 0 0 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +0 0 1 0 0 1 0 0 +0 0 0 0 0 0 0 0 +0 0 1 0 1 1 0 0 +1 1 1 0 1 0 1 0 +0 1 1 1 0 1 0 1 +0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 +0 0 0 1 0 0 0 1 +1 1 0 1 1 0 1 1 +1 0 0 0 1 0 1 0 +1 0 1 0 1 0 1 0 +1 0 0 0 1 0 1 0 +0 0 0 0 0 0 0 0 +1 0 1 0 1 1 1 0 +1 0 0 1 0 0 1 1 +1 1 1 0 1 1 0 0 +0 0 0 0 0 0 0 0 +1 1 0 0 0 0 1 0 +0 1 0 1 0 1 0 1 +0 0 0 0 0 0 0 0 +1 0 1 0 1 1 0 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 0 0 0 1 0 1 0 +1 1 0 1 0 0 1 1 +1 0 0 0 1 0 1 0 +1 0 0 1 1 0 1 1 +1 0 0 0 1 0 0 0 +1 0 0 0 0 0 1 0 +1 0 0 0 1 0 1 0 +0 0 0 0 0 0 0 0 +1 0 0 1 1 0 1 1 +0 0 1 0 1 0 0 0 +1 0 0 1 1 0 1 1 +1 1 1 0 1 1 0 0 +1 0 0 0 1 0 1 0 +1 0 0 1 0 0 1 1 +0 1 0 1 0 0 1 1 +1 0 0 1 1 0 0 1 +1 0 1 1 0 1 1 1 +0 0 0 1 0 0 0 1 +1 0 0 1 1 0 1 1 +1 1 0 1 0 0 1 1 +1 0 0 1 1 0 0 1 +1 0 1 0 1 1 1 0 diff --git a/dl_src/README.md b/dl_src/README.md new file mode 100644 index 0000000..e69de29 diff --git a/dl_src/cnn.py b/dl_src/cnn.py new file mode 100644 index 0000000..e24876d --- /dev/null +++ b/dl_src/cnn.py @@ -0,0 +1,390 @@ +# reference:https://github.com/shanglianlm0525/PyTorch-Networks/blob/f8b0376ba6a0dcfd5d461fcbd1cbd7c236a944b4/SemanticSegmentation/SegNet.py + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim +from tensorboardX import SummaryWriter + +import numpy as np +import random + +from collections import OrderedDict +from visualization_utils import save_visualize_gif + + +def Conv2DBNReLU(in_channels, out_channels, kernel_size, stride, pad, groups=1): + return nn.Sequential( + nn.Conv2d(in_channels=in_channels, out_channels=out_channels, \ + kernel_size=kernel_size, stride=stride, padding=pad, groups=groups), + nn.BatchNorm2d(out_channels), + nn.Sigmoid() + ) + + +def Conv2D(in_channels, out_channels, kernel_size, stride, pad, groups=1): + return nn.Sequential( + nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=1, groups=groups), + ) + + +# class ConvClassifier(nn.Module): +# def __init__(self, in_channels, out_channels, kernels, strides, pads, groups, class_num, device): +# super().__init__() +# convnet = OrderedDict() +# for l in range(len(in_channels)): +# convnet[f"conv_{l}"] = Conv2DBNReLU(in_channels[l], out_channels[l], kernels[l], strides[l], pads[l], groups[l]) +# convnet["flatten"] = nn.Flatten() +# convnet["fc"] = nn.Linear(39600*15, class_num) +# convnet["sigmoid"] = nn.Sigmoid() +# self.ConvNet = nn.Sequential(convnet) +# self.ConvNet = self.ConvNet.float() +# self.class_num = class_num +# self.sgd_optimizer = optim.SGD(self.ConvNet.parameters(), lr = 0.00001, momentum = 0.9) +# self.criterion = nn.MSELoss() +# self.device = device + + +# def forward(self, x): +# # print(self.ConvNet) +# nn.Conv2d(in_channels=in_channels, out_channels=out_channels, \ +# kernel_size=kernel_size, stride=stride, padding=pad, groups=groups), +# ) + + +class I3DNet64(nn.Module): + def __init__(self, args): + super().__init__() + + +class C3DNet128(nn.Module): + def __init__(self, args): + super().__init__() + # 3Dkernel: IC,OC,k1,k2,k3 + # 4D Input: B,IC,F,H,W + self.conv1 = nn.Conv3d(1, 64, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool1 = nn.MaxPool3d(kernel_size=(1, 2, 2), stride=(1, 2, 2)) + + self.conv2 = nn.Conv3d(64, 128, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool2 = nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2)) + + self.conv3a = nn.Conv3d(128, 256, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.conv3b = nn.Conv3d(256, 256, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool3 = nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2)) + + self.conv4a = nn.Conv3d(256, 512, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.conv4b = nn.Conv3d(512, 512, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool4 = nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2)) + + self.conv5a = nn.Conv3d(512, 512, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.conv5b = nn.Conv3d(512, 512, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool5 = nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2), padding=(0, 1, 1)) + + + self.conv6a = nn.Conv3d(512, 1024, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.conv6b = nn.Conv3d(1024, 1024, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool6 = nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2), padding=(1, 1, 1)) + + # self.fc6 = nn.Linear(8192, 4096) + self.fc6 = nn.Linear(1024*3*3*3, 4096) + self.fc7 = nn.Linear(4096, 4096) + self.fc8 = nn.Linear(4096, args.class_num) + + self.dropout = nn.Dropout(p=0.5) + + self.relu = nn.ReLU() + self.softmax = nn.Softmax() + + + + def forward(self, x): + if x.dim()==4: + x = x.unsqueeze(1) + # 这里输入为[1,1,80,128,128] + h = self.relu(self.conv1(x)) + h = self.pool1(h) + + h = self.relu(self.conv2(h)) + h = self.pool2(h) + + h = self.relu(self.conv3a(h)) + h = self.relu(self.conv3b(h)) + h = self.pool3(h) + + h = self.relu(self.conv4a(h)) + h = self.relu(self.conv4b(h)) + h = self.pool4(h) + + h = self.relu(self.conv5a(h)) + h = self.relu(self.conv5b(h)) + h = self.pool5(h) + + # 这里输出为[1,512,5,5,5] + # add by wyh + # h = self.relu(self.conv6a(h)) + # h = self.relu(self.conv6b(h)) + # h = self.pool6(h) + # 这里输出为[1,1024,2,3,3] + + h = h.view(-1, 1024*3*3*3) + h = self.relu(self.fc6(h)) + h = self.dropout(h) + h = self.relu(self.fc7(h)) + h = self.dropout(h) + + logits = self.fc8(h) + probs = self.softmax(logits) + + return probs + + + +class C3DNet64(nn.Module): + def __init__(self, args): + super().__init__() + # 3Dkernel: IC,OC,k1,k2,k3 + # 4D Input: B,IC,F,H,W + self.conv1 = nn.Conv3d(1, 64, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool1 = nn.MaxPool3d(kernel_size=(1, 2, 2), stride=(1, 2, 2)) + + self.conv2 = nn.Conv3d(64, 128, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool2 = nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2)) + + self.conv3a = nn.Conv3d(128, 256, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.conv3b = nn.Conv3d(256, 256, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool3 = nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2)) + + self.conv4a = nn.Conv3d(256, 512, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.conv4b = nn.Conv3d(512, 512, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool4 = nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2)) + + self.conv5a = nn.Conv3d(512, 512, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.conv5b = nn.Conv3d(512, 512, kernel_size=(3, 3, 3), padding=(1, 1, 1)) + self.pool5 = nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2), padding=(0, 1, 1)) + + self.fc6 = nn.Linear(512*1*3*3, 4096) + self.fc7 = nn.Linear(4096, 4096) + self.fc8 = nn.Linear(4096, args.class_num) + + self.dropout = nn.Dropout(p=0.5) + + self.relu = nn.ReLU() + self.softmax = nn.Softmax() + + + + def forward(self, x): + if x.dim()==4: + x = x.unsqueeze(1) + # 这里输入为[1,1,80,128,128] + h = self.relu(self.conv1(x)) + h = self.pool1(h) + + h = self.relu(self.conv2(h)) + h = self.pool2(h) + + h = self.relu(self.conv3a(h)) + h = self.relu(self.conv3b(h)) + h = self.pool3(h) + + h = self.relu(self.conv4a(h)) + h = self.relu(self.conv4b(h)) + h = self.pool4(h) + + h = self.relu(self.conv5a(h)) + h = self.relu(self.conv5b(h)) + h = self.pool5(h) + + h = h.view(-1, 512*1*3*3) + h = self.relu(self.fc6(h)) + h = self.dropout(h) + h = self.relu(self.fc7(h)) + h = self.dropout(h) + + logits = self.fc8(h) + probs = self.softmax(logits) + + return probs + +class ConvNet(nn.Module): + def __init__(self, args): + super().__init__() + in_channels = args.in_channels + out_channels = args.out_channels + kernels = args.kernels + strides = args.strides + pads = args.pads + groups = args.groups + class_num = args.class_num + convnet = OrderedDict() + for l in range(len(in_channels)-4): + convnet[f"conv_{l}"] = Conv2DBNReLU(in_channels[l], out_channels[l], kernels[l], strides[l], pads[l], groups[l]) + convnet["pooling"] = nn.MaxPool2d(kernel_size=3, stride=3) + convnet["flatten"] = nn.Flatten() + convnet["conv14"] = nn.Linear(1024, 1024) + convnet["conv14"] = nn.Linear(1024, 1024) + convnet["conv15"] = nn.Linear(1024, 968) + convnet["conv16"] = nn.Linear(968, 2640) + convnet["fc"] = nn.Linear(2640, class_num) + convnet["act"] = nn.Softmax() + self.convnet = nn.Sequential(convnet) + + def forward(self, x): + return self.convnet(x) + + +class ConvClassifier(nn.Module): + def __init__(self, cnn_cfg, train_cfg): + super().__init__() + self.classifierNet = self.create_model(cnn_cfg, train_cfg) + self.classifierNet = self.classifierNet.float() + self.class_num = cnn_cfg.class_num + self.sgd_optimizer = optim.SGD(self.classifierNet.parameters(), \ + lr = train_cfg.lr, \ + momentum = train_cfg.momentum) + self.criterion = nn.MSELoss() + self.device = train_cfg.device + + + def create_model(self, cnn_cfg, train_cfg): + if train_cfg.net == "convnet": + return ConvNet(cnn_cfg) + elif train_cfg.net == "c3d": + return C3DNet64(cnn_cfg) + elif train_cfg.net == "i3d": + return I3DNet64(cnn_cfg) + else: + NotImplementedError + + + + def forward(self, x): + # print(self.classifierNet) + # cuda_used = torch.cuda.memory_allocated() + # cuda_reserved = torch.cuda.memory_reserved() + # print(f"\tCuda used {cuda_used}/{cuda_reserved}") + x = x.to(self.device) + y = self.classifierNet(x) + # for layer in range(len(self.classifierNet)): + # x = self.classifierNet[layer](x) + # print(layer, x.shape) + return y + + + def cal_loss(self, y_pred, y_b): + self.sgd_optimizer.zero_grad() + y_b = y_b.to(self.device) + loss = self.criterion(y_pred, y_b) + return loss + + + def train(self, x, y): + # x:b,ic,h,w y:b,1,class_num + shuffle_batch_idx = random.sample(range(len(x)), len(x)) + for b_i, ori_i in enumerate(shuffle_batch_idx): + x_b = x[ori_i] + y_b = y[ori_i] + y_pred = self.forward(x_b) + if (b_i+1)%100==0: + print(f"\tTraining iteration:{b_i+1}/{len(x)}") + # print(f"x_b:{x_b.sum()}") + # print(f"y_pred_shape:{y_pred.shape}, y_shape:{y_b.shape}") + # print(f"y_pred:{y_pred}, \n y:{y_b}") + loss = self.cal_loss(y_pred, y_b) + loss.backward() + # print(self.classifierNet[17].weight.grad.sum()) + self.sgd_optimizer.step() + # for b_cnt in range(x_b.shape[0]): + # input_list = list() + # input_tensor = x_b[b_cnt].detach().cpu() + # for frame_cnt in range(input_tensor.shape[0]): + # input_list.append(input_tensor[frame_cnt]) + # input_shape = tuple(input_list[0].shape) + # label = int(torch.where(y_b.detach().cpu()[b_cnt]==1)[0]) + # image_name = f"/yhwang/0-Projects/1-snn/dl_src/dl_visualize/label{label}_train_batch{b_i}_data{b_cnt}.gif" + # save_visualize_gif(input_list, input_shape, image_name) + + train_acc, train_loss = self.test(x, y) + return train_acc, train_loss + + def test(self, x, y): + y_pred_list = list() + loss_list = list() + acc_list = list() + for b_i in range(len(x)): + x_b = x[b_i] + y_b = y[b_i] + y_pred = self.forward(x_b) + loss = self.cal_loss(y_pred, y_b) + loss_list.append(loss.to("cpu").detach().numpy()) + y_label_class = y_b.argmax(dim=1) + y_pred_class = y_pred.cpu().argmax(dim=1) + acc_list.append(torch.sum(y_label_class==y_pred_class)/len(y_pred_class)) + test_acc = np.mean(acc_list) + test_loss = np.mean(loss_list) + return test_acc, test_loss + + + def train_test(self, train_x, train_y, test_x, test_y, epochs, log_dir): + print("log directory", log_dir) + writer = SummaryWriter(log_dir) + train_acc_list = list() + train_loss_list = list() + test_acc_list = list() + test_loss_list = list() + for e in range(epochs): + print(f"-------------------------Epoch {e+1}/{epochs} ---------------------") + train_acc, train_loss = self.train(train_x, train_y) + test_acc, test_loss = self.test(test_x, test_y) + writer.add_scalar("train/acc", train_acc, e) + writer.add_scalar("train/loss", train_loss, e) + writer.add_scalar("test/acc", test_acc, e) + writer.add_scalar("test/loss", test_loss, e) + print(f"Train: acc={train_acc}|loss={train_loss}") + print(f"test: acc={test_acc}|loss={test_loss}") + with open(f"{log_dir}/{self.class_num}_epoch{epochs}.txt", "a") as f: + f.write(f"-------------------------Epoch {e+1}/{epochs} ---------------------\n") + f.write(f"Train: acc={train_acc}|loss={train_loss}\n") + f.write(f"test: acc={test_acc}|loss={test_loss}\n") + train_acc_list.append(train_acc) + train_loss_list.append(train_loss) + test_acc_list.append(test_acc) + test_loss_list.append(test_loss) + model_name = f"{log_dir}/{self.class_num}_epoch{epochs}.pkl" + torch.save(self.classifierNet, model_name) + + + def get_batch_data(self, x, y, batch_size=1): + if isinstance(x, np.ndarray): + x = torch.from_numpy(x).float() + if isinstance(y, np.ndarray): + y = torch.from_numpy(np.array(y)).float() + + x_batch = list() + y_batch = list() + if x.dim()==4: + shuffle_idx = torch.randperm(x.shape[0]) + x = x[shuffle_idx, :, :, :] + assert x.shape[0]%batch_size == 0 + for b_idx in range(0, x.shape[0], batch_size): + x_batch.append(x[b_idx:b_idx+batch_size,:,:,:]) + # size, batch_size, class_num 98,1,10 + if y.dim()==1: + assert y.shape[0]%batch_size == 0 + for b_idx in range(int(x.shape[0]/batch_size)): + y_batch.append(torch.zeros(batch_size, self.class_num)) + for idx in range(batch_size): + y_idx = int(shuffle_idx[b_idx*batch_size+idx]) + y_batch[b_idx][idx][int(y[y_idx])] = 1 + # print(y_batch[b_idx].shape) + assert len(x_batch) == len(y_batch) + return x_batch, y_batch + + def resize_data(self, x, frame_scale=1, feature_scale=1): + x_new = list() + frame_interval = int(1/frame_scale) + for x_b in x: + # print(x_b.shape) + x_b_resize = F.interpolate(x_b, None, feature_scale, mode='bilinear') + x_new.append(x_b_resize[:, ::frame_interval, :, :]) + return x_new diff --git a/dl_src/cnn_cfg.py b/dl_src/cnn_cfg.py new file mode 100644 index 0000000..6714183 --- /dev/null +++ b/dl_src/cnn_cfg.py @@ -0,0 +1,19 @@ +import argparse + +parser = argparse.ArgumentParser(description='CNN configuration.') +parser.add_argument('--in_channels', type=list, help='in_channels', default=[]) +parser.add_argument('--out_channels', type=list, help='out_channels', default=[]) +parser.add_argument('--kernels', type=list, help='kernels', default=[]) +parser.add_argument('--strides', type=list, help='strides', default=[]) +parser.add_argument('--pads', type=list, help='pads', default=[]) +parser.add_argument('--groups', type=list, help='groups', default=[]) +parser.add_argument('--class_num', type=int, help='class_num', default=10) + +cnn_cfg = parser.parse_args() + +cnn_cfg.in_channels = [80, 12, 252, 256, 256, 512, 512, 512, 512, 512, 1024, 1024, 1024, 1024, 1024, 968] +cnn_cfg.out_channels = [12, 252, 256, 256, 512, 512, 512, 512, 512, 1024, 1024, 1024, 1024, 1024, 968, 2640] +cnn_cfg.kernels = [3, 4, 1, 2, 3, 1, 1, 1, 2, 3, 1, 1, 2, 1, 1, 1] +cnn_cfg.strides = [2, 2, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1] +cnn_cfg.pads = [0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0] +cnn_cfg.groups = [1, 2, 2, 2, 32, 4, 4, 4, 16, 64, 8, 8, 32, 8, 8, 8] \ No newline at end of file diff --git a/dl_src/comparison.xlsx b/dl_src/comparison.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..834152ec2788daf74e458f422332584de9953191 GIT binary patch literal 8908 zcmaKSWk4L;vM%oK8l2z^?g4@YA0Q#P!$6S1gL`ndAi>?;-GYY@+@0Vq!NMJ~_j@}h z_nuw-qo;eiYE^$FtJbOp%EQ9pK_MX_LCLCt)S!MN63A~CD|UT*Q!5Ad_h5TVBQV&K z&BfY^?deWgjG|=^04?zN0blyMPZrTQoWCtxyyD>p^k#3{R5UZ#?0=}jK71xS@dvO-b<>h{m6s(?;60$h%cksiY}BRg zAJWiVutZTV4Ul{tz1}>^c{;{YWC)>Zqv^y@lyr&fL%T@~4^{N~ zj5`7*y9Euw0Mxn0JU1jSXflfZ0e!kr@}ahb0=&>`a84)IqYK5|XsKe6FLl)slwgTX z@+omN2Bk;BO^mBz@|Bx-7aQM z?_1zJZ*6ZVOhG5CiRD4n7b_|r@J|j#P$n2p0dcN6qr*V zyhcUy=H9Bwk04$hq2=J`!--#-S8SD^d2fgMT!ZJvxxr(!cdD37#&nn`opQ#fBlY3b zDSH2!zM~@Ho3Z-^e*G~q^30oU%*QXzuAW8>x3^Hbt%FBlS|(LL&IGhU@1^RkrtdCZ zmuAI`tx$Aq?rU{&w&)9&p-#b@v#q@Oo{`mm97LKo!xKExP#j`cuUBG6-_w-_qc09? z2q3#p_#TyUOkfGbJ%9Lot*uLu@dl+$(_<{7R&Pi@q0VFQTy&C=Zsslbt|0?Fm~g0i z$Oa7&mH+M&c{3M==6P?{k-;OyXvl5VwnB)FSNUwR`95^Bys6I~C|Q-oQ85AoPtTM6 z%_j;+*EkW7rPw|}>UXh(Anfo+5`IzGy2H!I1elqyLp{6KC~fZZgf|<$tJ`W~!VaL<|*=cTN@x zBQG1}zNkn=*S-)E{88w<=%~W-Wz6}ep-U2O9r3L4c=to~s6P^3s~+|cO$ zU`DBc{@^+2F4Eu~B0DMUs${rvX5?&++6W7C<4}Vnc$6*{AY3d*77|63fjK!%k9>0I6`*WTr)Rk|OFAN5+Ow5^%)YO=$)~7#SI%RfN$-a)Mwlk6z z0n+!Kxp@9LmADXi4$AR-T_YBVL)z>P^7hOfW_}GL*kt*Qxl9&O3!+wNSM9HNsa1u` z=dl3Mo_-GbSez5_mp+2y#j4mMXT@9;Y(_0Us<23E`+*8*SPJFPesio;>SjsDU74*H zzWm-kF6XHvvTP$RW{AfqP1;M(rnvEc)|MhhW zX=>G#@YnAv+oGerjD<(vnJD{YscDL_8a^@Gxm{8RanXAcEU=_sr^uz8UcE*xw8(wA zw1aglC-tcli+b%`BT(GO8Ba%mR*#t0J|j)CJy>G5M+9u5qEK;dPT1<&;xIcEtsywQ zkKPx~2O3U9C&n{sI^B9uif*EnGabl+L=cL35wbt1pB}9_BsmF?xJ!5Z-fd7wkzFWD z6*lyMwvjtzonx91{)0>4*aFB?wCn4#_{U}OFj02^%p2)-@`0<>2Do%kV;o# zKz-8WF*Pw?sV=c_*|1kEq?EK)L;#$LwJ&DPLR>szC*%hT z{Pxk*Ck8-0HwIJwC{GW*6G4vU3m}sn%UU}t-i43~ZeT9uoQOP!Mi~SV0cY@tgG4^L zpfGOaVotPJB%G0Bk$q&AGVix`ISj1QFjLfVj^Q>XPIk#x#`M)Ym@vvWk%*)S>8qF4 zWXmKx^mXVTN0V(gk>N+MrQC~8A93sy#4F+kP_dmdGkX%7tDpnGWoF$m`MB8TMavnV zUzKloS9q7M>lHL{ji*-&nz;F?eN{@^J2mYd_$GxYfc2oZ%?dY!ywgh!#18? z0K+;iITkYJA$sD<(#U=?dY&M$o*+&w3UOV({t;(Xyo;2d%}5@5UL)UXrqe{%X@9Pb z2?4(G3GecV4QyZ_ASopz0r=k-byH#{rvS|5_)t?wo#oIXx9QPR$n{eJJA3Ej0kV-C zduw?l0esLfomfoi%X^{=H!lVda)R};Y5eifM2~Z zQ7WS6btaewxP!#a+w2xY3}DP*sMYE5pN8wy)~H(BPBUia6+R2X6(9!`LHo}fp<$gx zC;duGs3+14)LTR7O>9)W4vo|FlzXniyeC)l6m_Ss>o}PCBIo($576qO4$FA@P6@6E z9D`H1tHcgRFJ`Hi@eQD28g~N}7Nd)b_-0paQF^fW(xUfqjQIGW5ePNV%iDN~Xo++T zcp9(f4WbQf>*pP0tSQN}E8TKAeHh5Z@XBSnha8Za!rlJu@Yt=mG`@dE`_Xe=+- zqlH7J&Qt2e7E_4aQ2R42&A72ht2t(O`gO&Mpe4;Jj;w8M1}4#y;rWk^k@}PdoRF*B z1FQW6wIzjf+OA}*MV$_@+@;+QNv&N2=lcm}QgYo;!*VQ`NJ52J^dr$JZwDY^jH+-s z?IiN^*^I+_ELbI2oL`p92QRJDYUr5PJ?R`{V3#$uK5ywS<<~d&Zj8Ln*D1J}uoZLvI1{>glpd>{Cxq=|?JO)(9(iHhi@t7&DdCL{^;hL#*p?9$RtHjtY(s1e6 zzM#q2YydKKq;8Hb88>1%wy&k+DoKoLD2Y~%{}&56%P-fgI{r5NnG<+DN=kbqx`Puz4z_A1Bx4uh`ApvV7<5FtP;g(O@@+N;%s#-Biro@) z*uMT*I8sm^{?M(WIk%EfcDATO%y8|af{AqQgAG{52jC_9}Bl zj^>>~#Lp(Pqw4Q{ZK$#zhXyV-k&G|HFfu2hx+fqbtp_SBMJJo|?hLMU*gv@fILLz* z%j9AHRk0|%&x|Em0=U*`e5hY~>RK-2pz_=E-J#^JLP1DD5e%n&ZInUE$s_iv(uw`5 ziE5_Wa~v%kt7~*i|D8KF{NKbZ> zJe2sC^|9XS3C0KJNvsn%(tTMafY=mTa9i^NP%s@S|xy4e8~_|=*x5a-9D`SEq|Wj zhd2dsH22yfZO5mIC)uy@@uV1%jU>!s%k^U!gK_Oy$>l%IBki?0&a&E?uCMUf3>4r{ zr{JN4xn8y@G3*Lb&CM~LW-nBsbX9j2z?>98c@*nm(+O4gOJZc+!+lpUz1t(aK!ADK zc8lMyLV41&(ttti;QnCDd#fiAM_%tXd{ws0-kjA;9@lJ!K(;Ty35TGk?w`%D&VTN? zAI;G)BWjj;Tt_lzSkTamN|EGS`2qSG;iSp2lw<(uf`VuMr0R@(A1-mIKjqlz?pmk5 zAa1Ju(6v)JYaen328%sRIS4y9WNKqM-cbk3QVC%4(_oT@ldvnz9;v6UPpEgef=;4Y zCz<`PF2y?9Tdxo@Za(2(wveAK?NxZFu}p7RshPBVPNht*(XDFE5e>&+KTT$L)_?o; zM5}|WUo>QBW-C4;%%en9eD*Guuv6mlP`txI#xpLwuf6n<6yxZ;74XPitWS1POKwHD zV0GbyJMg%p$Rjg1 zkM`?N`jyV60#Dwqwvv&CZOWzc)$CGfRmR7y5#4rO(|KiaVnjNn+IEEDsRt3PT1H1$ zcF$rZZ!I?V4E_7{dlr2*Oz;aB_jewFQHUWy6PMr3j8E#kPA6}?p`SJiC{gYxH!z`~ zTHpQKMgio3r#-sgcjqG17VPH%Sf0mkT`n-%Z5aVcXgn^l$;vI zgJ)^cY|~D!6)@4F5O%Au8e!@+Tf5_}$lP`=Bn9Fz))fiFT3oeTqn&iCLuts1qWBf* zJ7j4DRRSLUvxZeSE5~oD5I&FxLl4HxF352wmL=vzpUm=Xp61_-SZ1f$CUbdT%H5Tm za;8&6D$31qFgMkTMX29i(A{AOsLJTbZR3+seL~!Lv4x2g5ie^xFems7{?oy+i=|+% zwsmiq`oI<&pVm-PmVjOfwhG=&2X%`x5L39O?fjnhb(S(m4!-X4qVmx#ElR1m@5H#( z;+>870dx2q%O{x;iS(rg|Ij(wOi&(xKo>ixyC@En=~gC%eghV?7ur`=Zt`vZXegiX zrNxg)D2?(%`Um`E+RtWG3uH=_MEbSZks$JcrNOC!aQXpf1FY)C`X*g z=D9y+&Z$YX%|tx?N2^%Z+c?K9AmN3);7=j1!W8SZ+?UR_YS6`tCi`mt;<7=ahnB7JkOQ4l*?}yEMn{9Hi}2% zEmFDPC^9LgT!~p(1cMCu>`P+cW8IDPoy#`o2ba$mB%jOP;eL1kHUJsJA=PW+U=PXl_ia}z3G zZVOB%6Ug`_T6kT+teI|`T0-uv)cSyq0iNsxV~$e4)16WvlE3em*&Kr!PJqYf_(TByJ@TQI>Uf<8Gh z+V^N9z(DKSjcag}s22%~G;tPcm~&`5BaUj+{5SkT>rnMhI#dF3lXJHeG}; zV|)&l1>B!thX<{aJm(7St!YmP3GE)i&8kf=FgEw?9E(xasEjH1IdiQZxHRE5ksw*SZMX%jF*&_;q|%eHp6pe4OGK6E+JKg+F$PhAPbap%goZ9Bgy(GKm@+XljO`lrqKDvc>A-xH z8bEOrm+j_UL}|s*Sp(v?=tCpED25Rqf_MEoxlN}uwDj33F*E?7+CNBat*Ln%MU$Xx zS1;2eyKCO1JDa-7Ca#F`z`0u{*Jhe`s}4=f)g>%P&P0xSsSbO0WFt}S=-YGf+YGrh z>zowZiK4wJR{=%iT@r~#WiWyFZX|Zu4;4CMte3azMIJ?WyzR;yY^~L1nip8dI~tl=p=LhW*I4Qp zPLk?V@B$5v21Xu|2EwoA8+XHjQW~15AXUuGZ$=S1)+tnDoUf}~+VE30SS>mUuPqg) z^t1NB;yk!8HMS~TX{7+C29VacQ**a-wZH;CL00-{%8u2C!iIG78vg5KeucCPHaquE}cvH4|G z!4gdNQOa@`BWvPAx$ch9awt3Jit^*o-;EDUphU!i>~-I~=lp3Byd9u<5RPL9QqP|C zhHDK_w6^52k)rL_;LiL;Ub=QW9=+)3&{>(FMY2D^36aIf8)j<1u1Jh^)Tze}zSe2r#+ z$1QYF+56G`Z78>Yy+sMB=GD^Mz@2fEiG_)e_uhwbyeZz(2@ctETwTVi=vcN4K8 zSpG}Shf2OV=-UG2a0MLZg$U=HjBamUs+tl_MJlceBII${BV^Ijl~p~PHMMz*lJ(B& z07#z6fFf;9KTwswC`OURK`Ub|c{n6bwsU6o{Cs9WC;H0Kt;0HZR6g6mWdpCy&=sw{ znsT%n3ckDpMQui6`5SuBTCbE^zY`Cj~1Bj3wpK^gDM;oj{Kx_w`D?}uL+9FbM z$TU#$uhRhbe^>x)Z~BD4r>cr_Hvr4)`Yns+N;8JIPJK+_nk)`3CCSRW_u+ft?UAP&TVdu0hgMtb4EYHnEm!aN2wfAy+xm8lL9@v7^@ zS7XKr7^PL5nz7o@cV1n$=P*|q3Oy7%LCG;8#=4n~8{Fb7awh2Jp*5;P@WJFD2v?M> zVt||e`gG??q^o0@&rerpw^V#AB+LPz;9$ChE_5Q(wMCcE_k~p$cDLdqt0@Cd0ZUO8 z>iY}yp!>BQ?()goklv!JioD<-0YBDsjTuw;-x4Oe`PSD2ihzIWNzdm?%qv(sQ)F~n zfh=C>ST(U`F<-A`JSKUe*`8B?wvTMk^S6f$FYIyEbdpI&WHg-Gbmg%9D$R7l zWWI4t?z*3`&r`%HSCNPQOwis>p?*EEBOi+@%U1{AZ;V`k@-up#CVA)JBHjWa2yIZ7 zh4avYNG4~Mv2f5=rrD%44?yFB-W&;{yrHp){GnfnvG=|GemTQv4SCr=Or07@iQSX} zIKRw~HT>-b>QAg39yudf$%7G)-^UgFT5Ao5l~MwsjRMyn8VwTeZ*|epwmz@}UlBW4 zo$?cx?Bf^A@GG%5g@B)c>YQ$0+=sy60Rij3_hz4xinavU#?jQqQUA58t*L|VudZ4A zC?p4C1sr>Z4!n)$-}1qd`v!|6Qz$7wV+*ax~z&Es-j-76ukz` zQLSy>k9OY~*17qkbl5lq9qcE{6psdRUrydxdUfmiY>c;5%X+fJlqw&gSgJAsMdxxt zF};hn(Vy@S1^{|RA=ppCJ>d@xg9r7`cLTro&7Zy-_)q%pUlIH%=lAyLQ!1Cf;S7|AM|c_n1#a=YBrQ-jeLCVWdQgBAe{3Wrbw+wIU;%Xd#u7Rmd#9 zWG4OYy}z$>9`Exx{(ha$_u1*5`?;TUZ=bKZ#{d8ATiDOiaGO=S(?$1u*9O%m&6+sX zzeA5;Z~q`?+d=tXzBK-?o6zxDbHnCS76(XOo0>+ZMutIB+i~LL$#udfITTGHr9S1+qGExKWncEea?1h0rqxLBxmirJ(7iWdjw;!KZzzi+MhmoB6RsD z?m1w3#*&(`uO5^vOh2XB0rHbmuj-~q^RCU>*LFBB_#Iy}9PDEQNzU4f#Ft;avPn7E zh1sM>dqzfv@Tc>fH-LE>L~6#~+dWw@O|m-+`I<3tU1YlsYwxoEhVZz-kqclinnZHe zUSFJdqsd91!M?$Z^k^?DD--T}m97WOFK<#a_ABBsqcJt)G2~M+5~j*_UDhtm`{iGg z`-6Sb1Cq0LE%A8&b>OTk*h_LrkM=3zO1)P40}q0&i8_Wt5IW};oiA+SG)Bt6>g>`n<87RG^qQACrPv2PY9=F30k zyCFYa;hZ7c^;x^LR#QWMwgS7pmdoFqwYL@z$vY-%BEf!MoAhXx*6Oi&U%LawN|)4( z{k*uCb|ie*2Kkk?1OLc&1D?HQh*u2Q-`J3xwMU8e`sP9WhPW2<#owNsspK z-F1}y+l{^eCM1K@jD5Q}G3S>Zss_36{n13(Zp7MO)W0j#ySQXI*c~fL&f2AU&lURM zycho=J=%M8zaf-d6Al4pfJPx=$a|BaI5GPIuJ3|;c-yiqvfY@q``O$V>JExM1ol6j zNY2{Fi*~2OBm2U0En-pj>vr0#=~P{?C{KDsH}tFd-Naw&fVsW`7=6|ZXs_Mn-hM|(_k zw6Hk1#a_Vln?-8I?jpW;RIMHOc2I3gPKs-{`z5mbJpHU ze0iNM3(dfOz>xH4uW7bPxaL%}6EMq-NzK@A4Hr*h`K~UIkIY$VaYb6IwOIS`xMRY4 zQ|m8aU-yLMto@c~zdZcO9BESnH6Q zv4@Lqd>1+h@sQsdXK_}x*JbU}G1IJmcr~yWd6As8uM_8e?}4rtz`ojt^k|o^BX`Y8 zDFni(iGkO#K!(BP`HR?S)a*WM3=mTCJpg8jA~$yvLp z`0_u^LSBG9*n#wDkBP3YOf6hG1u!|CNzK@I_DvR^c-`0pdBfC%{<6J3YY(kT62>pC z_zCvWnIvcJR-)bKb)!yTKXI4zXqS$e=;&3a0TXtg)QtUz_)CrIY-JDmg)S!Hvb_Oo zzjEQ0V4jrn1nl-IlC$!$%2 zodQxb_DlVfg+k*qzaigklU4hgv{oCl_SSjNgy!wS@LDXxk>srXn>aOOs-5us#+ll0?y@5FI zPmT>53ikOfq({5--jrYK*)za+38ZH1x#9t-vD>Ey9h8? zwF6&(y?O!3S$m>5HN$398wd7?N2Eu)be=gsq8@(sxK~JO#y(MeUoGe~;vm?!2WUQ( z?G~(k!-fK3w(c=Eu*b|HIcu*J?M;*WM1#F$29$M6hQ@k({;95|>j&qH{jj z7sQYr?b7~CmyF+dVfa3l)QtU#c>V5TH#83Hg*IjTWP1zNE?vJ*N^G?T>`xp?&f1mY z^^8dy`;lPxbRj+3rFl2ER`dsqqd;oLE`8P>(c?EB??Y01<;nJztld1JQ0N+Hc^T~S znIvcJMdG|4j8ot~LEv4|qg~pMG@ab_8`z)SBQ;|$72j8D_bWaQ`H=LL`ZuJt+KRRR z`}&ly_j;`mu!m=poV7OnRHGp|vl|y>8 zdy41xW#{+f=cc)Nq-N}H;>7&2T<8P#6z`%m+1{45OV=}N_Q&A8ve@Y)XYHp&yQ}@) zaIkNiL3(xjnJq^6S-VC6sTq5kcz(Z`eB2xC=l+ELmhJ6WyR;8H+2*AM%zFd9u)jHL z-z!ed{a&whz`nzP^k|p%XU-UXuL4ZC5vdvbMsZ>`9;`D8^5k2u+ufATGwoTsbdGfF zfBgv9bu&rM+M9`MRa3ZZ3HIbH(xW{yqna{gx;F0VHNQt{#y(Iy-unJZ?>9!UoQl%s{W7iPvL)=@Q zggoYV=z7`Sk+nZh{zv#x*DMa~FSWz|=B$0N_-lP&U-|^>HS|c2cIor#jvSp*z<3&v znz26@C+4z`)iB7fj9YwJws&IfahIM5#+@&A1N(a~lC$=a;#;5gqkVW!KE{XiXqWb| z>%9B)3or&#NX^)fiT78(>ZasE-q3daC)sYp+Q&`)AiS|@)&%UmoJh{vQ^l$I{55_F z%=yjSr(mk7_?hk8${h2wb8T%dafIRu`*>%W&R-7Lq+wEApbRB7J zbG8x8`){oae{U~wM+LXzpd$C33dZ}lCyTDIPXtqSL}deCfR}XXkR6M=6h_Pt^*U5k64<|Flbp5Nh#$+UuHB6VducA|(LOiiyHL*{3h#UT zct~o-eonM|UAFK5yUw3SFJ*gY)-LT6Z2sXf2<$_33;*Vv|^>?2U3s&e}6Ydy@Oc zJ7D*GNP4tO*Y8VAp8f=ka{;Lt`z!H!rv4b_ZTR^1+C62vBWstg-`74`yanvpJ4w#k z2Z?Webv@q>0Q>LVq({5-nXfE*%nz`)+e>Q3ZayqoNVnUBw|K6nwVEs2omjiHANelw za~0UPW|N$?FBIp!h1mbj_Sxhe*duL8k9O%Z-^K@@w85@nPin^gP`qbz zc=jz_$RqyzGP*6TRRwGRap<$KbU@h{u;13L_?xr#-=aOIX*+z5qlW?M(f(XK-a90P z;G0cbBT_T=IpTR{+t<{UV1J}jp^)tYYnMKI=p@&}=fy@=BROmDBc2MJvfh1#d0%Ei zdbCUDcWdLey8$!9l+=uUka)2)Gd>kB-9k6l_LuES)-GMYcbOEfgn8c@MRL|&C{B%A z)n|O3pid0x(Jo!jl+Luv0Q;#}QZx1@;xBb`yvAF|huRL@BimIxyOV2UycXN)L~_=C zNt~MCCp{cs-s>qyk9KLF;BAkHOMv+!keadUiW3t#-Ur_fLMLC%mhD|x`|Hp$A?Zh- zhcNFqr;?nt>x;+Cg4K89!9I67>Crw+{M^***qYv8FY+TbV-FW!{MfSZ3dsFJTy^hA zYgO&3eQB*0`K+r0_PgOEXYB*Tmyhn_h*aL6*PLu83Si7`O@GH)<6znH5NY2`4i-+WxTtj?@JTi;)XqV>Q zUSl+Fs4MP~nz4@*CuZ`v>x;quW~{{y+1{PC_qTr|^au(@dp$3bvvvdVn)F0Qj7_!HRst{^?y zeSBIfqgt9R0L)ujGxlpmqj``w*);UFZ12n3PnJ$pTJ?>I2m6FblCyS&U9ym| z{J~?eSC1w=+7BOYqdZb=FaEJrNDQeN`{pjmg0j`r;b32Nv(+!z-jB71Pw-T#Uihv9 z`;l~#v-U8{WWn%zfD_m~GD(m2r%%Qz`>(ll7cdvINX^(o#1}v3FdeVoTR51sOP9_w z{aO3|rVErm^#|ee1dRofv-Xo;&I%bR*5|;UtRg+yBO-b$J*$`D@owTqYR3LtOT3O8 z(@qP1)Z>+*<7E2));_fBdgWVfjjJ&4B|2e$bJp&Xc1CdX**6O8-uk3RySN{ztY#a8 z_dNy}lA5s}`Fch$+b7^9bL?&X&9Z$UYwtPhkn&rnC!N8bnMHEeJ|}Ct(6KNI@5wK` zM|!kJM@K8y8dt9d6Zz&osTq5dnK8npHAQ03RwJq&iOc#^aB zZ*ArYC#?@;!Mw*LkRI*gex$O^zB46Y|8R)ZjQx;TgmC5c@G{6-w@v&i+Xu7upmsG? z@g?3Lz~0k_7H`Xt%S|R$Z!ldlq2E+mo8H&rO^sSmkfQZH0SY zn${W8T6Jgb^OrPLdAnwx0Q=2pBxmg=dVPiE8+{&vJ;0CjXz$*=wkkVD0~ep8KdBje zlb^$dqPLDw@S`rj)oYY&AHv$}-f5vqwc7Ou?8zA(LbEeQfKpkFwp9 zwY&KZQ%%?Sv;*e-Nk@{icI`W76;9*p`~&t4Hl#ABy=Jq|r5lC$={ z`&TM_I^K-}`+OJDqg_K|uqq*Y=QhBM6iCh3e+1_!A|Fn`do}|%2c4DeBU$^jL|;|Y z{1kllCo+oUtbOy&Wr}`Ujc$UyUkvHh?YHB<%>?`DSW+|g7S~@Ybcc8bL%y+1)?3*= zinZJC3RDd+*T{!srnnQyS^E?lGsRkmtA=1-ZA*HzKY21%bu?_y1;Bi;BQ;~M-uZwc z#p4n_#%i$Xbi*uZt&V2x5!Sw{@^>rxgWWok2D#>3&4IFg3~OJ0W|C@U`|37e-x^MG);{LuR>heiHCKbZ z-y+hZ-P5y&YD^Q)O2C|1OlroSZEGT=nQg&muDVr)E|l$KS^JGfKB@`#Uf}-wF1@h7 zIcsE)QtU>@}A<$@*m0Yqgrn&KOx)4vG%tg zW~t_m+}<7LeM}_DS^MEH)fHVfE>(eDFPik~_VYcoYXW9g45=A=`++%%KVuGdu3Bc(aZeD~6Z1&U+8>teRO}dE(+lRkUjgaS9uwV1 zb+BI9Fu?36BsF8ddaRaEU9ST7u+zs|^p@=tS^E(4Kvl0v6Ni9Z+#CIyv-UUBEfv#> zb2Y&p;!Aq82Zsz-3U79z6{B`A{P}x3-wM%O?!TR$^uv_aE z{>@qYDznv!_4)?*{QEwA(xY8^UoA1)90ZtEhNNcfL-uJ46Tju-wnFOtl}Ba!WY#|X z_dHd{DQD3x;ui?6q~hg8-YD@*XeGu-G{Zi=z6J|hqhS($NPW-Bxmgv zJ7X2@K6`7xF*D~N>Cx`v(^KU(V)kUf3_eV1#(rdDU7`1ap0yz#Sovsny;G6G+Y2Tg<(oIIH;?_c*NDl^Ncb*6K9Yp3vDrm1}IV8thAKNzU3W5(f)Mk2~TX zwxd1i(Jt=4tA@G1!xP6r2U0WkhR9R0ZkL{;{lsvdyI>j{)PRBsF6n`NdjUdG;JG z?eDEk_Q>`btX+C8b?mTNU0~jSbR;=z-`w9(x%+Di{64*-Hl#`}(?QChS2Jq~`!%n7vJ;IOVJiGOcclaEKK>+F1?Omq7zz_bV zGfB;{5A#}vkKo?&c6E^LGkNwdJ8mrn`=J>m=h+*D*y8=B*8!wgw`W@)ssR(38c1r6 zJ;p035Ax|Ze0^nmAkXggam;?OFGwRf&t5lk7yf;dbq49x?Ln6HanoXbCaF30F@Xzl zdDuslN6GeCJp221FLB?S#DPw48XJWzM#B{<%T<4CV=e;v{87))gPK2me+Gkd+; z2f60vp=q*xHqY)i{W{*WnG;2F)-IiACN5oC0`?X$q*u4MHf|LFm{YN&=GX@=IfZ+A zZ@1@E$o3$v-EX2k95bJHkep|?sFv*ucGKOYSGQY~_9+FsOB|^=cGaRccng2Et$+Im z(s^bM&wl!Q#Q?CMb0Rs2@l#2_NGcwbL`*VI!%Q9sBPkS*&fWZ z$7nwL3idlrBh=jnUGcMrg_6`9`{NI80@ypY?XX$4hw$u!GAb;< z{<Q8c> zedde^+|zqJll1ELshYiTPw&bsQgiHA8hc)bdC!ir|0dh#@$9=MEyx0UlRYHo*-I=o zyoY&@+e>D+HfJ*d(@`@PG35IrrE9UKwN77!{PiaLQL;UZXRn=wHVRo>@HJb&YJF&$C-?(@%n9=A<{tdG=}LWkp~g z?Mr%fd;a$r+~~SIh148-P>W~(!jG!2S(qZ*7x3(Ttj^>9`%(jv^Xy-3Z|Z~nh!N@4 z?X_zh!%gWG)kw{;_iTC*_h;O4T78u5;XHew>7}VK@9PUm&a>A~--FK+Bt0R$y1nU& z6>VW6w?88_$38OR+6c%UZ2X($N^5l?&pz&74cwoJawIv=uCbv3RBjiRSoW z-a>~CQ0M0~oOA*_e{^Zi@mKqvH4l2r&g+-T{>5CsVnE7T@S7@W&-y2eSCj9*toaGQ zTt!zpK;1tgx8)`9n7h%O@h6Gr%H$r?u0ubsqkod@U&8h8`sy(Q{P_;FXZ_OE@cr6* zY{0+13mu^DkJqoC37&FCnlpasTwn8x7XG|iQ9-X||5C1hTiCwU;1AqQd!D~vayVW* z_1a4ZsQaD$ovVR=?0%Ya{Le!_wt>DwWVv~sw04(q{lz&?4Z!cei}pN!rA8S(XHu|- z4p8^I|1Nn4o@;w)&hZz&>^uSbmU=FIW&d)ne|&ZGYT$oSo%THcy3tDq!0}Vplnzk$ zcd8fG06d?~XwLCpelYeU^f9ws=ga;TT>tichX;cH^IY2V{4@7jz6Ss4P&z=}Z)|)A zZ|pkEr#Z*J@xsCy;6GX!nkf5Ma{VVhWc>jDEuApZ^ZaXr#;ymyN}mo;_p1)qNdS+% zAs zj{j$4*A~#{YE;aT{cE`XHn~Z9;14sPJi=lIJ* z0>^_tb7N3~>|e|E+r_6Rg8$og+VlJw2b1wx>?2WhfV#h8@lIdxn8(nZ<4;PDoB)0P z=B)*?e;wC9-gtBm@PCe?J>)&nTF&q4+oN3SVhsD)Q0e^^s4p8@dbUZX1=Cf3wImbWega=+fzi6{wA^SIQ z{VrCkhl9VW6YY6^gQ{uT;6G?f2dMj(hHd%`ep7pzbNm-|Ya2knzI6wG*}swN-`ZnI z6!>*J(VpkmKjVuJaGtfH1JwQFA4K#8Pa`{;bNo|tF5(Te>`k?I%l=JVzuTA*;owh? zq&?4n=lok|nE!dvbbz|QLG2R!qW6**nsfZy%q#H^EbHG$y(jxObNwwZt#<&wM;h&U z{;Gufc>Vk*oeogrZNpp^W`sG&3;P;icai~^-w0^g6{pq^qq2T|vo%THc zhbxyR!TcYNq65_ZTbj-22A*0mH0Sw;^ofGL&O?2B*}s+R@A|IiSn!8Dr9IEDSoaU! zgWdg{4p8?O+#F>Np83Tz=lG``xz!B%l%U1lvVR-bU;X6ne&DYNr9ICdwrl8YnEylb z=>Twi?au{HP;_S2r{FV8u20Q}hp=m2&9@oMGxg}X-&(wyTjtTh<- zi_Yyhy-D`Ra{ULIy>SJ9+J4&e{MQGV;{JnA0v(|4R~7EogZVsrh~^xBO}$%vpkJYJ z;hOBicq3=75gnlJpU|~6{trt$s?nU|uk+_YA^2mx;>ub>$+M|Etq!&+{)^Xz~q?pBMggfV%(T^_bV-F%P6U$KT@89S!L3-%My(D6QXJ zTz}H?t&PENpFw+`f5q%cc>Q0JNe8I=qpW)4ADwyJr8&nRzH_G$_@`vN9wGa8bNx3q zKHLKSwmG!t`MalokAV44%%cO;{Y?UzR)8lapXMC@Q14IMq1X8Cyhir#;reYa#XJN5 zrwZEh{1wsf@$VaVRMG+Jev@$1HsH~%qB+OEwBH+ixclLE=M>o=$MxrjRlWlMmrC06 z{C8@l#l!rct)c_e{XL_vSb*PB(}nTCz0wWcl3GH4rc+s&?BC1xFRvU8er-qEvwj1lIzh=JG5b#G&r9JDH z_OH(Lm|+I~l<9PUy8r3Ov-se)hCj_Y{-}Q!)CPaR_^U%?|9-At-!1zf_|v>;&+~UY zbO*n~%+Hq&Q1>_8WOM^O_omRC<3Al$j1Mw?xtO|4_Q!Mm*FLw)1ApVIwCDMI*PD-j zu+Zrm9iZ;FnD34c{nx%ubBW%DA;QIR(WfXzG`F+~+{1^TW#xG|m%%%g> z{Y#au_rRl-OLLCD+M$#9P@~zV^7>Dt^?Q)(?_{~AKKRE)(w^t<-R+(O%zuq&IzZk3 zvDw;H;9nR+bB;eMw*D9B2YD6sll_Od{_fHBbihAlI_-J>x99bI!CyFo4p8@ptQvC^ zJUIb0=lB!zKf6MI?0Uii*?*Yp|8+qBH2A;Vp*_!UrGEmyPh)R79iZ+{EgiBBJT)_E z&hf`QeeeSM7I$0yBm0kV{cRdF!t4LiJlga8q4DKOF#l@`=m2%UdxPRe;CWt1bB@2j zaW3A!EAEt7B>RtY{mQlT<6-_oI@6x@uMywB3E#JM#21lQPBbF{dY*q!fq!@KAJnG<)cy4=SJVg3T0@$1{H5-u z>Cpd};u<3R|Ka){G+%2Dej$MNJpabxR``&5ULYNy?k`F&PXJGsAewXh!4uD=K_6_F zbx`&n=lUNF@m~f0LTB3Z{87_h;qyqf1Uf+7-#`EQOz?bD(wyU8Ft!7JURyoIwNUn- z;QCGHn`{ODgqgJG`Sbpqz-Pg}&!Pj={Si(LM}xor9GY|dL&CH051JhwrdE32Se z|FvH_xPC+4(4OZ%U7UUmj{mLi=m2&9uG036!Qc7=%{l%{9ZTv$e{HgbQud$X`qNt7 zHiY?qHI4Q>e@xvMy}@rXgAP#l7hhVn2t3^bXwLCFRBLAsy_MF5nX>;h*RR`g?OX8M z8_}NU|Jh*?UjJlNqXX3a;T}nN{8-kYImaJ1VABEc&&lewSN11y{oPCTYl6R49_@L4 z!;-D~F#nGEbbz}5(Q;S3|6cbI%{l%;-3PeeIOhA%9NB+{>#t#x^auP+s%X#iuY8+w z9Q*?{J;{GS-EULYqchCsdTp9>{LR{~#4j^mxp}MJb7}pa<@!D9U9$)O!zkMG{Kh|j z;{*81W9R^Nf54rN_{U*ou{7uS>l*LJJ}xuCQT8Wu{jCNR;`Q^VT-x*eIz=7rVE!BA z(*f%KsliEjJ^!_U<{bZpm|oz27(fT8 z`@QNMa?`A}Mo_|l>KKQpP?~UmIb^p1R zW4FM3j;Ky^j(>0P^IOp0t$dUz`!8_)qwmbx1^%wOg{0^Chc%lt9_BwypAJy>`?R$y z08g?Z%{l(}JF`1NKQVG^mF&OB^{08{odf^&U9@NY;o|jk+h&2w!EYQ#2dMkM4EToo z=M{Tt&hfW9yha=Ri*5aFilp;T3fI42_3}F4U+74C)-PTE$FFuD3;tMVIzZjOyS(~E z@R%rQ&hf9`b+{Sy;{M?z*?)=aSG`qr0{`!+v}gTR-5r(dU0zQH|JmtufVzLd3}>AG zW;1Bc@#oh)aUT4Yk>y)u|7EWK#j{O!!0)@8_N-r;|8`ctR)PO(937zU_pa9i_fHD< z(VXKSUHyzH_+Q#3-je-Sxc)gWy>r0d-i7u&|MlW)zq&dgm zZopeSUrqe^>xb;W%Jn}FeboT`8>?u~^VhFz+zsZxhh_!&52*V+-&W%1$3!ifGyZh( z`T4Tl`=-PEf9a6brdV3P*SP-toMT}+L;be_gi$Gi2g~AG-v$M z^Z!$pdf?~xSz57UWdC)pzp3elyWk&fM0=h;Hq9Krv*TMeIzZj;Wxu~a%xA9}H0SuQ zjvG-6`r_-o*314IT))tNfiL(C(rC}~*WI%mpPxxirvudeiSFa?fTvj|%{l(m;kQem zA7htwS@z%L`U?VFHNpSUnf5%t*_>a4;P{y=&;jaxyFLYX!P8qsbB=$MW9?4Ruc{pN zuk63Y^#}DSSPcFuozbM{`Bm;2N6@cN2dMi^*4+FKo}q>`=lJ*kN!kQ`M4E5&7t;Do s<@!Iq&FcpK()+aM`8zI}&wQtF4jrKGkB;rS5IlYJXwLCRX|G%JKNYg9KL7v# literal 0 HcmV?d00001 diff --git a/dl_src/dl_results/convnet/20210919_080929/events.out.tfevents.1632039123.pytorch-1ab1ab1a-gjcg2 b/dl_src/dl_results/convnet/20210919_080929/events.out.tfevents.1632039123.pytorch-1ab1ab1a-gjcg2 new file mode 100644 index 0000000000000000000000000000000000000000..67896c12189ca3e2c2b72ebcbb7ac355c179fcbb GIT binary patch literal 37920 zcmZwQcU(^IY}Zqe051(vlrTkv&pmWt5Z|-O4Jf>>`zf zj8H0(5b|@M-`90{U+41o>$)G$uFidR&UMaxYo+r4{|>IO(N#Y`_qOQ;yWGKT8cp+d zo$b+csQ+{i-vK7__@&3&|F4^B^t`jWb$?-!vg_7NLsLWDSJ@i5y18jju6^y z&i~@Pr_D^%8(IG^Zm!W(L*xJBd7eH#+IiKkK6sPPEXy@UcHgc|LUe1O*GVe zr~CMdul%f_Kv?!6Z67cm{B8fAp=`w$p7Ti|%+1i7-Qj;PY-eNHNzb{eL-^8?leXroUXF11$L9qq(}SRKlasDYB-U7DoM@Q zAFoshH!Uk;!5(5$a6`1$;n}agn|vDVx5kj1wHuC82zER6js$zE9qFa^{Y^a10%q%2 zQZx3Wn-#)juW}#A9dA7;7VULed#jjy!G2lEaIlv>AUSIbPEzZ(A$?B)+ik9K?e zY+-SaUj={}kWFgFK75Ko=$~|<59If{ZESi`dA!$S?W=EE$tGVq^$6@|1d_A%e(e=P z;HX#`*xL^xJ=)94%7hbBEM$O5wIDTP-{+waX6t%3hCI$My02)j&)Vnp$`lMVaLcg!E`v9`9|c>eK)xc`2zG`!8pOaAHTNJCGl`_|i$VtFrc| z6EtP}7Voejz1Aa9@A>8DPF;keaa<{F(P}1D7;} zykCj`O3|*yvv=DO5d-!|pGnTziwlzkukoHoz@A%4da3(Aq?u} zX$5(c1ILeuc6HXS{Hv7tN&W)t*+)pu+RZYPgwl_5HNbu;mh@;>uJ0Bp*@pnLFpkuW z-TKcllQ#B)9CF`g;dey42G9OG&2a+Qd$lAvYp?e%Nr=(5=?3;Otw@h{v6qD?grE{z708?Z>hVdmH(>2q#kYjp^SAc{`zF<%e{}MX6UTR;K9lI7VVL7B`?436$1e3H24Iq!+;xSOPH)8G0XRVP%4QR6p?9F$QoV5pU zOcK^5L|y~?^$60V-K6&!p=f=@RKN_~MQX-wyW-FBzSH_7c56?~34j^bh}4X|^B#pTqilgH z817sPru>`x%ygCGxlxI z|D3;XsR#!9F|(`Mmz8T|bJqT<^?li@k$d`r-F-00S$ji^bAqh@;qhRfZ$Wyr>*_`e zOH_6G0mf(usTupJjQp}o6MhMF50zNyYg?7M$NCMV2}4AIcx9T<&V8-pL<~U zTR?iWD~}ny(5;;S)5)9EjJ;8cLP*Uv&4avN;)ZFWU7NKl*GS*o$SkmXB$J%A2X{Cp zEG+mI4fabHNH4WVcdQr$n3I=D&DeYDofkevOb>)SykE^4(cXf!8$5n46bzr}2=<_X zBxmjQN1YW6qAqm-dna?!qkX95HDO8O7j3{?r!`|gQlb#{y!Gk{dBAbkIMLpcwQt&# zCzQ;y$9;k;S4qy=mu@~Q%)ML34D3s8kRI*IW2W@Q$Gd<@NGCO8uXXRd(51~C9ms2^ zZ+{@#Td{WKG1D^QdkV~Z=PZ)5_Tn99g_vn3lfeG)5$UCNolj>T0>(6l)QmkKC0Y3T zI|i4fz}1_oM0;!2uIo8pwt0+xd$2zWB{^%KU36M#(AC-!?EO-T7#5eDk%q zNNUFJU!@Qpwn{gFAGI|4Rl6(7HL?wBzmhUc*2*KhIoQv|kesz!G&>`NRsC`W`^8w& zOYMuD63PLy^C+nqd;Oc|g+-A8&msTMLv65VZ_C=1$Bf?7PCdb1971x|{^tB?!FshU z2<#t1NiVez-KXsdn5%0^&Db3xlZ9)>&)p%{UwUwcXm7`}U(fGR2KK&zBxmh8FHQ*A zZ(16H-Ebx8rS=9YF(&{sWfiFz`?x>Hd&%ZbBOq_)QMq2Uw`c9Ax9*VDYW1WG*jM_J zoVCBRiWl-bE_woX4}a35UAeckGuH4bU_1jz&Dh7bJue*HZT|^!_mad@qFslzEByD$ zybLmGgI&LZ?;PChdHp6#q3Xia`QHx2K&tUq-N~RT+RrSe{XC8 z`QrJtw~O}wSi9Aj=Q7jseyd^LO}t6Y+7+`l3YEtS0>SR&LwdCL?%i89uS;XR)^_(L zHDgz3o)QXn?!Z0ltBz$!qFs-*?|Ts=YiblC1G~{wlC$=QYa;}WWuxQ3e#({fXrD1- zhRmz|EpwR2KsQn|_F)u-VH)rkj^T!DzOj5MKzV9~a(cY}-DVa_9mvMkm z+#xk%Z=Mz@Tz(t83UZBx#a^Pl8*4w-y|e6cuiRf?PhVB~H)rjy`l-pjhJDEe`=?;i zqkX7lUD->A4-)~iZw;v#`=T}tWWQCm&47Hm{N)bO-kr72dt6uE!zE=W*tc1eoV8C_ zyFfVPIl2ef@7a(Z?Yg?FWQrWw4Zz$RNovMEq*bC&x8{dEsqsiKe}4#&NJ4tsyyUuk+en_+>ll9oUUR zNRM{qG1KYBw?|;t2qiURH_|#ORBZH3fqcYT+fkz3n6*!CZX&n)@4yYPTWuvdYj0c9 zT?kwCRtM~f+ewf1U6FaR69zhi0aF-GYR2xrGE!(gI|8@78=hR{BiehhcE$Am^2#5- zwt;{>k}`Af_=eR(zEtSGHab2{9_|OM{35dbN`AkVS|9TG@T~Qi4g53 zti6xPAbHxGkF&u3I%NLeoV8cFHxbr1eS&Lb##+*&eO+j*Y}Z7$>oD&QVWej4M?S_2 zkH3u5hukG>@deS|o3)?&Y%0%|9b5+XL0d`A+D(IW1#OMiOJUxPwv!(1DJi>UI`O4v z08_Ao)Qr8>>=QzVswzC~d^SF#9Hh6wL47tCnTMyB0#@Zuijgjv+ z4q6WOytyQ2?Xz>AnOB7t7J$8I9_i6OW5x~HBmEE=U{dFknz4@!ju&#?byx$ry-lc{ zXz$C~_r1`UTZfi61pDnVBxmjEE<=S*>kC?gz1>*Sqy6rmzfCuv>fwIm&~c<@?1QpW zgbOE~7lJ+SajCy(@5kDw-*S<^yBnwl_KISXv-WvokDI4X8hZfj0i~ozdrC^aY@J;Z zUI@jPlbW%&es@5y`7dQI*qufh?Gf$$S$l`ZqvRbLX+Ht`K_`;4_HF9L=K3w0r-1#G zGwIQO_s)9R8?SG8@x0B2)QtV{kUZgL$Du>P9;H7dO|%bS?b$YSFTwz2*kVS^G!VGV|3hx8U(^oKAYQ&zP}Bw(MuWXMi!wAT?tT zAMiv->+FZ?%*yPheMP&#+M8b;D<87F^98W)%p*B#zZv`3yib$;i(%fkJ|R8Y|2(HB zi~n`3CD`ZZlbW%|uDmP^dRWH`@+aX{PNH4L+P4K;$z|@}4}m=~zWQ&@+FQR>6MWnT zb^v?U1Ja}Y;-7PZ#HFeDa6{BXQZx1|yRwDGa}VPoR&G^0NVLmY``(e0u%}p%oV9mru*v-H&jxsp^3o8}qdoM`d8DiFM?5hsv?MiSw_B4TteljW z0YB>DTd(emcF9xwAL?|MM?A{GXWcfmfBQFQ?Z3|r7y7)~f#>hp9Y~LM<@rqG%jsHx z@$N)w#y)LZuF!Oo&u7SO#$K%w?H263ySUAh8x@cL2=ji}iR7&PwYjBv)JEGHIA&s< zNsspOKiAp|P1?5x%nBD$Gxo^lw*{la?)cVjJjSK%4dog+gtb3h-de6{p4<}bm1n2^ z%~|`>91r2P+Jcr~k5iBy?Ut6ovhBe`7J>a~GN~E6bN^!D+? zxk|66(k=WzwTvMPHAaD zVBS>{NssnsO*3Wo#zRfO{)yI%ea4_vL8ZtA?Q4=oXNmUVtUW`;Q@(q`L)_z-@{;7N z{qYlB^VyM?)`EThE7GIAckhR?;`pBxU?2XP)Qr8c#UxwA{$EVG!7pmy(>dt2`(-Ka}7+7wq4bkzQ(7Vi=f8ws(Ng(k*vLT&}{jw=5Y~VKYW_xtiAI1Ec5NJ`+9=C z=~>dFeW>MY*@wvs-vMS|5~&%xLF@@(T+;*hA&++n3Ks3Qti8s>NbYN zSHCn`Sf(+{4eXcPNRRgNvW>D1a{EHSY;-3zW503io1kNOXb0pGo{Nu)_ED_;zfTVG zC1=bUgZ-`_$ys~%Q4h>(XD%HDcH4!dm)gJN_4EVG-T+cF_H(~J3l;r#;Ht`u*XG{oV5omJS7xfaSH`|yF;XB?LB1;wg=)fwWAM{nz2VkHI^CZ zuFi!Yb>ljnO3^-swJXmflMKe=9>=rwBxmg_K3bXw9eITJwf#4c9_`BY-CSiR?s0sj zHDg~Nby3h;`3xVou|G1fb-Hqmv}5g|U1!QajhPVz^Bx;da@JmIM!{ zc~OLV5gaqkdXgUPf1U@HCGNM_2lhHfq-N~ZC({MZWg2rJH_mo&7wzL%`$6Mra>X#O z9$*j6BROkd`6J3aILZ`1ziRV@^k}#IV{aZS{{fgvS~K=$jcy9gi~U+a?vj5iM6{1* z?GqyP7xY(OygCiY%p_MKRc` zY0cP69P)%N3s>T{f=cw-N21+K zhK9-xC6?gB4PR)@*oPUa$v)j`ya#?%W4|^(MEeBRzWd2|`OGYnc(CtSPIA_+di#a> zmZ^mgz+M$ddbBI&eYGM1H%C>2NX^)@#y%B%I;oEYyUBI?jv2}|aw2P2KFb{XY-R}9 zOS?P#%~`wi$bK@DI|q!wzFtmxv@4(8534C{1DGv?NX^(k*!7dG>E0s-a=oT&hl%z{ zJbSDAo2$XTTASpoeaj!a#oW&W!5-X#^iunk=biD7{PdQjX6&VZ>|rrp0@$y0NuMv; zC-dxwn~iq{`$c1t^XzT4cE1PvdlS-2?Ix~0@jl-qQ&My6TN*}MgFW-`z3rmik!PQF z@*VCiCB%`OXV2I*A3yJ*8c%wuy`R%}OTZWofqv+JiFlE;0?_C zwSFY$+0T@=#(OrN14u8mZ}_N<4+!X*lbU1C43HaveQwi*&qcd4&+dD8$4{`IY(;XO zy(%E)5*#zttw}Gnk2_+FH>z{nkeXxPDXa?yyK8jUI=7VTj0?}c_nzlFu$LYoInQo9 zBXk+q>&B8^YOk3$9M{MST664yw}bJV;Gy?@ebGLJXKyp1elFNmgGtV_cc^KD=fDQ5 zNiVgV7X?ScL~5-eHOIcr?=0>U3~RZ}Mzl}m*(bV%S%Q6P8qeM7t}`?l^AU7O=?b6`%ca_aVL1e)@a@?h`cgB{j#s>0qE7>^UQIMvC^CJiB(! z*8^bQ7mp!1&;IysGCqfGVMltY{hDD{ym+plHOD^5g)YA59Gx?X!9I-R(!U27BRglJo3u zCOzy6$9v*Q(o5}H)@`nUed1|SbL?w+SN(!~Q{b!KwdB|Umzo&Lbxkk?8*_U~*#%qtSlS$6AA0FRv4cK=&kzQ(V zqQ+471I--mT0?FSQG~b8G-JwlAqU_H8kntRVMWS^Je} z_vYD?TG$1E-Dfq)d3Mzs)p)OL$r{p2?R93II0hK^5K?pO8EQN65kvgHRl8ArS`tBx8w2txRlf!`<-PoaG$`zqvvkXzK~~+F|Yjr?B?Di=h-dPSK)Wk zeefZ@)UK;)@D?Vrs~@R3cFWgw^dO&@UV2%y2k`8@HJ&X8yT=2P^XykPmixdlGb4-i zQhQva2VvNsW4_-rhslB%M zz~+Ey6+&u`J##>K3FM`%wluh>Tq76r?6J3YIe@)kdy@0)#o=R1VBTwJFSS3 z7dv{~X{l&m#WBG{9XNzJkQKQ`D0`MKPi z`$YS4p55?34SpZTlYEl%>=CEe;rgEWl=M>jo`s$8!zXVFNX@ak|IWrA)iC_ab!N16a4v_k96hvx*r@)lv9RKI{zjr}@@55Cy z(Z7o8pRjGm2k;;MNPC_?tIYT__;*y$0aE|-CaFWfGvgD@IsVM`CYzwwNG){|{Xty+ zEK|2Z;6Hbb_B?-}^_HgKUvZrdkoxayEI0+89ye&t@mHqypAG$j3+X|kKbY%x)q8CM ze%~v!=lSnBE}skj8>w`F)W6)#BoI7_X*B2fGgBu&g1+{_-Lay7HP`Pmx;Pp9Pma=_ z=huqujW?u2kI?~AzvB2I+`v^ypgG4s^y$CXx0$|68l43*h)E zI!gyg{qoV1dxJmi9L+iYhvBpE!&vpd=KuFVxqgOn{b~1HO2OaYC+&Iu!Di0*If?7P z=m4p|=l7qjz;E@N<{baATCVoc_v&e8E&A7T{o{Kn48h;N7wvg||HK~sVgARP&;e5a zLhEk$1^Pz4Y0mL)n;2#be($e+y+r>yuD{{drf%RLTTOeO-#2%BH}HS>XMofn)@wd~ zA&bWk`uZGy;Oo5m;BUXJ?GDi&#`T+-ug?a5>VDeu{6B(paC3BHG#w!I`!!ju51y)n zH0Stt)r+13eYJ{qis)a@_4mqitq=YU^=QxYR}34C@881ubb!>~c6J3`P^hZWoa5JQ zYmx!}_zgXZMgIn_f7IlhCE!1}oAx~aQq{+c;P_b+MF&Xz;r+gi22cMzH0Ss=zSg@9 zeWNh5rkTq1XCv3&zp};${L44dp6CDcQRguDS8S#Or2cN@tFMA*@)nwN{C;Yu@I%C| zqdS|4{!Lte_TurX;IC_}PkNp|Ff?dB_)Sdc0I7d>y2)zrm-VJO$M4|q1wT07xZgKN z(Z8ANKYOjr8~h^%(w^sU@oa1+_+S4sK(T)#o4H@<(hPtu;}&%PkT z>ld|Cbb!=v6}1ZeXaC7Le)X1}@%-~x-^Vi1AI|kxeA;{!=6}LK+VlKdl8nM({_V`^ z0I7d|&-@lJpIro+bNqS^dBdS^u)@<_^zY>QmsoZ60Dr|Q+VlJ>DmSiz|9uc0AoVY< zn2BG=krqsIj(>fKTMzKB-}y9D^ha?0t*e^j^;hpG+VlLi)CS%Izx^IMKo$tkqF#mD7wCDMsXg8_?|B%jffYh&*cMrduF72P3 z<9}ke0QXzQuf3Hm`gd{t73W6a{==*dwCDL7UU^UrewU4OfYd)VKlujC|A0+2=lB<_ zY>^Fpp8c$9(Z8GPZ}xGL9?ZX!Bkg(qg@Gz~{?pl+4v_kPtn!=+o?k9B=lEaEs{Rjp zkJ~3YJXEfqQC$DEz!?p|zwsI5Erwc5}DQh&zr2Ib&c(U#^M|CNLcd`Q&jY@6Al ze=pbXbMHhi@E5(KJ5V@?Row|?|w}Kf8SknfYjf7Vyi0f6#bKP{I8-?@qD}0v{z?D|9-Cjs7F5y z@E6XYJn-rFnniPtzk9umrO-R6G{_VE2e^JOua6nvcV69y z^gMslJy-D=i<2RAfYdKIH8TZI#9Eqj{GXm!R6@VIJnfh0kLLP)_b%QF{*=$O=lQ3f zaEgKB=X50p@^8Na~a(2@2$zy3Z4{O<7I{|u1&-PJ3{!+d5r(bwnr z6%S4fgWlKjWWMN+<@yiBEGPm0JzLuI{3CY<;CB*hkER2pe*KrV@bBr*e{zn$?Ky=i z_`3~vu9dA^e~xnfCGLsu!GB>S?RoyO<*K9L{hMY>2T1+hpM6>gp53Ep&hgh=x`OL} zQh8T>(I3b4hxSau`|p!~(w^s!YQA_p_#gbD1EhYPpKI`oNq*MQoa49qZi@5oc&Exn z^dIB;r>M>ifcf`$KzpA5kjasz;GdXD2T1+CXSd_~v0WC;IsOjw0}bH!wXUb5x9E@O z`qz0jj{yJYKD6ig^Y1>w{~txa9~~g|d$m&Y22bk&H0Suc*_moUpLJ@>PSKyh_3vEx z-3t6i6KT)$yYy>^AJ80hmJX2m-OEO$g6HZ#ImiD?<0*as;^S-mi=zKH*Dt$ecpd!F zWwht{9|q{Wf#b)%f)0@S`;3~5|9Qyy&ot-w3;e!Mgr7J3P;sH?Kf(1c*ftWcAFX0( z&-1TrYm4vS!AI!;ssHWvd7Z%1Hjd^TfB6_E{Ck(UJy!Lxa{WBX_5afISBLq3u#5IQ z|LDLSIWYe!QFMURU-|8ABk&K{LvxP5X{SCnp&w^x-9z-B;`(c*w6FkwS7X}q{4-kh zZ4CZiz32d`|N6yiq2Os^LUWG)km)15-}Jqt&`$K9=KALaXzT<3=})xh`5(7mPzL^K zm2`mAAJ;u^GWcJ9p*hDd+~2Pa{lbS+{YC#7uHSCWtM%Zon@@Y5e{fmROz>ZSN(V^& zTm1*&{YUvTnsfX+7Te?c`C*ggZqc8}^;^{na{<4~4%+klk$tDRfnOt>4v_jk4rqA< z=Cksjoa29eWY_`dGu#TUi2k!&|M^B5c>V4-llDA++QYZD;CGot2T1*GJ*#?wr^jrX zbNr{X&y+y_MV3+|`pU=lJvg8 zKlq1jqCL;Qr($gn@Kb--=JaQSnx0E zLwlaTOJ)=NeB4DdIzZ|__oE)}|2*zXbB@1J#IAbq^ZE#NHFK5g=OwN`yp~=O_%{!x zJ_O1><|7vJ!#ML zuZ~t<4Sq8tIzZ~zZPa@bc!n9%oa3*3DnTFmO2b=|MgJA9Kd54TIrvqLY0vXp2Q|n2 ztK0t!kot#rA8ZRA-(K|fIsWZo4F*H+7u#rs=uhSP1%ueW;BRo6_B_8=#eV#->bgWa zK(_fUDHr@r7So>R56P(N3jSaJ43PTmGj8sK z`M0d;V-KEZD`?K~w_W18=>GuB C+RCv2 literal 0 HcmV?d00001 diff --git a/dl_src/dl_results/result_log b/dl_src/dl_results/result_log new file mode 100644 index 0000000..f0b10b3 --- /dev/null +++ b/dl_src/dl_results/result_log @@ -0,0 +1,10015 @@ +bh1llmn592poa-0 +<<<<<<< Updated upstream +Namespace(device='cuda', epochs=20, selected_events=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], test_data_num=24, train_data_num=98) +======= +Namespace(device='cuda', epochs=1000, lr=0.0001, selected_events=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], test_data_num=24, train_data_num=98) +>>>>>>> Stashed changes +CNN Configration: Namespace(class_num=10, groups=[1, 2, 2, 2, 32, 4, 4, 4, 16, 64, 8, 8, 32, 8, 8, 8], in_channels=[80, 12, 252, 256, 256, 512, 512, 512, 512, 512, 1024, 1024, 1024, 1024, 1024, 968], kernels=[3, 4, 1, 2, 3, 1, 1, 1, 2, 3, 1, 1, 2, 1, 1, 1], out_channels=[12, 252, 256, 256, 512, 512, 512, 512, 512, 1024, 1024, 1024, 1024, 1024, 968, 2640], pads=[0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0], strides=[2, 2, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1]) +============Initing CNN model +============Loading train_data +============Training + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +<<<<<<< Updated upstream +======= +Training epochs:1/1000, acc:0.10827375203371048, loss:0.24787607789039612 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:2/1000, acc:0.10520939528942108, loss:0.24373038113117218 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:3/1000, acc:0.11031664907932281, loss:0.2396993190050125 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:4/1000, acc:0.10520939528942108, loss:0.23577880859375 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:5/1000, acc:0.08988764137029648, loss:0.23196791112422943 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:6/1000, acc:0.10725229978561401, loss:0.22826266288757324 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:7/1000, acc:0.10520939528942108, loss:0.22466227412223816 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:8/1000, acc:0.10725229978561401, loss:0.2211627960205078 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:9/1000, acc:0.10827375203371048, loss:0.2177629917860031 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:10/1000, acc:0.09703779220581055, loss:0.21446552872657776 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:11/1000, acc:0.09499489516019821, loss:0.2112637311220169 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:12/1000, acc:0.10112359374761581, loss:0.20815348625183105 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:13/1000, acc:0.10623084753751755, loss:0.20513826608657837 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:14/1000, acc:0.09908069670200348, loss:0.20220884680747986 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:15/1000, acc:0.10214504599571228, loss:0.19936758279800415 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:16/1000, acc:0.10520939528942108, loss:0.19661293923854828 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:17/1000, acc:0.10418795049190521, loss:0.19393974542617798 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:18/1000, acc:0.11746679991483688, loss:0.19134752452373505 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:19/1000, acc:0.10010214149951935, loss:0.18883295357227325 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:20/1000, acc:0.12053115665912628, loss:0.18639406561851501 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:21/1000, acc:0.10520939528942108, loss:0.18402619659900665 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:22/1000, acc:0.11644535511732101, loss:0.18173407018184662 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:23/1000, acc:0.10214504599571228, loss:0.17951107025146484 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:24/1000, acc:0.10623084753751755, loss:0.17735373973846436 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:25/1000, acc:0.11235955357551575, loss:0.17525896430015564 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:26/1000, acc:0.12768130004405975, loss:0.17323079705238342 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:27/1000, acc:0.10520939528942108, loss:0.1712632179260254 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:28/1000, acc:0.10725229978561401, loss:0.16935357451438904 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:29/1000, acc:0.10929519683122635, loss:0.16750043630599976 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:30/1000, acc:0.09908069670200348, loss:0.16570498049259186 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:31/1000, acc:0.11440245062112808, loss:0.16396135091781616 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:32/1000, acc:0.11133810132741928, loss:0.16226767003536224 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:33/1000, acc:0.10010214149951935, loss:0.16062608361244202 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:34/1000, acc:0.09805924445390701, loss:0.1590348482131958 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:35/1000, acc:0.11338099837303162, loss:0.15748758614063263 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:36/1000, acc:0.12053115665912628, loss:0.15598733723163605 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:37/1000, acc:0.10623084753751755, loss:0.15452925860881805 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:38/1000, acc:0.10214504599571228, loss:0.1531142294406891 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:39/1000, acc:0.09397344291210175, loss:0.15174007415771484 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:40/1000, acc:0.11235955357551575, loss:0.15040655434131622 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:41/1000, acc:0.11133810132741928, loss:0.1491103321313858 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:42/1000, acc:0.10418795049190521, loss:0.14784972369670868 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:43/1000, acc:0.10010214149951935, loss:0.14662663638591766 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:44/1000, acc:0.11848825216293335, loss:0.14543911814689636 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:45/1000, acc:0.10214504599571228, loss:0.14428280293941498 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:46/1000, acc:0.10520939528942108, loss:0.14315979182720184 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:47/1000, acc:0.10316649824380875, loss:0.14206503331661224 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:48/1000, acc:0.09805924445390701, loss:0.14100296795368195 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:49/1000, acc:0.11338099837303162, loss:0.13997049629688263 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:50/1000, acc:0.11338099837303162, loss:0.13896776735782623 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:51/1000, acc:0.12359550595283508, loss:0.13799111545085907 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:52/1000, acc:0.10520939528942108, loss:0.13703955709934235 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:53/1000, acc:0.09908069670200348, loss:0.13611511886119843 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:54/1000, acc:0.10725229978561401, loss:0.13521254062652588 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:55/1000, acc:0.11338099837303162, loss:0.1343366652727127 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:56/1000, acc:0.11235955357551575, loss:0.13348430395126343 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:57/1000, acc:0.11133810132741928, loss:0.13265325129032135 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:58/1000, acc:0.11950970441102982, loss:0.13184213638305664 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:59/1000, acc:0.11235955357551575, loss:0.13105647265911102 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:60/1000, acc:0.11950970441102982, loss:0.13028672337532043 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:61/1000, acc:0.11338099837303162, loss:0.12954013049602509 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:62/1000, acc:0.11542390286922455, loss:0.12881049513816833 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:63/1000, acc:0.10418795049190521, loss:0.12809784710407257 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:64/1000, acc:0.10214504599571228, loss:0.12740689516067505 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:65/1000, acc:0.12972420454025269, loss:0.12672902643680573 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:66/1000, acc:0.11031664907932281, loss:0.12607114017009735 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:67/1000, acc:0.10418795049190521, loss:0.12542937695980072 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:68/1000, acc:0.10725229978561401, loss:0.1248025968670845 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:69/1000, acc:0.10827375203371048, loss:0.1241917610168457 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:70/1000, acc:0.12155260145664215, loss:0.12359664589166641 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:71/1000, acc:0.09397344291210175, loss:0.12301278859376907 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:72/1000, acc:0.11338099837303162, loss:0.12244394421577454 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:73/1000, acc:0.1327885538339615, loss:0.12188985198736191 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:74/1000, acc:0.11644535511732101, loss:0.12134910374879837 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:75/1000, acc:0.11031664907932281, loss:0.12082021683454514 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:76/1000, acc:0.11950970441102982, loss:0.12030389159917831 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:77/1000, acc:0.11950970441102982, loss:0.1197982057929039 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:78/1000, acc:0.13176710903644562, loss:0.11930670589208603 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:79/1000, acc:0.10520939528942108, loss:0.11882572621107101 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:80/1000, acc:0.11542390286922455, loss:0.11835594475269318 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:81/1000, acc:0.11440245062112808, loss:0.11789283156394958 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:82/1000, acc:0.12972420454025269, loss:0.11744598299264908 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:83/1000, acc:0.12359550595283508, loss:0.11700475215911865 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:84/1000, acc:0.12768130004405975, loss:0.11657508462667465 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:85/1000, acc:0.11644535511732101, loss:0.11615630239248276 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:86/1000, acc:0.10520939528942108, loss:0.11574476212263107 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:87/1000, acc:0.12257405370473862, loss:0.11534319818019867 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:88/1000, acc:0.10623084753751755, loss:0.11495066434144974 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:89/1000, acc:0.10214504599571228, loss:0.11456575244665146 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:90/1000, acc:0.13176710903644562, loss:0.11418846994638443 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:91/1000, acc:0.11338099837303162, loss:0.11382027715444565 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:92/1000, acc:0.11133810132741928, loss:0.11345919221639633 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:93/1000, acc:0.12870275974273682, loss:0.11310677230358124 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:94/1000, acc:0.11746679991483688, loss:0.11276009678840637 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:95/1000, acc:0.11746679991483688, loss:0.11242131888866425 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:96/1000, acc:0.11644535511732101, loss:0.11209073662757874 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:97/1000, acc:0.12053115665912628, loss:0.11176594346761703 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:98/1000, acc:0.10520939528942108, loss:0.11144746094942093 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:99/1000, acc:0.12665985524654388, loss:0.111136294901371 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:100/1000, acc:0.11338099837303162, loss:0.11083003878593445 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:101/1000, acc:0.12768130004405975, loss:0.11053206026554108 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:102/1000, acc:0.11133810132741928, loss:0.11023829877376556 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:103/1000, acc:0.11950970441102982, loss:0.10995199531316757 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:104/1000, acc:0.09908069670200348, loss:0.10966736078262329 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:105/1000, acc:0.12870275974273682, loss:0.10939297080039978 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:106/1000, acc:0.13074566423892975, loss:0.10912124067544937 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:107/1000, acc:0.11542390286922455, loss:0.10885623842477798 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:108/1000, acc:0.12461695820093155, loss:0.10859561711549759 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:109/1000, acc:0.11746679991483688, loss:0.10834117233753204 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:110/1000, acc:0.1327885538339615, loss:0.10809047520160675 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:111/1000, acc:0.10725229978561401, loss:0.10784502327442169 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:112/1000, acc:0.11950970441102982, loss:0.10760512202978134 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:113/1000, acc:0.11950970441102982, loss:0.10736730694770813 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:114/1000, acc:0.13074566423892975, loss:0.10713674128055573 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:115/1000, acc:0.12563841044902802, loss:0.10690803080797195 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:116/1000, acc:0.12359550595283508, loss:0.10668371617794037 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:117/1000, acc:0.14096015691757202, loss:0.10646510869264603 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:118/1000, acc:0.12972420454025269, loss:0.10624880343675613 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:119/1000, acc:0.15219612419605255, loss:0.10603680461645126 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:120/1000, acc:0.13074566423892975, loss:0.10582879930734634 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:121/1000, acc:0.13687436282634735, loss:0.10562534630298615 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:122/1000, acc:0.13074566423892975, loss:0.10542438179254532 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:123/1000, acc:0.11950970441102982, loss:0.1052294597029686 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:124/1000, acc:0.1327885538339615, loss:0.1050347238779068 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:125/1000, acc:0.14198161661624908, loss:0.10484439879655838 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:126/1000, acc:0.13687436282634735, loss:0.10465772449970245 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:127/1000, acc:0.14402452111244202, loss:0.10447495430707932 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:128/1000, acc:0.13891726732254028, loss:0.10429403185844421 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:129/1000, acc:0.13074566423892975, loss:0.10411849617958069 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:130/1000, acc:0.13176710903644562, loss:0.10394419729709625 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:131/1000, acc:0.11848825216293335, loss:0.10377316921949387 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:132/1000, acc:0.12972420454025269, loss:0.10360530763864517 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:133/1000, acc:0.14708887040615082, loss:0.10343970358371735 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:134/1000, acc:0.13074566423892975, loss:0.10327726602554321 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:135/1000, acc:0.13585291802883148, loss:0.10311708599328995 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:136/1000, acc:0.13789580762386322, loss:0.10295838862657547 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:137/1000, acc:0.12870275974273682, loss:0.10280511528253555 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:138/1000, acc:0.15730337798595428, loss:0.10265259444713593 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:139/1000, acc:0.1511746644973755, loss:0.10250374674797058 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:140/1000, acc:0.12665985524654388, loss:0.10235588252544403 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:141/1000, acc:0.13176710903644562, loss:0.10221118479967117 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:142/1000, acc:0.12257405370473862, loss:0.10206928104162216 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:143/1000, acc:0.13891726732254028, loss:0.10192941129207611 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:144/1000, acc:0.14402452111244202, loss:0.10179158300161362 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:145/1000, acc:0.13789580762386322, loss:0.101654551923275 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:146/1000, acc:0.13891726732254028, loss:0.10152250528335571 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:147/1000, acc:0.14606741070747375, loss:0.10138999670743942 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:148/1000, acc:0.14504596590995789, loss:0.10126097500324249 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:149/1000, acc:0.14708887040615082, loss:0.1011333242058754 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:150/1000, acc:0.1542390137910843, loss:0.10100765526294708 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:151/1000, acc:0.15015321969985962, loss:0.1008833795785904 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:152/1000, acc:0.14913176000118256, loss:0.10076338797807693 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:153/1000, acc:0.14402452111244202, loss:0.1006431132555008 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:154/1000, acc:0.15934626758098602, loss:0.1005258709192276 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:155/1000, acc:0.12257405370473862, loss:0.10041040927171707 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:156/1000, acc:0.14504596590995789, loss:0.10029543191194534 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:157/1000, acc:0.13585291802883148, loss:0.10018295049667358 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:158/1000, acc:0.14708887040615082, loss:0.10007079690694809 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:159/1000, acc:0.1634320765733719, loss:0.09996208548545837 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:160/1000, acc:0.1511746644973755, loss:0.09985443204641342 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:161/1000, acc:0.16138917207717896, loss:0.09974832832813263 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:162/1000, acc:0.14913176000118256, loss:0.09964419156312943 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:163/1000, acc:0.15934626758098602, loss:0.09954025596380234 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:164/1000, acc:0.16241061687469482, loss:0.09943867474794388 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:165/1000, acc:0.16138917207717896, loss:0.09933951497077942 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:166/1000, acc:0.1481103152036667, loss:0.09924163669347763 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:167/1000, acc:0.12461695820093155, loss:0.09914445877075195 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:168/1000, acc:0.16036772727966309, loss:0.09904889762401581 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:169/1000, acc:0.14402452111244202, loss:0.09895478934049606 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:170/1000, acc:0.1787538230419159, loss:0.09886225312948227 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:171/1000, acc:0.15526047348976135, loss:0.09877009689807892 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:172/1000, acc:0.17671093344688416, loss:0.09867917746305466 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:173/1000, acc:0.1756894737482071, loss:0.09858953207731247 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:174/1000, acc:0.15628191828727722, loss:0.09850252419710159 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:175/1000, acc:0.15321756899356842, loss:0.09841492772102356 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:176/1000, acc:0.1511746644973755, loss:0.09832966327667236 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:177/1000, acc:0.16445352137088776, loss:0.09824580699205399 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:178/1000, acc:0.15321756899356842, loss:0.09816243499517441 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:179/1000, acc:0.1664964258670807, loss:0.09807971864938736 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:180/1000, acc:0.1664964258670807, loss:0.09799876064062119 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:181/1000, acc:0.14913176000118256, loss:0.09792035818099976 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:182/1000, acc:0.15628191828727722, loss:0.09784078598022461 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:183/1000, acc:0.16241061687469482, loss:0.0977630764245987 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:184/1000, acc:0.14708887040615082, loss:0.09768672287464142 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:185/1000, acc:0.1481103152036667, loss:0.0976119413971901 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:186/1000, acc:0.1664964258670807, loss:0.09753746539354324 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:187/1000, acc:0.14606741070747375, loss:0.09746455401182175 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:188/1000, acc:0.1327885538339615, loss:0.0973912701010704 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:189/1000, acc:0.15219612419605255, loss:0.09731955081224442 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:190/1000, acc:0.15015321969985962, loss:0.09724985808134079 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:191/1000, acc:0.1481103152036667, loss:0.0971798300743103 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:192/1000, acc:0.14913176000118256, loss:0.09711107611656189 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:193/1000, acc:0.14913176000118256, loss:0.09704294055700302 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:194/1000, acc:0.15526047348976135, loss:0.09697560220956802 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:195/1000, acc:0.17364658415317535, loss:0.0969085767865181 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:196/1000, acc:0.13993871212005615, loss:0.09684330224990845 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:197/1000, acc:0.16036772727966309, loss:0.09677845984697342 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:198/1000, acc:0.14300306141376495, loss:0.09671562910079956 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:199/1000, acc:0.15832482278347015, loss:0.09665238112211227 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:200/1000, acc:0.16241061687469482, loss:0.09658949077129364 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:201/1000, acc:0.1542390137910843, loss:0.09652874618768692 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:202/1000, acc:0.16853933036327362, loss:0.09646773338317871 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:203/1000, acc:0.15730337798595428, loss:0.09640775620937347 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:204/1000, acc:0.1787538230419159, loss:0.09634851664304733 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:205/1000, acc:0.18079672753810883, loss:0.09628892689943314 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:206/1000, acc:0.16853933036327362, loss:0.0962328389286995 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:207/1000, acc:0.1726251244544983, loss:0.09617503732442856 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:208/1000, acc:0.1664964258670807, loss:0.09611918777227402 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:209/1000, acc:0.15832482278347015, loss:0.0960642620921135 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:210/1000, acc:0.18079672753810883, loss:0.09600891172885895 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:211/1000, acc:0.17466802895069122, loss:0.09595532715320587 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:212/1000, acc:0.1726251244544983, loss:0.09590110182762146 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:213/1000, acc:0.18896833062171936, loss:0.09584876149892807 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:214/1000, acc:0.1695607751607895, loss:0.09579578787088394 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:215/1000, acc:0.16751787066459656, loss:0.09574467688798904 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:216/1000, acc:0.1756894737482071, loss:0.09569399803876877 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:217/1000, acc:0.15321756899356842, loss:0.09564366191625595 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:218/1000, acc:0.17671093344688416, loss:0.0955919399857521 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:219/1000, acc:0.17671093344688416, loss:0.09554331004619598 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:220/1000, acc:0.1695607751607895, loss:0.09549536556005478 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:221/1000, acc:0.1634320765733719, loss:0.0954471081495285 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:222/1000, acc:0.1879468858242035, loss:0.09539897739887238 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:223/1000, acc:0.17058221995830536, loss:0.09535187482833862 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:224/1000, acc:0.17466802895069122, loss:0.09530524909496307 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:225/1000, acc:0.18692544102668762, loss:0.09525985270738602 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:226/1000, acc:0.17058221995830536, loss:0.09521375596523285 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:227/1000, acc:0.17466802895069122, loss:0.09516984224319458 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:228/1000, acc:0.17058221995830536, loss:0.09512534737586975 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:229/1000, acc:0.17058221995830536, loss:0.09508208185434341 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:230/1000, acc:0.17977528274059296, loss:0.0950385183095932 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:231/1000, acc:0.1634320765733719, loss:0.0949953943490982 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:232/1000, acc:0.1940755844116211, loss:0.09495244920253754 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:233/1000, acc:0.18283963203430176, loss:0.09491124004125595 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:234/1000, acc:0.16547498106956482, loss:0.0948687344789505 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:235/1000, acc:0.18998979032039642, loss:0.0948277935385704 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:236/1000, acc:0.20122574269771576, loss:0.09478811174631119 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:237/1000, acc:0.1818181872367859, loss:0.09474756568670273 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:238/1000, acc:0.17058221995830536, loss:0.0947074219584465 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:239/1000, acc:0.20837590098381042, loss:0.0946686714887619 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:240/1000, acc:0.18692544102668762, loss:0.09462997317314148 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:241/1000, acc:0.1940755844116211, loss:0.09459194540977478 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:242/1000, acc:0.14300306141376495, loss:0.09455649554729462 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:243/1000, acc:0.15832482278347015, loss:0.09451838582754135 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:244/1000, acc:0.13789580762386322, loss:0.09448093175888062 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:245/1000, acc:0.15730337798595428, loss:0.09444376826286316 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:246/1000, acc:0.15730337798595428, loss:0.09440809488296509 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:247/1000, acc:0.16241061687469482, loss:0.09437161684036255 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:248/1000, acc:0.14606741070747375, loss:0.09433624148368835 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:249/1000, acc:0.16547498106956482, loss:0.09430032223463058 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:250/1000, acc:0.1695607751607895, loss:0.09426611661911011 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:251/1000, acc:0.14504596590995789, loss:0.0942324697971344 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:252/1000, acc:0.16853933036327362, loss:0.09419828653335571 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:253/1000, acc:0.17058221995830536, loss:0.09416327625513077 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:254/1000, acc:0.18386107683181763, loss:0.09413056075572968 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:255/1000, acc:0.1634320765733719, loss:0.09409692883491516 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:256/1000, acc:0.15219612419605255, loss:0.09406503289937973 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:257/1000, acc:0.16751787066459656, loss:0.09403301030397415 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:258/1000, acc:0.1848825365304947, loss:0.09400084614753723 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:259/1000, acc:0.1940755844116211, loss:0.09396917372941971 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:260/1000, acc:0.16138917207717896, loss:0.0939369723200798 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:261/1000, acc:0.14708887040615082, loss:0.09390709549188614 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:262/1000, acc:0.18079672753810883, loss:0.09387581050395966 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:263/1000, acc:0.17058221995830536, loss:0.0938456654548645 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:264/1000, acc:0.2002042829990387, loss:0.09381495416164398 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:265/1000, acc:0.16036772727966309, loss:0.09378586709499359 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:266/1000, acc:0.1664964258670807, loss:0.09375662356615067 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:267/1000, acc:0.17058221995830536, loss:0.09372706711292267 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:268/1000, acc:0.18590398132801056, loss:0.09369781613349915 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:269/1000, acc:0.16445352137088776, loss:0.0936693549156189 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:270/1000, acc:0.17364658415317535, loss:0.09364070743322372 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:271/1000, acc:0.20224718749523163, loss:0.09361307322978973 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:272/1000, acc:0.17773237824440002, loss:0.09358556568622589 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:273/1000, acc:0.17160367965698242, loss:0.09355813264846802 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:274/1000, acc:0.1542390137910843, loss:0.09353076666593552 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:275/1000, acc:0.1787538230419159, loss:0.09350398182868958 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:276/1000, acc:0.17364658415317535, loss:0.09347725659608841 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:277/1000, acc:0.17773237824440002, loss:0.09345103055238724 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:278/1000, acc:0.2002042829990387, loss:0.09342508018016815 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:279/1000, acc:0.1971399337053299, loss:0.09339878708124161 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:280/1000, acc:0.17466802895069122, loss:0.09337374567985535 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:281/1000, acc:0.1695607751607895, loss:0.09334871917963028 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:282/1000, acc:0.1910112351179123, loss:0.09332268685102463 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:283/1000, acc:0.1879468858242035, loss:0.09329795092344284 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:284/1000, acc:0.18998979032039642, loss:0.09327362477779388 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:285/1000, acc:0.16547498106956482, loss:0.09324868768453598 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:286/1000, acc:0.1818181872367859, loss:0.09322520345449448 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:287/1000, acc:0.1879468858242035, loss:0.09320089966058731 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:288/1000, acc:0.17671093344688416, loss:0.09317786246538162 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:289/1000, acc:0.1848825365304947, loss:0.09315472096204758 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:290/1000, acc:0.20429009199142456, loss:0.09313172847032547 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:291/1000, acc:0.2063329964876175, loss:0.09310842305421829 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:292/1000, acc:0.16853933036327362, loss:0.09308542311191559 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:293/1000, acc:0.12665985524654388, loss:0.09306341409683228 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:294/1000, acc:0.1664964258670807, loss:0.09304127097129822 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:295/1000, acc:0.1695607751607895, loss:0.09301894903182983 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:296/1000, acc:0.1664964258670807, loss:0.09299613535404205 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:297/1000, acc:0.19611848890781403, loss:0.09297456592321396 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:298/1000, acc:0.18692544102668762, loss:0.09295273572206497 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:299/1000, acc:0.18079672753810883, loss:0.09293121099472046 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:300/1000, acc:0.1756894737482071, loss:0.09291011095046997 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:301/1000, acc:0.18283963203430176, loss:0.09288833290338516 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:302/1000, acc:0.1848825365304947, loss:0.09286800771951675 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:303/1000, acc:0.1848825365304947, loss:0.09284770488739014 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:304/1000, acc:0.1879468858242035, loss:0.09282699227333069 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:305/1000, acc:0.17364658415317535, loss:0.09280653297901154 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:306/1000, acc:0.19816139340400696, loss:0.09278734028339386 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:307/1000, acc:0.1818181872367859, loss:0.09276650100946426 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:308/1000, acc:0.18590398132801056, loss:0.09274774044752121 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:309/1000, acc:0.2002042829990387, loss:0.09272783249616623 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:310/1000, acc:0.1879468858242035, loss:0.09270856529474258 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:311/1000, acc:0.1787538230419159, loss:0.09268967807292938 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:312/1000, acc:0.18692544102668762, loss:0.09267088770866394 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:313/1000, acc:0.1756894737482071, loss:0.09265188127756119 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:314/1000, acc:0.20429009199142456, loss:0.09263405203819275 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:315/1000, acc:0.1726251244544983, loss:0.09261531382799149 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:316/1000, acc:0.15321756899356842, loss:0.09259718656539917 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:317/1000, acc:0.20531153678894043, loss:0.09257882833480835 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:318/1000, acc:0.1940755844116211, loss:0.09256062656641006 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:319/1000, acc:0.20122574269771576, loss:0.09254250675439835 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:320/1000, acc:0.19816139340400696, loss:0.09252466261386871 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:321/1000, acc:0.1971399337053299, loss:0.09250764548778534 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:322/1000, acc:0.17977528274059296, loss:0.09249045699834824 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:323/1000, acc:0.2185903936624527, loss:0.09247307479381561 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:324/1000, acc:0.1879468858242035, loss:0.09245527535676956 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:325/1000, acc:0.19203267991542816, loss:0.09243931621313095 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:326/1000, acc:0.17466802895069122, loss:0.09242281317710876 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:327/1000, acc:0.17977528274059296, loss:0.09240613877773285 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:328/1000, acc:0.18998979032039642, loss:0.09238972514867783 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:329/1000, acc:0.19611848890781403, loss:0.09237305819988251 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:330/1000, acc:0.18692544102668762, loss:0.09235762804746628 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:331/1000, acc:0.21041879057884216, loss:0.0923418253660202 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:332/1000, acc:0.2002042829990387, loss:0.09232450276613235 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:333/1000, acc:0.1910112351179123, loss:0.09230951964855194 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:334/1000, acc:0.19816139340400696, loss:0.09229398518800735 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:335/1000, acc:0.20429009199142456, loss:0.09227859228849411 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:336/1000, acc:0.2155260443687439, loss:0.09226228296756744 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:337/1000, acc:0.20735444128513336, loss:0.09224691987037659 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:338/1000, acc:0.16547498106956482, loss:0.09223172813653946 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:339/1000, acc:0.1940755844116211, loss:0.09221772104501724 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:340/1000, acc:0.22574055194854736, loss:0.09220282733440399 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:341/1000, acc:0.22676199674606323, loss:0.0921877771615982 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:342/1000, acc:0.22267620265483856, loss:0.09217323362827301 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:343/1000, acc:0.2032686471939087, loss:0.0921587347984314 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:344/1000, acc:0.24719101190567017, loss:0.09214455634355545 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:345/1000, acc:0.20429009199142456, loss:0.09213148057460785 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:346/1000, acc:0.2216547429561615, loss:0.09211718291044235 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:347/1000, acc:0.19305413961410522, loss:0.09210202842950821 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:348/1000, acc:0.20122574269771576, loss:0.0920887216925621 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:349/1000, acc:0.2032686471939087, loss:0.09207434207201004 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:350/1000, acc:0.2247191071510315, loss:0.0920606255531311 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:351/1000, acc:0.2492339164018631, loss:0.09204642474651337 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:352/1000, acc:0.2553626000881195, loss:0.09203247725963593 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:353/1000, acc:0.23595505952835083, loss:0.09201966226100922 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:354/1000, acc:0.20837590098381042, loss:0.09200650453567505 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:355/1000, acc:0.24208375811576843, loss:0.091993547976017 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:356/1000, acc:0.24106231331825256, loss:0.09198050945997238 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:357/1000, acc:0.21756894886493683, loss:0.09196855872869492 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:358/1000, acc:0.21961185336112976, loss:0.09195538610219955 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:359/1000, acc:0.2124616950750351, loss:0.09194249659776688 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:360/1000, acc:0.2247191071510315, loss:0.0919303148984909 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:361/1000, acc:0.1940755844116211, loss:0.09191783517599106 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:362/1000, acc:0.23186925053596497, loss:0.09190542250871658 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:363/1000, acc:0.20735444128513336, loss:0.09189383685588837 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:364/1000, acc:0.20837590098381042, loss:0.09188100695610046 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:365/1000, acc:0.22982634603977203, loss:0.09186947345733643 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:366/1000, acc:0.2216547429561615, loss:0.0918571949005127 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:367/1000, acc:0.18283963203430176, loss:0.09184478968381882 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:368/1000, acc:0.20429009199142456, loss:0.09183390438556671 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:369/1000, acc:0.21450459957122803, loss:0.09182234853506088 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:370/1000, acc:0.19611848890781403, loss:0.09181029349565506 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:371/1000, acc:0.19918283820152283, loss:0.09179886430501938 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:372/1000, acc:0.20735444128513336, loss:0.09178713709115982 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:373/1000, acc:0.20224718749523163, loss:0.09177594631910324 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:374/1000, acc:0.21961185336112976, loss:0.09176428616046906 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:375/1000, acc:0.23901940882205963, loss:0.0917525514960289 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:376/1000, acc:0.23289071023464203, loss:0.09174171090126038 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:377/1000, acc:0.23493359982967377, loss:0.09172999858856201 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:378/1000, acc:0.22880490124225616, loss:0.09172026067972183 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:379/1000, acc:0.22063329815864563, loss:0.09170851111412048 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:380/1000, acc:0.2093973457813263, loss:0.09169827401638031 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:381/1000, acc:0.2124616950750351, loss:0.09168853610754013 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:382/1000, acc:0.21756894886493683, loss:0.09167734533548355 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:383/1000, acc:0.22982634603977203, loss:0.09166694432497025 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:384/1000, acc:0.24106231331825256, loss:0.09165605902671814 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:385/1000, acc:0.21756894886493683, loss:0.09164637327194214 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:386/1000, acc:0.24719101190567017, loss:0.09163552522659302 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:387/1000, acc:0.21348313987255096, loss:0.09162610024213791 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:388/1000, acc:0.23289071023464203, loss:0.09161587804555893 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:389/1000, acc:0.2277834564447403, loss:0.09160582721233368 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:390/1000, acc:0.24208375811576843, loss:0.09159576892852783 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:391/1000, acc:0.26251277327537537, loss:0.09158547967672348 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:392/1000, acc:0.19611848890781403, loss:0.09157633036375046 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:393/1000, acc:0.25331971049308777, loss:0.09156710654497147 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:394/1000, acc:0.2339121550321579, loss:0.09155680239200592 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:395/1000, acc:0.21961185336112976, loss:0.0915471538901329 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:396/1000, acc:0.22267620265483856, loss:0.09153763204813004 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:397/1000, acc:0.2400408536195755, loss:0.0915277972817421 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:398/1000, acc:0.24106231331825256, loss:0.09151861816644669 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:399/1000, acc:0.24412666261196136, loss:0.09150948375463486 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:400/1000, acc:0.21961185336112976, loss:0.09150169044733047 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:401/1000, acc:0.25127682089805603, loss:0.09149257093667984 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:402/1000, acc:0.24106231331825256, loss:0.09148254245519638 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:403/1000, acc:0.22267620265483856, loss:0.09147341549396515 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:404/1000, acc:0.2369765043258667, loss:0.09146520495414734 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:405/1000, acc:0.23289071023464203, loss:0.09145641326904297 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:406/1000, acc:0.24719101190567017, loss:0.09144662320613861 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:407/1000, acc:0.24719101190567017, loss:0.09143775701522827 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:408/1000, acc:0.2277834564447403, loss:0.0914292186498642 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:409/1000, acc:0.22574055194854736, loss:0.09142075479030609 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:410/1000, acc:0.19509704411029816, loss:0.09141257405281067 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:411/1000, acc:0.21961185336112976, loss:0.0914047509431839 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:412/1000, acc:0.2093973457813263, loss:0.0913960188627243 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:413/1000, acc:0.2185903936624527, loss:0.09138691425323486 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:414/1000, acc:0.25434115529060364, loss:0.09137827903032303 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:415/1000, acc:0.25127682089805603, loss:0.0913705825805664 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:416/1000, acc:0.22574055194854736, loss:0.09136217087507248 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:417/1000, acc:0.24106231331825256, loss:0.09135416895151138 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:418/1000, acc:0.25331971049308777, loss:0.09134581685066223 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:419/1000, acc:0.21348313987255096, loss:0.09133805334568024 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:420/1000, acc:0.23186925053596497, loss:0.09133077412843704 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:421/1000, acc:0.26966291666030884, loss:0.09132205694913864 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:422/1000, acc:0.2706843614578247, loss:0.09131386131048203 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:423/1000, acc:0.24821245670318604, loss:0.09130655229091644 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:424/1000, acc:0.25025537610054016, loss:0.0912984311580658 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:425/1000, acc:0.24514810740947723, loss:0.09129086136817932 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:426/1000, acc:0.2339121550321579, loss:0.09128335863351822 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:427/1000, acc:0.2584269642829895, loss:0.0912756621837616 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:428/1000, acc:0.25944840908050537, loss:0.09126783907413483 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:429/1000, acc:0.23493359982967377, loss:0.09126046299934387 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:430/1000, acc:0.2492339164018631, loss:0.09125419706106186 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:431/1000, acc:0.2808988690376282, loss:0.09124620258808136 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:432/1000, acc:0.23289071023464203, loss:0.09123846888542175 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:433/1000, acc:0.25434115529060364, loss:0.09123189747333527 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:434/1000, acc:0.23493359982967377, loss:0.09122443199157715 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:435/1000, acc:0.25331971049308777, loss:0.0912177562713623 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:436/1000, acc:0.24106231331825256, loss:0.09121094644069672 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:437/1000, acc:0.26046985387802124, loss:0.0912037119269371 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:438/1000, acc:0.24412666261196136, loss:0.09119679778814316 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:439/1000, acc:0.265577107667923, loss:0.09118970483541489 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:440/1000, acc:0.2461695671081543, loss:0.09118283540010452 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:441/1000, acc:0.2247191071510315, loss:0.09117614477872849 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:442/1000, acc:0.24821245670318604, loss:0.0911690965294838 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:443/1000, acc:0.25331971049308777, loss:0.09116242825984955 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:444/1000, acc:0.2890704870223999, loss:0.0911550223827362 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:445/1000, acc:0.29009193181991577, loss:0.09114822745323181 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:446/1000, acc:0.27272728085517883, loss:0.09114174544811249 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:447/1000, acc:0.2553626000881195, loss:0.09113558381795883 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:448/1000, acc:0.2522982656955719, loss:0.09112903475761414 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:449/1000, acc:0.25740551948547363, loss:0.09112317860126495 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:450/1000, acc:0.29111337661743164, loss:0.09111565351486206 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:451/1000, acc:0.3043922483921051, loss:0.09110882878303528 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:452/1000, acc:0.24106231331825256, loss:0.09110279381275177 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:453/1000, acc:0.2860061228275299, loss:0.09109726548194885 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:454/1000, acc:0.25740551948547363, loss:0.09109039604663849 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:455/1000, acc:0.28192031383514404, loss:0.09108425676822662 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:456/1000, acc:0.29928499460220337, loss:0.09107745438814163 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:457/1000, acc:0.29009193181991577, loss:0.09107096493244171 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:458/1000, acc:0.27170583605766296, loss:0.09106490015983582 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:459/1000, acc:0.26353421807289124, loss:0.09105978161096573 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:460/1000, acc:0.22676199674606323, loss:0.09105491638183594 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:461/1000, acc:0.26046985387802124, loss:0.09104897826910019 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:462/1000, acc:0.26966291666030884, loss:0.09104275703430176 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:463/1000, acc:0.2553626000881195, loss:0.09103671461343765 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:464/1000, acc:0.26659858226776123, loss:0.09103071689605713 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:465/1000, acc:0.27477017045021057, loss:0.09102477878332138 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:466/1000, acc:0.26353421807289124, loss:0.0910182073712349 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:467/1000, acc:0.3043922483921051, loss:0.09101278334856033 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:468/1000, acc:0.29111337661743164, loss:0.09100732952356339 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:469/1000, acc:0.27885597944259644, loss:0.09100160747766495 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:470/1000, acc:0.26353421807289124, loss:0.09099593758583069 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:471/1000, acc:0.2808988690376282, loss:0.09099087119102478 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:472/1000, acc:0.2808988690376282, loss:0.09098463505506516 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:473/1000, acc:0.2870275676250458, loss:0.09097929298877716 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:474/1000, acc:0.2706843614578247, loss:0.09097369760274887 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:475/1000, acc:0.2829417884349823, loss:0.09096850454807281 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:476/1000, acc:0.25740551948547363, loss:0.0909627303481102 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:477/1000, acc:0.265577107667923, loss:0.09095747768878937 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:478/1000, acc:0.27783453464508057, loss:0.09095291793346405 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:479/1000, acc:0.27170583605766296, loss:0.09094692021608353 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:480/1000, acc:0.28804904222488403, loss:0.0909413993358612 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:481/1000, acc:0.2921348214149475, loss:0.09093593806028366 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:482/1000, acc:0.2962206304073334, loss:0.0909305289387703 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:483/1000, acc:0.27170583605766296, loss:0.09092570841312408 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:484/1000, acc:0.2870275676250458, loss:0.09092094004154205 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:485/1000, acc:0.28396323323249817, loss:0.09091557562351227 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:486/1000, acc:0.28804904222488403, loss:0.09091096371412277 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:487/1000, acc:0.28192031383514404, loss:0.09090634435415268 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:488/1000, acc:0.29724207520484924, loss:0.09090133011341095 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:489/1000, acc:0.3074565827846527, loss:0.09089556336402893 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:490/1000, acc:0.3074565827846527, loss:0.09089043736457825 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:491/1000, acc:0.29928499460220337, loss:0.0908859446644783 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:492/1000, acc:0.2798774242401123, loss:0.09088148176670074 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:493/1000, acc:0.2645556628704071, loss:0.09087686240673065 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:494/1000, acc:0.28804904222488403, loss:0.09087250381708145 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:495/1000, acc:0.2962206304073334, loss:0.09086713194847107 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:496/1000, acc:0.2676200270652771, loss:0.09086239337921143 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:497/1000, acc:0.26659858226776123, loss:0.09085802733898163 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:498/1000, acc:0.27579161524772644, loss:0.09085312485694885 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:499/1000, acc:0.24208375811576843, loss:0.0908493772149086 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:500/1000, acc:0.25127682089805603, loss:0.09084440022706985 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:501/1000, acc:0.2614913284778595, loss:0.09083977341651917 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:502/1000, acc:0.2890704870223999, loss:0.09083519876003265 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:503/1000, acc:0.26353421807289124, loss:0.09083054959774017 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:504/1000, acc:0.24412666261196136, loss:0.09082675725221634 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:505/1000, acc:0.27477017045021057, loss:0.0908215269446373 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:506/1000, acc:0.28498467803001404, loss:0.09081698209047318 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:507/1000, acc:0.2870275676250458, loss:0.09081205725669861 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:508/1000, acc:0.29724207520484924, loss:0.09080742299556732 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:509/1000, acc:0.3166496455669403, loss:0.09080313891172409 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:510/1000, acc:0.27783453464508057, loss:0.09079901874065399 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:511/1000, acc:0.2768130600452423, loss:0.0907956212759018 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:512/1000, acc:0.2798774242401123, loss:0.0907914787530899 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:513/1000, acc:0.26353421807289124, loss:0.09078723937273026 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:514/1000, acc:0.2645556628704071, loss:0.090783029794693 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:515/1000, acc:0.27783453464508057, loss:0.09077955782413483 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:516/1000, acc:0.26353421807289124, loss:0.09077472984790802 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:517/1000, acc:0.26251277327537537, loss:0.09077049046754837 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:518/1000, acc:0.2829417884349823, loss:0.09076612442731857 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:519/1000, acc:0.3043922483921051, loss:0.09076134115457535 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:520/1000, acc:0.29315629601478577, loss:0.09075780212879181 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:521/1000, acc:0.28192031383514404, loss:0.09075389802455902 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:522/1000, acc:0.27885597944259644, loss:0.09074998646974564 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:523/1000, acc:0.26966291666030884, loss:0.09074541181325912 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:524/1000, acc:0.26864147186279297, loss:0.09074220806360245 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:525/1000, acc:0.27272728085517883, loss:0.0907379761338234 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:526/1000, acc:0.29009193181991577, loss:0.0907345563173294 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:527/1000, acc:0.2870275676250458, loss:0.09072981774806976 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:528/1000, acc:0.29928499460220337, loss:0.0907258614897728 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:529/1000, acc:0.2951991856098175, loss:0.09072186797857285 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:530/1000, acc:0.2951991856098175, loss:0.09071772545576096 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:531/1000, acc:0.30337077379226685, loss:0.09071413427591324 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:532/1000, acc:0.2870275676250458, loss:0.09071027487516403 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:533/1000, acc:0.2645556628704071, loss:0.09070713073015213 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:534/1000, acc:0.26864147186279297, loss:0.09070371091365814 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:535/1000, acc:0.28498467803001404, loss:0.09069998562335968 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:536/1000, acc:0.3084780275821686, loss:0.09069591760635376 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:537/1000, acc:0.3299284875392914, loss:0.09069184213876724 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:538/1000, acc:0.28192031383514404, loss:0.09068845957517624 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:539/1000, acc:0.30030643939971924, loss:0.09068553149700165 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:540/1000, acc:0.2921348214149475, loss:0.09068108350038528 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:541/1000, acc:0.302349328994751, loss:0.09067784249782562 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:542/1000, acc:0.3135852813720703, loss:0.09067387133836746 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:543/1000, acc:0.29111337661743164, loss:0.09067026525735855 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:544/1000, acc:0.30337077379226685, loss:0.09066716581583023 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:545/1000, acc:0.31256383657455444, loss:0.09066344052553177 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:546/1000, acc:0.30337077379226685, loss:0.09065991640090942 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:547/1000, acc:0.3074565827846527, loss:0.09065650403499603 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:548/1000, acc:0.30643513798713684, loss:0.0906536802649498 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:549/1000, acc:0.29009193181991577, loss:0.09065043926239014 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:550/1000, acc:0.3084780275821686, loss:0.09064645320177078 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:551/1000, acc:0.29111337661743164, loss:0.09064263105392456 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:552/1000, acc:0.30030643939971924, loss:0.0906396135687828 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:553/1000, acc:0.33605721592903137, loss:0.0906360000371933 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:554/1000, acc:0.2951991856098175, loss:0.09063296765089035 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:555/1000, acc:0.3084780275821686, loss:0.09062930941581726 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:556/1000, acc:0.3176710903644562, loss:0.09062635898590088 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:557/1000, acc:0.33605721592903137, loss:0.09062257409095764 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:558/1000, acc:0.32175689935684204, loss:0.09061870723962784 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:559/1000, acc:0.3483146131038666, loss:0.09061583876609802 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:560/1000, acc:0.339121550321579, loss:0.09061245620250702 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:561/1000, acc:0.3472931683063507, loss:0.09060898423194885 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:562/1000, acc:0.3442288041114807, loss:0.09060540795326233 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:563/1000, acc:0.3779366612434387, loss:0.09060254693031311 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:564/1000, acc:0.34627169370651245, loss:0.09059912711381912 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:565/1000, acc:0.3319714069366455, loss:0.09059641510248184 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:566/1000, acc:0.32788559794425964, loss:0.09059394150972366 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:567/1000, acc:0.3452502489089966, loss:0.09059110283851624 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:568/1000, acc:0.34014299511909485, loss:0.09058833867311478 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:569/1000, acc:0.34320735931396484, loss:0.09058503806591034 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:570/1000, acc:0.3503575026988983, loss:0.09058158099651337 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:571/1000, acc:0.37691521644592285, loss:0.09057765454053879 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:572/1000, acc:0.3503575026988983, loss:0.09057532995939255 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:573/1000, acc:0.30337077379226685, loss:0.09057337045669556 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:574/1000, acc:0.3289070427417755, loss:0.09057068824768066 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:575/1000, acc:0.3411644399166107, loss:0.09056727588176727 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:576/1000, acc:0.32482123374938965, loss:0.09056492149829865 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:577/1000, acc:0.29417774081230164, loss:0.09056264907121658 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:578/1000, acc:0.29315629601478577, loss:0.09055966138839722 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:579/1000, acc:0.342185914516449, loss:0.09055687487125397 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:580/1000, acc:0.30337077379226685, loss:0.09055374562740326 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:581/1000, acc:0.32788559794425964, loss:0.09055130928754807 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:582/1000, acc:0.33401429653167725, loss:0.09054824709892273 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:583/1000, acc:0.3329928517341614, loss:0.09054501354694366 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:584/1000, acc:0.33094996213912964, loss:0.09054258465766907 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:585/1000, acc:0.3289070427417755, loss:0.09053894877433777 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:586/1000, acc:0.3319714069366455, loss:0.09053672850131989 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:587/1000, acc:0.2921348214149475, loss:0.09053416550159454 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:588/1000, acc:0.3319714069366455, loss:0.09053129702806473 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:589/1000, acc:0.2890704870223999, loss:0.0905287116765976 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:590/1000, acc:0.2982635200023651, loss:0.09052625298500061 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:591/1000, acc:0.2951991856098175, loss:0.09052412956953049 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:592/1000, acc:0.31460675597190857, loss:0.0905209332704544 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:593/1000, acc:0.2951991856098175, loss:0.09051795303821564 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:594/1000, acc:0.2951991856098175, loss:0.09051638096570969 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:595/1000, acc:0.2962206304073334, loss:0.09051245450973511 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:596/1000, acc:0.3268641531467438, loss:0.09051027148962021 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:597/1000, acc:0.33401429653167725, loss:0.09050824493169785 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:598/1000, acc:0.3237997889518738, loss:0.09050546586513519 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:599/1000, acc:0.3227783441543579, loss:0.09050275385379791 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:600/1000, acc:0.3074565827846527, loss:0.09050014615058899 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:601/1000, acc:0.302349328994751, loss:0.09049738198518753 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:602/1000, acc:0.27885597944259644, loss:0.09049646556377411 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:603/1000, acc:0.31869253516197205, loss:0.09049361199140549 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:604/1000, acc:0.30337077379226685, loss:0.09049040079116821 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:605/1000, acc:0.31562820076942444, loss:0.09048811346292496 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:606/1000, acc:0.31562820076942444, loss:0.09048493951559067 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:607/1000, acc:0.33605721592903137, loss:0.09048230946063995 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:608/1000, acc:0.35240042209625244, loss:0.09048005938529968 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:609/1000, acc:0.35240042209625244, loss:0.09047829359769821 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:610/1000, acc:0.3697650730609894, loss:0.090475894510746 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:611/1000, acc:0.3452502489089966, loss:0.09047277271747589 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:612/1000, acc:0.31869253516197205, loss:0.09047102183103561 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:613/1000, acc:0.30541369318962097, loss:0.09046869724988937 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:614/1000, acc:0.3268641531467438, loss:0.09046649932861328 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:615/1000, acc:0.3299284875392914, loss:0.09046389162540436 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:616/1000, acc:0.34933605790138245, loss:0.09046174585819244 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:617/1000, acc:0.3789581060409546, loss:0.09045826643705368 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:618/1000, acc:0.3381001055240631, loss:0.09045583754777908 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:619/1000, acc:0.3667007088661194, loss:0.09045437723398209 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:620/1000, acc:0.3595505654811859, loss:0.09045194834470749 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:621/1000, acc:0.36159345507621765, loss:0.09044922888278961 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:622/1000, acc:0.33707866072654724, loss:0.09044701606035233 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:623/1000, acc:0.35546475648880005, loss:0.09044432640075684 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:624/1000, acc:0.36465781927108765, loss:0.09044225513935089 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:625/1000, acc:0.37078651785850525, loss:0.09044013917446136 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:626/1000, acc:0.3544433116912842, loss:0.09043805301189423 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:627/1000, acc:0.33401429653167725, loss:0.09043662995100021 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:628/1000, acc:0.34627169370651245, loss:0.09043486416339874 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:629/1000, acc:0.33707866072654724, loss:0.0904325619339943 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:630/1000, acc:0.342185914516449, loss:0.0904303565621376 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:631/1000, acc:0.3105209469795227, loss:0.09042835235595703 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:632/1000, acc:0.3258427083492279, loss:0.09042630344629288 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:633/1000, acc:0.3442288041114807, loss:0.090423583984375 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:634/1000, acc:0.33605721592903137, loss:0.09042131155729294 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:635/1000, acc:0.3258427083492279, loss:0.09041979908943176 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:636/1000, acc:0.3503575026988983, loss:0.09041678160429001 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:637/1000, acc:0.342185914516449, loss:0.0904146358370781 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:638/1000, acc:0.3258427083492279, loss:0.09041351079940796 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:639/1000, acc:0.3636363744735718, loss:0.09041185677051544 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:640/1000, acc:0.3411644399166107, loss:0.0904093086719513 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:641/1000, acc:0.35852912068367004, loss:0.09040693938732147 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:642/1000, acc:0.3748723268508911, loss:0.09040475636720657 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:643/1000, acc:0.35546475648880005, loss:0.09040255099534988 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:644/1000, acc:0.38610827922821045, loss:0.0903998464345932 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:645/1000, acc:0.38610827922821045, loss:0.0903971791267395 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:646/1000, acc:0.3595505654811859, loss:0.09039571136236191 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:647/1000, acc:0.3881511688232422, loss:0.09039407968521118 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:648/1000, acc:0.3840653598308563, loss:0.09039183706045151 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:649/1000, acc:0.3575076460838318, loss:0.09039013832807541 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:650/1000, acc:0.3820224702358246, loss:0.09038915485143661 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:651/1000, acc:0.3810010254383087, loss:0.09038641303777695 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:652/1000, acc:0.3840653598308563, loss:0.09038454294204712 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:653/1000, acc:0.3605720102787018, loss:0.09038203209638596 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:654/1000, acc:0.3513789474964142, loss:0.09038097411394119 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:655/1000, acc:0.3850868344306946, loss:0.09037885814905167 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:656/1000, acc:0.3850868344306946, loss:0.09037648886442184 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:657/1000, acc:0.3605720102787018, loss:0.09037595242261887 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:658/1000, acc:0.37385088205337524, loss:0.09037361294031143 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:659/1000, acc:0.39530134201049805, loss:0.0903715193271637 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:660/1000, acc:0.3626148998737335, loss:0.09036953002214432 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:661/1000, acc:0.3881511688232422, loss:0.09036848694086075 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:662/1000, acc:0.38917261362075806, loss:0.09036608785390854 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:663/1000, acc:0.38917261362075806, loss:0.09036434441804886 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:664/1000, acc:0.3973442316055298, loss:0.09036259353160858 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:665/1000, acc:0.38917261362075806, loss:0.09036140143871307 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:666/1000, acc:0.3789581060409546, loss:0.09035947918891907 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:667/1000, acc:0.3687436282634735, loss:0.09035719931125641 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:668/1000, acc:0.38610827922821045, loss:0.09035541117191315 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:669/1000, acc:0.37385088205337524, loss:0.09035427123308182 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:670/1000, acc:0.3850868344306946, loss:0.09035227447748184 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:671/1000, acc:0.36465781927108765, loss:0.09035028517246246 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:672/1000, acc:0.35240042209625244, loss:0.09034945070743561 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:673/1000, acc:0.33401429653167725, loss:0.0903484895825386 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:674/1000, acc:0.339121550321579, loss:0.09034660458564758 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:675/1000, acc:0.3452502489089966, loss:0.09034442156553268 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:676/1000, acc:0.3779366612434387, loss:0.09034280478954315 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:677/1000, acc:0.37385088205337524, loss:0.09034063667058945 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:678/1000, acc:0.3534218668937683, loss:0.09033948183059692 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:679/1000, acc:0.339121550321579, loss:0.09033849835395813 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:680/1000, acc:0.32175689935684204, loss:0.09033682942390442 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:681/1000, acc:0.3626148998737335, loss:0.09033515304327011 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:682/1000, acc:0.35852912068367004, loss:0.09033337235450745 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:683/1000, acc:0.3534218668937683, loss:0.0903322622179985 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:684/1000, acc:0.339121550321579, loss:0.0903305634856224 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:685/1000, acc:0.36465781927108765, loss:0.09032858163118362 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:686/1000, acc:0.3544433116912842, loss:0.09032630920410156 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:687/1000, acc:0.37997958064079285, loss:0.0903257355093956 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:688/1000, acc:0.3748723268508911, loss:0.09032291173934937 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:689/1000, acc:0.37691521644592285, loss:0.09032155573368073 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:690/1000, acc:0.36772215366363525, loss:0.09031978249549866 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:691/1000, acc:0.3789581060409546, loss:0.09031829982995987 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:692/1000, acc:0.3687436282634735, loss:0.09031713753938675 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:693/1000, acc:0.3850868344306946, loss:0.09031502902507782 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:694/1000, acc:0.41062307357788086, loss:0.09031356126070023 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:695/1000, acc:0.3973442316055298, loss:0.09031113237142563 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:696/1000, acc:0.372829407453537, loss:0.09030995517969131 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:697/1000, acc:0.3779366612434387, loss:0.09030959010124207 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:698/1000, acc:0.409601628780365, loss:0.0903075635433197 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:699/1000, acc:0.4310520887374878, loss:0.09030544757843018 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:700/1000, acc:0.3973442316055298, loss:0.09030391275882721 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:701/1000, acc:0.3871297240257263, loss:0.09030357003211975 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:702/1000, acc:0.39836567640304565, loss:0.09030169993638992 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:703/1000, acc:0.4085801839828491, loss:0.09029980003833771 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:704/1000, acc:0.41368743777275085, loss:0.09029758721590042 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:705/1000, acc:0.4361593425273895, loss:0.09029632061719894 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:706/1000, acc:0.4330950081348419, loss:0.09029488265514374 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:707/1000, acc:0.39530134201049805, loss:0.09029331803321838 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:708/1000, acc:0.41675180196762085, loss:0.09029263257980347 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:709/1000, acc:0.3850868344306946, loss:0.09029172360897064 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:710/1000, acc:0.3912155330181122, loss:0.0902906283736229 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:711/1000, acc:0.4310520887374878, loss:0.09028865396976471 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:712/1000, acc:0.4392237067222595, loss:0.09028634428977966 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:713/1000, acc:0.4157303273677826, loss:0.09028477221727371 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:714/1000, acc:0.41675180196762085, loss:0.09028296172618866 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:715/1000, acc:0.44841673970222473, loss:0.09028134495019913 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:716/1000, acc:0.4208375811576843, loss:0.09027975797653198 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:717/1000, acc:0.45352399349212646, loss:0.09027853608131409 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:718/1000, acc:0.4678243100643158, loss:0.09027652442455292 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:719/1000, acc:0.43513789772987366, loss:0.09027565270662308 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:720/1000, acc:0.4218590259552002, loss:0.090274378657341 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:721/1000, acc:0.41062307357788086, loss:0.0902731716632843 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:722/1000, acc:0.3840653598308563, loss:0.0902731791138649 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:723/1000, acc:0.3718079626560211, loss:0.09027284383773804 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:724/1000, acc:0.3840653598308563, loss:0.09027115255594254 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:725/1000, acc:0.43207353353500366, loss:0.0902695283293724 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:726/1000, acc:0.41675180196762085, loss:0.09026742726564407 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:727/1000, acc:0.44433096051216125, loss:0.09026601165533066 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:728/1000, acc:0.4055158197879791, loss:0.09026593714952469 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:729/1000, acc:0.4034729301929474, loss:0.09026499837636948 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:730/1000, acc:0.4055158197879791, loss:0.09026387333869934 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:731/1000, acc:0.41062307357788086, loss:0.0902622789144516 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:732/1000, acc:0.38304391503334045, loss:0.0902603268623352 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:733/1000, acc:0.37997958064079285, loss:0.09026049077510834 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:734/1000, acc:0.3820224702358246, loss:0.0902586430311203 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:735/1000, acc:0.3840653598308563, loss:0.09025765210390091 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:736/1000, acc:0.3850868344306946, loss:0.09025686234235764 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:737/1000, acc:0.41675180196762085, loss:0.09025522321462631 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:738/1000, acc:0.42900919914245605, loss:0.09025340527296066 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:739/1000, acc:0.4208375811576843, loss:0.09025222808122635 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:740/1000, acc:0.3973442316055298, loss:0.09025054425001144 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:741/1000, acc:0.3901940882205963, loss:0.09025054425001144 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:742/1000, acc:0.41062307357788086, loss:0.09024891257286072 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:743/1000, acc:0.3901940882205963, loss:0.0902474895119667 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:744/1000, acc:0.3912155330181122, loss:0.09024707973003387 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:745/1000, acc:0.38304391503334045, loss:0.0902455672621727 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:746/1000, acc:0.40449437499046326, loss:0.09024501591920853 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:747/1000, acc:0.4024514853954315, loss:0.09024301916360855 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:748/1000, acc:0.3840653598308563, loss:0.09024170786142349 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:749/1000, acc:0.37691521644592285, loss:0.09024251252412796 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:750/1000, acc:0.3697650730609894, loss:0.09024093300104141 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:751/1000, acc:0.3697650730609894, loss:0.09024064242839813 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:752/1000, acc:0.3789581060409546, loss:0.0902387946844101 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:753/1000, acc:0.3850868344306946, loss:0.09023670107126236 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:754/1000, acc:0.3718079626560211, loss:0.09023679047822952 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:755/1000, acc:0.3748723268508911, loss:0.09023565798997879 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:756/1000, acc:0.3789581060409546, loss:0.09023436903953552 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:757/1000, acc:0.3881511688232422, loss:0.09023284167051315 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:758/1000, acc:0.39836567640304565, loss:0.09023125469684601 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:759/1000, acc:0.3973442316055298, loss:0.0902300700545311 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:760/1000, acc:0.3963227868080139, loss:0.09022805839776993 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:761/1000, acc:0.3748723268508911, loss:0.09022730588912964 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:762/1000, acc:0.372829407453537, loss:0.09022687375545502 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:763/1000, acc:0.3718079626560211, loss:0.09022583812475204 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:764/1000, acc:0.3656792640686035, loss:0.09022483229637146 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:765/1000, acc:0.34933605790138245, loss:0.09022467583417892 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:766/1000, acc:0.37691521644592285, loss:0.0902240052819252 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:767/1000, acc:0.3942798674106598, loss:0.09022166579961777 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:768/1000, acc:0.342185914516449, loss:0.09022077918052673 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:769/1000, acc:0.36159345507621765, loss:0.0902196541428566 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:770/1000, acc:0.3687436282634735, loss:0.09021885693073273 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:771/1000, acc:0.40143004059791565, loss:0.09021778404712677 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:772/1000, acc:0.38917261362075806, loss:0.09021565318107605 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:773/1000, acc:0.4116445481777191, loss:0.09021525830030441 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:774/1000, acc:0.3881511688232422, loss:0.09021413326263428 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:775/1000, acc:0.4004085659980774, loss:0.09021365642547607 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:776/1000, acc:0.3963227868080139, loss:0.09021236002445221 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:777/1000, acc:0.38917261362075806, loss:0.09021120518445969 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:778/1000, acc:0.4157303273677826, loss:0.09021025896072388 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:779/1000, acc:0.3993871212005615, loss:0.09020857512950897 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:780/1000, acc:0.41062307357788086, loss:0.09020748734474182 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:781/1000, acc:0.4085801839828491, loss:0.09020629525184631 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:782/1000, acc:0.4024514853954315, loss:0.09020449966192245 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:783/1000, acc:0.43207353353500366, loss:0.09020362049341202 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:784/1000, acc:0.4341164529323578, loss:0.09020240604877472 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:785/1000, acc:0.43513789772987366, loss:0.09020192176103592 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:786/1000, acc:0.4310520887374878, loss:0.09020080417394638 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:787/1000, acc:0.41981613636016846, loss:0.09019932895898819 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:788/1000, acc:0.42594483494758606, loss:0.0901990756392479 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:789/1000, acc:0.4218590259552002, loss:0.0901976227760315 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:790/1000, acc:0.42696627974510193, loss:0.09019728749990463 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:791/1000, acc:0.46271705627441406, loss:0.09019584208726883 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:792/1000, acc:0.4279877543449402, loss:0.09019418805837631 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:793/1000, acc:0.45658835768699646, loss:0.09019342064857483 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:794/1000, acc:0.4218590259552002, loss:0.09019321948289871 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:795/1000, acc:0.4300306439399719, loss:0.09019308537244797 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:796/1000, acc:0.4514811038970947, loss:0.090191550552845 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:797/1000, acc:0.4279877543449402, loss:0.09019062668085098 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:798/1000, acc:0.42696627974510193, loss:0.09019023925065994 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:799/1000, acc:0.4218590259552002, loss:0.09018916636705399 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:800/1000, acc:0.42288050055503845, loss:0.09018870443105698 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:801/1000, acc:0.3810010254383087, loss:0.09018868952989578 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:802/1000, acc:0.3912155330181122, loss:0.09018873423337936 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:803/1000, acc:0.3575076460838318, loss:0.09018931537866592 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:804/1000, acc:0.3779366612434387, loss:0.09018758684396744 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:805/1000, acc:0.3881511688232422, loss:0.09018610417842865 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:806/1000, acc:0.4024514853954315, loss:0.09018522500991821 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:807/1000, acc:0.412665992975235, loss:0.09018398821353912 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:808/1000, acc:0.3820224702358246, loss:0.09018202871084213 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:809/1000, acc:0.41368743777275085, loss:0.09018125385046005 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:810/1000, acc:0.41368743777275085, loss:0.09018104523420334 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:811/1000, acc:0.3963227868080139, loss:0.09017948806285858 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:812/1000, acc:0.4218590259552002, loss:0.09017911553382874 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:813/1000, acc:0.39836567640304565, loss:0.09017805755138397 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:814/1000, acc:0.3881511688232422, loss:0.09017828851938248 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:815/1000, acc:0.3871297240257263, loss:0.09017747640609741 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:816/1000, acc:0.4055158197879791, loss:0.09017559140920639 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:817/1000, acc:0.3850868344306946, loss:0.09017506241798401 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:818/1000, acc:0.37078651785850525, loss:0.09017510712146759 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:819/1000, acc:0.4024514853954315, loss:0.09017296135425568 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:820/1000, acc:0.4024514853954315, loss:0.09017197787761688 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:821/1000, acc:0.40143004059791565, loss:0.09017129242420197 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:822/1000, acc:0.40755873918533325, loss:0.09017017483711243 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:823/1000, acc:0.4330950081348419, loss:0.09016953408718109 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:824/1000, acc:0.4310520887374878, loss:0.09016793966293335 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:825/1000, acc:0.4177732467651367, loss:0.09016798436641693 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:826/1000, acc:0.4249233901500702, loss:0.09016680717468262 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:827/1000, acc:0.3973442316055298, loss:0.09016638249158859 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:828/1000, acc:0.4055158197879791, loss:0.09016551822423935 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:829/1000, acc:0.4361593425273895, loss:0.09016430377960205 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:830/1000, acc:0.41368743777275085, loss:0.09016343951225281 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:831/1000, acc:0.4116445481777191, loss:0.09016352891921997 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:832/1000, acc:0.4004085659980774, loss:0.0901627391576767 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:833/1000, acc:0.4055158197879791, loss:0.09016214311122894 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:834/1000, acc:0.40143004059791565, loss:0.09016067534685135 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:835/1000, acc:0.4249233901500702, loss:0.0901605561375618 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:836/1000, acc:0.3912155330181122, loss:0.09016009420156479 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:837/1000, acc:0.41368743777275085, loss:0.09015905857086182 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:838/1000, acc:0.42696627974510193, loss:0.09015762805938721 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:839/1000, acc:0.4065372943878174, loss:0.09015774726867676 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:840/1000, acc:0.3667007088661194, loss:0.09015733003616333 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:841/1000, acc:0.39223697781562805, loss:0.09015699476003647 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:842/1000, acc:0.39223697781562805, loss:0.0901554599404335 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:843/1000, acc:0.4034729301929474, loss:0.09015427529811859 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:844/1000, acc:0.412665992975235, loss:0.09015404433012009 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:845/1000, acc:0.4218590259552002, loss:0.09015238285064697 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:846/1000, acc:0.3901940882205963, loss:0.09015139192342758 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:847/1000, acc:0.41981613636016846, loss:0.09015117585659027 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:848/1000, acc:0.4187946915626526, loss:0.09015017002820969 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:849/1000, acc:0.43207353353500366, loss:0.09014932811260223 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:850/1000, acc:0.43207353353500366, loss:0.09014879167079926 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:851/1000, acc:0.43513789772987366, loss:0.09014788269996643 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:852/1000, acc:0.4453524053096771, loss:0.09014762192964554 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:853/1000, acc:0.3881511688232422, loss:0.09014692902565002 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:854/1000, acc:0.39530134201049805, loss:0.09014664590358734 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:855/1000, acc:0.412665992975235, loss:0.09014610201120377 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:856/1000, acc:0.39530134201049805, loss:0.09014470130205154 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:857/1000, acc:0.42288050055503845, loss:0.09014403820037842 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:858/1000, acc:0.39530134201049805, loss:0.09014339745044708 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:859/1000, acc:0.3503575026988983, loss:0.09014416486024857 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:860/1000, acc:0.37691521644592285, loss:0.09014513343572617 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:861/1000, acc:0.372829407453537, loss:0.0901443138718605 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:862/1000, acc:0.38610827922821045, loss:0.09014204889535904 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:863/1000, acc:0.4024514853954315, loss:0.09014172852039337 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:864/1000, acc:0.3871297240257263, loss:0.0901411920785904 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:865/1000, acc:0.38610827922821045, loss:0.09014067053794861 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:866/1000, acc:0.39223697781562805, loss:0.09013935923576355 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:867/1000, acc:0.4024514853954315, loss:0.09013821929693222 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:868/1000, acc:0.3871297240257263, loss:0.09013786166906357 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:869/1000, acc:0.3748723268508911, loss:0.09013757854700089 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:870/1000, acc:0.3667007088661194, loss:0.09013748914003372 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:871/1000, acc:0.39530134201049805, loss:0.09013639390468597 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:872/1000, acc:0.39223697781562805, loss:0.09013567864894867 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:873/1000, acc:0.36465781927108765, loss:0.09013597667217255 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:874/1000, acc:0.41981613636016846, loss:0.09013505280017853 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:875/1000, acc:0.41981613636016846, loss:0.0901331827044487 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:876/1000, acc:0.3942798674106598, loss:0.09013213217258453 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:877/1000, acc:0.39223697781562805, loss:0.09013187140226364 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:878/1000, acc:0.4034729301929474, loss:0.09013094007968903 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:879/1000, acc:0.4085801839828491, loss:0.09013045579195023 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:880/1000, acc:0.3973442316055298, loss:0.09012936800718307 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:881/1000, acc:0.4034729301929474, loss:0.09012909233570099 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:882/1000, acc:0.3993871212005615, loss:0.09012842923402786 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:883/1000, acc:0.3779366612434387, loss:0.09012869000434875 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:884/1000, acc:0.3932584226131439, loss:0.09012776613235474 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:885/1000, acc:0.4034729301929474, loss:0.09012696146965027 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:886/1000, acc:0.3942798674106598, loss:0.09012553840875626 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:887/1000, acc:0.4085801839828491, loss:0.09012515842914581 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:888/1000, acc:0.4157303273677826, loss:0.09012453258037567 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:889/1000, acc:0.4514811038970947, loss:0.0901237204670906 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:890/1000, acc:0.4341164529323578, loss:0.09012244641780853 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:891/1000, acc:0.375893771648407, loss:0.09012418985366821 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:892/1000, acc:0.3881511688232422, loss:0.09012413769960403 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:893/1000, acc:0.3718079626560211, loss:0.09012296795845032 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:894/1000, acc:0.38917261362075806, loss:0.09012305736541748 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:895/1000, acc:0.409601628780365, loss:0.09012246876955032 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:896/1000, acc:0.41675180196762085, loss:0.09012124687433243 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:897/1000, acc:0.3810010254383087, loss:0.0901208445429802 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:898/1000, acc:0.38917261362075806, loss:0.09012032300233841 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:899/1000, acc:0.4024514853954315, loss:0.09011931717395782 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:900/1000, acc:0.39223697781562805, loss:0.09011884778738022 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:901/1000, acc:0.4034729301929474, loss:0.09011814743280411 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:902/1000, acc:0.4004085659980774, loss:0.09011786431074142 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:903/1000, acc:0.4055158197879791, loss:0.09011673927307129 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:904/1000, acc:0.4310520887374878, loss:0.09011641144752502 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:905/1000, acc:0.40449437499046326, loss:0.09011456370353699 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:906/1000, acc:0.40143004059791565, loss:0.09011443704366684 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:907/1000, acc:0.4004085659980774, loss:0.09011389315128326 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:908/1000, acc:0.41981613636016846, loss:0.09011382609605789 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:909/1000, acc:0.412665992975235, loss:0.09011217951774597 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:910/1000, acc:0.40755873918533325, loss:0.09011241793632507 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:911/1000, acc:0.4177732467651367, loss:0.09011119604110718 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:912/1000, acc:0.4330950081348419, loss:0.09011083096265793 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:913/1000, acc:0.41675180196762085, loss:0.09010979533195496 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:914/1000, acc:0.42900919914245605, loss:0.09010996669530869 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:915/1000, acc:0.443309485912323, loss:0.09010972082614899 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:916/1000, acc:0.42594483494758606, loss:0.09010892361402512 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:917/1000, acc:0.3932584226131439, loss:0.09010870009660721 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:918/1000, acc:0.4116445481777191, loss:0.09010773152112961 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:919/1000, acc:0.42288050055503845, loss:0.09010721743106842 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:920/1000, acc:0.4218590259552002, loss:0.09010636061429977 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:921/1000, acc:0.42900919914245605, loss:0.09010586142539978 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:922/1000, acc:0.4392237067222595, loss:0.09010487794876099 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:923/1000, acc:0.4525025486946106, loss:0.09010449051856995 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:924/1000, acc:0.41368743777275085, loss:0.0901031568646431 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:925/1000, acc:0.4453524053096771, loss:0.09010341018438339 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:926/1000, acc:0.4310520887374878, loss:0.09010251611471176 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:927/1000, acc:0.4361593425273895, loss:0.09010208398103714 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:928/1000, acc:0.43513789772987366, loss:0.09010166674852371 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:929/1000, acc:0.4361593425273895, loss:0.09010091423988342 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:930/1000, acc:0.4177732467651367, loss:0.09009983390569687 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:931/1000, acc:0.4586312472820282, loss:0.09010006487369537 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:932/1000, acc:0.41062307357788086, loss:0.09009929746389389 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:933/1000, acc:0.4300306439399719, loss:0.0900997668504715 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:934/1000, acc:0.4116445481777191, loss:0.09009966999292374 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:935/1000, acc:0.449438214302063, loss:0.09009919315576553 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:936/1000, acc:0.44433096051216125, loss:0.09009742736816406 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:937/1000, acc:0.44841673970222473, loss:0.09009716659784317 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:938/1000, acc:0.43513789772987366, loss:0.09009674936532974 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:939/1000, acc:0.37691521644592285, loss:0.0900973528623581 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:940/1000, acc:0.4024514853954315, loss:0.09009850770235062 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:941/1000, acc:0.4004085659980774, loss:0.09009690582752228 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:942/1000, acc:0.4279877543449402, loss:0.09009658545255661 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:943/1000, acc:0.4116445481777191, loss:0.0900954008102417 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:944/1000, acc:0.409601628780365, loss:0.0900946855545044 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:945/1000, acc:0.3973442316055298, loss:0.09009482711553574 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:946/1000, acc:0.43513789772987366, loss:0.09009455889463425 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:947/1000, acc:0.412665992975235, loss:0.09009382128715515 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:948/1000, acc:0.4392237067222595, loss:0.09009294956922531 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:949/1000, acc:0.4300306439399719, loss:0.09009277075529099 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:950/1000, acc:0.409601628780365, loss:0.09009265154600143 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:951/1000, acc:0.41981613636016846, loss:0.09009211510419846 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:952/1000, acc:0.42900919914245605, loss:0.09009180217981339 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:953/1000, acc:0.4034729301929474, loss:0.09009075164794922 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:954/1000, acc:0.3942798674106598, loss:0.09009130299091339 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:955/1000, acc:0.3993871212005615, loss:0.09009113907814026 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:956/1000, acc:0.4085801839828491, loss:0.09008997678756714 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:957/1000, acc:0.40755873918533325, loss:0.09008888900279999 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:958/1000, acc:0.4310520887374878, loss:0.09008895605802536 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:959/1000, acc:0.4279877543449402, loss:0.09008756279945374 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:960/1000, acc:0.409601628780365, loss:0.09008778631687164 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:961/1000, acc:0.4187946915626526, loss:0.09008752554655075 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:962/1000, acc:0.43207353353500366, loss:0.09008726477622986 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:963/1000, acc:0.443309485912323, loss:0.09008609503507614 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:964/1000, acc:0.4034729301929474, loss:0.09008572995662689 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:965/1000, acc:0.4330950081348419, loss:0.090085968375206 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:966/1000, acc:0.3779366612434387, loss:0.09008494764566422 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:967/1000, acc:0.4024514853954315, loss:0.09008580446243286 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:968/1000, acc:0.4157303273677826, loss:0.09008549898862839 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:969/1000, acc:0.4300306439399719, loss:0.09008453786373138 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:970/1000, acc:0.4147088825702667, loss:0.09008382260799408 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:971/1000, acc:0.42900919914245605, loss:0.09008368104696274 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:972/1000, acc:0.4453524053096771, loss:0.0900825709104538 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:973/1000, acc:0.42594483494758606, loss:0.0900832861661911 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:974/1000, acc:0.40449437499046326, loss:0.09008251130580902 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:975/1000, acc:0.4085801839828491, loss:0.09008202701807022 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:976/1000, acc:0.4065372943878174, loss:0.09008125215768814 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:977/1000, acc:0.42594483494758606, loss:0.09008059650659561 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:978/1000, acc:0.3850868344306946, loss:0.09008152782917023 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:979/1000, acc:0.4065372943878174, loss:0.09008105844259262 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:980/1000, acc:0.4187946915626526, loss:0.09007972478866577 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:981/1000, acc:0.44228804111480713, loss:0.09007934480905533 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:982/1000, acc:0.44126659631729126, loss:0.09007883816957474 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:983/1000, acc:0.4300306439399719, loss:0.09007862955331802 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:984/1000, acc:0.43820226192474365, loss:0.09007799625396729 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:985/1000, acc:0.4177732467651367, loss:0.09007740765810013 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:986/1000, acc:0.41675180196762085, loss:0.09007757902145386 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:987/1000, acc:0.42696627974510193, loss:0.090077243745327 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:988/1000, acc:0.4330950081348419, loss:0.0900762528181076 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:989/1000, acc:0.4279877543449402, loss:0.09007611125707626 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:990/1000, acc:0.4239019453525543, loss:0.09007520228624344 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:991/1000, acc:0.4555669128894806, loss:0.09007437527179718 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:992/1000, acc:0.4525025486946106, loss:0.09007372707128525 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:993/1000, acc:0.4361593425273895, loss:0.09007428586483002 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:994/1000, acc:0.43513789772987366, loss:0.09007386118173599 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:995/1000, acc:0.41675180196762085, loss:0.09007427096366882 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:996/1000, acc:0.43820226192474365, loss:0.09007324278354645 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:997/1000, acc:0.43513789772987366, loss:0.09007290750741959 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:998/1000, acc:0.4239019453525543, loss:0.09007211029529572 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:999/1000, acc:0.4147088825702667, loss:0.09007202088832855 + Training iteration:100/979 + Training iteration:200/979 + Training iteration:300/979 + Training iteration:400/979 + Training iteration:500/979 + Training iteration:600/979 + Training iteration:700/979 + Training iteration:800/979 + Training iteration:900/979 +Training epochs:1000/1000, acc:0.4300306439399719, loss:0.09007173776626587 +============Testing +test_acc 0.1375 test_loss 0.09008622 +>>>>>>> Stashed changes diff --git a/dl_src/dl_scripts/c3d.sh b/dl_src/dl_scripts/c3d.sh new file mode 100644 index 0000000..2ddd2d8 --- /dev/null +++ b/dl_src/dl_scripts/c3d.sh @@ -0,0 +1,19 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-06 21:26:08 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 200 \ +--net "c3d" diff --git a/dl_src/dl_scripts/c3d_train_num_1.sh b/dl_src/dl_scripts/c3d_train_num_1.sh new file mode 100644 index 0000000..5d378df --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_1.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:20:27 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_1.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 10000 \ +--net "c3d" \ +--train_data_num 1 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_12.sh b/dl_src/dl_scripts/c3d_train_num_12.sh new file mode 100644 index 0000000..d378c9e --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_12.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:21:53 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_12.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 300 \ +--net "c3d" \ +--train_data_num 12 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_2.sh b/dl_src/dl_scripts/c3d_train_num_2.sh new file mode 100644 index 0000000..b1937e8 --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_2.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:20:37 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_2.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 10000 \ +--net "c3d" \ +--train_data_num 2 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_24.sh b/dl_src/dl_scripts/c3d_train_num_24.sh new file mode 100644 index 0000000..0ae1fc4 --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_24.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:22:00 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_24.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 300 \ +--net "c3d" \ +--train_data_num 24 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_3.sh b/dl_src/dl_scripts/c3d_train_num_3.sh new file mode 100644 index 0000000..f7fb130 --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_3.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:20:32 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_3.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 10000 \ +--net "c3d" \ +--train_data_num 3 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_36.sh b/dl_src/dl_scripts/c3d_train_num_36.sh new file mode 100644 index 0000000..7923c66 --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_36.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:22:06 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_36.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 300 \ +--net "c3d" \ +--train_data_num 36 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_48.sh b/dl_src/dl_scripts/c3d_train_num_48.sh new file mode 100644 index 0000000..41c7653 --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_48.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:22:27 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_48.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 300 \ +--net "c3d" \ +--train_data_num 36 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_6.sh b/dl_src/dl_scripts/c3d_train_num_6.sh new file mode 100644 index 0000000..999e9cf --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_6.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:20:43 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_6.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 10000 \ +--net "c3d" \ +--train_data_num 6 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_60.sh b/dl_src/dl_scripts/c3d_train_num_60.sh new file mode 100644 index 0000000..bb4279e --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_60.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:22:31 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_60.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 300 \ +--net "c3d" \ +--train_data_num 60 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_72.sh b/dl_src/dl_scripts/c3d_train_num_72.sh new file mode 100644 index 0000000..d9bef85 --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_72.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:22:36 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_72.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 300 \ +--net "c3d" \ +--train_data_num 72 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_84.sh b/dl_src/dl_scripts/c3d_train_num_84.sh new file mode 100644 index 0000000..a56769b --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_84.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:22:41 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_84.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 300 \ +--net "c3d" \ +--train_data_num 84 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_90.sh b/dl_src/dl_scripts/c3d_train_num_90.sh new file mode 100644 index 0000000..29338dc --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_90.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:22:46 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_90.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 300 \ +--net "c3d" \ +--train_data_num 90 \ No newline at end of file diff --git a/dl_src/dl_scripts/c3d_train_num_96.sh b/dl_src/dl_scripts/c3d_train_num_96.sh new file mode 100644 index 0000000..4ab5da3 --- /dev/null +++ b/dl_src/dl_scripts/c3d_train_num_96.sh @@ -0,0 +1,20 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-07 12:22:51 + # @FilePath : /SGF_v2/dl_src/dl_scripts/c3d_train_num_96.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 300 \ +--net "c3d" \ +--train_data_num 96 \ No newline at end of file diff --git a/dl_src/dl_scripts/convnet.sh b/dl_src/dl_scripts/convnet.sh new file mode 100644 index 0000000..6d2ca40 --- /dev/null +++ b/dl_src/dl_scripts/convnet.sh @@ -0,0 +1,19 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Yanhong Wang + # @LastEditTime : 2021-11-06 21:10:13 + # @FilePath : /SGF_v2/dl_src/dl_scripts/convnet.sh + # @Description : +### +python="/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir="/yhwang/0-Projects/1-snn" +dl_main="$dl_dir/main_dl.py" +result_dir="/yhwang/0-Projects/1-snn/dl_src/dl_results" + +${python} ${dl_main} \ +--epochs 100 \ +--net "convnet" \ No newline at end of file diff --git a/dl_src/dl_scripts/epochs.sh b/dl_src/dl_scripts/epochs.sh new file mode 100644 index 0000000..d04815e --- /dev/null +++ b/dl_src/dl_scripts/epochs.sh @@ -0,0 +1,26 @@ +#! /bin/bash +### + # @Author : Zihao Zhao + # @E-mail : zhzhao18@fudan.edu.cn + # @Company : IBICAS, Fudan University + # @Date : 2021-08-07 20:36:58 + # @LastEditors : Zihao Zhao + # @LastEditTime : 2021-09-05 22:30:58 + # @FilePath : /SGF_v2/script/10_2122.sh + # @Description : +### +python = "/yhwang/anaconda3/envs/sgf/bin/python" +dl_dir = "/yhwang/0-Projects/1-snn/dl_src" +dl_main = "$dl_dir/main_dl.py" +result_dir = "/yhwang/0-Projects/1-snn/dl_src/dl_results" + +cd $dl_main + +event_list=("1_2_3_4_5_6_7_8_9_10") + +for event in ${event_list[*]} +do + ${python} ${main} \ + --selected_events ${event} \ + --epochs 1 +done \ No newline at end of file diff --git a/dl_src/experiments.md b/dl_src/experiments.md new file mode 100644 index 0000000..e69de29 diff --git a/dl_src/model_cfg.py b/dl_src/model_cfg.py new file mode 100644 index 0000000..49c3d5a --- /dev/null +++ b/dl_src/model_cfg.py @@ -0,0 +1,22 @@ +import argparse + +cnn_parser = argparse.ArgumentParser(description='CNN configuration.') +cnn_parser.add_argument('--in_channels', type=list, help='in_channels', default=[]) +cnn_parser.add_argument('--out_channels', type=list, help='out_channels', default=[]) +cnn_parser.add_argument('--kernels', type=list, help='kernels', default=[]) +cnn_parser.add_argument('--strides', type=list, help='strides', default=[]) +cnn_parser.add_argument('--pads', type=list, help='pads', default=[]) +cnn_parser.add_argument('--groups', type=list, help='groups', default=[]) +cnn_parser.add_argument('--class_num', type=int, help='class_num', default=10) +cnn_parser.add_argument('--frame_scale', type=int, help='class_num', default=0.25) +cnn_parser.add_argument('--feature_scale', type=int, help='class_num', default=0.5) + +cnn_cfg = cnn_parser.parse_known_args()[0] + +cnn_cfg.in_channels = [80, 6, 12, 252, 256, 256, 512, 512, 512, 512, 512, 1024, 1024, 1024, 1024, 1024, 968] +cnn_cfg.out_channels = [6, 12, 252, 256, 256, 512, 512, 512, 512, 512, 1024, 1024, 1024, 1024, 1024, 968, 2640] +cnn_cfg.kernels = [2, 3, 4, 1, 2, 3, 1, 1, 1, 2, 3, 1, 1, 3, 1, 1, 1] +cnn_cfg.strides = [2, 2, 2, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 3, 1, 1, 1] +cnn_cfg.pads = [0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0] +cnn_cfg.groups = [1, 1, 2, 2, 2, 32, 4, 4, 4, 16, 64, 8, 8, 32, 8, 8, 8] +cnn_cfg.in_channels[0] = int(cnn_cfg.in_channels[0]*cnn_cfg.frame_scale) diff --git a/dl_src/test b/dl_src/test new file mode 100644 index 0000000..56a6051 --- /dev/null +++ b/dl_src/test @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/dl_src/utils.py b/dl_src/utils.py new file mode 100644 index 0000000..ddc98ad --- /dev/null +++ b/dl_src/utils.py @@ -0,0 +1,21 @@ +import torch +import numpy as np + +from visualization_utils import save_visualize_gif + + +def visualize_batch_data(x, y, save_dir, git_end_str=""): + # x:[b_size, frame, h, w] + for b_cnt in range(x.shape[0]): + input_list = list() + input_tensor = x[b_cnt] + for frame_cnt in range(input_tensor.shape[0]): + input_list.append(input_tensor[frame_cnt]) + input_shape = tuple(input_list[0].shape) + if type(y[b_cnt])==torch.Tensor: + label = int(torch.where(y[b_cnt]==1)[0]) + elif type(y[b_cnt])==np.int64: + label = int(y[b_cnt]) + # image_name = f"/yhwang/0-Projects/1-snn/dl_src/dl_visualize/label{label}_train_batch{b_i}_data{b_cnt}.gif" + image_name = f"{save_dir}/label{label}_data{b_cnt}{git_end_str}.gif" + save_visualize_gif(input_list, input_shape, image_name) \ No newline at end of file diff --git a/dvsgesture_i.py b/dvsgesture_i.py new file mode 100644 index 0000000..04e2300 --- /dev/null +++ b/dvsgesture_i.py @@ -0,0 +1,157 @@ +import numpy as np +import os +import cv2 +import h5py +import cfg +from base import DatasetBase +from visualization_utils import save_visualize, save_curve, visualize, save_vis_formatted + +class DatasetGesture_i(DatasetBase): + def __init__(self, root): + super(DatasetGesture_i, self).__init__(root) + + self.input_shape = (128, 128) + self.input_channel = self.input_shape[0] * self.input_shape[1] #? + self.event_num = 11 + + self.root = root + self.if_save_png = False + # self.preloaded = self.check_npy_files(self.root) + self.preloaded = True + self.if_dvs = True + + self.test_np_folder = os.path.join(self.root, 'test_npy') + self.test_data_filenames = os.listdir(self.test_np_folder) + self.test_data_filenames.sort() + if ".DS_Store" in self.test_data_filenames: + self.test_data_filenames.remove(".DS_Store") + self.test_label = self.get_labels(self.root) #? + + def test_len(self): + return len(self.test_data_filenames) + + + def get_test_sample(self, i, reverse=False): + assert i < self.test_len() + self.test_data_filenames.sort(reverse=reverse) + data_filename = self.test_data_filenames[i] + np_name = os.path.join(self.test_np_folder, data_filename) + # print(np_name) + video = np.load(np_name) + test_label = int(data_filename.split('_')[1]) + # class_i = int(data_filename.split('_')[2][:-4]) + # print(data_filename, video.shape) + return video, test_label + + def get_test_data_file_name(self,i): + return self.test_data_filenames[i] + + + def read_from_npy(self, file_name): + video = np.load(file_name) + return video + + def collect_data(self, dir, file_names): + data = list() + for data_filename in file_names: + # print(data_filename.split(".")[0]) + save_dir = os.path.join(self.save_folder, data_filename.split(".")[0]) + if not os.path.exists(save_dir): + os.mkdir(save_dir) + f = h5py.File(os.path.join(dir, data_filename),'r') + + + step = 1000 + video = list() + image = np.zeros((128, 128)) + for i, addr in enumerate(f['addrs']): + if addr[2] == 0: + image[addr[1]][addr[0]] = -1 + elif addr[2] == 1: + image[addr[1]][addr[0]] = 1 + if i % step == step - 1: + video.append(image) + image = np.zeros((128, 128)) + video = np.array(video) + data.append(video) + print(data_filename, len(video)) + + # if self.if_save_png: + # for i, image in enumerate(video): + # vis.save_visualize(image, (128, 128), os.path.join(save_dir, str(i)+".png")) + + return data + + def collect_data_npy(self, dir, file_names): + data = list() + for data_filename in file_names: + np_name = os.path.join(dir, data_filename) + print(np_name) + video = np.load(np_name) + data.append(video) + + # print(video.shape) + return data + + def save_data(self, data, dir, data_filenames): + for i, video in enumerate(data): + np_name = os.path.join(dir, data_filenames[i].replace('.hdf5', '')) + np.save(np_name, video) + print('saved in', np_name + '.npy') + + + + def h5pt2npy(self, root): + + test_folder = os.path.join(root, 'test') + + test_np_folder = os.path.join(root, 'test_npy') + + if not os.path.exists(test_np_folder): + os.mkdir(test_np_folder) + + if self.preloaded == False: + test_data_filenames = os.listdir(test_folder) + test_data_filenames.sort() + test_data = self.collect_data(test_folder, test_data_filenames) + self.save_data(test_data, test_np_folder, test_data_filenames) + test_data_filenames = os.listdir(test_np_folder) + test_data_filenames.sort() + test_np_data = self.collect_data_npy(test_np_folder, test_data_filenames) + else: + test_data_filenames = os.listdir(test_np_folder) + test_data_filenames.sort() + test_np_data = self.collect_data_npy(test_np_folder, test_data_filenames) + + return test_np_data, test_data_filenames + + + def get_labels(self, root): + test_folder = os.path.join(root, 'test_npy') + test_data_filenames = os.listdir(test_folder) + test_data_filenames.sort() + test_label = list() + + for t in test_data_filenames: + if '.npy' in t: + test_label.append(int(t.split('_')[1])) + + return test_label + + + def check_npy_files(self, root): + test_folder = os.path.join(root, 'test') + test_np_folder = os.path.join(root, 'test_npy') + if len(os.listdir(test_folder)) != len(os.listdir(test_np_folder)): + return False + else: + return True + + def dataconvert(self, event_number): + self.train_dataset = np.full((80,128,128,event_number),0) + for i in range(0,event_number): + sample = self.train_np_data[i] + self.train_dataset[:,:,:,i] = sample[0:80,:,:] + + + \ No newline at end of file diff --git a/dvsgesture_t.py b/dvsgesture_t.py new file mode 100644 index 0000000..78cfcaa --- /dev/null +++ b/dvsgesture_t.py @@ -0,0 +1,301 @@ +import numpy as np +import os +import cv2 +import h5py +import shutil +import random + +import cfg +from base import DatasetBase +import visualization_utils as vis + +class DatasetGesture(DatasetBase): + def __init__(self, root): + super(DatasetGesture, self).__init__(root) + self.save_folder = cfg.code_path + "/output/dvsframe" + if not os.path.exists(self.save_folder): + os.makedirs(self.save_folder) + + self.input_shape = (128, 128) + self.input_channel = self.input_shape[0] * self.input_shape[1] #? + self.event_num = 11 + self.root = root + self.if_save_png = False + self.batch_true = True + self.if_dvs = True + self.train_num = 98 + + def train_len(self): + return 131#len(self.train_np_data) + + def get_train_sample(self, i): + return self.train_np_data[i], self.train_label[i] + + def get_train_label(self, i): + return self.train_label[i] + + def read_from_npy(self, file_name): + video = np.load(file_name) + return video + + def collect_save_data(self, dir, file_names): + data = list() + if not os.path.exists(dir.replace("train_label", "train_npy")): + os.mkdir(dir.replace("train_label", "train_npy")) + if not os.path.exists(dir.replace("test_label", "test_npy")): + os.mkdir(dir.replace("test_label", "test_npy")) + + for data_filename in file_names: + # print(data_filename.split(".")[0]) + # save_dir = os.path.join(self.save_folder, data_filename.split(".")[0]) + # if not os.path.exists(save_dir): + # os.mkdir(save_dir) + try: + f = h5py.File(os.path.join(dir, data_filename),'r') + step = 1000 + video = list() + image = np.zeros((128, 128)) + for i, addr in enumerate(f['addrs']): + if addr[2] == 0: + image[addr[1]][addr[0]] = -1 + elif addr[2] == 1: + image[addr[1]][addr[0]] = 1 + if i % step == step - 1: + video.append(image) + image = np.zeros((128, 128)) + video = np.array(video) + print(data_filename, len(video)) + + np_name = os.path.join(dir.replace("train_label", "train_npy") + .replace("test_label", "test_npy")\ + , data_filename.replace('.hdf5', '')) + np.save(np_name, video) + print('saved in', np_name + '.npy') + + # if self.if_save_png: + # for i, image in enumerate(video): + # vis.save_visualize(image, (128, 128), os.path.join(save_dir, str(i)+".png")) + except: + print(os.path.join(dir, data_filename)) + + def collect_data_npy(self, dir, file_names): + data = list() + file_names.sort() + for data_filename in file_names: + np_name = os.path.join(dir, data_filename) + video = np.load(np_name) + data.append(video) + return data + + def save_data(self, data, dir, data_filenames): + for i, video in enumerate(data): + np_name = os.path.join(dir, data_filenames[i].replace('.hdf5', '')) + np.save(np_name, video) + print('saved in', np_name + '.npy') + + + + def h5pt2npy(self, generate_npy=False, dir_name='train_label'): + root = self.root + train_folder = os.path.join(root, dir_name) + + if generate_npy == True: + train_data_filenames = os.listdir(train_folder) + train_data_filenames.sort(reverse=True) + + # train_data_filenames = train_data_filenames[520:] + self.collect_save_data(train_folder, train_data_filenames) + + + + def get_labels(self, train_np_folder): + train_data_filenames = os.listdir(train_np_folder) + train_data_filenames.sort() + + train_label = list() + for t in train_data_filenames: + train_label.append(int(t.split('_')[1].replace("10", "0"))) + + return train_label + + + def check_npy_files(self, root): + train_folder = os.path.join(root, 'train') + + train_np_folder = os.path.join(root, 'train_npy') + if len(os.listdir(train_folder)) != len(os.listdir(train_np_folder)): + return False + else: + return True + + # def dataconvert(self, event_number,frame): + # self.train_dataset = np.full((80,128,128,event_number),0) + # for i in range(0,event_number): + # sample = self.train_np_data[i] + # self.train_dataset[:,:,:,i] = sample[0:frame,:,:] + + def dataconvert(self, event_number, frame, data): + self.train_dataset = np.full((80, 128, 128, event_number),0) + for i in range(0,event_number): + sample = data[i] + if np.shape(sample)[0] >= frame: + self.train_dataset[:,:,:,i] = sample[0:frame,:,:] + else: + self.train_dataset[0:np.shape(sample)[0],:,:,i] = sample + return self.train_dataset + + # def batch_generations(self,train_np_folder,times): + # if self.batch_true == False: + # for i in range(0,times): + # batch_folder = os.path.join(self.root, 'batch') + # batch_np_folder = os.path.join(batch_folder, str(i)) + # if not os.path.exists(batch_np_folder): + # os.mkdir(batch_np_folder) + + # batch_data_filenames = os.listdir(train_np_folder) + # batch_data_filenames.sort() + # for j in range(1,11): + # k = 0 + # while(j != int(batch_data_filenames[k].split('_')[1])): + # k = k+1 + # path = os.path.join(train_np_folder, batch_data_filenames[k]) + # shutil.move(path, batch_np_folder) + # else: + # pass + + + def get_train_data(self, train_data_num, selected_event): + self.event_num = len(selected_event) + random.seed(0) + train_data_folder = os.path.join(cfg.data_path, 'train_npy') + train_filenames_all = os.listdir(train_data_folder) + + all_data_list = range(0, 98) + selected_sample = random.sample(all_data_list, train_data_num) + train_filenames = list() + + for filename in train_filenames_all: + for event in selected_event: + for sample in selected_sample: + match_str = "train_" + str(int(event)+1) + "_" + str(sample) + ".npy" + if match_str in filename: + train_filenames.append(filename) + train_filenames.sort() + + cut_frame = 80 + + train_data = list() + train_label = list() + + # load np data and trancate 80 frame + for filename in train_filenames: + np_name = os.path.join(train_data_folder, filename) + sample = np.load(np_name) + event = int(filename.split("_")[-2]) + event_i = selected_event.index(event-1) + + if np.shape(sample)[0] >= cut_frame: + train_data.append(sample[0:cut_frame, :, :] ) + else: + data = np.zeros((cut_frame, 128, 128)) + data[0:np.shape(sample)[0], :, :] = sample + train_data.append(data) + train_label.append(event_i) + train_data = np.array(train_data) + train_label = np.array(train_label) + return train_data, train_label + + + + def get_test_data(self, test_data_num, selected_event): + self.event_num = len(selected_event) + random.seed(0) + test_data_folder = os.path.join(cfg.data_path, 'test_npy') + test_filenames_all = os.listdir(test_data_folder) + + all_data_list = range(0, 24) + selected_sample = random.sample(all_data_list, test_data_num) + test_filenames = list() + + for filename in test_filenames_all: + for event in selected_event: + for sample in selected_sample: + match_str = "test_" + str(event+1) + "_" + str(sample) + ".npy" + if match_str in filename: + test_filenames.append(filename) + + test_filenames.sort() + + cut_frame = 80 + test_data = list() + test_label = list() + + # load np data and trancate 80 frame + for filename in test_filenames: + np_name = os.path.join(test_data_folder, filename) + sample = np.load(np_name) + event = int(filename.split("_")[-2]) + event_i = selected_event.index(event-1) + + if np.shape(sample)[0] >= cut_frame: + test_data.append(sample[0:cut_frame, :, :] ) + else: + data = np.zeros((cut_frame, 128, 128)) + data[0:np.shape(sample)[0], :, :] = sample + test_data.append(data) + test_label.append(event_i) + test_data = np.array(test_data) + test_label = np.array(test_label) + return test_data, test_label + + + + def get_batch(self, train_data_num, batch_size, selected_event): + self.event_num = len(selected_event) + random.seed(0) + train_data_folder = os.path.join(cfg.data_path, 'train_npy') + train_filenames_all = os.listdir(train_data_folder) + + all_data_list = range(0, train_data_num) + assert batch_size <= train_data_num + selected_sample = random.sample(all_data_list, train_data_num) + train_filenames = list() + for filename in train_filenames_all: + for event in selected_event: + for sample in selected_sample: + match_str = "train_" + str(event) + "_" + str(sample) + ".npy" + if match_str in filename: + train_filenames.append(filename) + + selected_batch_sample = random.sample(selected_sample, batch_size) + batch_filenames = list() + cut_frame = 80 + + batch_data = np.full((cut_frame, 128, 128, self.event_num), 0) + for filename in train_filenames: + for event_i, event in enumerate(selected_event): + match_str = "train_" + str(event) + "_" + str(selected_batch_sample[0]) + ".npy" + if match_str in filename: + # print(match_str, filename) + batch_filenames.append(filename) + batch_filenames.sort() + + # load np data and trancate 80 frame + for filename in batch_filenames: + np_name = os.path.join(train_data_folder, filename) + sample = np.load(np_name) + event = int(filename.split("_")[-2]) + event_i = selected_event.index(event) + if np.shape(sample)[0] >= cut_frame: + batch_data[:, :, :, event_i] = sample[0:cut_frame, :, :] + else: + batch_data[0:np.shape(sample)[0], :, :, event_i] = sample + + return batch_data, selected_event + +if __name__ == "__main__": + dataset = DatasetGesture(cfg.data_path) + dataset.h5pt2npy(generate_npy=True) + dataset.h5pt2npy(generate_npy=True, dir_name='test_label') + + \ No newline at end of file diff --git a/envs/env.sh b/envs/env.sh new file mode 100644 index 0000000..74f1456 --- /dev/null +++ b/envs/env.sh @@ -0,0 +1,9 @@ +conda install numpy=1.20 -y +conda install opencv=3.4.2 -y +conda install h5py=2.8.0 -y +conda install matplotlib=3.3.4 -y +conda install imageio=2.9.0 -y +conda install scikit-learn=0.24 -y +conda install xlwt=1.3.0 -y +conda install xlrd=2.0.1 -y +conda install xlutils=2.0.0 -y \ No newline at end of file diff --git a/events_timeslices.py b/events_timeslices.py new file mode 100644 index 0000000..b9bfa13 --- /dev/null +++ b/events_timeslices.py @@ -0,0 +1,141 @@ +from __future__ import print_function +import bisect +import numpy as np + +def expand_targets(targets, T=500, burnin=0): + y = np.tile(targets.copy(), [T, 1, 1]) + y[:burnin] = 0 + return y + + +def one_hot(mbt, num_classes): + out = np.zeros([mbt.shape[0], num_classes]) + out[np.arange(mbt.shape[0], dtype='int'), mbt.astype('int')] = 1 + return out + + +def find_first(a, tgt): + return bisect.bisect_left(a, tgt) + + +def cast_evs(evs): + ts = (evs[:, 0] * 1e6).astype('uint64') + ad = (evs[:, 1:]).astype('uint64') + return ts, ad + +# def get_binary_frame(evs, size = (346,260), ds=1): +# tr = sparse_matrix((2*evs[:,3]-1,(evs[:,1]//ds,evs[:,2]//ds)), dtype=np.int8, shape=size) +# return tr.toarray() + +def get_subsampled_coordinates(evs, ds_h, ds_w): + x_coords = evs[:, 1] // ds_w + y_coords = evs[:, 2] // ds_h + if x_coords.dtype != np.int: + x_coords = x_coords.astype(int) + if y_coords.dtype != np.int: + y_coords = y_coords.astype(int) + return x_coords, y_coords + + +def get_binary_frame_np(arr, evs, ds_w=1, ds_h=1): + x_coords, y_coords = get_subsampled_coordinates(evs, ds_h, ds_w) + arr[x_coords, y_coords] = 2 * evs[:, 3] - 1 + + +def get_binary_frame(arr, evs, ds_w=1, ds_h=1): + x_coords, y_coords = get_subsampled_coordinates(evs, ds_h, ds_w) + arr[x_coords, y_coords] = 1 + +def get_slice(times, addrs, start_time, end_time): + try: + idx_beg = find_first(times, start_time) + idx_end = find_first(times[idx_beg:], end_time)+idx_beg + return times[idx_beg:idx_end]-times[idx_beg], addrs[idx_beg:idx_end] + except IndexError: + raise IndexError("Empty batch found") + +def get_event_slice(times, addrs, start_time, T, size = [128,128], ds = 1, dt = 1000): + try: + idx_beg = find_first(times, start_time) + idx_end = find_first(times[idx_beg:], start_time+T*dt)+idx_beg + return chunk_evs_pol_dvs(times[idx_beg:idx_end], addrs[idx_beg:idx_end], deltat=dt, chunk_size=T, size = size, ds_w=ds, ds_h=ds) + except IndexError: + raise IndexError("Empty batch found") + +def get_tmad_slice(times, addrs, start_time, T): + try: + idx_beg = find_first(times, start_time) + idx_end = find_first(times[idx_beg:], start_time+T)+idx_beg + return np.column_stack([times[idx_beg:idx_end], addrs[idx_beg:idx_end]]) + except IndexError: + raise IndexError("Empty batch found") + +def get_time_surface(evs, invtau=1e-6, size=(346, 260, 2)): + tr = np.zeros(size, 'int64') - np.inf + + for ev in evs: + tr[ev[2], ev[1], ev[3]] = ev[0] + + a = np.exp(tr[:, :, 0] * invtau) - np.exp(tr[:, :, 1] * invtau) + + return a + + +def chunk_evs_dvs(evs, deltat=1000, chunk_size=500, size=[304, 240], ds_w=1, ds_h=1): + t_start = evs[0, 0] + ts = range(t_start + chunk_size, t_start + chunk_size * deltat, deltat) + chunks = np.zeros([len(ts)] + size, dtype='int8') + idx_start = 0 + idx_end = 0 + for i, t in enumerate(ts): + idx_end += find_first(evs[idx_end:, 0], t) + if idx_end > idx_start: + get_binary_frame_np(chunks[i, ...], evs[idx_start:idx_end], ds_h=ds_h, ds_w=ds_w) + idx_start = idx_end + return chunks + +def frame_evs(times, addrs, deltat=1000, duration=500, size=[240], downsample = [1]): + t_start = times[0] + ts = range(t_start, t_start + duration * deltat, deltat) + chunks = np.zeros([len(ts)] + size, dtype='int8') + idx_start = 0 + idx_end = 0 + for i, t in enumerate(ts): + idx_end += find_first(times[idx_end:], t) + if idx_end > idx_start: + ee = addrs[idx_start:idx_end] + ev = [(ee[:, i] // d).astype(np.int) for i,d in enumerate(downsample)] + np.add.at(chunks, tuple([i]+ev), 1) + idx_start = idx_end + return chunks + + +def chunk_evs_pol_dvs(times, addrs, deltat=1000, chunk_size=500, size=[2, 304, 240], ds_w=1, ds_h=1): + t_start = times[0] + ts = range(t_start, t_start + chunk_size * deltat, deltat) + chunks = np.zeros([len(ts)] + size, dtype='int8') + idx_start = 0 + idx_end = 0 + for i, t in enumerate(ts): + idx_end += find_first(times[idx_end:], t) + if idx_end > idx_start: + ee = addrs[idx_start:idx_end] + pol, x, y = ee[:, 2], (ee[:, 0] // ds_w).astype(np.int), (ee[:, 1] // ds_h).astype(np.int) + np.add.at(chunks, (i, pol, x, y), 1) + idx_start = idx_end + return chunks + +def my_chunk_evs_pol_dvs(data, dt=1000, T=500, size=[2, 304, 240], ds=[4,4]): + t_start = data[0][0] + ts = range(t_start, t_start + T * dt, dt) + chunks = np.zeros([len(ts)] + size, dtype='int8') + idx_start = 0 + idx_end = 0 + for i, t in enumerate(ts): + idx_end += find_first(data[idx_end:,0], t+dt) + if idx_end > idx_start: + ee = data[idx_start:idx_end,1:] + pol, x, y = ee[:, 0], (ee[:, 1] // ds[0]).astype(np.int), (ee[:, 2] // ds[1]).astype(np.int) + np.add.at(chunks, (i, pol, x, y), 1) + idx_start = idx_end + return chunks \ No newline at end of file diff --git a/expert.py b/expert.py new file mode 100644 index 0000000..01a711f --- /dev/null +++ b/expert.py @@ -0,0 +1,1518 @@ +import numpy as np +from Neuron import Neuron +from Visualization import Visualization +from math import pow +import operator +from statistics import mean +from scipy.signal import convolve + + +class SGF_expert(Neuron): + def __init__(self, args): + self.args = args + super().__init__() + + + def expert_space(self, resolution_col, resolution_row, data, thres, offset, \ + thres_bit=1, thres_step=0.1, thres_inc_factor=[-1,1], if_vote=False, if_imbalance=False): + # resolution: active location resolution (>= 2), equal to space neuron power of 2 + # data: input data format:[row, coloum, event number] + # thres: ST core neuron threshold and space neuron threshold: [ST_core threshold, space_threshold] + # offset: active scope offset either from row or coloum: [offset_row, offset_coloum] + + row_dimension = int(np.shape(data)[0])-offset[0] + col_dimension = int(np.shape(data)[1])-offset[1] + space_neuron = np.full((resolution_row*resolution_col*thres_bit*thres_bit,np.shape(data)[2]),0) + row_unit = int(row_dimension/resolution_row) + col_unit = int(col_dimension/resolution_col) + weight = np.full((row_dimension, col_dimension, np.shape(space_neuron)[0]), 0) + + j = 0 # space neuron + for k in range(0, resolution_row): # row + for l in range(0, resolution_col): # col + weight[row_unit*k:row_unit*(k+1), col_unit*l:col_unit*(l+1),j]=1 + j = j+1 + + # thres_step = 0.1 + for t1 in range(thres_bit): + for t2 in range(thres_bit): + # print(int(thres[0]*pow((1+thres_inc_factor[0]*thres_step),t)),\ + # int(thres[1]*pow((1+thres_inc_factor[1]*thres_step),t))) + n1 = Neuron() + syn = np.full((resolution_row*resolution_col,np.shape(data)[2]),0) + for i in range (0, np.shape(data)[2]): # event + for j in range(0, resolution_row*resolution_col): # space neuron + if if_imbalance: + syn[j,i] = sum(sum(np.where(data[:,:,i]>(int(thres[0][j]*pow((1+thres_step),t1))), 1, 0) * weight[:,:,j])) # thres0调节每个点的阈值 + space_neuron[thres_bit*thres_bit*j+t1*thres_bit+t2,i]= n1.neuron_space_expert(syn[j,i],(int(thres[1][j]*pow((1+thres_step),t2)))) # thres1调节范围 + else: + syn[j,i] = sum(sum(np.where(data[:,:,i]>(int(thres[0]*pow((1+thres_step),t1))), 1, 0) * weight[:,:,j])) # thres0调节每个点的阈值 + space_neuron[thres_bit*thres_bit*j+t1*thres_bit+t2,i]= n1.neuron_space_expert(syn[j,i],(int(thres[1]*pow((1+thres_step),t2)))) # thres1调节范围 + if if_vote: + space_neuron_vote = np.full((resolution_row*resolution_col, np.shape(data)[2]), 0) + for i in range (0, np.shape(data)[2]): # event + for j in range(0, resolution_row*resolution_col): # space neuron + # print(sum(space_neuron[thres_bit*thres_bit*j:thres_bit*thres_bit*(j+1),i])) + if sum(space_neuron[thres_bit*thres_bit*j:thres_bit*thres_bit*(j+1),i]) >= thres_bit*thres_bit/2: + space_neuron_vote[j,i] = 1 + else: + space_neuron_vote[j,i] = 0 + return space_neuron_vote + else: + return space_neuron + + def attention_expert(self, data, threshold): + # data: data: input data format:[row, coloum, event number] + result_row_max = [0 for i in range (np.shape(data)[0])] + result_row_min = [0 for i in range (np.shape(data)[0])] + result_col_max = [0 for i in range (np.shape(data)[1])] + result_col_min = [0 for i in range (np.shape(data)[1])] + neuron_valid = np.full((np.shape(data)[0],np.shape(data)[1],np.shape(data)[2]),0) + result = np.full((4,np.shape(data)[2]),0) + for i in range (0, np.shape(data)[2]): # event + for j in range(0,np.shape(data)[0]): # Row + for k in range(0, np.shape(data)[1]): + neuron_valid[j,k,i] = np.where(data[j,k,i]>threshold,1,0) ## the absolute value 5 is user defined. + + for i in range (0, np.shape(data)[2]): # event + for j in range(0,np.shape(data)[0]): # Row + try: + result_row_max[j] = np.amax(np.where(neuron_valid[j,:,i] == 1)) + result_row_min[j] = np.amin(np.where(neuron_valid[j,:,i] == 1)) + except ValueError: + pass + try: + result[0,i] = np.amax(result_row_max) + result[1,i] = np.min(np.nonzero(result_row_min)) + except ValueError: + pass + for k in range(0, np.shape(data)[1]): + try: + result_col_max[k] = np.amax(np.where(neuron_valid[:,k,i] == 1)) + result_col_min[k] = np.amin(np.where(neuron_valid[:,k,i] == 1)) + except ValueError: + pass + try: + result[2,i] = np.amax(result_col_max) + result[3,i] = np.min(np.nonzero(result_col_min)) + except ValueError: + pass + + return result + + def expert_temporal(self, resolution, data, start, end, type, scale): + # resolution: temporal resolution + # data: input data format:[frame, row, coloum, event number] + # start: offset of the start points format: [row, coloum] + # end: offset of the end points format: [row, coloum] + # type: temporal sequences: 0-(top, down), 1-(left, right) + n1 = Neuron() + row_length = end[0] - start[0] # target zone row length + col_length = end[1] - start[1] # target zone coloum length + temporal_neuron = np.full((int(pow(resolution,2)),np.shape(data)[3]),0) # temporal neuron number + row_unit = int(row_length/resolution) # target zone row computational unit length + col_unit = int(col_length/resolution) # target zone col computational unit length + #weight = np.full((row_length, col_length, row_length*col_length),0) # target zone neuron weight information + weight = np.full((np.shape(data)[1], np.shape(data)[2], np.shape(data)[1]*np.shape(data)[2]),0) + modular_index = 0 + index = 0 + location = [0 for i in range (4)] + if type == 0: ## top -> down (left) + bottom - up (right) + for k in range(0,resolution): # row + for l in range(0,resolution): # col + location[0] = row_unit*k + location[1] = row_unit*(k +1) + location[2] = col_unit*l + start[1] + location[3] = col_unit*(l+1) + start[1] + if modular_index == 0 or modular_index == 2: + for i in range(location[0],location[1]): # row number + for j in range(location[2],location[3]): # coloum number + x_top = i-scale # connection range + if x_top < location[0]: + x_top = location[0] + weight[x_top:i, j, index] = 1 + weight[i,j,index] = 0 + index = index +1 + else: + for i in range(location[0],location[1]): #row + for j in range(location[2],location[3]): #coloum + x_bottom = i+scale + if x_bottom > location[1]: + x_bottom = location[1] + weight[i:x_bottom, j, index] = 1 + weight[i,j,index] = 0 + index = index +1 + modular_index = modular_index +1 + + elif type == 1: ## bottom->up (left) + top -> down )right + for k in range(0,resolution): # row + for l in range(0,resolution): # col + location[0] = row_unit*k + location[1] = row_unit*(k +1) + location[2] = col_unit*l + start[1] + location[3] = col_unit*(l +1) + start[1] + if modular_index == 1 or modular_index == 3: + for i in range(location[0],location[1]): # row + for j in range(location[2],location[3]): # coloum + x_top = i-scale # connection range + if x_top < location[0]: + x_top = location[0] + weight[x_top:i, j, index] = 1 + weight[i,j,index] = 0 + index = index +1 + else: + for i in range(location[0],location[1]): #row + for j in range(location[2],location[3]): #coloum + x_bottom = i+scale + if x_bottom > location[1]: + x_bottom = location[1] + weight[i:x_bottom, j, index] = 1 + weight[i,j,index] = 0 + index = index +1 + modular_index = modular_index +1 + + elif type == 2: # left->right(left) + right->left (right) + for k in range(0,resolution): # row + for l in range(0,resolution): # col + location[0] = row_unit*k + location[1] = row_unit*(k +1) + location[2] = col_unit*l + start[1] + location[3] = col_unit*(l +1) + start[1] + if modular_index == 1 or modular_index == 3: + for i in range(location[0],location[1]): # row + for j in range(location[2],location[3]): # coloum + y_top = j-scale # connection range + if y_top < location[2]: + y_top = location[2] + weight[i, y_top:j, index] = 1 + weight[i,j,index] = 0 + index = index +1 + + else: + for i in range(location[0],location[1]): #row + for j in range(location[2],location[3]): #coloum + y_bottom = j+scale + if y_bottom > location[3]: + y_bottom = location[3] + weight[i, j:y_bottom, index] = 1 + weight[i,j,index] = 0 + index = index +1 + modular_index = modular_index +1 + + else: + pass + spike_info = np.full((np.shape(data)[0],np.shape(data)[1],np.shape(data)[2],np.shape(data)[3]),0) + temporal_index = np.full((np.shape(data)[0],np.shape(data)[1],np.shape(data)[2],np.shape(data)[3]),0) + temporal_neuron = np.full((resolution,resolution,np.shape(data)[3]),0) + temporal_neuron_spike = np.full((resolution,resolution,np.shape(data)[3]),0) + temporal_neuron_final = [0 for i in range (np.shape(data)[3])] + + for i in range(0, np.shape(data)[3]): # event + for j in range(1, np.shape(data)[0]): # frame + index = 0 + spike_info[0,:,:,i] = data[0,:,:,i] # given the initial neuron states + for k in range(0,resolution): # row + for l in range(0,resolution): # col + location[0] = row_unit*k + location[1] = row_unit*(k +1) + location[2] = col_unit*l + start[1] + location[3] = col_unit*(l+1) + start[1] + for m in range(location[0],location[1]): # row-element + for n in range(location[2],location[3]): # coloum-element + stim = data[j,m,n,i] + syn = sum(sum(weight[:,:,index]*spike_info[j-1,:,:,i])) + V_spike, V_internal = n1.neuron_temporal_expert(syn, stim, 0.5) + if syn > 0 and stim == 1 : # Hebbian learning + temporal_index[j,m,n,i] = 1 + else: + temporal_index[j,m,n,i] = 0 + spike_info[j,m,n,i] = V_spike + index = index +1 + #v2 = Visualization(data) + #v2.generate_temporal_profiling( data[1,:,:,0], spike_info[0,:,:,0], temporal_index[1,:,:,0]) + #v2.generate_weight_map(weight, location[0]*row_length +location[2], location[0]*row_length +location[3]) + for i in range(0, np.shape(data)[3]): # event + for k in range(0,resolution): # row + for l in range(0, resolution): + #temporal_neuron[k,l,i] = int(np.where(sum(sum(sum(temporal_index[:,row_unit*k:row_unit*(k+1), col_unit*l:col_unit*(l+1),i])))>thres,1,0)) + temporal_neuron_spike[k,l,i] = sum(sum(sum(temporal_index[:,row_unit*k:row_unit*(k+1), col_unit*l:col_unit*(l+1),i]))) + temporal_neuron_final[i] = sum(sum(temporal_neuron_spike[:,:,i])) + return temporal_neuron_final + + + def coord_transformation(self, element_location, col_length): ## transfer modular local coord to global coord + # location: modular location, format[ row, coloum] + # modular_size: modular size, format:[row unit length, col unit length] + # modular_index: modular index + + + #new_location = modular_index*(modular_size[0]*modular_size[1]) + element_location[0]*modular_size[1] + element_location[1] + new_location = element_location[0]*col_length + element_location[1] + return new_location + + + # def expert_topology(self,stcore,sense_scope): + # n1 = Neuron() + # frame = np.shape(stcore)[0] + # event_num = np.shape(stcore)[3] + # resolution = 6 + # space_neuron_number = int(pow(resolution,2)) + # stim = np.full((frame,int(pow(resolution,2)),event_num),0) + # for i in range (0,frame): + # space_neuron = self.expert_space(resolution, stcore[i,:,:,:], [0,10], [0,0]) ## find the most active location as the stimulus + # stim[i,:,:] = space_neuron ## generate active patterns + + # weight = np.full((space_neuron_number,space_neuron_number,event_num),0) ## define the weight matrix + # for i in range(0,event_num): + # for j in range(1,frame): ## the weight has to be generated by frame + # for k in range(0,space_neuron_number): + # if stim[j,k,i] == 1: + # ### This require further optimizations + # for l in range (-sense_scope[1],sense_scope[1]): ## sense range-coloum + # for m in range (-sense_scope[0],sense_scope[0]): + # if stim[j-1,k+l*resolution+m,i] == 1: + # weight[k,k+l*resolution+m,i] = 1 ## only generate weight when both frame has the connections. + + # v1 = Visualization(stcore) + # v1.generate_weight_map(weight, 0, 9) + # spike_info = np.full((frame,space_neuron_number,event_num),0) + # temporal_index = np.full((frame,space_neuron_number,event_num),0) + # topology_index = np.full((frame,event_num),0) + # for i in range(0,event_num): + # spike_info[0,:,i] = stim[0,:,i] + # for j in range(1,frame): + # for k in range(0,space_neuron_number): + # syn = sum(weight[k,:,5]*spike_info[j-1,:,i]) # background computing + # V_spike, V_internal = n1.neuron_temporal_expert(syn, stim[j,k,i], 0.5) + # if syn > 0 and stim[j,k,i] == 1 : # Hebbian learning + # temporal_index[j,k,i] = 1 + # else: + # temporal_index[j,k,i] = 0 + # spike_info[j,k,i] = V_spike + # if sum(temporal_index[j,:,i]) > sum(stim[j,:,i]) * 0.8: + # topology_index[j,i] = 1 + # topology_neuron = [0 for i in range (event_num)] + # topology_spike = [0 for i in range (event_num)] + # ref1 = sum(sum(temporal_index[:,:,0]))-6 + # for i in range(0, event_num): + # topology_spike[i] = sum(sum(temporal_index[:,:,i])) + # topology_neuron[i] = np.where(topology_spike[i]>ref1,1,0) + # print(topology_neuron) + # print(topology_spike) + # print(topology_index) + # return stim,weight,temporal_index + + + def expert_knn_train(self, train_data, train_id,resolution,index, id_start, id_end): + + pattern_num = 0 + res = np.power(resolution,2) + event0_code = np.full((np.shape(train_id)[0],np.shape(train_data)[1]),0) + event_knowledge_clock = np.full((resolution, resolution),0) + event_knowledge_anticlock = np.full((resolution, resolution),0) + index0 = 0 + for i in range (0, np.shape(train_id)[0]): + if train_id[i] == index: + event0_code[index0] = train_data[i,:] + index0 = index0 + 1 + pattern_num = pattern_num + 1 + event0_code = event0_code[~np.all(event0_code == 0, axis=1)] + xdata = np.full((np.shape(event0_code)[0],res),0) + ydata = np.full((np.shape(event0_code)[0],res),0) + for j in range (0, np.shape(event0_code)[0]): + zdata = event0_code[j,8:10] ## temporal domain + xydata = event0_code[j,id_start:id_end] + if zdata[0] == 1 and zdata[1] == 0: + for k in range (0, res): + if xydata[k] == 1: + xdata[j,k] = k//resolution + ydata[j,k] = k%resolution + event_knowledge_clock[xdata[j,k],ydata[j,k]] = 1 + #v1 = Visualization(event0_code) + #v1.generate_spike_code(xdata,ydata,1,index) + + if zdata[0] == 0 and zdata[1] == 1: + for k in range (0, res): + if xydata[k] == 1: + xdata[j,k] = k//resolution + ydata[j,k] = k%resolution + event_knowledge_anticlock[xdata[j,k],ydata[j,k]] = 1 + + #v1 = Visualization(event0_code) + #v1.generate_spike_code(xdata,ydata,0,index) + # print(pattern_num) + return event_knowledge_clock, event_knowledge_anticlock + + + + def expert_knn_test(self,test_data, resolution, event_knowledge,event_knowledge_anticlock,id_start, id_end): + res = np.power(resolution,2) + event0_code = test_data + test_code= np.full((resolution, resolution),0) + xdata = [0 for x in range(np.power(resolution,2))] + ydata = [0 for x in range(np.power(resolution,2))] + + for j in range (0, np.shape(event0_code)[0]): + zdata = event0_code[8:10] ## temporal domain + xydata = event0_code[id_start:id_end] + for k in range (0, res): + if xydata[k] == 1: + xdata[k] = k//resolution + ydata[k] = k%resolution + test_code[xdata[k],ydata[k]] = 1 + + score = np.full((resolution,resolution,10),0) + score_p = [0 for x in range(10)] + score_n = [0 for x in range(10)] + result = [0 for x in range(10)] + + if zdata[0] == 1 and zdata[1] == 0: ## clockwise + for i in range(0,np.shape(event_knowledge)[2]): + for j in range (0, resolution): + for k in range(0,resolution): + if test_code[j,k] == event_knowledge[j,k,i] and test_code[j,k] == 1: + score[j,k,i] = 1 + score_p[i] = score_p[i]+1 + elif test_code[j,k] != event_knowledge[j,k,i] and test_code[j,k] == 1: + score[j,k,i] = -1 + score_n[i] = score_n[i]-1 + else: + pass + result[i] = score_p[i] + score_n[i] + #print(score_p,score_n) + + if zdata[0] == 0 and zdata[1] == 1: ## clockwise + for i in range(0,np.shape(event_knowledge_anticlock)[2]): + for j in range (0, resolution): + for k in range(0,resolution): + if test_code[j,k] == event_knowledge_anticlock[j,k,i] and test_code[j,k] == 1: + score[j,k,i] = 1 + score_p[i] = score_p[i]+1 + elif test_code[j,k] != event_knowledge_anticlock[j,k,i] and test_code[j,k] == 1: + score[j,k,i] = -1 + score_n[i] = score_n[i]-1 + else: + pass + result[i] = score_p[i] + score_n[i] + #print(score_p,score_n) + + #print(score_total) + + #print(score_p) + #print(score_n) + #result_p = np.argmax(score_p) + #result_n = np.argmin(score_n) + #result = [result_p, result_n] + #result = np.argmax(result) + result_return = np.where(result ==np.amax(result)) + return result_return + + def expert_overlap(self,data, type): + # data: input data format:[ frame,row, coloum, event number] + # type: 1: to check if there any active areas are overlapped. + frame = np.shape(data)[0] + event_num = np.shape(data)[3] + + + + def expert_temporal_poc(self, resolution, start, end, type, scale, data): + n1 = Neuron() + row_length = end[0] - start[0] # target zone row length + col_length = end[1] - start[1] # target zone coloum length + #temporal_neuron = np.full((int(pow(resolution,2)),np.shape(data)[3]),0) # temporal neuron number + row_unit = int(row_length/resolution) # target zone row computational unit length + col_unit = int(col_length/resolution) # target zone col computational unit length + weight = np.full((row_length, col_length, row_length*col_length),0) # target zone neuron weight information + #weight = np.full((np.shape(data)[1], np.shape(data)[2], np.shape(data)[1]*np.shape(data)[2]),0) + modular_index = 0 + index = 0 + location = [0 for i in range (4)] + if type == 0: ## pre: left + for k in range(0,resolution): # define the boundary of the area + for l in range(0,resolution): + location[0] = row_unit*k # row start address + location[1] = row_unit*(k +1) # row end address + location[2] = col_unit*l + start[1] # coloum start address + location[3] = col_unit*(l+1) + start[1] # coloum end address + print(location) + if modular_index == 0: + for i in range(location[0],location[1]): # row + for j in range(location[2],location[3]): # coloum + y_top = j-scale # connection range + if y_top < location[2]: + y_top = location[2] + weight[i, y_top:j, index] = 1 + weight[i,j,index] = 0 + index = index +1 # neuron index + modular_index = modular_index +1 + + elif type == 1: ## pre: right + for k in range(0,resolution): # define the boundary of the area + for l in range(0,resolution): + location[0] = row_unit*k # row start address + location[1] = row_unit*(k +1) # row end address + location[2] = col_unit*l + start[1] # coloum start address + location[3] = col_unit*(l+1) + start[1] # coloum end address + print(location) + if modular_index == 0: + for i in range(location[0],location[1]): # row + for j in range(location[2],location[3]): # coloum + y_top = j+scale # connection range + if y_top > location[3]: + y_top = location[3] + weight[i, j:y_top, index] = 1 + weight[i,j,index] = 0 + index = index +1 # neuron index + modular_index = modular_index +1 + else: + pass + v1 = Visualization(weight) + v1.generate_weight_map(weight,21,25) + + + ## start neuroal computation + spike_info = np.full((np.shape(data)[0],np.shape(data)[1],np.shape(data)[2],np.shape(data)[3]),0) + temporal_index = np.full((np.shape(data)[0],np.shape(data)[1],np.shape(data)[2],np.shape(data)[3]),0) + temporal_neuron = np.full((resolution,resolution,np.shape(data)[3]),0) + temporal_neuron_spike = np.full((resolution,resolution,np.shape(data)[3]),0) + temporal_neuron_final = [0 for i in range (np.shape(data)[3])] + + for i in range(0, np.shape(data)[3]): # event + for j in range(1, np.shape(data)[0]): # frame + index = 0 + spike_info[0,:,:,i] = data[0,:,:,i] # given the initial neuron states + for k in range(0,resolution): # row + for l in range(0,resolution): # col + location[0] = row_unit*k + location[1] = row_unit*(k +1) + location[2] = col_unit*l + start[1] + location[3] = col_unit*(l+1) + start[1] + for m in range(location[0],location[1]): # row-element + for n in range(location[2],location[3]): # coloum-element + stim = data[j,m,n,i] + syn = sum(sum(weight[:,:,index]*spike_info[j-1,:,:,i])) + V_spike, V_internal = n1.neuron_temporal_expert(syn, stim, 0.5) + if syn > 0 and stim == 1 : # Hebbian learning + temporal_index[j,m,n,i] = 1 + else: + temporal_index[j,m,n,i] = 0 + spike_info[j,m,n,i] = V_spike + index = index +1 + #v2 = Visualization(data) + #v2.generate_temporal_profiling( data[1,:,:,0], spike_info[0,:,:,0], temporal_index[1,:,:,0]) + #v2.generate_weight_map(weight, location[0]*row_length +location[2], location[0]*row_length +location[3]) + for i in range(0, np.shape(data)[3]): # event + for k in range(0,resolution): # row + for l in range(0, resolution): + #temporal_neuron[k,l,i] = int(np.where(sum(sum(sum(temporal_index[:,row_unit*k:row_unit*(k+1), col_unit*l:col_unit*(l+1),i])))>thres,1,0)) + temporal_neuron_spike[k,l,i] = sum(sum(sum(temporal_index[:,row_unit*k:row_unit*(k+1), col_unit*l:col_unit*(l+1),i]))) + temporal_neuron_final[i] = sum(sum(temporal_neuron_spike[:,:,i])) + print(temporal_neuron_spike) + return temporal_neuron_final,temporal_neuron_spike + + def expert_space_event10(self, space_neuron,resolution): + event_num = np.shape(space_neuron)[1] + resp = [0 for i in range (event_num)] + index = [0 for i in range (event_num)] + + for i in range(0, event_num): + test_data = space_neuron[:,i].reshape(resolution, resolution) + if np.any(test_data[:,5] == 1): + resp[i] = 1 + + + return resp + + def expert_hopfield(self, data, type,factor): + # data: input data format:[frame, row, coloum, event number] + # type: detect temporal movements: 0-(top, down), 1-(bottom, up) , 2-(left,right), 3(right, left) + # factor: the threshold scale factor (default = 0.7) + + frame_num = np.shape(data)[0] + #frame_num = 10 + row_num = np.shape(data)[1] + col_num = np.shape(data)[2] + target = np.full((frame_num,row_num,col_num,2),0) + target_spike = np.full((frame_num,row_num,col_num,2),0) + target_all = np.full((frame_num,1),0) + + if type == 0: + for i in range(1, frame_num): + if i == 0: + previous_event = data[i,:,:] #self checking + else: + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if j < active_pixels[0][l]: ##detect the movement of row + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + #print(target) + elif type == 1: + for i in range(1, frame_num): + if i == 0: + previous_event = data[i,:,:] #self checking + else: + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if j > active_pixels[0][l]: ##detect the movement of row + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + elif type == 2: + for i in range(1, frame_num): + if i == 0: + previous_event = data[i,:,:] #self checking + else: + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if k > active_pixels[1][l]: ##detect the movement of col + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + elif type == 3: + for i in range(1, frame_num): + if i == 0: + previous_event = data[i,:,:] #self checking + else: + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if k < active_pixels[1][l]: ##detect the movement of col + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + else: + pass + ### filtering sparse events that caused by the devices + + res = 5 + distrubtion_resolution = list(i for i in range (0, row_num,res)) + target_spike_hist = np.full((frame_num,len(distrubtion_resolution)-1),0) + if type == 0 or type == 1: + #threshold = 40 ## if below 40, there are background noises + for i in range(1,frame_num): + active_pixels = np.nonzero(target_spike[i,:,:,0]) + target_spike_hist[i,:] = np.histogram(active_pixels[0], bins=distrubtion_resolution)[0] + + elif type == 2 or type == 3: + #threshold = 30 ## if below 40, there are background noises + for i in range(1,frame_num): + active_pixels = np.nonzero(target_spike[i,:,:,0]) + target_spike_hist[i,:] = np.histogram(active_pixels[1], bins=distrubtion_resolution)[0] + + #position_spike = sum(target_spike_hist[:,0:-1]) + + + ### check the movement among timing + threshold = 15 ## #25 + target_spike_hist [target_spike_hist < threshold] = 0 + + ## find active area index + target_areas_index = np.full((frame_num,len(distrubtion_resolution)-1),-1) + for i in range(1,frame_num): ## experts sequence + for j in range(0, len(distrubtion_resolution)-1): ## active area index + if target_spike_hist[i,j] > 0: + target_areas_index[i,j] =j + else: + target_areas_index[i,j] =-1 ## there is no active areas + + + ## detect movement directions + areas_index = [] + for i in range(0, frame_num): + temporal = max(target_areas_index[i,:]) + if temporal == -1: + areas_index.append(-1) + #pass + else: + areas_index.append(max(target_areas_index[i,:])) + + + # print(areas_index) + location_bit = [0 for i in range(0,4)] + ## start to decode areas_index(neural activities) + active_index_1 = [i for i in areas_index if i >= 0] ## delete non active area -1 + time_bit = 100 + number = len(set(active_index_1)) + spike_flag = 0 + if number != 0: ## if there is an activity or activities + if type == 0 or type == 1: + ##calculat the decoding values: + max_value = max(active_index_1) + min_value = min(active_index_1) + decoding_value = max_value - min_value + ##check the spike intensities distributions + position_flag = 0 + frame_index = [i for i, j in enumerate(areas_index) if j == max_value] ## find the max_position frame index. This can be either max_position or min_position + while position_flag == 0: + + active_pixels = np.nonzero(target[frame_index[0]-1 ,:,:,0]) ## check the individual neuron intensitiy + + spike_intensity_distribution = np.histogram(active_pixels[0], bins=distrubtion_resolution)[0] ## check the spike intensity disutrbutions + required_position = [i for i, j in enumerate(spike_intensity_distribution) if j > 5] ##10 ## handcraft number 10, is the number less than 10, this indicates noises. + if len(required_position) > 1: + position_flag = 1 + else: + frame_index[0] = frame_index[0] +1 + + if frame_index[0] >= np.shape(target)[0]: + position_flag = 1 + spike_flag = 1 + + + if spike_flag == 0: + required_position_mean_spike = np.zeros_like(required_position) + for i in range(0, len(required_position)): + target_area = target[frame_index[0]-1,required_position[i]*res:(required_position[i]+1)*res,:,0] + required_position_mean_spike[i] = np.true_divide(target_area.sum(),(target_area!=0).sum()) + + if required_position_mean_spike[0] > required_position_mean_spike[-1]: ## if the bottom area spike intensities over the top spike intensities + spike_flow = 'top->down' #the spike flow is defined as the spike intensities flow from small values to the big values. + elif required_position_mean_spike[0] < required_position_mean_spike[-1]: # if the bottom area spike intensities smaller the top spike intensities + spike_flow = 'bottom->up' + else: + spike_flow = 'unknown' + else: + spike_flow = 'unknown' + # print('spike_flow:',spike_flow) + + elif type == 2 or type == 3: + ##calculat the decoding values: + max_value = max(active_index_1) + min_value = min(active_index_1) + decoding_value = max_value - min_value + ##check the spike intensities distributions + position_flag = 0 + frame_index = [i for i, j in enumerate(areas_index) if j == max_value] ## find the max_position frame index. This can be either max_position or min_position + while position_flag == 0: + + #frame_index = 3 + active_pixels = np.nonzero(target[frame_index[0]-1 ,:,:,0]) ## check the individual neuron intensitiy + + spike_intensity_distribution = np.histogram(active_pixels[1], bins=distrubtion_resolution)[0] ## check the spike intensity disutrbutions + required_position = [i for i, j in enumerate(spike_intensity_distribution) if j > 5] ## handcraft number 10, is the number less than 10, this indicates noises. + if len(required_position) > 1: + position_flag = 1 + else: + frame_index[0] = frame_index[0] +1 + + if frame_index[0] >= np.shape(target)[0]: + position_flag = 1 + spike_flag = 1 + + + if spike_flag == 0: + required_position_mean_spike = np.zeros_like(required_position) + for i in range(0, len(required_position)): + target_area = target[frame_index[0]-1,:,required_position[i]*res:(required_position[i]+1)*res,0] + required_position_mean_spike[i] = np.true_divide(target_area.sum(),(target_area!=0).sum()) + + if required_position_mean_spike[0] > required_position_mean_spike[-1]: ## if the bottom area spike intensities over the top spike intensities + spike_flow = 'right->left' #the spike flow is defined as the spike intensities flow from small values to the big values. + elif required_position_mean_spike[0] < required_position_mean_spike[-1]: # if the bottom area spike intensities smaller the top spike intensities + spike_flow = 'left->right' + else: + spike_flow = 'unknown' + else: + spike_flow = 'unknown' + # print('spike_flow:',spike_flow) + else: + print('hopfield network type error') + + #print('test') + max_position = [i for i, j in enumerate(areas_index) if j == max_value] + #print(max_position) + min_position = [i for i, j in enumerate(areas_index) if j == min_value] + #print(min_position) + v1 = Visualization(target[:,:,:,0]) + ''' + if decoding_value == number-1 and v1.areConsecutive(max_position,len(max_position))== True and decoding_value != 0 : ## if there is a pattern format : 3-2-1 or 3-2 + ## the pattern format: 3-2-1 or 3-2 + max_position = [i for i, j in enumerate(areas_index) if j == max_value] + if type == 0 or type == 1: + if max_position[0] < min_position[0]: ## if the movement is from top-> down + direction_flow = 'top->down' + elif max_position[0] > min_position[0]: + direction_flow = 'bottom->up' + else: + direction_flow = 'unknwon' + elif type == 2 or type == 3: + if max_position[0] < min_position[0]: ## if the movement is from top-> down + direction_flow = 'right->left' + elif max_position[0] > min_position[0]: + direction_flow = 'left->right' + else: + direction_flow = 'unknwon' + print('direction_flow:',direction_flow) + if direction_flow == spike_flow: + hopfield_bit = 1 ## there is an required pattern activities + + ## dectect lift right location + if type == 2 or type == 3: + if mean(active_index_1) > 7: ## check whether the active areas are at left + location_bit[0] = 1 + + if mean(active_index_1) <= 7: ## check whether the active areas are at right + location_bit[1] = 1 + + ## detect start timing + frame = [i for i, j in enumerate(areas_index) if j >=0] + time_bit = frame[0] + + else: + hopfield_bit = 0 + ''' + #if decoding_value == number-1 and v1.areConsecutive([abs(x) for x in max_position],len(max_position)) == False and decoding_value != 0: ## if there is a pattern format : 3-2-3 or 2-3-2 + if 1 == 1: ## if there is a pattern format : 3-2-3 or 2-3-2 + ## the pattern format: 3-2-3 or 1-2-1 there is a local minial points + # max_position = [i for i, j in enumerate(areas_index) if j == max_value] + i = 1 + pit_point_flap = 0 + sense_flag = 0 + sense_range = 4 + j = 1 + + if type == 0 or type == 1: + while pit_point_flap ==0: + while sense_flag == 0: + try: + if areas_index[i] != -1 and areas_index[i+j] != -1 : + if areas_index[i] > areas_index[i+j] and abs(areas_index[i] - areas_index[i+j])<3: #5 + direction_flow = 'top->down' + if direction_flow == spike_flow: + pit_point_flap = 1 + hopfield_bit = 1 + time_bit = i + sense_flag = 1 + elif areas_index[i] < areas_index[i+j] and abs(areas_index[i] - areas_index[i+j])<3: #5 + direction_flow = 'bottom->up' + if direction_flow == spike_flow: + pit_point_flap = 1 + hopfield_bit = 1 + time_bit = i + sense_flag = 1 + else: + pass + except: + return [0], 0, np.array([0,0,0,0]), 0 + + if j >= sense_range: + sense_flag = 1 + j = 1 + else: + j = j+1 + sense_flag = 0 + + if i == len(areas_index) -sense_range -1: ## if match the last second + hopfield_bit = 0 + pit_point_flap = 1 + if pit_point_flap == 0: + i = i+1 + + + + elif type == 2 or type == 3: + while pit_point_flap ==0: ## loop in area index + while sense_flag == 0: ## loop in the sense range + try: + if areas_index[i] != -1 and areas_index[i+j] != -1: + if areas_index[i] > areas_index[i+j] and abs(areas_index[i] - areas_index[i+j])<=3: + direction_flow = 'right->left' + if direction_flow == spike_flow: + pit_point_flap = 1 + hopfield_bit = 1 + time_bit = i + sense_flag = 1 + elif areas_index[i] < areas_index[i+j] and abs(areas_index[i] - areas_index[i+j])<=3: + + direction_flow = 'left->right' + if direction_flow == spike_flow: + pit_point_flap = 1 + hopfield_bit = 1 + time_bit = i + sense_flag = 1 + else: + pass + except: + return [0], 0, np.array([0,0,0,0]), 0 + + if j >= sense_range: + sense_flag = 1 + j=1 + else: + j = j+1 + sense_flag = 0 + + if i == len(areas_index) - sense_range-1: ## if match the last second + hopfield_bit = 0 + pit_point_flap = 1 + if pit_point_flap == 0: + i = i+1 + + + if hopfield_bit == 1: + + if type == 2 or type == 3: + if areas_index[time_bit] <19: ## left areas + location_bit[0] = 1 + + if areas_index[time_bit] >=19: ## right areas + location_bit[1] = 1 + + if type == 0 : + max_value = max(active_index_1) + min_value = min(active_index_1) + if abs(max_value-min_value) >= 5: + if areas_index[time_bit]>22: ## top areas + location_bit[2] = 1 + if areas_index[time_bit]<=22: ## bottom areas + location_bit[3] = 1 + else: + if areas_index[time_bit]>19: ## top areas + location_bit[2] = 1 + if areas_index[time_bit]<=19: ## bottom areas + location_bit[3] = 1 + if type == 1: + if areas_index[time_bit]<=17: ## bottom areas + location_bit[3] = 1 + if areas_index[time_bit]>17: ## top areas + location_bit[2] = 1 + # print('direction_flow:',direction_flow) + #elif decoding_value != number-1: + # hopfield_bit = 0 + + #elif decoding_value == 0: ## if there is only one activities + # hopfield_bit = 0 + + else: + hopfield_bit = -1 + else: + hopfield_bit = 0 + # print('no active patterns') + + # print(hopfield_bit) + return areas_index, hopfield_bit,location_bit, time_bit + + + def expert_hopfield_v1(self, data, type,factor): + # data: input data format:[frame, row, coloum, event number] + # type: detect temporal movements: 0-(top, down), 1-(bottom, up) , 2-(left,right), 3(right, left) + # factor: the threshold scale factor (default = 0.7) + + frame_num = 3 + row_num = np.shape(data)[1] + col_num = np.shape(data)[2] + target = np.full((frame_num,row_num,col_num,2),0) + target_spike = np.full((frame_num,row_num,col_num,2),0) + target_all = np.full((frame_num,1),0) + + if type == 0: + for i in range(1, frame_num): + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if j < active_pixels[0][l]: ##detect the movement of row + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + #print(target) + elif type == 1: + for i in range(1, frame_num): + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if j > active_pixels[0][l]: ##detect the movement of row + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + elif type == 2: + for i in range(1, frame_num): + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if k > active_pixels[1][l]: ##detect the movement of col + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + elif type == 3: + for i in range(1, frame_num): + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if k < active_pixels[1][l]: ##detect the movement of col + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + + return target,target_all + + def expert_hopfield2(self, data, type,factor,hist_threshold): + # data: input data format:[frame, row, coloum, event number] + # type: detect temporal movements: 0-(top, down), 1-(bottom, up) + # factor: the threshold scale factor (default = 0.7) + # hist_threshold: spike histgram threshold + + frame_num = np.shape(data)[0] + row_num = np.shape(data)[1] + col_num = np.shape(data)[2] + target = np.full((frame_num,row_num,col_num,2),0) + target_spike = np.full((frame_num,row_num,col_num,2),0) + target_all = np.full((frame_num,1),0) + + if type == 0: + for i in range(1, frame_num): + if i == 0: + previous_event = data[i,:,:] #self checking + else: + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if j < active_pixels[0][l]: ##detect the movement of row + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + # print("target pixels:", target_all[i]) + #print(target) + elif type == 1: + for i in range(1, frame_num): + if i == 0: + previous_event = data[i,:,:] #self checking + else: + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if j > active_pixels[0][l]: ##detect the movement of row + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + elif type == 2: + for i in range(1, frame_num): + if i == 0: + previous_event = data[i,:,:] #self checking + else: + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if k > active_pixels[1][l]: ##detect the movement of col + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + elif type == 3: + for i in range(1, frame_num): + if i == 0: + previous_event = data[i,:,:] #self checking + else: + previous_event = data[i-1,:,:] + active_pixels = np.nonzero(previous_event) + #print("activel pixels:", len(active_pixels[0])) + for j in range(0,row_num): + for k in range(0, col_num): + if data[i,j,k] == 1: # if there is an event + for l in range(0, len(active_pixels[0])): + if k < active_pixels[1][l]: ##detect the movement of col + target[i,j,k,0] = target[i,j,k,0]+1 + if target[i,j,k,0] > len(active_pixels[0]) *factor: + target_spike [i,j,k,0] = 1 + + #target[i,j,k,0] = target[i,j,k,0]/len(active_pixels[0]) + target_all[i] = sum(sum(target_spike [i,:,:,0])) + #print("target pixels:", target_all[i]) + else: + pass + ### filtering sparse events that caused by the devices + + res = 5 + distrubtion_resolution = list(i for i in range (0, row_num,res)) + target_spike_hist = np.full((frame_num,len(distrubtion_resolution)-1),0) + if type == 0 or type == 1: + #threshold = 40 ## if below 40, there are background noises + for i in range(1,frame_num): + active_pixels = np.nonzero(target_spike[i,:,:,0]) + target_spike_hist[i,:] = np.histogram(active_pixels[0], bins=distrubtion_resolution)[0] + + elif type == 2 or type == 3: + #threshold = 30 ## if below 40, there are background noises + for i in range(1,frame_num): + active_pixels = np.nonzero(target_spike[i,:,:,0]) + target_spike_hist[i,:] = np.histogram(active_pixels[1], bins=distrubtion_resolution)[0] + + #position_spike = sum(target_spike_hist[:,0:-1]) + + + ### check the movement among timing + #hist_threshold = 15 ## #25 + target_spike_hist [target_spike_hist < hist_threshold] = 0 + + ## find active area index + target_areas_index = np.full((frame_num,len(distrubtion_resolution)-1),-1) + for i in range(1,frame_num): ## experts sequence + for j in range(0, len(distrubtion_resolution)-1): ## active area index + if target_spike_hist[i,j] > 0: + target_areas_index[i,j] =j + else: + target_areas_index[i,j] =-1 ## there is no active areas + + + ## detect movement directions + areas_index = [] + for i in range(0, frame_num): + temporal = max(target_areas_index[i,:]) + if temporal == -1: + areas_index.append(-1) + #pass + else: + areas_index.append(max(target_areas_index[i,:])) + + + # print(areas_index) + location_bit = [0 for i in range(0,4)] + ## start to decode areas_index(neural activities) + active_index_1 = [i for i in areas_index if i >= 0] ## delete non active area -1 + time_bit = 100 + number = len(set(active_index_1)) + spike_flag = 0 + feature_ascent = 0 + feature_swing = 0 + if number != 0: ## if there is an activity or activities + if type == 0 or type == 1: + ##calculat the decoding values: + max_value = max(active_index_1) + min_value = min(active_index_1) + decoding_value = max_value - min_value + ##check the spike intensities distributions + position_flag = 0 + frame_index = [i for i, j in enumerate(areas_index) if j == max_value] ## find the max_position frame index. This can be either max_position or min_position + while position_flag == 0: + + try: + active_pixels = np.nonzero(target[frame_index[0]-1 ,:,:,0]) ## check the individual neuron intensitiy + except: + return areas_index, 0, 0 + + spike_intensity_distribution = np.histogram(active_pixels[0], bins=distrubtion_resolution)[0] ## check the spike intensity disutrbutions + required_position = [i for i, j in enumerate(spike_intensity_distribution) if j > 5] ##10 ## handcraft number 10, is the number less than 10, this indicates noises. + if len(required_position) > 1: + position_flag = 1 + else: + frame_index[0] = frame_index[0] +1 + + if frame_index[0] >= np.shape(target)[0]: + position_flag = 1 + spike_flag = 1 + + if spike_flag == 0: + required_position_mean_spike = np.zeros_like(required_position) + for i in range(0, len(required_position)): + target_area = target[frame_index[0]-1,required_position[i]*res:(required_position[i]+1)*res,:,0] + required_position_mean_spike[i] = np.true_divide(target_area.sum(),(target_area!=0).sum()) + + if required_position_mean_spike[0] > required_position_mean_spike[-1]: ## if the bottom area spike intensities over the top spike intensities + spike_flow = 'top->down' #the spike flow is defined as the spike intensities flow from small values to the big values. + elif required_position_mean_spike[0] < required_position_mean_spike[-1]: # if the bottom area spike intensities smaller the top spike intensities + spike_flow = 'bottom->up' + else: + spike_flow = 'unknown' + else: + spike_flow = 'unknown' + # print('spike_flow:',spike_flow) + elif type == 2 or type == 3: + ##calculat the decoding values: + max_value = max(active_index_1) + min_value = min(active_index_1) + decoding_value = max_value - min_value + ##check the spike intensities distributions + position_flag = 0 + frame_index = [i for i, j in enumerate(areas_index) if j == max_value] ## find the max_position frame index. This can be either max_position or min_position + while position_flag == 0: + + #frame_index = 3 + try: + active_pixels = np.nonzero(target[frame_index[0]-1 ,:,:,0]) ## check the individual neuron intensitiy + except: + return areas_index, 0, 0 + + spike_intensity_distribution = np.histogram(active_pixels[1], bins=distrubtion_resolution)[0] ## check the spike intensity disutrbutions + required_position = [i for i, j in enumerate(spike_intensity_distribution) if j > 5] ## handcraft number 10, is the number less than 10, this indicates noises. + if len(required_position) > 1: + position_flag = 1 + else: + frame_index[0] = frame_index[0] +1 + + required_position_mean_spike = np.zeros_like(required_position) + for i in range(0, len(required_position)): + target_area = target[frame_index[0]-1,:,required_position[i]*res:(required_position[i]+1)*res,0] + required_position_mean_spike[i] = np.true_divide(target_area.sum(),(target_area!=0).sum()) + + if required_position_mean_spike[0] > required_position_mean_spike[-1]: ## if the bottom area spike intensities over the top spike intensities + spike_flow = 'right->left' #the spike flow is defined as the spike intensities flow from small values to the big values. + elif required_position_mean_spike[0] < required_position_mean_spike[-1]: # if the bottom area spike intensities smaller the top spike intensities + spike_flow = 'left->right' + else: + spike_flow = 'unknown' + # print('spike_flow:',spike_flow) + else: + print('hopfield network type error') + + #print('test') + max_position = [i for i, j in enumerate(areas_index) if j == max_value] + #print(max_position) + min_position = [i for i, j in enumerate(areas_index) if j == min_value] + #print(min_position) + v1 = Visualization(target[:,:,:,0]) + + #if decoding_value == number-1 and v1.areConsecutive([abs(x) for x in max_position],len(max_position)) == False and decoding_value != 0: ## if there is a pattern format : 3-2-3 or 2-3-2 + if 1 == 1: ## if there is a pattern format : 3-2-3 or 2-3-2 + ## the pattern format: 3-2-3 or 1-2-1 there is a local minial points + # max_position = [i for i, j in enumerate(areas_index) if j == max_value] + hopfield_bit = 0 + + if type == 0 or type == 1: + ascent_bit = 0 + swing_bit = 0 + sense_range = 1 + sense_flag = 0 + + ascent_threshold = 1 + swing_threshold = 2 + + + for i in range(0,len(areas_index)): ## loop in frame + sense_flag_as = 0 + sense_flag_sw = 0 + j = 1 + max_value = 100 + min_value = 0 + while sense_flag_as == 0 and sense_flag_sw == 0: ## loop in sense range + if i+j +1 > len(areas_index)-1: + sense_flag_as = 1 + sense_flag_sw = 1 + else: + if areas_index[i] != -1 and areas_index[i+j] != -1 and areas_index[i+j+1] != -1: + #if areas_index[i] > areas_index[i+j] and areas_index[i] < max_value: ## detect pattern 3-2-X-X-1 + if areas_index[i] > areas_index[i+j] and areas_index[i+j] > areas_index[i+j+1]: + direction_flow = 'top->down' + if direction_flow == spike_flow: + ascent_bit = ascent_bit +1 + sense_flag_as = 1 + #max_value = areas_index[i+j] + elif areas_index[i] < areas_index[i+j] and areas_index[i+j] < areas_index[i+j+1]: + direction_flow = 'bottom->up' + if direction_flow == spike_flow: + ascent_bit = ascent_bit +1 + sense_flag_as = 1 + min_value = areas_index[i] + else: + pass + ## detect swing bit + if (areas_index[i] > areas_index[i+j]) and (areas_index[i+j]< areas_index[i+j+1]): #5 + direction_flow = 'left->right' + #if direction_flow == spike_flow: + swing_bit = swing_bit +1 + sense_flag_sw = 1 + max_value = areas_index[i] + elif (areas_index[i] < areas_index[i+j]) and (areas_index[i+j]> areas_index[i+j+1]): #5 + direction_flow = 'right->left' + #if direction_flow == spike_flow: + swing_bit = swing_bit +1 + sense_flag_sw = 1 + min_value = areas_index[i] + else: + pass + + + if j >= sense_range: + sense_flag_sw = 1 + sense_flag_as = 1 + else: + j= j+1 + # print('ascent_bit:', ascent_bit) + # print('swing_bit:',swing_bit) + + + if ascent_bit >= ascent_threshold: + feature_ascent = 1 + else: + feature_ascent = 0 + + if swing_bit >= swing_threshold: + feature_swing = 1 + else: + feature_swing = 0 + + elif type == 2 or type == 3: + ascent_bit = 0 + swing_bit = 0 + sense_range = 1 + sense_flag = 0 + swing_threshold = 2 + ascent_threshold = 1 + for i in range(0,len(areas_index)): ## loop in frame + sense_flag_as = 0 + sense_flag_sw = 0 + j = 1 + max_value = 100 + min_value = 0 + while sense_flag_as == 0 and sense_flag_sw == 0: ## loop in sense range + if i+j+1 > len(areas_index)-1: + sense_flag_as = 1 + sense_flag_sw = 1 + else: + if areas_index[i] != -1 and areas_index[i+j] != -1 and areas_index[i+j+1] != -1: + if (areas_index[i] > areas_index[i+j]) and (areas_index[i+j]< areas_index[i+j+1]): #5 + direction_flow = 'left->right' + #if direction_flow == spike_flow: + swing_bit = swing_bit +1 + sense_flag_sw = 1 + #max_value = areas_index[i] + elif (areas_index[i] < areas_index[i+j]) and (areas_index[i+j]> areas_index[i+j+1]): #5 + direction_flow = 'right->left' + #if direction_flow == spike_flow: + swing_bit = swing_bit +1 + sense_flag_sw = 1 + #min_value = areas_index[i] + else: + pass + + if areas_index[i] != -1 and areas_index[i+j] != -1 and areas_index[i+j+1] != -1: + #if areas_index[i] > areas_index[i+j] and areas_index[i] < max_value: ## detect pattern 3-2-X-X-1 + if areas_index[i] > areas_index[i+j] and areas_index[i+j] > areas_index[i+j+1]: ## detect pattern 3-2-X-X-1 + direction_flow = 'top->down' + #if direction_flow == spike_flow: + ascent_bit = ascent_bit +1 + sense_flag_as = 1 + #max_value = areas_index[i+j] + elif areas_index[i] < areas_index[i+j] and areas_index[i+j] < areas_index[i+j+1]: + direction_flow = 'bottom->up' + #if direction_flow == spike_flow: + ascent_bit = ascent_bit +1 + sense_flag_as = 1 + #min_value = areas_index[i] + else: + pass + if j >= sense_range: + sense_flag_sw = 1 + sense_flag_as = 1 + else: + j= j+1 + # print('ascent_bit:', ascent_bit) + # print('swing_bit:',swing_bit) + + if ascent_bit >= ascent_threshold: + feature_ascent = 1 + else: + feature_ascent = 0 + + if swing_bit >= swing_threshold: + feature_swing = 1 + else: + feature_swing = 0 + else: + pass + #if hopfield_bit == 1: + # print('direction_flow:',direction_flow) + + # print(feature_ascent,feature_swing) + return areas_index, feature_ascent, feature_swing + + + + def unitC_space_expert1(self, testdata): + # e1 = SGF_expert() + self.thres_s3 = [int(t) for t in self.args.thres_s3.split('_')] + self.thres_s4 = [int(t) for t in self.args.thres_s4.split('_')] + self.thres_s5 = [int(t) for t in self.args.thres_s5.split('_')] + + space_neuron3 = self.expert_space(int(self.args.resolution_s3.split('_')[0]), int(self.args.resolution_s3.split('_')[1]), \ + testdata, self.thres_s3, [0,0], thres_bit=self.args.thres_bit_s3, \ + thres_step=self.args.thres_step_s3, thres_inc_factor=[1,-1], if_vote=self.args.vote_thres_step) + + if space_neuron3[1][0] == 0: + space_neuron4_1 = self.expert_space(int(self.args.resolution_s4.split('_')[0]), int(self.args.resolution_s4.split('_')[1]), \ + testdata[27:,:,:], self.thres_s4, [0,0], thres_bit=self.args.thres_bit_s4, \ + thres_step=self.args.thres_step_s4, thres_inc_factor=[1,-1], if_vote=self.args.vote_thres_step) + space_neuron4_2 = self.expert_space(int(self.args.resolution_s4.split('_')[0]), int(self.args.resolution_s4.split('_')[1]), \ + testdata[0:27,:,:], self.thres_s4, [0,0], thres_bit=self.args.thres_bit_s4, \ + thres_step=self.args.thres_step_s4, thres_inc_factor=[1,-1], if_vote=self.args.vote_thres_step) + + if space_neuron4_2[0][0] == 1: + predict_event = "2" + elif space_neuron4_1[0][0] == 1: + predict_event = "10" + else: + predict_event = "10" + else: + predict_event = "1+8+9+10" + # print(space_neuron3.flatten(), space_neuron4_1.flatten(), space_neuron4_2.flatten()) + return predict_event + + def unitC_space_expert2(self, testdata): + testdata_sum = np.sum(testdata,0) + + x, y = self.cal_center(testdata_sum[60:110,:,:]) + x, y = int(x), int(y)+60 + active_zone = testdata_sum[y-20:y+20, x-30:x+30, :] + active_zone = np.where((active_zone>5)&(active_zone<40), 1, 0) + + thres1 = self.args.test5 + testdata_sum = testdata_sum[self.args.test1:self.args.test2,:,:] + num_list = np.zeros(testdata_sum.shape[1]) + for x in range(testdata_sum.shape[1]): + cnt = 0 + for y in range(testdata_sum.shape[0]): + if testdata_sum[y][x] >= thres1: + cnt += testdata_sum[y][x] + num_list[x] = cnt + + smooth_factor = self.args.test6 + num_list = convolve(num_list, np.ones(smooth_factor))/smooth_factor + + increase_list = list() + skip = 1 + increase_flag = 0 + thres = self.args.test7 + for num_i in range(int(len(num_list[:-1]))): + if num_list[(num_i+1)*skip] > num_list[num_i*skip]+thres: + increase_flag = 1 + elif num_list[(num_i+1)*skip] < num_list[num_i*skip]-thres: + increase_flag = -1 + increase_list.append(increase_flag) + + edge_list = list() + for i_i in range(len(increase_list[:-2])): + if increase_list[i_i+1] == 1 and increase_list[i_i] == -1: + edge_list.append(1) + elif increase_list[i_i+1] == -1 and increase_list[i_i] == 1: + edge_list.append(1) + else: + edge_list.append(0) + edge_num = sum(edge_list[self.args.test3:self.args.test4]) + + if edge_num != 1: + predict_event = "1+9+10" + elif active_zone.sum()<867: + predict_event = "1+9+10" + else: + predict_event = "8" + + return predict_event + + def unitC_space_expert3(self, testdata): + testdata[testdata < 0] = 1 + testdata_sum = np.sum(testdata,0) + x_c, y_c = self.cal_center(np.where(testdata_sum>10,1,0)) + x_c, y_c = int(x_c), int(y_c) + x1, y1 = self.cal_center(np.where(testdata_sum[:,0:x_c,:]>10,1,0)) + x2, y2 = self.cal_center(np.where(testdata_sum[:,x_c:128,:]>10,1,0)) + delta_h = abs(y2-y1) + # print(y1, y2, abs(y2-y1)) + if delta_h < 9: + predict_event = "1+9" + else: + predict_event = "10" + + return predict_event + + def cal_center(self, input_img): + cnt = 0 + x_acc = 0 + y_acc = 0 + for y in range(input_img.shape[0]): + for x in range(input_img.shape[1]): + if input_img[y][x] > 0: + cnt += 1 + x_acc += x + y_acc += y + x_center = x_acc / cnt + y_center = y_acc / cnt + return [x_center, y_center] diff --git a/inference.py b/inference.py new file mode 100644 index 0000000..4d296dc --- /dev/null +++ b/inference.py @@ -0,0 +1,258 @@ +from os import terminal_size +from dvsgesture_i import DatasetGesture_i +from expert import SGF_expert +import numpy as np +from STLayer import Spatiotemporal_Core +from logger import Logger +from agent import SGF_agent +import agent +from write_excel import write_excel +import cfg +from prior_knowledge import SGF_prior_knowledge +import knowledge +import train +import copy +from visualization_utils import save_visualize, save_curve, visualize, save_vis_formatted +from train import SGF_train + +class SGF_inference(object): + def __init__(self, args): + super().__init__() + self.args = args + self.exp = args.exp + self.save_excel = args.save_excel + self.save_excel_path = args.save_excel_path + self.selected_event = [event for event in args.selected_events.split("_")] + self.sub_events = list() + for e in self.args.selected_events.split("_"): + self.sub_events.extend(e.split("+")) + self.event_num = len(self.selected_event) + self.sub_event_num = len(self.sub_events) + self.st_paras = [int(st_para) for st_para in args.st_paras.split("_")] + self.thres_s1 = [int(t) for t in args.thres_s1.split("_")] + self.thres_s2 = [int(t) for t in args.thres_s2.split("_")] + # self.thres_s3 = list() + # self.thres_s3.append([int(t) for t in args.thres_s3.split("_")][0:2]) + # self.thres_s3.append([int(t) for t in args.thres_s3.split("_")][2:4]) + self.thres_s3 = [int(t) for t in args.thres_s3.split("_")] + self.thres_s4 = [int(t) for t in args.thres_s4.split("_")] + self.thres_s5 = [int(t) for t in args.thres_s5.split("_")] + self.thres_bit_s1 = args.thres_bit_s1 + self.thres_bit_s2 = args.thres_bit_s2 + self.thres_bit_s3 = args.thres_bit_s3 + self.thres_bit_s4 = args.thres_bit_s4 + self.thres_bit_s5 = args.thres_bit_s5 + self.thres_step_s1 = args.thres_step_s1 + self.thres_step_s2 = args.thres_step_s2 + self.thres_step_s3 = args.thres_step_s3 + self.thres_step_s4 = args.thres_step_s4 + self.thres_step_s5 = args.thres_step_s5 + self.resolution_s1 = [int(t) for t in args.resolution_s1.split("_")] + self.resolution_s2 = [int(t) for t in args.resolution_s2.split("_")] + self.resolution_s3 = [int(t) for t in args.resolution_s3.split("_")] + self.resolution_s4 = [int(t) for t in args.resolution_s4.split("_")] + self.resolution_s5 = [int(t) for t in args.resolution_s5.split("_")] + self.hopfield_frame_para = [int(para) for para in self.args.hopfield_frame_para.split("_")] + self.code_mode = self.args.code_mode + + self.cnt_s3_2 = 0 + self.cnt_s3_10 = 0 + self.cnt_s3 = 0 + self.cnt_str = "" + self.hop_hist_2 = np.zeros((1,4)) + self.hop_hist_10 = np.zeros((1,4)) + self.hop2_hist_dict = dict() + for event in self.sub_events: + self.hop2_hist_dict[event] = np.zeros((1,8)) + + def init_save_dict(self): + self.cnt = 0 ## total number + self.correct = 0 ## correct one + self.correct_dict = dict() + self.cnt_dict = dict() + self.missing_dict = dict() + self.incorrect_dict = dict() + self.unique_pre_dict = dict() + self.unique_cor_dict = dict() + self.knowledge_pre_dict = dict() + self.knowledge_cor_dict = dict() + self.expert1_cnt = 0 + self.train_fail_cnt = 0 + self.train_succ_cnt = 0 + + def load_knowledge(self): + self.unita_id = self.ReadTxtName(cfg.code_path + "/data/{:}/UnitA_id.txt".format(self.exp)) + self.unita_knowledge = np.loadtxt(cfg.code_path + "/data/{:}/UnitA_information.txt".format(self.exp)) + self.unitc_id = self.ReadTxtName(cfg.code_path + "/data/{:}/UnitC_id.txt".format(self.exp)) + self.unitc_knowledge = np.loadtxt(cfg.code_path + "/data/{:}/UnitC_information.txt".format(self.exp)) + + def sgf_inference(self): + dataset = DatasetGesture_i(cfg.data_path) ## Read the inference dataset + batchsize = 1 + testdata = np.full((80, 128, 128, 1),0) ## data initializations + test_label = -1 + + self.init_save_dict() + self.load_knowledge() + for event in self.sub_events: + self.cnt_dict[event] = 0 + self.correct_dict[event] = 0 + self.incorrect_dict[event] = 0 + + ##-----------------------------------------------------------## + ## Inference Phase ## + ##-----------------------------------------------------------## + for i in range(0, dataset.test_len(), batchsize): + # data preparing + video, label = dataset.get_test_sample(i, reverse=False) + if np.shape(video)[0] < 80: + testdata[0:np.shape(video)[0],:,:,0] = video[:,:,:] + else: + testdata[:,:,:,0] = video[0:80,:,:] + test_label = label + + # SGF UnitA class 1+2+8+9+10/3/4+5/6+7 + ##-----------------------------------------------------------## + ## SGF UnitA feature vector score calculation ## + ##-----------------------------------------------------------## + # ST core 1 + stlayer1 = Spatiotemporal_Core(testdata, 3, 2, 3, 2) + stlayer1.Spaceprocessing() + stlayer1.Temporalprocessing() + stlayer1.Stprocessing() + stlayer1.stspike() + e1 = SGF_expert(self.args) + # Spatial SNN with feature index A and D + space_neuron1 = e1.expert_space(self.resolution_s1[0], self.resolution_s1[1], \ + stlayer1.ST_spike, self.thres_s1, [0,0], thres_bit=self.thres_bit_s1, \ + thres_step=self.args.thres_step_s1, thres_inc_factor=[-1,1], if_vote=self.args.vote_thres_step) + # Spatial SNN with feature index B and C + space_neuron2 = e1.expert_space(self.resolution_s2[0], self.resolution_s2[1], \ + stlayer1.ST_spike, self.thres_s2, [0,0], thres_bit=self.thres_bit_s2, \ + thres_step=self.args.thres_step_s2, thres_inc_factor=[1,-1], if_vote=self.args.vote_thres_step) + space_all = np.concatenate((space_neuron1,space_neuron2), axis = 0) ## jow results along rows, generate unique code + + knowledge_weight_dict_a = knowledge.knowledge_weight_dict_gen(self.args, self.unita_id, self.unita_knowledge) + predict_event_a = knowledge.knowledge_weight_dict_infer(self.args, knowledge_weight_dict_a, np.array(space_all)) + print(predict_event_a) + + testdata [testdata < 0] = 1 + stlayer2 = Spatiotemporal_Core(testdata, 1, 1, 2, 2, if_st_neuron_clear=True) + stlayer2.Spaceprocessing() ## space integration + stlayer2.Temporalprocessing() ## temporal integration + stlayer2.Stprocessing() ## space-temporal integration + stlayer2.stspike() ## generate acculumation spike + + # SGF UnitB class 4/5 6/7 + ##-----------------------------------------------------------## + ## SGF UnitB inference process ## + ##-----------------------------------------------------------## + if predict_event_a in ["4+5", "6+7"]: + final_results = np.full((1,2),0) + for j in range(4,7): ## threshold + threshold = j*0.15 + for k in range (3,8): ## frameskip + # Component E/F + area_index1, bit1,location_bit1, time1= e1.expert_hopfield(stlayer2.stcore[0:10*k:k,:,:,0],0,threshold) + area_index3, bit3,location_bit3, time3= e1.expert_hopfield(stlayer2.stcore[0:10*k:k,:,:,0],1,threshold) + area_index2, bit2,location_bit2,time2= e1.expert_hopfield(stlayer2.stcore[0:10*k:k,:,:,0],2,threshold) + area_index4, bit4,location_bit4,time4= e1.expert_hopfield(stlayer2.stcore[0:10*k:k,:,:,0],3,threshold) + tagert13= np.stack((bit1,bit3),axis = 0) + tagert24= np.stack((bit2,bit4),axis = 0) + target1234 = np.concatenate((tagert13,tagert24),axis = 0) + location_bit = np.concatenate((location_bit1,location_bit2,location_bit3,location_bit4),axis = 0) + time_bit = [time1,time3,time2,time4] + pk1 = SGF_prior_knowledge() + final = pk1.clockwise_knowledge(target1234,location_bit, time_bit) + if final[0] == 1: + final_results[0,0] = final_results[0,0] +1 + elif final[1] == 1: + final_results[0,1] = final_results[0,1] +1 + print(final_results.flatten()) + if final_results[0,0] > final_results[0,1]: + predict_event = predict_event_a.split("+")[1] + else: + predict_event = predict_event_a.split("+")[0] + + + # SGF UnitC class 1/2/8/9/10 + ##-----------------------------------------------------------## + ## SGF UnitC feature vector score calculation ## + ##-----------------------------------------------------------## + elif predict_event_a in ["1+2+8+9+10"]: + # Spatial SNN with feature index G + predict_event = e1.unitC_space_expert1(stlayer1.ST_spike) + print(predict_event) + if "+" in predict_event: + predict_event = e1.unitC_space_expert2(testdata) + print(predict_event) + if "+" in predict_event: + predict_event = e1.unitC_space_expert3(testdata) + print(predict_event) + if "+" in predict_event: + # Temporal SNNs with feature index H, I, J and K. + area_index1, ascent_bit1, swing_bit1= e1.expert_hopfield2(stlayer2.stcore[0:30:3,:,0:60,0], 0, 0.5, 14) + area_index3, ascent_bit3, swing_bit3= e1.expert_hopfield2(stlayer2.stcore[0:30:3,:,0:60,0], 1, 0.5, 14) + area_index2, ascent_bit2, swing_bit2= e1.expert_hopfield2(stlayer2.stcore[0:30:3,:,60:120,0], 0, 0.5, 14) + area_index4, ascent_bit4, swing_bit4= e1.expert_hopfield2(stlayer2.stcore[0:30:3,:,60:120,0], 1, 0.5, 14) + ascent_activites_td_left = ascent_bit1 + ascent_bit3 ## This for detection top->down and bottom-> up on the left area + ascent_activites_td_right = ascent_bit2 + ascent_bit4 ## This for detection top->down and bottom-> up on the right area + swing_activities_td_left = swing_bit1 + swing_bit3 + swing_activities_td_right = swing_bit2 + swing_bit4 + + area_index1, ascent_bit5, swing_bit5= e1.expert_hopfield2(stlayer2.stcore[0:30:3,:,0:60,0], 2, 0.5, 14) + area_index3, ascent_bit7, swing_bit7= e1.expert_hopfield2(stlayer2.stcore[0:30:3,:,0:60,0], 3, 0.5, 14) + area_index2, ascent_bit6, swing_bit6= e1.expert_hopfield2(stlayer2.stcore[0:30:3,:,60:120,0], 2, 0.5, 14) + area_index4, ascent_bit8, swing_bit8= e1.expert_hopfield2(stlayer2.stcore[0:30:3,:,60:120,0], 3, 0.5, 14) + ascent_activites_lr_left = ascent_bit5 + ascent_bit7 + ascent_activites_lr_right = ascent_bit6 + ascent_bit8 + swing_activities_lr_left = swing_bit5 + swing_bit7 + swing_activities_lr_right = swing_bit6 + swing_bit8 + + feature_bit = [np.where(ascent_activites_td_left+ascent_activites_td_right>0,1,0) , + np.where(swing_activities_td_left+swing_activities_td_right>0,1,0) , + np.where(ascent_activites_lr_left +swing_activities_lr_left>0,1,0) , + np.where( ascent_activites_lr_right + swing_activities_lr_right>0,1,0) ] + location_bit = [np.where(ascent_activites_td_left + ascent_activites_lr_left>0,1,0) , + np.where(swing_activities_td_left+swing_activities_lr_left>0,1,0) , + np.where(ascent_activites_td_right+ swing_activities_td_right>0,1,0) , + np.where( ascent_activites_lr_right+swing_activities_lr_right>0,1,0) ] + knowledge_bit = np.concatenate((feature_bit,location_bit),axis = 0) + # print(knowledge_bit) + knowledge_weight_dict_c = knowledge.knowledge_weight_dict_gen(self.args, self.unitc_id, self.unitc_knowledge) + predict_event_c = knowledge.knowledge_weight_dict_infer(self.args, knowledge_weight_dict_c, np.array(knowledge_bit)) + # print(knowledge_weight_dict_c.keys()) + # print(predict_event_c) + if predict_event_c == "9": + predict_event = "9" + else: + predict_event = "1" + + # Bypass class 3 + else: + predict_event = predict_event_a + + self.cnt += 1 + self.cnt_dict[str(test_label)] += 1 + if str(test_label) in predict_event: + self.correct += 1 + self.correct_dict[str(test_label)] += 1 + + info = "ID:{:} {:}/{:} label:{:} perdict:{:}".format(i, self.correct, self.cnt, test_label, predict_event) + print(info) + + print("Total test sample number:",self.cnt) + print("the accurate rate:", self.correct/self.cnt) + for i in self.correct_dict.keys(): + print("the",i,"event type accurate rate:", self.correct_dict[i]/self.cnt_dict[i]) + + def ReadTxtName(self, rootdir): + lines = [] + with open(rootdir, "r") as file_to_read: + while True: + line = file_to_read.readline() + if not line: + break + line = line.strip("\n") + lines.append(line) + return lines diff --git a/knowledge.py b/knowledge.py new file mode 100644 index 0000000..084ee5e --- /dev/null +++ b/knowledge.py @@ -0,0 +1,440 @@ +from os import remove +import numpy as np +import cfg +import copy +import scipy +import matplotlib.pyplot as plt +from sklearn import preprocessing +class Knowledge(object): + def __init__(self, args): + super().__init__() + self.args = args + + def knowledge_base(self, information, label, event_pre_id, expert1_pre_knowledge, ignore_list=[]): + self.events = list() + for e in self.args.selected_events.split("_"): + self.events.extend(e.split("+")) + # print("start decoding from active location areas:") + target = information # the outputs from space expert1 + s_results = self.check_similarity(target, label, self.args.selected_events.split("_")) + np.fill_diagonal(s_results, 1) + expert1_index= np.linspace(0,0,np.shape(s_results)[0]) + for i in range(0,np.shape(s_results)[0]): + expert1_index[i] = 1 + for j in range(0,np.shape(s_results)[1]): + if not label[j] in ignore_list and s_results[i][j] == 0: + expert1_index[i] = 0 + # if np.all(s_results[:,i] == 1): + # expert1_index[i] = 1 + # else: + # expert1_index[i] = 0 + useful_id = np.where(expert1_index ==1)[0] + expert1_knowledge = [0 for i in range (np.shape(target)[0])] + event_post_id = event_pre_id + expert1_post_knowledge = expert1_pre_knowledge + similarity = self.check_similarity(information, label, self.args.selected_events.split("_")) + np.fill_diagonal(similarity, 1) ## fill diagnoal to 1 + # print(similarity) + if self.args.strict_knowledge: + if np.all(similarity) == 1: + for i in range(0,np.shape(useful_id)[0]): + event_id = label[useful_id[i]] # get event id + expert1_knowledge = target[:,useful_id[i]] + event_post_id = np.append(event_post_id, event_id) + expert1_post_knowledge = np.vstack([expert1_post_knowledge, expert1_knowledge]) + else: + k1, id1 = expert1_pre_knowledge, event_pre_id + elif self.args.all_knowledge: + for id_i, event_id in enumerate(label): + event_post_id = np.append(event_post_id, event_id) + expert1_post_knowledge = np.vstack([expert1_post_knowledge, information[:,id_i]]) + else: + for i in range(0,np.shape(useful_id)[0]): + event_id = label[useful_id[i]] # get event id + expert1_knowledge = target[:,useful_id[i]] + event_post_id = np.append(event_post_id, event_id) + expert1_post_knowledge = np.vstack([expert1_post_knowledge, expert1_knowledge]) + + if len(event_post_id) == 1 and event_post_id[0] == -1: + k1, id1 = expert1_pre_knowledge, event_pre_id + else: + k1, id1 = self.knowledge_distillation(event_post_id, expert1_post_knowledge) + # k1, id1 = expert1_post_knowledge, event_post_id + + # for knowledge in k1: + # if np.all(knowledge == np.array([0,0,0,0,0,1])): + # print(knowledge) + return k1, id1 + + def check_similarity(self, data, label, select_event=None): + if data.shape[0] == data.size: + data = data.reshape(data.shape[0], 1) + similarity = np.full((np.shape(data)[1],np.shape(data)[1]),0) + + macro_class_dict = dict() + for l in label: + for macro_class in select_event: + if l in macro_class.split("+"): + macro_class_dict[l] = macro_class + break + + for i in range(0, np.shape(data)[1]): ## event + for j in range(0, np.shape(data)[1]): ## event + if np.all(data[:,i] == data[:,j]) and macro_class_dict[label[i]] != macro_class_dict[label[j]]: + similarity[i,j] = 0 + else: + similarity[i,j] = 1 + return similarity + + def knowledge_distillation(self, event_post_id, expert1_post_knowledge): + + if self.args.knowledge_distillation: + s_results = self.check_similarity(np.transpose(expert1_post_knowledge), self.args.selected_events.split("_")) + np.fill_diagonal(s_results, 1) + il = np.tril_indices(np.shape(s_results)[0]) + s_results[il] = 1 + for i in range(0,np.shape(s_results)[0]): + if np.any(s_results[i,:] == 0): + print("start knowledge distillation") + index = np.where(s_results[i,:] == 0)[0] + for j in range(0,np.shape(index)[0]): + expert1_post_knowledge[index[j],:] = -1 + event_post_id[index[j]] = -1 + + expert1_post_knowledge = expert1_post_knowledge[(expert1_post_knowledge >=0).all(axis = 1)] + event_post_id = [id for id in event_post_id if id not in ['-1',-1]] + # event_post_id = event_post_id.reshape((len(event_post_id), 1)) + # event_post_id = event_post_id[event_post_id >=0] + else: + return expert1_post_knowledge, event_post_id + return expert1_post_knowledge, event_post_id + + # def knowledge_classifications(self,k): + # raw_data = np.load(k) + # number = np.shape(raw_data)[0] + # pattern_length = np.shape(raw_data)[1] + # pattern =[] + # pattern_id = [-1 for i in range(0,number)] + # index = 0 + # for i in range (0, number): + # flag = 0 + # j = 0 + # while flag == 0: + # if i == j or np.all(raw_data[i,:] == 0): + # pass + + # elif np.array_equal(raw_data[i,:], raw_data[j,:]) == True: + # if pattern_id[j] != -1: + # pattern_id[i] = pattern_id[j] + # flag = 1 + # elif pattern_id[j] == -1 and pattern_id[i] == -1: + # pattern_id[i] = index + # flag = 1 + # index = index +1 + # else: + # pass + + # else: + # pass + # j = j+1 + # if j == number and flag == 0: + # if np.all(raw_data[i,:] == 0): + # flag = 1 + # else: + # flag = 1 + # pattern_id[i] = index + # index = index +1 + + # ## record pattern Id and the weights. + # pattern_num = np.amax(pattern_id) + # pattern_weight = [0 for i in range(0,pattern_num)] + # for i in range(0, pattern_num): + # for j in range(0, number): + # if i == pattern_id[j]: + # pattern_weight[i] = pattern_weight[i] +1 + + # ## record pattern information + # pattern_information = np.full((pattern_num,pattern_length),0) + # for i in range(0,pattern_num): + # flag = 0 + # j = 0 + # while flag == 0: + # if i == pattern_id[j]: + # pattern_information[i,:] = raw_data[j,:] + # flag = 1 + # else: + # pass + # j = j +1 + + # return pattern_weight,pattern_information + + + + + # def knowledge_inference(self, test_data, k2,kw2): + # k1 = np.load(test_data) + # test_num = np.shape(k1)[0] + + # know_leng = np.shape(k1)[1] + + # test_result = [0 for i in range (0, test_num)] + # know_num = np.shape(k2)[0] + # kw2 = preprocessing.normalize([kw2]) + # test_sim = np.full((test_num,know_num),0,float) + # for i in range(0,test_num): + # for j in range(0,know_num): + # if np.array_equal(k1[i,:], k2[j,:]) == True: + # test_result[i] = know_leng + # else: + # for k in range(0, know_leng): + # if k1[i,k] == k2[j,k]: + # test_sim[i,j] = test_sim[i,j]+1 + # test_sim[i,j]= test_sim[i,j]/ know_leng*kw2[0][j] + # if test_result[i] == know_leng: + # pass + # else: + # test_result[i] = sum(test_sim[i,:]) + # #print(test_sim) + # print('test_result:', test_result) + # return test_result + + + # def knowledge_dis(self, k1, k2,kw1,kw2): + # k1_origion = k1.copy() + # k2_origion = k2.copy() + # know_length = np.shape(k1)[0] + # print('origioan k1 number:', know_length) + # know_length1 = np.shape(k2)[0] + # print('origioan k9 number:', know_length1) + # know_result = [0 for i in range (0, know_length)] + + # for i in range(0,know_length): + # flag = 0 + # j = 0 + # while flag == 0: + # if np.array_equal(k1[i,:], k2[j,:]) == True: + # know_result[i] = 1 + # flag = 1 + # k1_origion[i,:] = 0 + # k2_origion[j,:] = 0 + # kw1[i] = 0 + # kw2[j] = 0 + # else: + # j = j+1 + + # if j == know_length1: + # flag = 1 + # know_result[i] = 0 + # print('know_result:', know_result) + # #print(k1_origion) + # idx = np.argwhere(np.all(k1_origion[..., :] == 0, axis=1)) + # k1_origion = np.delete(k1_origion, idx, axis=0) + # ## update weight + # kw1 = np.asarray(kw1) + # kw1 = kw1[kw1 != 0] + # idx = np.argwhere(np.all(k2_origion[..., :] == 0, axis=1)) + # k2_origion = np.delete(k2_origion, idx, axis=0) + # kw2 = np.asarray(kw2) + # kw2 = kw2[kw2 != 0] + + # print(np.shape(k1_origion)[0],kw1) + # print(np.shape(k2_origion)[0],kw2) + # #print(k2_origion) + # return k1_origion, k2_origion, kw1, kw2 + +def knowledge_weight_dict_gen(args, id_list, k_list): + knowledge_weight_dict = dict() + for id in id_list: + knowledge_weight_dict[id] = dict() + + sub_events = list() + for e in args.selected_events.split("_"): + sub_events.extend(e.split("+")) + for k_i, k in enumerate(k_list): + if id_list[k_i] in sub_events: + if str(k.astype(np.int32)) != '[0 0 0 0 0 0 0 0]': + if not str(k.astype(np.int32)) in knowledge_weight_dict[id_list[k_i]].keys(): + knowledge_weight_dict[id_list[k_i]][str(k.astype(np.int32))] = 1 + else: + knowledge_weight_dict[id_list[k_i]][str(k.astype(np.int32))] += 1 + # print(knowledge_weight_dict) + return knowledge_weight_dict + +def knowledge_weight_dict_dis(args, knowledge_weight_dict): + knowledge_weight_dict_dis = dict() + remove_list = list() + for id1 in knowledge_weight_dict.keys(): + for id2 in knowledge_weight_dict.keys(): + if id1 != id2: + for k1 in knowledge_weight_dict[id1].keys(): + for k2 in knowledge_weight_dict[id1].keys(): + if k1 == k2: + remove_list.append(k1) + remove_list = list(set(remove_list)) + for id in knowledge_weight_dict.keys(): + knowledge_weight_dict_dis[id] = dict() + for k in knowledge_weight_dict[id].keys(): + if k not in remove_list: + knowledge_weight_dict_dis[id][k] = knowledge_weight_dict[id][k] + return knowledge_weight_dict_dis + +def knowledge_score_dict_gen(knowledge_weight_dict): + knowledge_score_dict = dict() + for id in knowledge_weight_dict.keys(): + knowledge_score_dict[id] = dict() + for k in knowledge_weight_dict[id].keys(): + knowledge_score_dict[id][k] = knowledge_weight_dict[id][k] \ + / sum(knowledge_weight_dict[id].values()) + return knowledge_score_dict + +def knowledge_weight_dict_infer(args, knowledge_weight_dict, knowledge_bit): + predict_result = dict() + for id in knowledge_weight_dict.keys(): + for k in knowledge_weight_dict[id].keys(): + if str(knowledge_bit.astype(np.int32)) == k: + predict_result[id] = knowledge_weight_dict[id][k] + + knowledge_score_dict = knowledge_score_dict_gen(knowledge_weight_dict) + + if len(predict_result) == 1: + predict_event = list(predict_result.keys())[0] + # print("exactly match", predict_event) + else: + score_dict = dict() + for id in knowledge_score_dict.keys(): + id_score = 0 + for k in knowledge_score_dict[id].keys(): + k_np = np.array([int(b) for b in k[1:-1].split(" ")]) + score = 0 + for bit_i, bit in enumerate(k_np): + if bit == knowledge_bit[bit_i]: + score += knowledge_score_dict[id][k] / len(k_np) + id_score += score + score_dict[id] = id_score + + max_score = max(score_dict.values()) + for id, score in score_dict.items(): + if score == max_score: + predict_event = id + + if sorted(list(knowledge_weight_dict.keys())) == sorted(['1', '9', '10']): + predict_event = predict_event + else: + if predict_event in ['1', '2', '8', '9', '10']: + predict_event = '1+2+8+9+10' + elif predict_event in ['3']: + predict_event = '3' + elif predict_event in ['4', '5']: + predict_event = '4+5' + elif predict_event in ['6', '7']: + predict_event = '6+7' + + return predict_event + +def knowledge_hist(args, id_list, n_list): + # np.place(n_list, n_list==0, -1) + # n_neg = np.where(n_list==0, 0, n_list) + n_neg = np.where(n_list==0, -1, n_list) + + # for id_i, id in enumerate(id_list): + # # m = np.mean(n_neg[id_i]) + # # mx = max(n_neg[id_i]) + # # mn = min(n_neg[id_i]) + # # n_neg[id_i] = (n_neg[id_i] -m) / (mx-mn) + # sum = np.sum(n_neg[id_i]) + # n_neg[id_i] = n_neg[id_i]/sum + + knowledge_hist = dict() + for id_i, id in enumerate(id_list): + if id not in knowledge_hist.keys(): + knowledge_hist[id] = copy.deepcopy(n_neg[id_i].reshape((len(n_neg[id_i]),1))) + else: + knowledge_hist[id] += n_neg[id_i].reshape((len(n_neg[id_i]),1)) + + value_list = list() + for key, value in knowledge_hist.items(): + print(key, value.flatten()) + value_list.append(value.flatten()) + np.savetxt(cfg.code_path + "/data/" + args.exp + "/hist_raw.txt", np.array(value_list), fmt='%d') + + # knowledge_hist = gen_knowledge_hist_nobias(knowledge_hist) + # value_list = list() + # for key, value in knowledge_hist.items(): + # print(key, value.flatten()) + # value_list.append(value.flatten()) + # np.savetxt(cfg.code_path + "/output/hist_nobias.txt", np.array(value_list), fmt='%d') + + # knowledge_hist_array = np.array(list(knowledge_hist.values())) + # knowledge_hist_array_softmax = scipy.special.softmax(knowledge_hist_array, axis=0) + # for i, (key, value) in enumerate(knowledge_hist.items()): + # knowledge_hist[key] = knowledge_hist_array_softmax[i] + + print("knowledge_hist:") + for key, value in knowledge_hist.items(): + print(key, value.flatten()) + + for id, hist in knowledge_hist.items(): + m = np.mean(hist) + mx = max(hist) + mn = min(hist) + hist_pos = np.where(hist>0, hist, 0) + hist_neg = np.where(hist<0, hist, 0) + # hist_pos = hist[hist>0] + # hist_neg = hist[hist<0] + pos_max = np.sum(hist_pos) + neg_max = -np.sum(hist_neg) + + # 加sigmoid + + hist_norm = np.zeros_like(hist) + hist_norm = hist_norm + hist_pos / float(pos_max) + hist_norm = hist_norm + hist_neg / float(neg_max) + knowledge_hist[id] = hist_norm + + print("knowledge_hist:") + for key, value in knowledge_hist.items(): + print(key, value.flatten()) + # print("knowledge_hist:") + + value_list = list() + for key, value in knowledge_hist.items(): + print(key, value.flatten()) + value_list.append(value.flatten()) + np.savetxt(cfg.code_path + "/data/" + args.exp + "/hist_norm.txt", np.array(value_list), fmt='%f') + + # plt.figure("hist") + # fig, ax = plt.subplots(figsize=(10, 7)) + # ax.bar( + # x=list(range(len(value.flatten()))), # Matplotlib自动将非数值变量转化为x轴坐标 + # height=value, # 柱子高度,y轴坐标 + # width=0.6, # 柱子宽度,默认0.8,两根柱子中心的距离默认为1.0 + # align="center", # 柱子的对齐方式,'center' or 'edge' + # color="red", # 柱子颜色 + # edgecolor="red", # 柱子边框的颜色 + # linewidth=2.0 # 柱子边框线的大小 + # ) + + # # n, bins, patches = plt.hist(value.flatten(), facecolor='green', alpha=0.75) + # # plt.show() + # plt.savefig("/Users/zzh/Code/SGF_v2/output/hist/" + str(key) + ".png", dpi=300) + + return knowledge_hist + + +def gen_knowledge_hist_nobias(knowledge_hist): + knowledge_hist_nobias = dict() + hist0 = list(knowledge_hist.values())[0] + bias_list = list() + for bit_i, bit in enumerate(hist0): + min_bit = bit + for id, hist in knowledge_hist.items(): + if hist[bit_i] <= min_bit: + min_bit = hist[bit_i] + bias_list.append(min_bit) + bias_tensor = np.array(bias_list) + for id, hist in knowledge_hist.items(): + knowledge_hist_nobias[id] = knowledge_hist[id] - bias_tensor + + return knowledge_hist_nobias + + diff --git a/logger.py b/logger.py new file mode 100644 index 0000000..e7048f1 --- /dev/null +++ b/logger.py @@ -0,0 +1,24 @@ +class Logger(object): + + def __init__(self, file_name): + self.file_name = file_name + + + def _write_log(self,level,msg): + with open(self.file_name,"a") as log_file: + log_file.write("[{0}] {1}\n".format(level,msg)) + + def critial(self,msg): + self._write_log("CRITIAL",msg) + + def error(self,msg): + self._write_log("ERROR",msg) + + def warn(self,msg): + self._write_log("WARN",msg) + + def info(self,msg): + self._write_log("INFO",msg) + + def debug(self,msg): + self._write_log("DEBUG",msg) \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000..dbc7d1e --- /dev/null +++ b/main.py @@ -0,0 +1,152 @@ +from train import SGF_train +from inference import SGF_inference +import os +import shutil +from expert import SGF_expert +import numpy as np +import cfg +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +import matplotlib +from sklearn.decomposition import PCA +import argparse +import time + +### This is a demonstration version of the Spike Gating Flow ### + +def parse_args(): + ''' + Parse input arguments + ''' + parser = argparse.ArgumentParser(description='SGF 2.0.') + parser.add_argument('--train', action='store_true', help='train SGF', default=False) + parser.add_argument('--test', action='store_true', help='test SGF', default=False) + parser.add_argument('--train_test', action='store_true', help='train and test SGF', default=False) + parser.add_argument('--exp', type=str, help='event types', default='example') + parser.add_argument('--train_data_num', type=int, help='train_data_num', default=98) + parser.add_argument('--iter', type=int, help='iter', default=36) + parser.add_argument('--save_excel', action='store_true', help='save_excel', default=True) + parser.add_argument('--save_excel_path', type=str, help='save excel path', default='exp.xls') + parser.add_argument('--save_train_curve', action='store_true', help='train knowledge curve', default=True) + parser.add_argument('--each_sample_train_once', action='store_true', help='each_sample_train_once', default=False) + parser.add_argument('--inner_batch_random', action='store_true', help='each_sample_train_once', default=False) + parser.add_argument('--test_batch_list', type=str, help='test_batch_list', default='0') + parser.add_argument('--if_print', action='store_true', help='if_print', default=False) + + parser.add_argument('--selected_events', type=str, help='event types', default='1+2+8+9+10_3_4+5_6+7') + + parser.add_argument('--st_paras', type=str, help='c_space_num, s_thre, t_window, t_thre', default='3_2_3_2') + parser.add_argument('--save_st_core', action='store_true', help='save_excel', default=False) + + ## space expert parameters ## + parser.add_argument('--resolution_s1', type=str, help='col, row', default='2_1') + parser.add_argument('--thres_s1', type=str, help='space expert 1', default='20_10') + parser.add_argument('--thres_bit_s1', type=int, help='col, row', default=4) + parser.add_argument('--thres_step_s1', type=float, help='col, row', default=0.025) + + parser.add_argument('--resolution_s2', type=str, help='col, row', default='2_2') + parser.add_argument('--thres_s2', type=str, help='space expert 2', default='3_120') + parser.add_argument('--thres_bit_s2', type=int, help='col, row', default=3) + parser.add_argument('--thres_step_s2', type=float, help='col, row', default=0.2) + + parser.add_argument('--resolution_s3', type=str, help='col, row', default='2_1') + parser.add_argument('--thres_s3', type=str, help='space expert 3', default='17_8') + parser.add_argument('--thres_bit_s3', type=int, help='col, row', default=1) + parser.add_argument('--thres_step_s3', type=float, help='col, row', default=0.1) + + parser.add_argument('--resolution_s4', type=str, help='col, row', default='2_1') + parser.add_argument('--thres_s4', type=str, help='space expert 2', default='18_10') + parser.add_argument('--thres_bit_s4', type=int, help='col, row', default=1) + parser.add_argument('--thres_step_s4', type=float, help='col, row', default=0.1) + + parser.add_argument('--resolution_s5', type=str, help='col, row', default='2_1') + parser.add_argument('--thres_s5', type=str, help='space expert 2', default='22_8') + parser.add_argument('--thres_bit_s5', type=int, help='col, row', default=1) + parser.add_argument('--thres_step_s5', type=float, help='col, row', default=0.1) + + + parser.add_argument('--vote_thres_step', action='store_true', help='vote_thres_step', default=False) + + parser.add_argument('--only_s1s2', action='store_true', help='only_s1s2', default=False) + parser.add_argument('--no_s4', action='store_true', help='no_s4', default=False) + + parser.add_argument('--t_expert_skip', type=int, help='t_expert_skip', default=5) + parser.add_argument('--t_expert_scale', type=int, help='t_expert_scale', default=50) + + parser.add_argument('--hopfield', action='store_true', help='hopfield', default=True) + parser.add_argument('--hopfield_frame_para', type=str, help='40:3, 40:4, 50:5', default="40_3") + parser.add_argument('--hf_skip', type=int, help='hf_skip', default=16) + parser.add_argument('--hf_v_thres', type=float, help='hf_v_thres', default=0.5) + parser.add_argument('--hf_h_thres', type=float, help='hf_h_thres', default=0.5) + + + parser.add_argument('--reverse_inference', action='store_true', help='reverse_inference', default=False) + parser.add_argument('--logic_inference', action='store_true', help='logic_inference', default=False) + parser.add_argument('--pre_defined_logic', action='store_true', help='logic_inference', default=False) + parser.add_argument('--use_unique_code', action='store_true', help='use_unique_code', default=False) + parser.add_argument('--hist_predict', action='store_true', help='hist_predict', default=False) + parser.add_argument('--know_weight_predict', action='store_true', help='hist_predict', default=False) + parser.add_argument('--detailed_predict', action='store_true', help='detailed_predict', default=False) + + parser.add_argument('--code_mode', type=int, help='0:s1+s2+final; 1:s1+s2+act+final', default=3) + parser.add_argument('--strict_knowledge', action='store_true', help='strict_knowledge', default=False) + parser.add_argument('--all_knowledge', action='store_true', help='all_knowledge', default=False) + parser.add_argument('--knowledge_distillation', action='store_true', help='knowledge_distillation', default=False) + parser.add_argument('--sigmoid', action='store_true', help='sigmoid', default=False) + + parser.add_argument('--frame_skip_test', type=int, help='frame_skip', default=4) + parser.add_argument('--threshold_test', type=float, help='hf_v_thres', default=0.7) + parser.add_argument('--hist_threshold_test', type=float, help='hf_v_thres', default=5) + parser.add_argument('--offset1_test', type=str, help='hf_v_thres', default="0_42") + parser.add_argument('--offset2_test', type=str, help='hf_v_thres', default="0_42") + + parser.add_argument('--hop2_frame_skip', type=int, help='hop2_frame_skip', default=3) + parser.add_argument('--hop2_threshold', type=float, help='hop2_threshold', default=0.5) + parser.add_argument('--hop2_hist_threshold', type=int, help='hop2_hist_threshold', default=14) + + parser.add_argument('--test1', type=int, help='test1', default=60) + parser.add_argument('--test2', type=int, help='test1', default=105) + parser.add_argument('--test3', type=int, help='test1', default=35) + parser.add_argument('--test4', type=int, help='test1', default=105) + parser.add_argument('--test5', type=int, help='test1', default=6) + parser.add_argument('--test6', type=int, help='test1', default=13) + parser.add_argument('--test7', type=int, help='test1', default=7) + + # parser.add_argument() + + # if len(sys.argv) == 1: + # parser.print_help() + # sys.exit(1) + + localtime = time.asctime( time.localtime(time.time()) ) + parser.add_argument('--exp_date', type=str, help='localtime', default=str(localtime)) + print(str(localtime)) + + + args = parser.parse_args() + print(str(args)) + return args + + +if __name__ == "__main__": + args = parse_args() + if args.train: + print("training...",args.exp) + s1 = SGF_train(args) + s1.sgf_train(data_num=args.train_data_num, iter=args.iter) + + elif args.test: + print("testing...",args.exp) + i1 = SGF_inference(args) + i1.sgf_inference() + + elif args.train_test: + print("training...",args.exp) + s1 = SGF_train(args) + s1.sgf_train(data_num=args.train_data_num, iter=args.iter) + print("testing...",args.exp) + i1 = SGF_inference(args) + i1.sgf_inference() + + else: + print("Choose --train or --test") diff --git a/main_dl.py b/main_dl.py new file mode 100644 index 0000000..c7ede14 --- /dev/null +++ b/main_dl.py @@ -0,0 +1,105 @@ +import cfg +import argparse +import time +import os + +from dvsgesture_t import DatasetGesture +from dvsgesture_i import DatasetGesture_i + +import sys +sys.path.append("dl_src") +from dl_src.cnn import ConvClassifier + +### Deep Learning Counter Part ### + + +def parse_args(): + ''' + Parse input arguments + ''' + parser = argparse.ArgumentParser(description='SGF DL 1.0.') + parser.add_argument('--train_data_num', type=int, help='train_data_num,1,2,3,6,12,24,36,48,60,72,84,90,96', default=1) + parser.add_argument('--test_data_num', type=int, help='test_data_num', default=24) + parser.add_argument('--epochs', type=int, help='epochs', default=1) + parser.add_argument('--selected_events', type=list, help='event types', default=[0,1,2,3,4,5,6,7,8,9]) + + # # Test setting + # parser = argparse.ArgumentParser(description='SGF DL 1.0.') + # parser.add_argument('--train_data_num', type=int, help='train_data_num', default=10) + # parser.add_argument('--test_data_num', type=int, help='test_data_num', default=1) + # parser.add_argument('--epochs', type=int, help='epochs', default=1) + # parser.add_argument('--selected_events', type=list, help='event types', default=[0,1,2,3,4]) + + + parser.add_argument('--device', type=str, help='cpu or cuda', default='cuda') + parser.add_argument('--lr', type=float, help='lr', default=0.01) + parser.add_argument('--log_dir', type=str, help='log directory', \ + default=None) + parser.add_argument('--net', type=str, help='"convnet", "c3d", "i3d"', default="convnet") + parser.add_argument('--momentum', type=float, help='momentum', default=0.9) + + train_cfg = parser.parse_args() + print(str(train_cfg)) + return train_cfg + + +if __name__ == "__main__": + train_cfg = parse_args() + tm = time.localtime() + tm_data = f"{tm.tm_year}{'%02d'%(tm.tm_mon)}{'%02d'%(tm.tm_mday)}" + tm_time = f"{'%02d'%(tm.tm_hour)}{'%02d'%(tm.tm_min)}{'%02d'%(tm.tm_sec)}" + if train_cfg.log_dir == None: + train_cfg.log_dir = f"/yhwang/0-Projects/1-snn/dl_src/dl_results/{train_cfg.net}_train_num_{train_cfg.train_data_num}/{tm_data}_{tm_time}" + print(train_cfg.log_dir) + if not os.path.exists(train_cfg.log_dir): + os.makedirs(train_cfg.log_dir) + selected_event = train_cfg.selected_events + dataset_t = DatasetGesture(cfg.data_path) ## Read training data + dataset_i = DatasetGesture_i(cfg.data_path) ## Read the inference dataset + + + from dl_src.model_cfg import cnn_cfg + cnn_cfg.class_num = len(selected_event) + print("CNN Configration:", cnn_cfg) + print("============Initing CNN model") + cnn_model = ConvClassifier(cnn_cfg, train_cfg) + cnn_model.to(train_cfg.device) + + print("============Loading data=============") + train_data, train_label = dataset_t.get_train_data(train_data_num=train_cfg.train_data_num, \ + selected_event=selected_event) + # from dl_src.utils import visualize_batch_data + # visualize_batch_data(train_data[:, ::5, :, :], train_label, save_dir="/yhwang/0-Projects/1-snn/dl_src/dl_visualize2") + + + train_data, train_label = cnn_model.get_batch_data(train_data, train_label, 1) + if train_cfg.net == "c3d": + train_data = cnn_model.resize_data(train_data, cnn_cfg.frame_scale, cnn_cfg.feature_scale) + else: + train_data = cnn_model.resize_data(train_data, cnn_cfg.frame_scale) + + # from dl_src.utils import visualize_batch_data + # for i, (batch_data, batch_label) in enumerate(zip(train_data, train_label)): + # visualize_batch_data(batch_data[:, ::3, :, :], batch_label, \ + # save_dir="/yhwang/0-Projects/1-snn/dl_src/dl_visualize1", git_end_str=f"_batch{i}") + + + test_data, test_label = dataset_t.get_test_data(test_data_num=train_cfg.test_data_num, \ + selected_event=selected_event) + test_data, test_label = cnn_model.get_batch_data(test_data, test_label, train_cfg.test_data_num) + if train_cfg.net == "c3d": + test_data = cnn_model.resize_data(test_data, cnn_cfg.frame_scale, cnn_cfg.feature_scale) + else: + test_data = cnn_model.resize_data(test_data, cnn_cfg.frame_scale) + + + + # print("============Training") + # cnn_model.train(train_data, train_label, epochs=args.epochs) + + # print("============Testing") + # test_acc, test_loss = cnn_model.test(test_data, test_label) + # print("test_acc", test_acc, "test_loss", test_loss) + + print("============Training and Testing=============") + cnn_model.train_test(train_data, train_label, test_data, test_label, train_cfg.epochs, train_cfg.log_dir) diff --git a/npy_to_txt.py b/npy_to_txt.py new file mode 100644 index 0000000..ec1c70c --- /dev/null +++ b/npy_to_txt.py @@ -0,0 +1,8 @@ +import numpy as np +import cfg + +if __name__ == "__main__": + info = np.load(cfg.code_path+"/expert4_information.npy") + np.savetxt(cfg.code_path+"/expert4_information_txt.txt", info) + info = np.load(cfg.code_path+"/expert4_id.npy") + np.savetxt(cfg.code_path+"/expert4_id_txt.txt", info) \ No newline at end of file diff --git a/prior_knowledge.py b/prior_knowledge.py new file mode 100644 index 0000000..7a6ade3 --- /dev/null +++ b/prior_knowledge.py @@ -0,0 +1,421 @@ +import numpy as np + + +class SGF_prior_knowledge(object): + def __init__(self): + super().__init__() + + + def prior_knowledge(self, actions, location, timings): + ## actions: the detected movement in an actions + ## [0,1,2,3] #0: top->down; 1:botom->up; 2:left->right; 3: right->left + ## location: the movement happened areas + ## [0,1,2,3] #0: horonizital right area(>7) + # #1: horonizital left area(<=7) + # #2: vertical top area(>5) + # #3: vertical bottom area(<=5) + ## timings: the movement happened timing + + ##---------------------------------------## + # ----------》(2) + # ^ (1) | clock wise: (3->1)-[XXX1],(1->2)-[XX1X],(2->0)-[XXX1], (0->3)=[XXX1] + # | | two movements happened same time: (0,2)=[xx10] + # | X (0) + # (3)<---------- + ##---------------------------------------## + + ##---------------------------------------## + # <----------(3) + # (0) ^ + # | | anti-clock wise: (1,3),(3,0),(0,2), (2,1) + # | | + # | | + # X (1) + # (2)----------》 + ##---------------------------------------## + + + ## decoding the timing activities + + if np.all(actions) == 1: ### it must be either clock or anti-clock [ 1111] + print('all events happen.') + timing_origion = timings.copy() + timings.sort() + min_frame = min(timings) + first = [i for i, j in enumerate(timing_origion) if j == min_frame] + + for i in range(0,len(timings)): + if timings[i] == min_frame: + timings[i] = 100 + min_second_frame = min(timings) + second = [i for i, j in enumerate(timing_origion) if j == min_second_frame] + + final = [0 for i in range(0,2)] + + if len(first) == 1 and len(second) == 1 : ## if the movement sequence is [7,3,X,X] + test_list = [first,second] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + else: + pass + + elif len(first) != 1 and len(second) == 1 and second != 100: ## [1,1,2,3] + finish_flag = 0 + for i in range(0,len(first)): + if finish_flag == 0: + test_list = [[first[i]],second] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + finish_flag = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + finish_flag = 1 + else: + pass + + elif len(first) == 1 and len(second) != 1 : ## [1,2,2,3] + for i in range(0,len(second)): + test_list = [first,[second[i]]] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + else: + pass + + else: + pass + + else: ## [X000] + final = [0 for i in range(0,2)] + timing_origion = timings.copy() + timings.sort() + min_frame = min(timings) + first = [i for i, j in enumerate(timing_origion) if j == min_frame] + if len(first) == 2: ## if two events happened in the same time [1,1,0,X] + ''' + print('two actions happened in the same time points') + + if (first[0] ==0 and first[1] == 3) or (first[0] ==3 and first[1] == 0): + if location[2] == 1: + final[0] = 1 + else: + final[1] = 1 + elif (first[0] == 0 and first[1] == 2) or (first[0] ==2 and first[1] == 0): + if location[2] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first[0] ==1 and first[1] == 3) or (first[0] ==3 and first[1] == 1): + if location[11] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first[0] ==1 and first[1] == 2) or (first[0] ==2 and first[1] == 1): + if location[11] == 1: + final[0] = 1 + else: + final[1] = 0 + ''' + location_flag = 1 + finish_flag = 0 + for i in range(0,len(timings)): + if timings[i] == min_frame: + timings[i] = 100 + min_second_frame = min(timings) + second = [i for i, j in enumerate(timing_origion) if j == min_second_frame] ## check the second event + test_list = [[first[0]],second ] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + location_flag = 0 + finish_flag = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + location_flag = 0 + finish_flag = 1 + else: + pass + + if finish_flag == 0: + test_list = [[first[1]],second ] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + location_flag = 0 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + location_flag = 0 + else: + if location_flag == 1: + if (first[0] ==0 and first[1] == 3) or (first[0] ==3 and first[1] == 0): + if location[2] == 1: + final[0] = 1 + else: + final[1] = 1 + elif (first[0] == 0 and first[1] == 2) or (first[0] ==2 and first[1] == 0): + if location[2] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first[0] ==1 and first[1] == 3) or (first[0] ==3 and first[1] == 1): + if location[11] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first[0] ==1 and first[1] == 2) or (first[0] ==2 and first[1] == 1): + if location[11] == 1: + final[0] = 1 + else: + final[1] = 0 + else: + print('unknown issue1') + elif len(first) == 1 and first != 100: ## if there are events in sequence [1,2,0,X] + for i in range(0,len(timings)): + if timings[i] == min_frame: + timings[i] = 100 + min_second_frame = min(timings) + second = [i for i, j in enumerate(timing_origion) if j == min_second_frame] ## check the second event + if len(second) == 1 and timing_origion[second[0]] != 100: #[1,0,2,X] + ''' + if (first ==[0] and second == [3]) or (first ==[3 ]and second == [0]): + if location[2] == 1: + final[0] = 1 + else: + final[1] = 1 + elif (first ==[0] and second == [2]) or (first ==[2] and second == [0]): + if location[2] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first ==[1] and second == [3]) or (first ==[3] and second == [1]): + if location[11] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first ==[1] and second == [2]) or (first ==[2] and second == [1]): + if location[11] == 1: + final[0] = 1 + else: + final[1] = 1 + ''' + test_list = [first,second] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + else: #[1,2,0,X] + print('system info: the two events are 0 and 1 or 2 and 3') + for i in range(0,len(timings)): + if timings[i] == min_second_frame: + timings[i] = 100 + min_third_frame = min(timings) + third = [i for i, j in enumerate(timing_origion) if j == min_third_frame] ## + if len(third) == 1 and third != 100 : + ''' + if (first ==[0] and third == [3]) or (first ==[3 ]and third == [0]): + if location[2] == 1: + final[0] = 1 + else: + final[1] = 1 + elif (first ==[0] and third == [2]) or (first ==[2] and third == [0]): + if location[2] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first ==[1] and third == [3]) or (first ==[3] and third == [1]): + if location[11] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first ==[1] and third == [2]) or (first ==[2] and third == [1]): + if location[11] == 1: + final[0] = 1 + else: + final[1] = 1 + ''' + test_list = [first,third] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + else: + print('system error2: complex situations') + elif len(second) >1 and timing_origion[second[0]] != 100: #[1,2,2,0] + finish_flag1 = 0 + test_list = [first,[second[0]] ] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + finish_flag1 = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + finish_flag1 = 1 + else: + pass + + if finish_flag1 == 0: + test_list = [first,[second[1]] ] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + else: + pass + ''' + if (first ==[0] and second[0] == 3) or (first ==[3] and second[0] == 0): + if location[2] == 1: + final[0] = 1 + else: + final[1] = 1 + elif (first ==[0] and second[0] == 2) or (first ==[2] and second[0] == 0): + if location[2] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first ==[1] and second[0] == 3) or (first ==[3] and second[0] == 1): + if location[11] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first ==[1] and second[0] == 2) or (first ==[2] and second[0] == 1): + if location[10] == 1: + final[0] = 1 + else: + final[1]= 1 + + if (first ==[0] and second[1] == 3) or (first ==[3] and second[1] == 0): + if location[2] == 1: + final[0] = 1 + else: + final[1] = 1 + elif (first ==[0] and second[1] == 2) or (first ==[2] and second[1] == 0): + if location[2] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first ==[1] and second[1] == 3) or (first ==[3] and second[1] == 1): + if location[11] == 1: + final[1] = 1 + else: + final[0] = 1 + elif (first ==[1] and second[1] == 2) or (first ==[2] and second[1] == 1): + if location[10] == 1: + final[0] = 1 + else: + final[1]= 1 + ''' + else: + # print('system error4: unknown') + a = 1 + #if np.shape(np.nonzero(actions))[0] == 1: ## special case + # final[0] = 1 + pass + elif len(first) == 3: + #if location[5] == 1: + # final[1] = 1 + #else: + # final[0] = 1 + pass + return final + + + def clockwise_knowledge(self, actions, location, timings): + ## actions: the detected movement in an actions + ## [0,1,2,3] #0: top->down; 1:botom->up; 2:left->right; 3: right->left + ## location: the movement happened areas + ## [0,1,2,3] #0: horonizital right area(>7) + # #1: horonizital left area(<=7) + # #2: vertical top area(>5) + # #3: vertical bottom area(<=5) + ## timings: the movement happened timing + + ##---------------------------------------## + # ----------》(2) + # ^ (1) | clock wise: (3->1)-[XXX1],(1->2)-[XX1X],(2->0)-[XXX1], (0->3)=[XXX1] + # | | two movements happened same time: (0,2)=[xx10] + # | X (0) + # (3)<---------- + ##---------------------------------------## + + ##---------------------------------------## + # <----------(3) + # (0) ^ + # | | anti-clock wise: (1,3),(3,0),(0,2), (2,1) + # | | + # | | + # X (1) + # (2)----------》 + ##---------------------------------------## + + + ## decoding the timing activities + + if np.all(actions) == 1: ### it must be either clock or anti-clock [ 1111] + # print('all events happen.') + timing_origion = timings.copy() + timings.sort() + min_frame = min(timings) + first = [i for i, j in enumerate(timing_origion) if j == min_frame] + + for i in range(0,len(timings)): + if timings[i] == min_frame: + timings[i] = 100 + min_second_frame = min(timings) + second = [i for i, j in enumerate(timing_origion) if j == min_second_frame] + + final = [0 for i in range(0,2)] + + if len(first) == 1 and len(second) == 1 : ## if the movement sequence is [7,3,X,X] + test_list = [first,second] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + else: + pass + + else: ## [XX00] + final = [0 for i in range(0,2)] + timing_origion = timings.copy() + timings.sort() + min_frame = min(timings) + first = [i for i, j in enumerate(timing_origion) if j == min_frame] + if len(first) == 2: ## if two events happened in the same time [1,1,0,X] + pass + elif len(first) == 1 and first != 100: ## if there are events in sequence [1,2,0,X] + for i in range(0,len(timings)): + if timings[i] == min_frame: + timings[i] = 100 + min_second_frame = min(timings) + second = [i for i, j in enumerate(timing_origion) if j == min_second_frame] ## check the second event + if len(second) == 1 and timing_origion[second[0]] != 100: #[1,0,2,X] + test_list = [first,second] + if test_list ==[[3],[1]] or test_list ==[[1],[2]] or test_list ==[[2],[0]] or test_list ==[[0],[3]]: + ## from the sequence prior knowledge, it belongs to clock, check the location characteristics + final[1] = 1 + elif test_list ==[[1],[3]] or test_list ==[[3],[0]] or test_list ==[[0],[2]] or test_list ==[[2],[1]]: + final[0] = 1 + else: #[1,2,0,X] + pass + else: + # print('system error4: unknown') + #if np.shape(np.nonzero(actions))[0] == 1: ## special case + # final[0] = 1 + pass + elif len(first) == 3: + #if location[5] == 1: + # final[1] = 1 + #else: + # final[0] = 1 + pass + return final diff --git a/process_dvs_gesture.py b/process_dvs_gesture.py new file mode 100644 index 0000000..33aca70 --- /dev/null +++ b/process_dvs_gesture.py @@ -0,0 +1,163 @@ + +import tarfile +import os +import h5py +import numpy as np +import struct +from events_timeslices import * +import cfg + +def untar(fname, dirs): + t = tarfile.open(fname) + t.extractall(path=dirs) + +def gather_aedat(directory, start_id, end_id, filename_prefix='user'): + import glob + fns = [] + for i in range(start_id, end_id): + search_mask = directory + os.sep + \ + filename_prefix + "{0:02d}".format(i) + '*.aedat' + # print(search_mask) + glob_out = glob.glob(search_mask) + if len(glob_out) > 0: + fns += glob_out + return fns + +def aedat_to_events(filename): + label_filename = filename[:-6] + '_labels.csv' + labels = np.loadtxt(label_filename, + skiprows=1, + delimiter=',', + dtype='uint32') + + events = [] + with open(filename, 'rb') as f: + + for i in range(5): + _ = f.readline() + + while True: + data_ev_head = f.read(28) + if len(data_ev_head) == 0: + break + + eventtype = struct.unpack('H', data_ev_head[0:2])[0] + eventsource = struct.unpack('H', data_ev_head[2:4])[0] + eventsize = struct.unpack('I', data_ev_head[4:8])[0] + eventoffset = struct.unpack('I', data_ev_head[8:12])[0] + eventtsoverflow = struct.unpack('I', data_ev_head[12:16])[0] + eventcapacity = struct.unpack('I', data_ev_head[16:20])[0] + eventnumber = struct.unpack('I', data_ev_head[20:24])[0] + eventvalid = struct.unpack('I', data_ev_head[24:28])[0] + + if (eventtype == 1): + event_bytes = np.frombuffer(f.read(eventnumber * eventsize), + 'uint32') + event_bytes = event_bytes.reshape(-1, 2) + + x = (event_bytes[:, 0] >> 17) & 0x00001FFF + y = (event_bytes[:, 0] >> 2) & 0x00001FFF + p = (event_bytes[:, 0] >> 1) & 0x00000001 + t = event_bytes[:, 1] + events.append([t, x, y, p]) + + else: + f.read(eventnumber * eventsize) + + events = np.column_stack(events) + events = events.astype('uint32') + + clipped_events = np.zeros([4, 0], 'uint32') + + for l in labels: + start = np.searchsorted(events[0, :], l[1]) + end = np.searchsorted(events[0, :], l[2]) + clipped_events = np.column_stack([clipped_events, + events[:, start:end]]) + + return clipped_events.T, labels + +def create_hdf5(path, save_path): + print('processing train data...') + save_path_train = os.path.join(save_path, 'train_label') + if not os.path.exists(save_path_train): + os.makedirs(save_path_train) + + fns_train = gather_aedat(path, 1, 24) + + for i in range(len(fns_train)): + print('strat processing ' + str(i + 1) + ' train data') + data, labels_starttime = aedat_to_events(fns_train[i]) + tms = data[:, 0] + ads = data[:, 1:] + lbls = labels_starttime[:, 0] + start_tms = labels_starttime[:, 1] + end_tms = labels_starttime[:, 2] + + for lbls_idx in range(len(lbls)): + + s_ = get_slice(tms, ads, start_tms[lbls_idx], end_tms[lbls_idx]) + times = s_[0] + addrs = s_[1] + file_name = save_path_train + os.sep + 'DVS-Gesture-train_' + str(lbls[lbls_idx]) + '_' + str(i) + '.hdf5' + if not os.path.exists(file_name): + file_name = file_name + # else: + # file_name = save_path_train + os.sep + 'DVS-Gesture-train_' + str(lbls[lbls_idx]) + '_' + str(i) + '_2.hdf5' + if lbls[lbls_idx] != 11: + print(file_name) + with h5py.File(file_name, 'w') as f: + tm_dset = f.create_dataset('times', data=times, dtype=np.uint32) + ad_dset = f.create_dataset('addrs', data=addrs, dtype=np.uint8) + lbl_dset = f.create_dataset('labels', data=lbls[lbls_idx] - 1, dtype=np.uint8) + + print('trainset process finish') + + print('processing test data...') + save_path_test = os.path.join(save_path, 'test_label') + if not os.path.exists(save_path_test): + os.makedirs(save_path_test) + + fns_test = gather_aedat(path, 24, 30) + + for i in range(len(fns_test)): + print('strat processing ' + str(i + 1) + ' test data') + data, labels_starttime = aedat_to_events(fns_test[i]) + tms = data[:, 0] + ads = data[:, 1:] + lbls = labels_starttime[:, 0] + start_tms = labels_starttime[:, 1] + end_tms = labels_starttime[:, 2] + + for lbls_idx in range(len(lbls)): + + s_ = get_slice(tms, ads, start_tms[lbls_idx], end_tms[lbls_idx]) + times = s_[0] + addrs = s_[1] + file_name = save_path_test + os.sep + 'DVS-Gesture-test_' + str(lbls[lbls_idx]) + '_' + str(i) + '.hdf5' + if not os.path.exists(file_name): + file_name = file_name + # else: + # file_name = save_path_test + os.sep + 'DVS-Gesture-test_' + str(lbls[lbls_idx]) + '_' + str(i) + '_2.hdf5' + + print(file_name) + if lbls[lbls_idx] != 11: + print(file_name) + with h5py.File(file_name, 'w') as f: + tm_dset = f.create_dataset('times', data=times, dtype=np.uint32) + ad_dset = f.create_dataset('addrs', data=addrs, dtype=np.uint8) + lbl_dset = f.create_dataset('labels', data=lbls[lbls_idx] - 1, dtype=np.uint8) + + test_data_filenames = os.listdir(save_path_test) + for data_filename in test_data_filenames: + if 'DVS-Gesture-test_11' in data_filename: + os.remove(data_filename) + print('testset process finish') + + +def datasets_process(path=None): + create_hdf5(os.path.join(path, 'DvsGesture'), path) + + +if __name__ == '__main__': + datasets_process(path=cfg.data_path) \ No newline at end of file diff --git a/sys_profiling.py b/sys_profiling.py new file mode 100644 index 0000000..f69a5fe --- /dev/null +++ b/sys_profiling.py @@ -0,0 +1,366 @@ +from dvsgesture_i import DatasetGesture_i +from expert import SGF_expert +import numpy as np +from STLayer import Spatiotemporal_Core +from logger import Logger +from agent import SGF_agent +import cfg + +class SGF_profiling(object): + def __init__(self): + super().__init__() + + + def dataset_profiling(self): + name = "dataset_profiling" + logger_object = Logger(name) + logger_object_0 = Logger('event_0') + logger_object_1 = Logger('event_1') + logger_object_2 = Logger('event_2') + logger_object_3 = Logger('event_3') + logger_object_4 = Logger('event_4') + logger_object_5 = Logger('event_5') + logger_object_6 = Logger('event_6') + logger_object_7 = Logger('event_7') + logger_object_8 = Logger('event_8') + logger_object_9 = Logger('event_9') + + sample_code_0 = np.full((1,46),0) + sample_code_1 = np.full((1,46),0) + sample_code_2 = np.full((1,46),0) + sample_code_3 = np.full((1,46),0) + sample_code_4 = np.full((1,46),0) + sample_code_5 = np.full((1,46),0) + sample_code_6 = np.full((1,46),0) + sample_code_7 = np.full((1,46),0) + sample_code_8 = np.full((1,46),0) + sample_code_9 = np.full((1,46),0) + + pattern0 = [0 for i in range (500)] + pattern1 = [0 for i in range (500)] ## 30 is the user defined type for a single event + pattern2 = [0 for i in range (500)] + pattern3 = [0 for i in range (500)] + pattern4 = [0 for i in range (500)] + pattern5 = [0 for i in range (500)] ## 30 is the user defined type for a single event + pattern6 = [0 for i in range (500)] + pattern7 = [0 for i in range (500)] + pattern8 = [0 for i in range (500)] + pattern9 = [0 for i in range (500)] + + num_his_9 = 0 + num_his_8 = 0 + num_his_7 = 0 + num_his_6 = 0 + num_his_5 = 0 + num_his_4 = 0 + num_his_3 = 0 + num_his_2 = 0 + num_his_1 = 0 + num_his_0 = 0 + + + dataset = DatasetGesture_i(cfg.data_path) + batchsize = 10 + cnt = 0 + testdata = np.full((80,128,128,batchsize),0) ## data initializations + test_label = [0 for i in range (batchsize)] + for i in range(0,dataset.test_len(),batchsize): ## read dataset by a block 10 + for j in range(0, batchsize): + index = j+ i + if index >= dataset.test_len(): + index = dataset.test_len()-1 + else: + index = index + video, label = dataset.get_test_sample(index) + if np.shape(video)[0] < 80: + testdata[0:np.shape(video)[0],:,:,j] = video[:,:,:] + else: + testdata[:,:,:,j] = video[0:80,:,:] + test_label[j] = label + + stlayer = Spatiotemporal_Core(testdata, 2, 1, 2, 1) + stlayer.Spaceprocessing() + stlayer.Temporalprocessing() + stlayer.Stprocessing() + stlayer.stspike() + e1 = SGF_expert() + space_neuron = e1.expert_space( 2, stlayer.ST_spike, [20,10], [0,0]) + space_neuron1 = e1.expert_space( 2, stlayer.ST_spike, [5,200], [0,0]) + space_all = np.concatenate((space_neuron,space_neuron1), axis = 0) ## jow results along rows, generate unique code + data_ds = testdata[0:80:5,:,:,:] + #data_ds[data_ds < 0] = 0 + stlayer = Spatiotemporal_Core(data_ds, 2, 1, 2, 1) ##: user defined (this should done by agent) + stlayer.Spaceprocessing() ## space integration + stlayer.Temporalprocessing() ## temporal integration + stlayer.Stprocessing() ## space-temporal integration + stlayer.stspike() + if np.any(space_neuron1[2,:] == 1): ## simple attention mechanisms + start = [0,0] + end = [64,40] + elif np.any(space_neuron1[3,:] == 1): + start = [0,20] + end = [64,64] + else: + start = [0,0] + end = [64,64] + temporal_neuron_final1 = e1.expert_temporal(2, stlayer.stcore, start, end, 0, int(50/64*stlayer.stcore.shape[1])) #--: expert_temporal(self, resolution, data, start, end, type, scale): + temporal_neuron_final2 = e1.expert_temporal(2, stlayer.stcore, start, end ,1, int(50/64*stlayer.stcore.shape[1])) #--: expert_temporal(self, resolution, data, start, end, type, scale): + temporal_neuron1 = [0 for i in range (len(temporal_neuron_final1))] + temporal_neuron2 = [0 for i in range (len(temporal_neuron_final2))] + for i in range(0, len(temporal_neuron_final1)): + if temporal_neuron_final1[i] > temporal_neuron_final2[i]: + temporal_neuron1[i] = 1 + else: + temporal_neuron2[i] = 1 + temporal_td = np.stack((temporal_neuron1,temporal_neuron2),axis = 0) + temporal_all = np.concatenate((space_all,temporal_td),axis = 0) + space_neuron3 = e1.expert_space(6, stlayer.ST_spike, [8,5], [0,0]) + sp_all= np.concatenate((temporal_all,space_neuron3), axis = 0) ## combine two experts knowledge + + + for i in range(0, batchsize): + sample = sp_all[:,i] + file_name_id = dataset.get_test_data_file_name(cnt) + print(cnt,file_name_id) + if test_label[i] == 9: ## event is 9 + pattern_num = np.shape(sample_code_9)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_9[j]): ## if it is the same pattern, record the number + pattern9[j] = pattern9[j] +1 + pattern_index[j] = 1 + logger_object_9.debug(pattern9) + if np.all(np.asarray(pattern_index) == 0): + sample_code_9 = np.vstack([sample_code_9,sp_all[:,i]]) + logger_object_9.critial(sample_code_9) + logger_object_9.warn(file_name_id) + logger_object_9.debug(cnt) + + num_his_9 = self.expert_profiling(sample_code_9, num_his_9) + + + if test_label[i] == 0: ## event is 0 + pattern_num = np.shape(sample_code_0)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_0[j]): ## if it is the same pattern, record the number + pattern0[j] = pattern0[j] +1 + pattern_index[j] = 1 + logger_object_0.debug(pattern0) + if np.all(np.asarray(pattern_index) == 0): + sample_code_0 = np.vstack([sample_code_0,sp_all[:,i]]) + logger_object_0.critial(sample_code_0) + logger_object_0.warn(file_name_id) + logger_object_0.debug(cnt) + num_his_0 = self.expert_profiling(sample_code_0, num_his_0) + + if test_label[i] == 1: ## event is 1 + pattern_num = np.shape(sample_code_1)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_1[j]): ## if it is the same pattern, record the number + pattern1[j] = pattern1[j] +1 + pattern_index[j] = 1 + logger_object_1.debug(pattern1) + if np.all(np.asarray(pattern_index) == 0): + sample_code_1 = np.vstack([sample_code_1,sp_all[:,i]]) + logger_object_1.critial(sample_code_1) + logger_object_1.warn(file_name_id) + logger_object_1.debug(cnt) + num_his_1 = self.expert_profiling(sample_code_1, num_his_1) + + if test_label[i] == 2: ## event is 2 + pattern_num = np.shape(sample_code_2)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_2[j]): ## if it is the same pattern, record the number + pattern2[j] = pattern2[j] +1 + pattern_index[j] = 1 + logger_object_2.debug(pattern2) + if np.all(np.asarray(pattern_index) == 0): + sample_code_2 = np.vstack([sample_code_2,sp_all[:,i]]) + logger_object_2.critial(sample_code_2) + logger_object_2.warn(file_name_id) + logger_object_2.debug(cnt) + num_his_2 = self.expert_profiling(sample_code_2, num_his_2) + + if test_label[i] == 3: ## event is 3 + pattern_num = np.shape(sample_code_3)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_3[j]): ## if it is the same pattern, record the number + pattern3[j] = pattern3[j] +1 + pattern_index[j] = 1 + logger_object_3.debug(pattern3) + if np.all(np.asarray(pattern_index) == 0): + sample_code_3 = np.vstack([sample_code_3,sp_all[:,i]]) + logger_object_3.critial(sample_code_3) + logger_object_3.warn(file_name_id) + logger_object_3.debug(cnt) + num_his_3 = self.expert_profiling(sample_code_3, num_his_3) + + if test_label[i] == 4: ## event is 4 + pattern_num = np.shape(sample_code_4)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_4[j]): ## if it is the same pattern, record the number + pattern4[j] = pattern4[j] +1 + pattern_index[j] = 1 + logger_object_4.debug(pattern4) + if np.all(np.asarray(pattern_index) == 0): + sample_code_4 = np.vstack([sample_code_4,sp_all[:,i]]) + logger_object_4.critial(sample_code_4) + logger_object_4.warn(file_name_id) + logger_object_4.debug(cnt) + num_his_4 = self.expert_profiling(sample_code_4, num_his_4) + + if test_label[i] == 5: ## event is 5 + pattern_num = np.shape(sample_code_5)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_5[j]): ## if it is the same pattern, record the number + pattern5[j] = pattern5[j] +1 + pattern_index[j] = 1 + logger_object_5.debug(pattern5) + if np.all(np.asarray(pattern_index) == 0): + sample_code_5 = np.vstack([sample_code_5,sp_all[:,i]]) + logger_object_5.critial(sample_code_5) + logger_object_5.warn(file_name_id) + logger_object_5.debug(cnt) + num_his_5 = self.expert_profiling(sample_code_5, num_his_5) + + if test_label[i] == 6: ## event is 6 + pattern_num = np.shape(sample_code_6)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_6[j]): ## if it is the same pattern, record the number + pattern6[j] = pattern6[j] +1 + pattern_index[j] = 1 + logger_object_6.debug(pattern6) + if np.all(np.asarray(pattern_index) == 0): + sample_code_6 = np.vstack([sample_code_6,sp_all[:,i]]) + logger_object_6.critial(sample_code_6) + logger_object_6.warn(file_name_id) + logger_object_6.debug(cnt) + + if test_label[i] == 7: ## event is 7 + pattern_num = np.shape(sample_code_7)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_7[j]): ## if it is the same pattern, record the number + pattern7[j] = pattern7[j] +1 + pattern_index[j] = 1 + logger_object_7.debug(pattern7) + if np.all(np.asarray(pattern_index) == 0): + sample_code_7 = np.vstack([sample_code_7,sp_all[:,i]]) + logger_object_7.critial(sample_code_7) + logger_object_7.warn(file_name_id) + logger_object_7.debug(cnt) + + if test_label[i] == 8: ## event is 8 + pattern_num = np.shape(sample_code_8)[0] ## calculate the current pattern num + pattern_index = [0 for i in range (pattern_num)] + for j in range(0, pattern_num): + if np.all(sample == sample_code_8[j]): ## if it is the same pattern, record the number + pattern8[j] = pattern8[j] +1 + pattern_index[j] = 1 + logger_object_8.debug(pattern8) + if np.all(np.asarray(pattern_index) == 0): + sample_code_8 = np.vstack([sample_code_8,sp_all[:,i]]) + logger_object_8.critial(sample_code_8) + logger_object_8.warn(file_name_id) + logger_object_8.debug(cnt) + cnt +=1 + + + + logger_object_9.debug(np.shape(sample_code_9)[0]-1) + logger_object_9.debug(pattern9) + event9_id_p = "expert4_id_p" + np.save(event9_id_p, pattern9) + event9_pattern_p = 'event9_pattern_p' + np.save(event9_pattern_p, sample_code_9) + event9_expert_p = "event9_expert_p" + np.save(event9_expert_p, num_his_9) + + logger_object_8.debug(np.shape(sample_code_8)[0]-1) + logger_object_8.debug(pattern8) + event8_id_p = "expert8_id_p" + np.save(event8_id_p, pattern8) + event8_pattern_p = 'event8_pattern_p' + np.save(event8_pattern_p, sample_code_8) + + logger_object_7.debug(np.shape(sample_code_7)[0]-1) + logger_object_7.debug(pattern7) + event7_id_p = "expert7_id_p" + np.save(event7_id_p, pattern7) + event7_pattern_p = 'event7_pattern_p' + np.save(event7_pattern_p, sample_code_7) + + logger_object_6.debug(np.shape(sample_code_6)[0]-1) + logger_object_6.debug(pattern6) + event6_id_p = "expert6_id_p" + np.save(event6_id_p, pattern6) + event6_pattern_p = 'event6_pattern_p' + np.save(event6_pattern_p, sample_code_6) + + logger_object_5.debug(np.shape(sample_code_5)[0]-1) + logger_object_5.debug(pattern5) + event5_id_p = "expert5_id_p" + np.save(event5_id_p, pattern5) + event5_pattern_p = 'event5_pattern_p' + np.save(event5_pattern_p, sample_code_5) + event5_expert_p = "event5_expert_p" + np.save(event5_expert_p, num_his_5) + + logger_object_4.debug(np.shape(sample_code_4)[0]-1) + logger_object_4.debug(pattern4) + event4_id_p = "expert4_id_p" + np.save(event4_id_p, pattern4) + event4_pattern_p = 'event4_pattern_p' + np.save(event4_pattern_p, sample_code_4) + event4_expert_p = "event4_expert_p" + np.save(event4_expert_p, num_his_4) + + logger_object_3.debug(pattern3) + logger_object_3.debug(np.shape(sample_code_3)[0]-1) + event3_id_p = "expert3_id_p" + np.save(event3_id_p, pattern3) + event3_pattern_p = 'event3_pattern_p' + np.save(event3_pattern_p, sample_code_3) + event3_expert_p = "event3_expert_p" + np.save(event3_expert_p, num_his_3) + + logger_object_2.debug(pattern2) + logger_object_2.debug(np.shape(sample_code_2)[0]-1) + event2_id_p = "expert2_id_p" + np.save(event2_id_p, pattern2) + event2_pattern_p = 'event2_pattern_p' + np.save(event2_pattern_p, sample_code_2) + event2_expert_p = "event2_expert_p" + np.save(event2_expert_p, num_his_2) + + logger_object_1.debug(pattern1) + logger_object_1.debug(np.shape(sample_code_1)[0]-1) + event1_id_p = "expert1_id_p" + np.save(event1_id_p, pattern1) + event1_pattern_p = 'event1_pattern_p' + np.save(event1_pattern_p, sample_code_1) + event1_expert_p = "event1_expert_p" + np.save(event1_expert_p, num_his_1) + + logger_object_0.debug(pattern0) + logger_object_0.debug(np.shape(sample_code_0)[0]-1) + event0_id_p = "expert0_id_p" + np.save(event0_id_p, pattern0) + event0_pattern_p = 'event0_pattern_p' + np.save(event0_pattern_p, sample_code_0) + event0_expert_p = "event0_expert_p" + np.save(event0_expert_p, num_his_0) + + + def expert_profiling(self,sample_code,num_his): + num = np.shape(sample_code)[0]-1 + num_his = np.vstack([num_his,num]) + return num_his + diff --git a/train.py b/train.py new file mode 100644 index 0000000..b58f07a --- /dev/null +++ b/train.py @@ -0,0 +1,259 @@ +import os +from visualization_utils import save_visualize, save_curve, visualize, save_vis_formatted +import numpy as np +from dvsgesture_t import DatasetGesture +from Visualization import Visualization +from SensingLayer import SensingLayer +from STLayer import Spatiotemporal_Core +from agent import SGF_agent +from expert import SGF_expert +import cfg +import copy +import random +from prior_knowledge import SGF_prior_knowledge +import knowledge +# from logger import Logger + +class SGF_train(object): + def __init__(self, args): + super().__init__() + self.args = args + self.selected_event = [event for event in args.selected_events.split('_')] + self.sub_events = list() + for e in self.args.selected_events.split("_"): + self.sub_events.extend(e.split("+")) + self.event_num = len(self.selected_event) + self.sub_event_num = len(self.sub_events) + self.st_paras = [int(st_para) for st_para in args.st_paras.split('_')] + self.thres_s1 = [int(t) for t in args.thres_s1.split('_')] + self.thres_s2 = [int(t) for t in args.thres_s2.split('_')] + self.thres_s3 = list() + self.thres_s3.append([int(t) for t in args.thres_s3.split('_')][0:2]) + self.thres_s3.append([int(t) for t in args.thres_s3.split('_')][2:4]) + self.thres_bit_s1 = args.thres_bit_s1 + self.thres_bit_s2 = args.thres_bit_s2 + self.resolution_s1 = [int(t) for t in args.resolution_s1.split('_')] + self.resolution_s2 = [int(t) for t in args.resolution_s2.split('_')] + self.resolution_s3 = [int(t) for t in args.resolution_s3.split('_')] + self.exp = args.exp + self.exp_dir = cfg.code_path + "/data/" + self.exp + self.save_st_sore = args.save_st_core + self.train_succ_list = list() + self.train_succ_cnt = 0 + self.train_fail_cnt = 0 + self.code_mode = self.args.code_mode + if not os.path.exists(self.exp_dir): + os.mkdir(self.exp_dir) + + def save_knowledge(self, expert_name, id, knowledge, if_save=True): + if (not isinstance(id,int)): + # print("expert2:") + sort_idx = sorted(range(len(id)), key=lambda k: id[k]) + sorted_knowledge = list(map(lambda x:knowledge[x], sort_idx)) + sorted_id = sorted(id) + for event_i, event in enumerate(id): + if not if_save: + print(event, knowledge[event_i], sorted_id[event_i], sorted_knowledge[event_i]) + expert_id = cfg.code_path + "/data/" + self.exp + "/"+expert_name+"_id.txt" + expert_information = cfg.code_path + "/data/" + self.exp + "/"+expert_name+"_information.txt" + if (isinstance(knowledge, list)): + len_n = len(knowledge) + n = np.array(knowledge).reshape(1, len_n) + if if_save: + np.savetxt(expert_id, sorted_id, fmt='%s') + np.savetxt(expert_information, sorted_knowledge, fmt='%d') + + def sgf_train(self, data_num, iter): + # data: training data + # times: training times + # random seed + dataset = DatasetGesture(cfg.data_path) ## Read training data + + ## select event + selected_event = self.sub_events + print(self.args.selected_events) + print(selected_event) + # 1: hand clip + # 2: left wave + # 3: right wave + # 4: left counter clock-wise + # 5: left clock-wise + # 6: right counter clock-wise + # 7: right clock-wise + # 8: arm roll + # 9: arm drum + # 10: air guitar + # 11: random + + train_data_folder = os.path.join(cfg.data_path, 'train_npy') + train_filenames_all = os.listdir(train_data_folder) + + random.seed(0) + hop2_knowledge = list() + hop2_id = list() + all_data_list = range(0, dataset.train_num) + assert data_num <= dataset.train_num + selected_sample = random.sample(all_data_list, data_num) + print(selected_sample) + + ##-----------------------------------------------------------## + ## Data preprocessing ## + ##-----------------------------------------------------------## + print("Preparing training data...") + event_mix = False + train_filenames = list() + for filename in train_filenames_all: + for event in selected_event: + if not "+" in event: + event_mix = True + for sample in selected_sample: + match_str = "train_" + str(event) + "_" + str(sample) + ".npy" + if match_str in filename: + train_filenames.append(filename) + else: + event_list = event.split("+") + for event in event_list: + for sample in selected_sample: + match_str = "train_" + str(event) + "_" + str(sample) + ".npy" + if match_str in filename: + train_filenames.append(filename) + + if self.args.test_batch_list == '0': + test_batch_list = list() + else: + test_batch_list = [int(t) for t in self.args.test_batch_list.split('_')] + + + + ##-----------------------------------------------------------## + ## Training Phase ## + ##-----------------------------------------------------------## + for i in range(0, iter): + print("----------------------------------------------") + print(self.exp, ", start the", i, "/", iter, "training iteration:") + + if self.args.each_sample_train_once: + assert (iter == data_num) + selected_batch_sample = selected_sample[i] + else: + if self.args.inner_batch_random: + selected_batch_sample = random.sample(selected_sample, self.sub_event_num) + else: + if self.args.test_batch_list == '0': + selected_batch_sample = random.sample(selected_sample, 1)[0] + else: + selected_batch_sample = test_batch_list[i] + if self.args.test_batch_list == '0': + test_batch_list.append(selected_batch_sample) + batch_filenames = list() + cut_frame = 80 + batch_data = np.full((cut_frame, 128, 128, self.sub_event_num), 0) + + for filename in train_filenames: + for event_i, event in enumerate(selected_event): + if self.args.inner_batch_random: + match_str = "train_" + str(event) + "_" + str(selected_batch_sample[event_i]) + ".npy" + else: + match_str = "train_" + str(event) + "_" + str(selected_batch_sample) + ".npy" + if match_str in filename: + # print(match_str, filename) + batch_filenames.append(filename) + batch_filenames.sort() + + # load np data and trancate 80 frame + for filename in batch_filenames: + np_name = os.path.join(train_data_folder, filename) + # print(np_name) + sample = np.load(np_name) + event = str(filename.split("_")[-2]) + event_i = selected_event.index(event) + if np.shape(sample)[0] >= cut_frame: + batch_data[:, :, :, event_i] = sample[0:cut_frame, :, :] + else: + batch_data[0:np.shape(sample)[0], :, :, event_i] = sample + + + e1 = SGF_expert(self.args) + stlayer1 = Spatiotemporal_Core(batch_data, 3, 2, 3, 2) ## ST layer processing + stlayer1.Spaceprocessing() ## space integration + stlayer1.Temporalprocessing() ## temporal integration + stlayer1.Stprocessing() ## space-temporal integration + stlayer1.stspike() ## generate acculumation spike + + testdata = copy.deepcopy(batch_data) + testdata [testdata < 0] = 1 ## generate acculumation spike + + stlayer2 = Spatiotemporal_Core(testdata[:,:,:,2:5], 1, 1, 2, 2, if_st_neuron_clear=True) + stlayer2.Spaceprocessing() ## space integration + stlayer2.Temporalprocessing() ## temporal integration + stlayer2.Stprocessing() ## space-temporal integration + stlayer2.stspike() ## generate acculumation spike + + ##-----------------------------------------------------------## + ## SGF Unit A ## + ##-----------------------------------------------------------## + a1 = SGF_agent(self.args, batch_data, selected_event, exp=self.exp, st_paras=self.st_paras, train_succ_list=self.train_succ_list) + + if i == 0: ## expert knowledge initializations + event_pre_id = [-1] # pre id + k1_len = self.resolution_s1[0]*self.resolution_s1[1]*self.thres_bit_s1*self.thres_bit_s1 \ + + self.resolution_s2[0]*self.resolution_s2[1]*self.thres_bit_s2*self.thres_bit_s2 + expert1_pre_knowledge = np.array([-1 for x in range(k1_len)]) ## user defined at this version + else: + event_pre_id = id1 + expert1_pre_knowledge = n1 + + # generate feature vectors + n1, id1 = a1.agent_binary_tree(i, stlayer1.ST_spike, \ + thres_s1=self.thres_s1, \ + thres_s2=self.thres_s2, \ + thres_s3=self.thres_s3, \ + offset=[0,0] , \ + expert1_id=event_pre_id, expert1_knowledge=expert1_pre_knowledge) ## agent binary_tree_search_policy + + + # SGF Unit B doesn't require trainings since it is designed based on the human prior knowledge. + + ##-----------------------------------------------------------## + ## SGF Unit C ## + ##-----------------------------------------------------------## + frame_skip = self.args.hop2_frame_skip #3 + threshold = self.args.hop2_threshold #0.2 + hist_threshold = self.args.hop2_hist_threshold #15 + print("UnitC new knowledge:") + for l in range(stlayer2.stcore.shape[3]): + # Temporal SNNs with feature index H, I, J and K. + area_index1, ascent_bit1, swing_bit1= e1.expert_hopfield2(stlayer2.stcore[0:10*frame_skip:frame_skip,:,0:60,l],0,threshold,hist_threshold) + area_index3, ascent_bit3, swing_bit3= e1.expert_hopfield2(stlayer2.stcore[0:10*frame_skip:frame_skip,:,0:60,l],1,threshold,hist_threshold) + area_index2, ascent_bit2, swing_bit2= e1.expert_hopfield2(stlayer2.stcore[0:10*frame_skip:frame_skip,:,60:120,l],0,threshold,hist_threshold) + area_index4, ascent_bit4, swing_bit4= e1.expert_hopfield2(stlayer2.stcore[0:10*frame_skip:frame_skip,:,60:120,l],1,threshold,hist_threshold) + ascent_activites_td_left = ascent_bit1 + ascent_bit3 ## This for detection top->down and bottom-> up on the left area + ascent_activites_td_right = ascent_bit2 + ascent_bit4 ## This for detection top->down and bottom-> up on the right area + swing_activities_td_left = swing_bit1 + swing_bit3 + swing_activities_td_right = swing_bit2 + swing_bit4 + + area_index1, ascent_bit5, swing_bit5= e1.expert_hopfield2(stlayer2.stcore[0:10*frame_skip:frame_skip,:,0:60,l],2,threshold,hist_threshold) + area_index3, ascent_bit7, swing_bit7= e1.expert_hopfield2(stlayer2.stcore[0:10*frame_skip:frame_skip,:,0:60,l],3,threshold,hist_threshold) + area_index2, ascent_bit6, swing_bit6= e1.expert_hopfield2(stlayer2.stcore[0:10*frame_skip:frame_skip,:,60:120,l],2,threshold,hist_threshold) + area_index4, ascent_bit8, swing_bit8= e1.expert_hopfield2(stlayer2.stcore[0:10*frame_skip:frame_skip,:,60:120,l],3,threshold,hist_threshold) + ascent_activites_lr_left = ascent_bit5 + ascent_bit7 + ascent_activites_lr_right = ascent_bit6 + ascent_bit8 + swing_activities_lr_left = swing_bit5 + swing_bit7 + swing_activities_lr_right = swing_bit6 + swing_bit8 + + feature_bit = [np.where(ascent_activites_td_left+ascent_activites_td_right>0,1,0) , + np.where(swing_activities_td_left+swing_activities_td_right>0,1,0) , + np.where(ascent_activites_lr_left +swing_activities_lr_left>0,1,0) , + np.where( ascent_activites_lr_right + swing_activities_lr_right>0,1,0) ] + location_bit = [np.where(ascent_activites_td_left + ascent_activites_lr_left>0,1,0) , + np.where(swing_activities_td_left+swing_activities_lr_left>0,1,0) , + np.where(ascent_activites_td_right+ swing_activities_td_right>0,1,0) , + np.where( ascent_activites_lr_right+swing_activities_lr_right>0,1,0) ] + knowledge_bit = np.concatenate((feature_bit,location_bit),axis = 0) + hop2_knowledge.append(knowledge_bit) + hop2_id.append(selected_event[2+l]) + print(selected_event[2+l], knowledge_bit) + + # save feature vector + self.save_knowledge("UnitA", id1, n1, if_save=True) + self.save_knowledge("UnitC", hop2_id, np.array(hop2_knowledge), if_save=True) \ No newline at end of file diff --git a/visualization_utils.py b/visualization_utils.py new file mode 100644 index 0000000..e0a535a --- /dev/null +++ b/visualization_utils.py @@ -0,0 +1,247 @@ +import numpy as np +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +import matplotlib +from matplotlib.colors import LinearSegmentedColormap +import cv2 +import imageio +import os +from sklearn.decomposition import PCA + +import cfg + +def visualize(input, input_shape): + input_image = input.reshape(input_shape) + plt.matshow(input_image, cmap='hot') + plt.colorbar() + plt.show() + +def pca_decomposition(input_data, target_dim): + pca=PCA(n_components=target_dim) + pca.fit(input_data) + pca_data = pca.transform(input_data) + plt.scatter(pca_data[:,0],pca_data[:,1]) + plt.savefig(cfg.code_path + "/output/pca_test.jpg", dpi=300) + plt.clf() + return pca_data + +def save_visualize(input, input_shape, image_name): + if type(input_shape) == type((1, 1)): + input_image = input.reshape(input_shape) + plt.matshow(input_image, cmap='hot') + # plt.matshow(input_image, cmap='hot', vmin = 0, vmax = 1) + plt.colorbar() + plt.savefig(image_name, dpi=300) + elif type(input_shape) == type(1): + input_image = input.reshape((input_shape, 1)) + plt.matshow(input_image, cmap='hot') + # plt.colorbar() + plt.savefig(image_name) + else: + assert 0, "save error." + +def save_curve(x, y, image_name): + plt.plot(x,y) + plt.savefig(image_name) + plt.clf() + +def save_vis_formatted(train_data): + for event in range(train_data.shape[-1]): + save_dir = cfg.code_path + "/output/"+str(event) + try: + os.mkdir(save_dir) + print(save_dir) + except FileExistsError: + print(save_dir) + for frame in range(train_data.shape[0]): + save_visualize(train_data[frame, :, :, event], (128,128), + os.path.join(save_dir, str(frame)+".jpg")) + +def train_result_vis_pca(id_sel): + + info = np.load(cfg.code_path + "/expert4_information.npy") + pca=PCA(n_components=2) + pca.fit(info) + pca_data = pca.transform(info) + id = np.load(cfg.code_path + "/expert4_id.npy") + + plt.xlim(xmax=2,xmin=-2) + plt.ylim(ymax=2,ymin=-2) + if id_sel > 0: + pca_data_1 = list() + for i in range(0, info.shape[0]): + if id[i] == id_sel: + print(i, id[i], pca_data[i], info[i]) + pca_data_1.append(pca_data[i]) + pca_data_1 = np.array(pca_data_1) + + plt.scatter(pca_data_1[:,0], pca_data_1[:,1]) + plt.savefig(cfg.code_path + "/output/pca_test_"+str(id_sel)+".jpg",dpi=300) + plt.clf() + else: + plt.scatter(pca_data[:,0], pca_data[:,1], c=id*30) + plt.savefig(cfg.code_path + "/output/pca_test_all.jpg",dpi=300) + plt.clf() + +def train_result_vis_pca_3d(id_sel): + info = np.load(cfg.code_path + "/expert4_information.npy") + pca=PCA(n_components=3) + pca.fit(info) + pca_data = pca.transform(info) + id = np.load(cfg.code_path + "/expert4_id.npy") + + # plt.xlim(xmax=2,xmin=-2) + # plt.ylim(ymax=2,ymin=-2) + # plt.zlim(zmax=2,zmin=-2) + pca_data_1 = list() + for i in range(0, info.shape[0]): + if id[i] == id_sel: + print(i, id[i], pca_data[i], info[i]) + pca_data_1.append(pca_data[i]) + pca_data_1 = np.array(pca_data_1) + + fig = plt.figure() + ax = Axes3D(fig) + ax.scatter(pca_data[:,0], pca_data[:,1], pca_data[:,2], c=id*30) + ax.set_zlabel('Z', fontdict={'size': 15, 'color': 'red'}) + ax.set_ylabel('Y', fontdict={'size': 15, 'color': 'red'}) + ax.set_xlabel('X', fontdict={'size': 15, 'color': 'red'}) + plt.savefig(cfg.code_path + "/output/pca_3d_test_"+str(id_sel)+".jpg",dpi=300) + fig.clf() + +def save_visualize_gif(input_list, input_shape, image_name): + # fig = plt.figure() + # camera = Camera(fig) + # print("Saving gif", image_name) + # for input in input_list: + # input_image = input.reshape(input_shape) + # plt.matshow(input_image, cmap='hot') + # plt.colorbar() + # plt.show() + # # plt.savefig(image_name) + # camera.snap() + # animation = camera.animate() + # animation.save(image_name) + # # plt.show() + frames = [] + print("Saving", image_name) + if type(input_shape) == type((1, 1)): + for input in input_list: + input_image = input.reshape(input_shape) + plt.matshow(input_image, cmap='hot') + # plt.colorbar() + plt.savefig("tmp.png") + frames.append(cv2.imread("tmp.png")) + imageio.mimsave(image_name, frames, fps=20) + elif type(input_shape) == type(1): + for input in input_list: + input_image = input.reshape((input_shape, 1)) + plt.matshow(input_image, cmap='hot') + # plt.colorbar() + plt.savefig("tmp.png") + frames.append(cv2.imread("tmp.png")) + imageio.mimsave(image_name, frames, fps=256) + # gif.save(frames, 'random.gif', duration=50) + +def save_visualize_img_gif(input_list, image_name): + frames = [] + print("Saving", image_name) + for input in input_list: + frames.append(input) + imageio.mimsave(image_name, frames, fps=25) + +def save_visualize_3d(input_list, input_shape, image_name): + x = [] + y = [] + z = [] + # c = [] + fig=plt.figure(dpi=120) + ax=fig.add_subplot(111,projection='3d') + ax.view_init(elev=10., azim=11) + + # colors = matplotlib.cm.rainbow(np.linspace(0, 1, 1024)) + + for i, input in enumerate(input_list): + input_image = input.reshape(input_shape) + input_max = input.max() + for j in range(input_image.shape[0]): + for k in range(input_image.shape[1]): + if input_image[j][k] != 0: + x.append(j) + z.append(k) + y.append(i) + # c.append(colors[int(input_image[j][k]/ input_max *255)]) + # + # print(colors[int(input_image[j][k]/ input_max *255)]) + # ax.scatter(x,y,z,c,'filled',cmap='spectral') + ax.scatter(x,y,z,c='b',marker='.',s=20,linewidth=0,alpha=0.8,cmap='spectral') + # plt.matshow(input_image, cmap='hot') + # plt.matshow(input_image, cmap='hot', vmin = 0, vmax = 1) + # plt.colorbar() + plt.savefig(image_name, dpi=300) + +def save_visualize_3dsurface(input, input_shape, image_name): + figure = plt.figure() + ax = Axes3D(figure,azim=-75,elev=30) + X = np.arange(0,input_shape[1],1) + Y = np.arange(0,input_shape[0],1) + X,Y = np.meshgrid(X,Y) + ax.plot_surface(X,Y,input,rstride=1,cstride=1,cmap='rainbow') + + plt.savefig(image_name, dpi=300) + +def show_wave(wave): + dist = 60 + channel_num = 20 + y_range = dist * channel_num + 50 + start_time = int(0 * 30000) + time_scale = 15 + i=0 + plt.cla() + plt.xlim(i, i + 600) + plt.ylim(-y_range / 2 / dist + channel_num / 2, y_range / 2 / dist + channel_num / 2) + x = np.linspace(i, i + 600, 601).astype(int) + for j in range(0, channel_num): + y = wave[j, x * time_scale + start_time] * 0.4 * 1e6 + int(j - channel_num / 2) * dist + dist / 2 + y = y / dist + channel_num / 2 + plt.plot(x, y) + i = i + 1 + + plt.xlabel("Sample Data Point", size=14) + plt.ylabel("Recording Channel", size=14) + + plt.title("EEG Motor Movement/Imagery Dataset", + fontdict={'family': 'serif', + 'color': 'darkgreen', + 'weight': 'bold', + 'size': 18}) + + plt.savefig("tmp.png") + +def save_wave(wave, image_name, title): + dist = 60 + channel_num = 20 + y_range = dist * channel_num + 50 + start_time = int(0 * 30000) + time_scale = int(wave.shape[1]/600) + i=0 + plt.cla() + plt.xlim(i, i + 600) + plt.ylim(-y_range / 2 / dist + channel_num / 2, y_range / 2 / dist + channel_num / 2) + x = np.linspace(i, i + 600, 601).astype(int) + for j in range(0, channel_num): + y = wave[j, x * time_scale + start_time] * 0.4 * 1e6 + int(j - channel_num / 2) * dist + dist / 2 + y = y / dist + channel_num / 2 + plt.plot(x, y) + i = i + 1 + + plt.xlabel("Sample Data Point", size=14) + plt.ylabel("Recording Channel", size=14) + + plt.title(title, + fontdict={'family': 'serif', + 'color': 'darkgreen', + 'weight': 'bold', + 'size': 18}) + + plt.savefig(image_name) \ No newline at end of file diff --git a/vote_infer.py b/vote_infer.py new file mode 100644 index 0000000..b0fe380 --- /dev/null +++ b/vote_infer.py @@ -0,0 +1,56 @@ +import numpy as np +import cfg + +infer_list = list() +ref_result = [10]*24 + [1]*24 +[8]*24 + [9]*24 +for fs in [1,2,3,4,5]: + for thres in [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9]: + for hist_thres in range(2,17): +# for fs in [1,2,3,4,5]: +# for thres in [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9]: +# for hist_thres in range(4,15): + exp_name = "8_1910_hop_train36_{:}_{:}_{:}".format(fs, thres, hist_thres) + # if exp_name not in ["8_1910_hop_train36_2_0.6_7", + # "8_1910_hop_train36_2_0.2_7", + # "8_1910_hop_train36_4_0.1_9", + # "8_1910_hop_train36_4_0.3_3", + # "8_1910_hop_train36_2_0.4_7", + # "8_1910_hop_train36_3_0.6_6", + # "8_1910_hop_train36_5_0.3_6", + # "8_1910_hop_train36_3_0.5_11", + # "8_1910_hop_train36_3_0.6_7"]: + # continue + try: + infer_result = np.loadtxt("/Users/zzh/Code/SGF_v2/data/" + exp_name + "/predict_list.txt") + # print(cfg.code_path + "/data/" + exp_name + "/predict_list.txt") + infer_list.append(infer_result) + except: + print("miss ", exp_name) +infer_result_array = np.array(infer_list) +print(infer_result_array.shape) + +correct_cnt = 0 +for i in range(0,97): + cnt_1 = 0 + cnt_8 = 0 + cnt_9 = 0 + cnt_10 = 0 + for pre_i, pre in enumerate(infer_result_array[:,i]): + if pre == 1: + cnt_1 += 1 + elif pre == 8: + cnt_8 += 1 + elif pre == 9: + cnt_9 += 1 + elif pre == 10: + cnt_10 += 1 + cnt_max = max(cnt_1, cnt_8, cnt_9, cnt_10) + if cnt_1 == cnt_max and ref_result[i] == 1: + correct_cnt += 1 + elif cnt_8 == cnt_max and ref_result[i] == 8: + correct_cnt += 1 + elif cnt_9 == cnt_max and ref_result[i] == 9: + correct_cnt += 1 + elif cnt_10 == cnt_max and ref_result[i] == 10: + correct_cnt += 1 +print(correct_cnt, correct_cnt/96) \ No newline at end of file diff --git a/write_excel.py b/write_excel.py new file mode 100644 index 0000000..bd87e14 --- /dev/null +++ b/write_excel.py @@ -0,0 +1,316 @@ +import os +import numpy as np +import xlwt +import xlrd +from xlutils.copy import copy + +def write_excel(excel_name, exp_name, args, acc_list): + # train_loss_list = [1.32, 1.543, 1.111, 1.098] + + if not os.path.exists(excel_name): + base_row = 0 + wb = xlwt.Workbook(encoding='ascii') + ws = wb.add_sheet('sheet1') + else: + base_row = blank_raw(excel_name) + data = xlrd.open_workbook(excel_name, formatting_info=True) + wb = copy(wb=data) + ws = wb.get_sheet(0) + + ws.write(base_row, 0, exp_name) + ws.write(base_row, 1, args) + + for i, acc in enumerate(acc_list): + ws.write(base_row, i+2, acc) + + wb.save(excel_name) + print("results saved in", excel_name) + + + +def write_pattern_count(excel_name, exp_name, nnz_list, count_list): + # train_loss_list = [1.32, 1.543, 1.111, 1.098] + # val_loss_list = [1.32, 1.543, 1.111, 1.098] + + if not os.path.exists(excel_name): + base_row = 0 + wb = xlwt.Workbook(encoding='ascii') + ws = wb.add_sheet('sheet1') + else: + base_row = blank_raw(excel_name) + data = xlrd.open_workbook(excel_name, formatting_info=True) + wb = copy(wb=data) + ws = wb.get_sheet(0) + + name_row = base_row + 1 + ptid_row = base_row + 2 + nnz_row = base_row + 3 + count_row = base_row + 4 + + style = xlwt.XFStyle() + font = xlwt.Font() + font.colour_index = 2 + style.font = font + ws.write(name_row, 0, exp_name, style) + ws.write(ptid_row, 0, 'epoch') + ws.write(nnz_row, 0, 'pattern_nnz') + ws.write(count_row, 0, 'pattern_count') + + ptid_list = range(len(count_list)) + for i, e in enumerate(ptid_list): + ws.write(ptid_row, i+1, int(e)) + + for i, t in enumerate(nnz_list): + ws.write(nnz_row, i+1, int(t)) + for i, t in enumerate(count_list): + ws.write(count_row, i+1, int(t)) + + wb.save(excel_name) + print("results saved in", excel_name) + +def write_pattern_curve_analyse_lstm(excel_name, sheet_name, exp_name, patterns, pattern_match_num, pattern_coo_nnz, pattern_nnz, pattern_inner_nnz, + pattern_num_memory_dict, pattern_num_cal_num_dict, pattern_num_coo_nnz_dict): + # train_loss_list = [1.32, 1.543, 1.111, 1.098] + # val_loss_list = [1.32, 1.543, 1.111, 1.098] + + # print(pattern_num_memory_dict) + if len(patterns) > 200: + patterns = patterns[:200] + pattern_match_num = pattern_match_num[:200] + pattern_coo_nnz = pattern_coo_nnz[:200] + pattern_nnz = pattern_nnz[:200] + pattern_inner_nnz = pattern_inner_nnz[:200] + + if not os.path.exists(os.path.dirname(excel_name)): + os.makedirs(os.path.dirname(excel_name)) + + if not os.path.exists(excel_name): + base_row = 0 + wb = xlwt.Workbook(encoding='ascii') + ws = wb.add_sheet(sheet_name) + else: + data = xlrd.open_workbook(excel_name, formatting_info=True) + wb = copy(wb=data) + if sheet_name in data.sheet_names(): + ws = wb.get_sheet(sheet_name) + else: + ws = wb.add_sheet(sheet_name) + base_row = blank_row(excel_name,sheet_name) + + + name_row = base_row + 1 + ptid_row = base_row + 2 + match_num_row = base_row + 3 + pt_nnz_row = base_row + 4 + coo_nnz_row = base_row + 5 + nnz_num_row = base_row + 6 + + pattern_num_row = base_row + 8 + pattern_num_memory_row = base_row + 9 + pattern_num_cal_num_row = base_row + 10 + pattern_num_coo_nnz_row = base_row + 11 + + + ws.write(name_row, 0, exp_name) + ws.write(ptid_row, 0, 'pattern id') + ws.write(pt_nnz_row, 0, 'pattern nnz') + ws.write(match_num_row, 0, 'match_num') + ws.write(coo_nnz_row, 0, 'match_coo_nnz') + ws.write(nnz_num_row, 0, 'match_nnz_num') + + ws.write(pattern_num_row, 0, 'pattern_num') + ws.write(pattern_num_memory_row, 0, 'memory') + ws.write(pattern_num_cal_num_row, 0, 'cal_num') + ws.write(pattern_num_coo_nnz_row, 0, 'left_coo') + + ptid_list = range(len(patterns)) + for i, e in enumerate(ptid_list): + ws.write(ptid_row, i+1, int(e)) + for i, t in enumerate(pattern_inner_nnz): + ws.write(pt_nnz_row, i+1, int(t)) + for i, t in enumerate(pattern_match_num): + ws.write(match_num_row, i+1, int(t)) + for i, t in enumerate(pattern_coo_nnz): + ws.write(coo_nnz_row, i+1, int(t)) + for i, t in enumerate(pattern_nnz): + ws.write(nnz_num_row, i+1, int(t)) + + # ptnum_list = range(len(pattern_num_memory_dict)) + for i, p_num in enumerate(pattern_num_memory_dict.keys()): + ws.write(pattern_num_row, i+1, int(p_num)) + + if int(pattern_num_memory_dict[p_num]) == int(np.array([m for m in pattern_num_memory_dict.values()]).min()): + style = xlwt.XFStyle() + font = xlwt.Font() + font.colour_index = 2 + style.font = font + ws.write(pattern_num_memory_row, i+1, int(pattern_num_memory_dict[p_num]), style) + else: + ws.write(pattern_num_memory_row, i+1, int(pattern_num_memory_dict[p_num])) + + if int(pattern_num_cal_num_dict[p_num]) == int(np.array([m for m in pattern_num_cal_num_dict.values()]).min()): + style = xlwt.XFStyle() + font = xlwt.Font() + font.colour_index = 2 + style.font = font + ws.write(pattern_num_cal_num_row, i+1, int(pattern_num_cal_num_dict[p_num]), style) + else: + ws.write(pattern_num_cal_num_row, i+1, int(pattern_num_cal_num_dict[p_num])) + + ws.write(pattern_num_coo_nnz_row, i+1, int(pattern_num_coo_nnz_dict[p_num])) + + wb.save(excel_name) + print("results saved in", excel_name) + + +def write_pattern_curve_analyse(excel_name, exp_name, patterns, pattern_match_num, pattern_coo_nnz, pattern_nnz, pattern_inner_nnz, + pattern_num_memory_dict, pattern_num_cal_num_dict, pattern_num_coo_nnz_dict): + # train_loss_list = [1.32, 1.543, 1.111, 1.098] + # val_loss_list = [1.32, 1.543, 1.111, 1.098] + + # print(pattern_num_memory_dict) + if len(patterns) > 200: + patterns = patterns[:200] + pattern_match_num = pattern_match_num[:200] + pattern_coo_nnz = pattern_coo_nnz[:200] + pattern_nnz = pattern_nnz[:200] + pattern_inner_nnz = pattern_inner_nnz[:200] + + if not os.path.exists(excel_name): + base_row = 0 + wb = xlwt.Workbook(encoding='ascii') + ws = wb.add_sheet('sheet1') + else: + base_row = blank_raw(excel_name) + data = xlrd.open_workbook(excel_name, formatting_info=True) + + wb = copy(wb=data) + ws = wb.get_sheet(0) + + name_row = base_row + 1 + ptid_row = base_row + 2 + match_num_row = base_row + 3 + pt_nnz_row = base_row + 4 + coo_nnz_row = base_row + 5 + nnz_num_row = base_row + 6 + + pattern_num_row = base_row + 8 + pattern_num_memory_row = base_row + 9 + pattern_num_cal_num_row = base_row + 10 + pattern_num_coo_nnz_row = base_row + 11 + + + + ws.write(name_row, 0, exp_name) + ws.write(ptid_row, 0, 'pattern id') + ws.write(pt_nnz_row, 0, 'pattern nnz') + ws.write(match_num_row, 0, 'match_num') + ws.write(coo_nnz_row, 0, 'match_coo_nnz') + ws.write(nnz_num_row, 0, 'match_nnz_num') + + ws.write(pattern_num_row, 0, 'pattern_num') + ws.write(pattern_num_memory_row, 0, 'memory') + ws.write(pattern_num_cal_num_row, 0, 'cal_num') + ws.write(pattern_num_coo_nnz_row, 0, 'left_coo') + + ptid_list = range(len(patterns)) + for i, e in enumerate(ptid_list): + ws.write(ptid_row, i+1, int(e)) + for i, t in enumerate(pattern_inner_nnz): + ws.write(pt_nnz_row, i+1, int(t)) + for i, t in enumerate(pattern_match_num): + ws.write(match_num_row, i+1, int(t)) + for i, t in enumerate(pattern_coo_nnz): + ws.write(coo_nnz_row, i+1, int(t)) + for i, t in enumerate(pattern_nnz): + ws.write(nnz_num_row, i+1, int(t)) + + # ptnum_list = range(len(pattern_num_memory_dict)) + for i, p_num in enumerate(pattern_num_memory_dict.keys()): + ws.write(pattern_num_row, i+1, int(p_num)) + + if int(pattern_num_memory_dict[p_num]) == int(np.array([m for m in pattern_num_memory_dict.values()]).min()): + style = xlwt.XFStyle() + font = xlwt.Font() + font.colour_index = 2 + style.font = font + ws.write(pattern_num_memory_row, i+1, int(pattern_num_memory_dict[p_num]), style) + else: + ws.write(pattern_num_memory_row, i+1, int(pattern_num_memory_dict[p_num])) + + if int(pattern_num_cal_num_dict[p_num]) == int(np.array([m for m in pattern_num_cal_num_dict.values()]).min()): + style = xlwt.XFStyle() + font = xlwt.Font() + font.colour_index = 2 + style.font = font + ws.write(pattern_num_cal_num_row, i+1, int(pattern_num_cal_num_dict[p_num]), style) + else: + ws.write(pattern_num_cal_num_row, i+1, int(pattern_num_cal_num_dict[p_num])) + + ws.write(pattern_num_coo_nnz_row, i+1, int(pattern_num_coo_nnz_dict[p_num])) + + wb.save(excel_name) + print("results saved in", excel_name) + + +def write_test_acc(excel_name, exp_name, + f1, val_loss, tps, preds, poses): + # train_loss_list = [1.32, 1.543, 1.111, 1.098] + # val_loss_list = [1.32, 1.543, 1.111, 1.098] + + # print(pattern_num_memory_dict) + if not os.path.exists(excel_name): + base_row = 0 + wb = xlwt.Workbook(encoding='ascii') + ws = wb.add_sheet('sheet1') + ws.write(base_row, 0, 'exp_name') + ws.write(base_row, 1, 'f1') + ws.write(base_row, 2, 'val_loss') + ws.write(base_row, 3, 'tps') + ws.write(base_row, 4, 'preds') + ws.write(base_row, 5, 'poses') + ws.write(base_row+1, 0, exp_name) + ws.write(base_row+1, 1, float(f1)) + ws.write(base_row+1, 2, float(val_loss)) + ws.write(base_row+1, 3, int(tps)) + ws.write(base_row+1, 4, int(preds)) + ws.write(base_row+1, 5, int(poses)) + else: + base_row = blank_raw(excel_name) + data = xlrd.open_workbook(excel_name, formatting_info=True) + wb = copy(wb=data) + ws = wb.get_sheet(0) + ws.write(base_row, 0, exp_name) + ws.write(base_row, 1, float(f1)) + ws.write(base_row, 2, float(val_loss)) + ws.write(base_row, 3, int(tps)) + ws.write(base_row, 4, int(preds)) + ws.write(base_row, 5, int(poses)) + + wb.save(excel_name) + print("results saved in", excel_name) + + + +def blank_raw(excel_name): + wb = xlrd.open_workbook(excel_name) + sheet1 = wb.sheet_by_index(0) + rowNum = sheet1.nrows + return rowNum + +def blank_row(excel_name, sheet_name): + wb = xlrd.open_workbook(excel_name) + if sheet_name not in wb.sheet_names(): + rowNum = 0 + else: + sheet1 = wb.sheet_by_name(sheet_name) + rowNum = sheet1.nrows + return rowNum + + +if __name__ == "__main__": + + train_loss_list = [1.32, 1.543, 1.111, 1.098] + val_loss_list = [1.32, 1.543, 1.111, 1.098] + for i in range(4): + write_excel("test.xls", str(i), train_loss_list, val_loss_list) \ No newline at end of file