Skip to content

Commit

Permalink
Resolved one issue with variables reuse
Browse files Browse the repository at this point in the history
  • Loading branch information
hkaggarwal committed Aug 30, 2018
1 parent 2338d76 commit 44a046a
Show file tree
Hide file tree
Showing 26 changed files with 19 additions and 29 deletions.
23 changes: 11 additions & 12 deletions model.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,18 +179,17 @@ def makeModel(atb,csm,mask,training,nLayers,K,gradientMethod):
out={}
out['dc0']=atb
with tf.name_scope('myModel'):
for i in range(1,K+1):
j=str(i)
with tf.variable_scope('Wts'):
with tf.variable_scope('Wts',reuse=tf.AUTO_REUSE):
for i in range(1,K+1):
j=str(i)
out['dw'+j]=dw(out['dc'+str(i-1)],training,nLayers)

lam1=getLambda()
rhs=atb + lam1*out['dw'+j]
if gradientMethod=='AG':
out['dc'+j]=dc(rhs,csm,mask,lam1)
elif gradientMethod=='MG':
if training:
out['dc'+j]=dcManualGradient(rhs)
else:
lam1=getLambda()
rhs=atb + lam1*out['dw'+j]
if gradientMethod=='AG':
out['dc'+j]=dc(rhs,csm,mask,lam1)
elif gradientMethod=='MG':
if training:
out['dc'+j]=dcManualGradient(rhs)
else:
out['dc'+j]=dc(rhs,csm,mask,lam1)
return out
3 changes: 0 additions & 3 deletions savedModels/09Aug_0333pm_5L_1K_100E_MG/checkpoint

This file was deleted.

2 changes: 0 additions & 2 deletions savedModels/09Aug_0333pm_5L_1K_100E_MG/checkpointTst

This file was deleted.

Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file removed savedModels/09Aug_0333pm_5L_1K_100E_MG/model.index
Binary file not shown.
Binary file removed savedModels/09Aug_0333pm_5L_1K_100E_MG/model.meta
Binary file not shown.
Binary file not shown.
Binary file removed savedModels/09Aug_0333pm_5L_1K_100E_MG/modelTst.index
Binary file not shown.
Binary file removed savedModels/09Aug_0333pm_5L_1K_100E_MG/modelTst.meta
Binary file not shown.
3 changes: 0 additions & 3 deletions savedModels/09Aug_0335pm_5L_1K_100E_AG/checkpoint

This file was deleted.

2 changes: 0 additions & 2 deletions savedModels/09Aug_0335pm_5L_1K_100E_AG/checkpointTst

This file was deleted.

Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file removed savedModels/09Aug_0335pm_5L_1K_100E_AG/model.index
Binary file not shown.
Binary file removed savedModels/09Aug_0335pm_5L_1K_100E_AG/model.meta
Binary file not shown.
Binary file not shown.
Binary file removed savedModels/09Aug_0335pm_5L_1K_100E_AG/modelTst.index
Binary file not shown.
Binary file removed savedModels/09Aug_0335pm_5L_1K_100E_AG/modelTst.meta
Binary file not shown.
15 changes: 8 additions & 7 deletions trn.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,11 @@

#--------------------------------------------------------------
#% SET THESE PARAMETERS CAREFULLY
nLayers=5
epochs=100
nLayers=3
epochs=5
batchSize=4
gradientMethod='AG'
K=1
K=2
sigma=0.01

#--------------------------------------------------------------------------
Expand Down Expand Up @@ -136,10 +136,11 @@
tf.summary.scalar('loss', loss)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)

optimizer = tf.train.AdamOptimizer()
gvs = optimizer.compute_gradients(loss)
capped_gvs = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in gvs]
opToRun=optimizer.apply_gradients(capped_gvs)
with tf.name_scope('optimizer'):
optimizer = tf.train.AdamOptimizer()
gvs = optimizer.compute_gradients(loss)
capped_gvs = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in gvs]
opToRun=optimizer.apply_gradients(capped_gvs)


#%% training code
Expand Down

0 comments on commit 44a046a

Please sign in to comment.