Skip to content

Commit

Permalink
test
Browse files Browse the repository at this point in the history
  • Loading branch information
ManfeiBai committed Apr 10, 2024
1 parent e0a4b92 commit c1c86cd
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion test/test_fori_loop_simple_linear_model_test_code.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def body_fun(l_in_i):
# bias_0 = linear_0.bias
# one_value_, lower_, upper_, add_res_x_, bias_, weight_, l_in_i_plus_1_, l_out_= fori_loop(one_value, lower, upper, body_fun, init_val, l_in_0, weight_0=weight_0, bias_0=bias_0) # , placeholder_func, placeholder_input) # , linear_0, l_in_0)
# one_value_, lower_, upper_, add_res_x_, bias_, weight_, l_in_i_plus_1_, l_out_= fori_loop(one_value, lower, upper, linear_0, init_val, l_in_0) #, weight_0=weight_0, bias_0=bias_0) # , placeholder_func, placeholder_input) # , linear_0, l_in_0)
upper_, lower_, one_value_, add_res_x_, bias_, weight_, l_out_, l_in_i_plus_1_= fori_loop(one_value, upper, lower, linear_0, init_val, l_in_0) #, weight_0=weight_0, bias_0=bias_0) # , placeholder_func, placeholder_input) # , linear_0, l_in_0)
upper_, lower_, one_value_, add_res_x_, bias_, weight_, l_in_i_plus_1_, l_out_= fori_loop(one_value, upper, lower, linear_0, init_val, l_in_0) #, weight_0=weight_0, bias_0=bias_0) # , placeholder_func, placeholder_input) # , linear_0, l_in_0)
#one_value, [upper],[lower],x, [bias],[new_weight], [l_in_i+1], l_out

print("one_value_: ", one_value_)
Expand Down
2 changes: 1 addition & 1 deletion torch_xla/experimental/fori_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def body_fn(upper, lower, one_value, x, bias_0, weight_0, output_value, *input_v
else:
weight = body_fun.weight
l_in_i_plus_1 = torch.ones([10], dtype=torch.float32, device=device)
new_lower = torch.add(one_value, lower)
new_lower = torch.add(one_value, lower) ### !!! this matter, torch.add might would change the second argument's value, even we use a new variable to catch the result!!!
return upper, new_lower, one_value, torch.add(one_value, x), bias_0, weight, *input_value, body_fun(*input_value) # , *input_value
# ---
# return torch.sub(upper, one_value), lower, body_fun(one_value, x, input_value)
Expand Down

0 comments on commit c1c86cd

Please sign in to comment.