You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
At first I got same issue as https://github.com/wouterkool/attention-learn-to-route/issues/16, and the same error keeps occurring even if I am using Python3.8, so I tried using return self[key] and wanted to see what happens next. Then I got problem with radius in input as following. I have traced all the way back but could not figure out where the issue begins. Any help will be appreciated.
(env_pytorch) /@ CSP_Attention-master % python3 run.py --graph_size 20 --baseline rollout --run_name 'tsp20_rollout'
{'baseline': 'rollout',
'batch_size': 256,
'bl_alpha': 0.05,
'bl_warmup_epochs': 1,
'checkpoint_encoder': False,
'checkpoint_epochs': 1,
'data_distribution': 'const',
'embedding_dim': 128,
'epoch_size': 320000,
'epoch_start': 0,
'eval_batch_size': 1024,
'eval_only': False,
'exp_beta': 0.8,
'graph_size': 20,
'head': 8,
'hidden_dim': 128,
'load_path': None,
'log_dir': 'logs',
'log_step': 50,
'lr_critic': 0.0001,
'lr_decay': 1.0,
'lr_model': 0.0001,
'max_grad_norm': 0,
'model': 'attention',
'n_encode_layers': 3,
'n_epochs': 50,
'no_cuda': False,
'no_progress_bar': False,
'no_tensorboard': False,
'normalization': 'instance',
'output_dir': 'outputs',
'problem': 'csp',
'resume': None,
'run_name': 'tsp20_rollout_20240724T195320',
'save_dir': 'outputs/csp_20/tsp20_rollout_20240724T195320',
'seed': 1234,
'shrink_size': 1,
'tanh_clipping': 10.0,
'test_instance': None,
'use_cuda': False,
'val_dataset': None,
'val_size': 10000}
outputs/csp_20/tsp20_rollout_20240724T195320
Evaluating baseline model on evaluation dataset
0%| | 0/10 [00:00<?, ?it/s]Traceback (most recent call last):
File "run.py", line 175, in
run(get_options())
File "run.py", line 105, in run
baseline = RolloutBaseline(model, problem, opts)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/reinforce_baselines.py", line 157, in init
self._update_model(model, epoch)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/reinforce_baselines.py", line 177, in _update_model
self.bl_vals = rollout(self.model, self.dataset, self.opts).cpu().numpy()
File "/Users/Documents/Academic/cobot/CSP_Attention-master/train.py", line 47, in rollout
return torch.cat([
File "/Users/Documents/Academic/cobot/CSP_Attention-master/train.py", line 48, in
eval_model_bat(bat)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/train.py", line 44, in eval_model_bat
cost, _ = model(move_to(bat, opts.device))
File "/Users/anaconda3/envs/env_pytorch/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/Users/anaconda3/envs/env_pytorch/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1541, in _call_impl
return forward_call(*args, **kwargs)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/nets/attention_model.py", line 151, in forward
_log_p, pi = self._inner_rnn(input, embeddings)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/nets/attention_model.py", line 229, in _inner_rnn
state = self.problem.make_state(input)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/problems/csp/problem_csp.py", line 53, in make_state
return StateCSP.initialize(*args, **kwargs)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/problems/csp/state_csp_cover.py", line 56, in initialize
radius = input['radius']
KeyError: 'radius'
The text was updated successfully, but these errors were encountered:
At first I got same issue as https://github.com/wouterkool/attention-learn-to-route/issues/16, and the same error keeps occurring even if I am using Python3.8, so I tried using
return self[key]
and wanted to see what happens next. Then I got problem with radius in input as following. I have traced all the way back but could not figure out where the issue begins. Any help will be appreciated.(env_pytorch) /@ CSP_Attention-master % python3 run.py --graph_size 20 --baseline rollout --run_name 'tsp20_rollout'
{'baseline': 'rollout',
'batch_size': 256,
'bl_alpha': 0.05,
'bl_warmup_epochs': 1,
'checkpoint_encoder': False,
'checkpoint_epochs': 1,
'data_distribution': 'const',
'embedding_dim': 128,
'epoch_size': 320000,
'epoch_start': 0,
'eval_batch_size': 1024,
'eval_only': False,
'exp_beta': 0.8,
'graph_size': 20,
'head': 8,
'hidden_dim': 128,
'load_path': None,
'log_dir': 'logs',
'log_step': 50,
'lr_critic': 0.0001,
'lr_decay': 1.0,
'lr_model': 0.0001,
'max_grad_norm': 0,
'model': 'attention',
'n_encode_layers': 3,
'n_epochs': 50,
'no_cuda': False,
'no_progress_bar': False,
'no_tensorboard': False,
'normalization': 'instance',
'output_dir': 'outputs',
'problem': 'csp',
'resume': None,
'run_name': 'tsp20_rollout_20240724T195320',
'save_dir': 'outputs/csp_20/tsp20_rollout_20240724T195320',
'seed': 1234,
'shrink_size': 1,
'tanh_clipping': 10.0,
'test_instance': None,
'use_cuda': False,
'val_dataset': None,
'val_size': 10000}
outputs/csp_20/tsp20_rollout_20240724T195320
Evaluating baseline model on evaluation dataset
0%| | 0/10 [00:00<?, ?it/s]Traceback (most recent call last):
File "run.py", line 175, in
run(get_options())
File "run.py", line 105, in run
baseline = RolloutBaseline(model, problem, opts)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/reinforce_baselines.py", line 157, in init
self._update_model(model, epoch)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/reinforce_baselines.py", line 177, in _update_model
self.bl_vals = rollout(self.model, self.dataset, self.opts).cpu().numpy()
File "/Users/Documents/Academic/cobot/CSP_Attention-master/train.py", line 47, in rollout
return torch.cat([
File "/Users/Documents/Academic/cobot/CSP_Attention-master/train.py", line 48, in
eval_model_bat(bat)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/train.py", line 44, in eval_model_bat
cost, _ = model(move_to(bat, opts.device))
File "/Users/anaconda3/envs/env_pytorch/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/Users/anaconda3/envs/env_pytorch/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1541, in _call_impl
return forward_call(*args, **kwargs)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/nets/attention_model.py", line 151, in forward
_log_p, pi = self._inner_rnn(input, embeddings)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/nets/attention_model.py", line 229, in _inner_rnn
state = self.problem.make_state(input)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/problems/csp/problem_csp.py", line 53, in make_state
return StateCSP.initialize(*args, **kwargs)
File "/Users/Documents/Academic/cobot/CSP_Attention-master/problems/csp/state_csp_cover.py", line 56, in initialize
radius = input['radius']
KeyError: 'radius'
The text was updated successfully, but these errors were encountered: