diff --git a/mcli/mcli-llama2-finetune.yaml b/mcli/mcli-llama2-finetune.yaml index 50a6035645..41a4fcd35e 100644 --- a/mcli/mcli-llama2-finetune.yaml +++ b/mcli/mcli-llama2-finetune.yaml @@ -152,4 +152,4 @@ parameters: # Load from local filesystem or remote object store # load_path: ./gpt-1b/checkpoints/latest-rank{rank}.pt # load_path: s3://my-bucket/my-folder/gpt-1b/checkpoints/latest-rank{rank}.pt -# load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc + load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc diff --git a/scripts/train/yamls/finetune/mpt-7b_domain_adapt.yaml b/scripts/train/yamls/finetune/mpt-7b_domain_adapt.yaml index 233d052353..e533b9118c 100644 --- a/scripts/train/yamls/finetune/mpt-7b_domain_adapt.yaml +++ b/scripts/train/yamls/finetune/mpt-7b_domain_adapt.yaml @@ -115,4 +115,4 @@ save_folder: ./{run_name}/checkpoints # Load from local filesystem or remote object store # load_path: ./gpt-7b/checkpoints/latest-rank{rank}.pt # load_path: s3://my-bucket/my-folder/gpt-7b/checkpoints/latest-rank{rank}.pt -# load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc +load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc diff --git a/scripts/train/yamls/finetune/t5-small_dolly_sft.yaml b/scripts/train/yamls/finetune/t5-small_dolly_sft.yaml index 60feef6a9f..b4d24ef0ff 100644 --- a/scripts/train/yamls/finetune/t5-small_dolly_sft.yaml +++ b/scripts/train/yamls/finetune/t5-small_dolly_sft.yaml @@ -101,4 +101,4 @@ callbacks: # Load from remote object store # REPLACE THE BELOW with you own checkpoint! # load_path: oci://my-bucket/my-folder/checkpoints/some_checkpoint.pt -# load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc +load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc