From 4989dce0f2487a40fc4cc9bd73e254a3201acfed Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 14 Sep 2023 10:31:07 -0400 Subject: [PATCH 1/3] add phi full ft example --- examples/phi/phi-ft.yml | 75 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 examples/phi/phi-ft.yml diff --git a/examples/phi/phi-ft.yml b/examples/phi/phi-ft.yml new file mode 100644 index 0000000000..b5cde8139c --- /dev/null +++ b/examples/phi/phi-ft.yml @@ -0,0 +1,75 @@ +base_model: microsoft/phi-1_5 +base_model_config: microsoft/phi-1_5 +model_type: AutoModelForCausalLM +tokenizer_type: AutoTokenizer +is_llama_derived_model: false +trust_remote_code: true + +load_in_8bit: false +load_in_4bit: false +strict: false + +datasets: + - path: garage-bAInd/Open-Platypus + type: alpaca + +dataset_prepared_path: last_run_prepared +val_set_size: 0.05 +output_dir: ./phi-sft-out + +sequence_len: 2048 +sample_packing: false # does not work with phi +pad_to_sequence_len: + +adapter: +lora_model_dir: +lora_r: +lora_alpha: +lora_dropout: +lora_target_linear: +lora_fan_in_fan_out: + +wandb_project: +wandb_entity: +wandb_watch: +wandb_run_id: +wandb_log_model: + +gradient_accumulation_steps: 2 +micro_batch_size: 1 +num_epochs: 4 +optimizer: adamw_bnb_8bit +adam_beta2: 0.95 +adam_epsilon: 0.00001 +max_grad_norm: 1.0 +lr_scheduler: cosine +learning_rate: 0.000003 + +train_on_inputs: false +group_by_length: true +bf16: true +fp16: false +tf32: true + +gradient_checkpointing: +early_stopping_patience: +resume_from_checkpoint: +local_rank: +logging_steps: 1 +xformers_attention: +flash_attention: + +warmup_steps: 100 +eval_steps: 0.05 +save_steps: +debug: +deepspeed: +weight_decay: 0.1 +fsdp: +fsdp_config: +resize_token_embeddings_to_32x: true +special_tokens: + bos_token: "<|endoftext|>" + eos_token: "<|endoftext|>" + unk_token: "<|endoftext|>" + pad_token: "<|endoftext|>" From 3fafebb912773cd139fa516dcd840e8064757925 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 14 Sep 2023 10:33:14 -0400 Subject: [PATCH 2/3] Add readme to point out that deepspeed should be used --- examples/phi/README.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 examples/phi/README.md diff --git a/examples/phi/README.md b/examples/phi/README.md new file mode 100644 index 0000000000..fcbbe1eb2f --- /dev/null +++ b/examples/phi/README.md @@ -0,0 +1,7 @@ +# Phi + +Due to some nuances with the phi code, please use deepspeed when training phi. + +```shell +accelerate launch scripts/finetune.py examples/phi/phi-ft.yml --deepspeed deepspeed/zero2.json +``` From fed4fdd9b725391d447ab86d8044f33f355b942f Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 14 Sep 2023 11:05:25 -0400 Subject: [PATCH 3/3] zero1 is better than zero2 for phi --- examples/phi/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/phi/README.md b/examples/phi/README.md index fcbbe1eb2f..6e12eec184 100644 --- a/examples/phi/README.md +++ b/examples/phi/README.md @@ -3,5 +3,5 @@ Due to some nuances with the phi code, please use deepspeed when training phi. ```shell -accelerate launch scripts/finetune.py examples/phi/phi-ft.yml --deepspeed deepspeed/zero2.json +accelerate launch scripts/finetune.py examples/phi/phi-ft.yml --deepspeed deepspeed/zero1.json ```