Skip to content

Commit

Permalink
reduce context len to 16k for tests
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Jan 17, 2024
1 parent 9292665 commit 5e0890d
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions tests/e2e/patched/test_llama_s2_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def test_lora_s2_attn(self, temp_dir):
{
"base_model": "JackFram/llama-68m",
"tokenizer_type": "LlamaTokenizer",
"sequence_len": 65536,
"sequence_len": 32768,
"sample_packing": False,
"flash_attention": True,
"s2_attention": True,
Expand Down Expand Up @@ -77,15 +77,15 @@ def test_fft_s2_attn(self, temp_dir):
{
"base_model": "JackFram/llama-68m",
"tokenizer_type": "LlamaTokenizer",
"sequence_len": 65536,
"sequence_len": 32768,
"sample_packing": False,
"flash_attention": True,
"s2_attention": True,
"val_set_size": 0.1,
"special_tokens": {},
"datasets": [
{
"path": "mhenrichsen/alpaca_2k_test",
"path": "Yukang/LongAlpaca-12k",
"type": "alpaca",
},
],
Expand Down

0 comments on commit 5e0890d

Please sign in to comment.