Skip to content

Commit

Permalink
#5337: Updated Mistral CI weight and cache path location
Browse files Browse the repository at this point in the history
  • Loading branch information
mtairum committed Jul 20, 2024
1 parent 3540d17 commit 2bf128f
Show file tree
Hide file tree
Showing 13 changed files with 43 additions and 43 deletions.
10 changes: 5 additions & 5 deletions models/demos/wormhole/mistral7b/demo/demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,9 @@ def preprocess_inputs(input_prompts, tokenizer, model_args, dtype, embd, instruc
def run_mistral_demo(user_input, batch_size, device, instruct_mode, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down Expand Up @@ -278,8 +278,8 @@ def run_mistral_demo(user_input, batch_size, device, instruct_mode, is_ci_env):
ids=["general_weights", "instruct_weights"],
)
def test_mistral7B_demo(device, use_program_cache, input_prompts, instruct_weights, is_ci_env):
if is_ci_env and instruct_weights == False:
pytest.skip("CI demo test only runs instruct weights (to reduce CI pipeline load)")
if is_ci_env and instruct_weights == True:
pytest.skip("CI demo test only runs general weights (to reduce CI pipeline load)")

return run_mistral_demo(
user_input=input_prompts, batch_size=32, device=device, instruct_mode=instruct_weights, is_ci_env=is_ci_env
Expand Down
10 changes: 5 additions & 5 deletions models/demos/wormhole/mistral7b/demo/demo_with_prefill.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,9 @@ def preprocess_inputs_prefill(input_prompts, tokenizer, model_args, dtype, embd,
def run_mistral_demo(user_input, batch_size, device, instruct_mode, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down Expand Up @@ -357,8 +357,8 @@ def run_mistral_demo(user_input, batch_size, device, instruct_mode, is_ci_env):
ids=["general_weights", "instruct_weights"],
)
def test_mistral7B_demo(device, use_program_cache, input_prompts, instruct_weights, is_ci_env):
if is_ci_env and instruct_weights == False:
pytest.skip("CI demo test only runs instruct weights (to reduce CI pipeline load)")
if is_ci_env and instruct_weights == True:
pytest.skip("CI demo test only runs general weights (to reduce CI pipeline load)")

return run_mistral_demo(
user_input=input_prompts, batch_size=32, device=device, instruct_mode=instruct_weights, is_ci_env=is_ci_env
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@
def test_mistral_attention_inference(device, use_program_cache, reset_seeds, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@
def test_mistral_attention_inference(seq_len, device, use_program_cache, reset_seeds, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down
6 changes: 3 additions & 3 deletions models/demos/wormhole/mistral7b/tests/test_mistral_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@
def test_mistral_decoder_inference(device, use_program_cache, reset_seeds, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@
def test_mistral_decoder_inference(device, seq_len, use_program_cache, reset_seeds, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ def forward(self, x):
def test_mistral_embedding(device, use_program_cache, reset_seeds, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down
6 changes: 3 additions & 3 deletions models/demos/wormhole/mistral7b/tests/test_mistral_mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@
def test_mistral_mlp_inference(device, seq_len, use_program_cache, reset_seeds, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down
6 changes: 3 additions & 3 deletions models/demos/wormhole/mistral7b/tests/test_mistral_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ def forward(self, x):
def test_mistral_model_inference(device, iterations, use_program_cache, reset_seeds, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ def forward(self, x):
def test_mistral_model_inference(device, seq_len, use_program_cache, reset_seeds, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_REF_OUTPUT_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/Mixtral-8x7B-v0.1/prefill/"

# This module requires the env paths above for CI runs
Expand Down
6 changes: 3 additions & 3 deletions models/demos/wormhole/mistral7b/tests/test_mistral_perf.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,9 @@ def test_mistral_model_perf(
):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@
def test_mistral_rms_norm_inference(device, use_program_cache, reset_seeds, is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down
6 changes: 3 additions & 3 deletions models/demos/wormhole/mistral7b/tests/test_mistral_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ def forward(self, x):
def test_mistral_torch_inference(is_ci_env):
# Set Mistral flags for CI
if is_ci_env:
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/ttnn/models/demos/mistral7b/"
os.environ["MISTRAL_CKPT_DIR"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_TOKENIZER_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"
os.environ["MISTRAL_CACHE_PATH"] = "/mnt/MLPerf/tt_dnn-models/Mistral/mistral-7B-v0.1/"

# This module requires the env paths above for CI runs
from models.demos.wormhole.mistral7b.tt.model_config import TtModelArgs
Expand Down

0 comments on commit 2bf128f

Please sign in to comment.