From ba2dd5d02dedd7778d6d9c27cfc213ea8d2677de Mon Sep 17 00:00:00 2001 From: Daniel King Date: Fri, 15 Dec 2023 11:39:05 -0800 Subject: [PATCH] precommit --- llmfoundry/__init__.py | 1 + llmfoundry/models/hf/hf_causal_lm.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/llmfoundry/__init__.py b/llmfoundry/__init__.py index 23ffa4e6b4..87504d26b3 100644 --- a/llmfoundry/__init__.py +++ b/llmfoundry/__init__.py @@ -5,6 +5,7 @@ try: import warnings + # bitsandbytes is a very noisy library. A lot of it is print statements that we can't easily suppress, # but we can at least suppress a bunch of spurious warnings. warnings.filterwarnings('ignore', diff --git a/llmfoundry/models/hf/hf_causal_lm.py b/llmfoundry/models/hf/hf_causal_lm.py index dd1bb72697..fcac57d817 100644 --- a/llmfoundry/models/hf/hf_causal_lm.py +++ b/llmfoundry/models/hf/hf_causal_lm.py @@ -108,7 +108,8 @@ def __init__(self, om_model_config: Union[DictConfig, trust_remote_code=trust_remote_code, use_auth_token=use_auth_token, attn_implementation=requested_attention_implementation, - use_cache=False, # Necessary due to https://github.com/huggingface/transformers/issues/28056 + use_cache= + False, # Necessary due to https://github.com/huggingface/transformers/issues/28056 ) # This is not ideal, however Hugging Face's _autoset_attn_implementation function