Skip to content

Commit

Permalink
precommit
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Dec 15, 2023
1 parent 004a023 commit ba2dd5d
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 1 deletion.
1 change: 1 addition & 0 deletions llmfoundry/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

try:
import warnings

# bitsandbytes is a very noisy library. A lot of it is print statements that we can't easily suppress,
# but we can at least suppress a bunch of spurious warnings.
warnings.filterwarnings('ignore',
Expand Down
3 changes: 2 additions & 1 deletion llmfoundry/models/hf/hf_causal_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,8 @@ def __init__(self, om_model_config: Union[DictConfig,
trust_remote_code=trust_remote_code,
use_auth_token=use_auth_token,
attn_implementation=requested_attention_implementation,
use_cache=False, # Necessary due to https://github.com/huggingface/transformers/issues/28056
use_cache=
False, # Necessary due to https://github.com/huggingface/transformers/issues/28056
)

# This is not ideal, however Hugging Face's _autoset_attn_implementation function
Expand Down

0 comments on commit ba2dd5d

Please sign in to comment.