From 935bce873fda0d61fbd18228edfecaa5cbb22a3d Mon Sep 17 00:00:00 2001 From: Daniel King Date: Thu, 18 Jan 2024 15:30:17 -0800 Subject: [PATCH] remove args overriding --- llmfoundry/models/hf/model_wrapper.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/llmfoundry/models/hf/model_wrapper.py b/llmfoundry/models/hf/model_wrapper.py index a7c993ff1b..d84b96b02f 100644 --- a/llmfoundry/models/hf/model_wrapper.py +++ b/llmfoundry/models/hf/model_wrapper.py @@ -58,13 +58,6 @@ def __init__(self, if self.z_loss < 0.0: raise ValueError(f'z_loss(={z_loss}) cannot be negative.') - self.model_forward_args = inspect.getfullargspec( - self.model.forward).args - # inspect.getfullargspec HuggingFace quantized model could not return args correctly - if not self.model_forward_args: - self.model_forward_args = inspect.signature( - self.model.forward).parameters.keys() - # Note: We need to add the FSDP related attributes to the model AFTER the super init, # so that the (possible) embedding resizing doesn't destroy them prepare_hf_model_for_fsdp(self.model, init_device)