Skip to content

Commit

Permalink
remove skip on test now #48 is complete
Browse files Browse the repository at this point in the history
Signed-off-by: Yu Chin Fabian Lim <[email protected]>
  • Loading branch information
fabianlim committed Nov 5, 2024
1 parent 9c3529c commit c06913e
Showing 1 changed file with 12 additions and 3 deletions.
15 changes: 12 additions & 3 deletions plugins/fused-ops-and-kernels/tests/test_fused_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,12 +240,21 @@ def loaded_models(device: torch.device = "cuda"):
class TrainArgs:
gradient_checkpointing = False
gradient_checkpointing_kwargs = {}
fp16 = False
bf16 = False

args = TrainArgs()
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)

all_models = {}
for dtype in DTYPES:
for base_type in [BNB, GPTQ]:

args = TrainArgs(
fp16=dtype==FLOAT16
)

for r, lora_alpha in LORA_PARAMS:
model_name, _, target_modules = TEST_MODELS[base_type]
peft_config = LoraConfig(
Expand Down Expand Up @@ -389,8 +398,8 @@ def test_adapter_gradients_match_with_attention_layer(


@pytest.mark.skipif(
not _is_package_available("bitsandbytes") or not _is_package_available("auto_gptq"),
reason="Only runs if both bitsandbytes and auto_gptq are installed",
not _is_package_available("bitsandbytes"),
reason="Only runs if both bitsandbytes",
)
def test_adapter_gradients_match_with_model(
model_inputs, loaded_models, dropout_masks # pylint: disable=redefined-outer-name
Expand Down

0 comments on commit c06913e

Please sign in to comment.