diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index b74077d4d9..523fd76feb 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -380,19 +380,6 @@ def apply_patches(self) -> None: plugin_manager = PluginManager.get_instance() plugin_manager.pre_model_load(self.cfg) - # if self.cfg.fsdp: - # from axolotl.monkeypatch.trainer_fsdp_optim import ( - # patch_training_loop_for_fsdp, - # ) - # - # patch_training_loop_for_fsdp() - # elif self.cfg.deepspeed and self.cfg.gradient_accumulation_steps > 1: - # from axolotl.monkeypatch.trainer_grad_accum import ( - # patch_training_loop_for_deepspeed_0_16_x, - # ) - # - # patch_training_loop_for_deepspeed_0_16_x() - if self.cfg.gradient_checkpointing == "unsloth": transformers.modeling_utils.checkpoint = hf_grad_checkpoint_unsloth_wrapper diff --git a/tests/patched/test_llama_trainer_ga.py b/tests/patched/test_llama_trainer_ga.py index 13acfc990d..58c229cf34 100644 --- a/tests/patched/test_llama_trainer_ga.py +++ b/tests/patched/test_llama_trainer_ga.py @@ -1,15 +1,12 @@ """"Test module for checking whether the Hugging Face Transformers is working as expected.""" import unittest -import pytest - from axolotl.monkeypatch.trainer_grad_accum import ( check_forward_is_patchable, check_training_step_is_patchable, ) -@pytest.mark.skip("should be fixed upstream") class TestTrainerGAIntegration(unittest.TestCase): """llama monkeypatch integration tests."""