From 4a7e02309b026b61bcfbb05302f8a32fdb629170 Mon Sep 17 00:00:00 2001 From: anw90 Date: Fri, 22 Dec 2023 16:58:53 +0800 Subject: [PATCH] bring back custom_sampler_check --- src/accelerate/test_utils/scripts/test_script.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/accelerate/test_utils/scripts/test_script.py b/src/accelerate/test_utils/scripts/test_script.py index 0d43274a633..6bf5d803f94 100644 --- a/src/accelerate/test_utils/scripts/test_script.py +++ b/src/accelerate/test_utils/scripts/test_script.py @@ -653,9 +653,7 @@ def main(): dl_preparation_check() if state.distributed_type != DistributedType.XLA: central_dl_preparation_check() - # Skip this test because the TorchXLA's MpDeviceLoaderWrapper does not - # have the 'batch_sampler' attribute. - custom_sampler_check() + custom_sampler_check() # Trainings are not exactly the same in DeepSpeed and CPU mode if state.distributed_type == DistributedType.DEEPSPEED: