From b0b16370330dce9579708b6c22093aefed4a2ad4 Mon Sep 17 00:00:00 2001 From: Daniel King Date: Sun, 19 Nov 2023 00:26:54 +0000 Subject: [PATCH] fix auto packing on 1.13 --- llmfoundry/data/packing.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/llmfoundry/data/packing.py b/llmfoundry/data/packing.py index 45322c9b2f..3fca0ade5e 100644 --- a/llmfoundry/data/packing.py +++ b/llmfoundry/data/packing.py @@ -5,6 +5,7 @@ import numpy as np import torch +from composer.utils import using_torch_2 from omegaconf import DictConfig from transformers import PreTrainedTokenizerBase @@ -347,7 +348,7 @@ def profile_packing( dataloader_cfg.dataset.packing_ratio = None dataloader_cfg.drop_last = False dataloader_cfg.num_workers = 0 - dataloader_cfg.prefetch_factor = None + dataloader_cfg.prefetch_factor = None if using_torch_2() else 2 dataloader_cfg.persistent_workers = False # Determine the packing_ratio values we'll try