Skip to content

Commit

Permalink
fix auto packing on 1.13
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Nov 19, 2023
1 parent 959a8de commit b0b1637
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion llmfoundry/data/packing.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import numpy as np
import torch
from composer.utils import using_torch_2
from omegaconf import DictConfig
from transformers import PreTrainedTokenizerBase

Expand Down Expand Up @@ -347,7 +348,7 @@ def profile_packing(
dataloader_cfg.dataset.packing_ratio = None
dataloader_cfg.drop_last = False
dataloader_cfg.num_workers = 0
dataloader_cfg.prefetch_factor = None
dataloader_cfg.prefetch_factor = None if using_torch_2() else 2
dataloader_cfg.persistent_workers = False

# Determine the packing_ratio values we'll try
Expand Down

0 comments on commit b0b1637

Please sign in to comment.