Skip to content

Commit

Permalink
use TPConfig
Browse files Browse the repository at this point in the history
  • Loading branch information
eitanturok committed Sep 30, 2024
1 parent 21868c7 commit d81e497
Showing 1 changed file with 2 additions and 5 deletions.
7 changes: 2 additions & 5 deletions llmfoundry/command_utils/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
TraceHandler,
cyclic_schedule,
)
from composer.utils import dist, get_device, reproducibility
from composer.utils import TPConfig, dist, get_device, reproducibility
from omegaconf import DictConfig
from omegaconf import OmegaConf as om

Expand Down Expand Up @@ -518,10 +518,7 @@ def train(cfg: DictConfig) -> Trainer:
if tp_config is not None:
strategy = tp_config.pop('strategy', None)
layer_plan = build_tp_strategies(strategy, model)
tp_config = {
'layer_plan': layer_plan,
'tensor_parallel_degree': tp_config['tensor_parallel_degree'],
}
tp_config = TPConfig(**tp_config, layer_plan=layer_plan)

# Parallelism config
parallelism_config = {'fsdp': fsdp_config, 'tp': tp_config}
Expand Down

0 comments on commit d81e497

Please sign in to comment.