Skip to content

Commit

Permalink
warn checkpointing does not work
Browse files Browse the repository at this point in the history
  • Loading branch information
eitanturok committed Sep 26, 2024
1 parent 2683c6d commit d5779c7
Showing 1 changed file with 6 additions and 7 deletions.
13 changes: 6 additions & 7 deletions llmfoundry/utils/builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,9 @@
import logging
import os
import re
import warnings
from collections import OrderedDict
from typing import (
Any,
ContextManager,
Iterable,
Optional,
Union,
)
from typing import Any, ContextManager, Iterable, Optional, Union

import torch
from composer.core import Algorithm, Callback, Evaluator
Expand Down Expand Up @@ -711,6 +706,10 @@ def build_tp_strategies(
name: str,
model: ComposerModel,
) -> dict[str, ParallelStyle]:

warnings.warn(
'Checkpointing is not currently supported for tensor parallelism due to this pytorch bug: https://github.com/pytorch/pytorch/issues/134095#issuecomment-2345018244'
)
return construct_from_registry(
name=name,
registry=registry.tp_strategies,
Expand Down

0 comments on commit d5779c7

Please sign in to comment.