Skip to content

Commit

Permalink
move moe tensor
Browse files Browse the repository at this point in the history
  • Loading branch information
oahzxl committed Aug 14, 2023
1 parent 00cb06e commit da1789f
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion colossalai/nn/layer/moe/experts.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

from colossalai.context import ParallelMode, seed
from colossalai.context.moe_context import MOE_CONTEXT
from colossalai.nn.layer.moe.moe_param import set_moe_param_info
from colossalai.tensor.moe_tensor.api import set_moe_param_info
from colossalai.utils import get_current_device
from colossalai.zero.legacy.init_ctx import no_shard_zero_decrator

Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion colossalai/zero/low_level/low_level_optim.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
)
from colossalai.interface import OptimizerWrapper
from colossalai.logging import get_dist_logger
from colossalai.nn.layer.moe.moe_param import is_moe_param
from colossalai.tensor.moe_tensor.api import is_moe_param
# from colossalai.tensor import ColoParameter, ProcessGroup
from colossalai.utils.cuda import get_current_device

Expand Down

0 comments on commit da1789f

Please sign in to comment.