Skip to content

Commit

Permalink
[fix] rm unused comments
Browse files Browse the repository at this point in the history
  • Loading branch information
duanjunwen committed Nov 18, 2024
1 parent dafda0f commit 41fdd21
Showing 1 changed file with 0 additions and 5 deletions.
5 changes: 0 additions & 5 deletions colossalai/pipeline/schedule/zero_bubble_pp.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ def __init__(
overlap_p2p: bool = True,
):
super().__init__(stage_manager)
# Not support overlap_p2p so far
# batch info
self.num_microbatch = num_microbatch
self.microbatch_size = microbatch_size
Expand Down Expand Up @@ -543,8 +542,6 @@ def backward_b_step(
output_obj_grad_ = []

# For chunk 0 stage 0, use micro_batch as input_obj_; and we don't have to cal microbatch dx.
# if model_chunk_id == 0 and self.stage_manager.is_first_stage(ignore_chunk=True):
# return None

# For loss backward; output_obj is loss; output_obj_grad should be None
if model_chunk_id == 1 and self.stage_manager.is_first_stage(ignore_chunk=True):
Expand Down Expand Up @@ -718,10 +715,8 @@ def schedule_f(
# Do not release_tensor_data loss, release_tensor_data other output_obj;
if model_chunk_id == 1 and self.stage_manager.is_first_stage(ignore_chunk=True):
self.output_tensors[model_chunk_id].append(output_obj)
# self.output_tensors_dw[model_chunk_id].append(output_obj)
else:
self.output_tensors[model_chunk_id].append(output_obj)
# self.output_tensors_dw[model_chunk_id].append(output_obj)

# add output to send_fwd_buffer
if model_chunk_id == 0: # chunk 0
Expand Down

0 comments on commit 41fdd21

Please sign in to comment.