Skip to content

Commit

Permalink
add trainer_state
Browse files Browse the repository at this point in the history
  • Loading branch information
DesmonDay committed Dec 13, 2024
1 parent 6655e12 commit 805cc67
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
12 changes: 7 additions & 5 deletions paddlenlp/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -973,7 +973,7 @@ def _inner_training_loop(
self.state.num_train_epochs = num_train_epochs
self.state.is_local_process_zero = self.is_local_process_zero()
self.state.is_world_process_zero = self.is_world_process_zero()
self.state.trained_samples = 0
self.state.consumed_samples = 0

self.control = self.callback_handler.on_train_begin(args, self.state, self.control)

Expand Down Expand Up @@ -1049,9 +1049,10 @@ def _inner_training_loop(
self._skip_steps_since_last_logged += 1

self.state.epoch = epoch + (step + 1) / steps_in_epoch
self.state.trained_samples = (
(epoch * steps_in_epoch + step + 1)
self.state.consumed_samples = (
self.state.global_step
* args.per_device_train_batch_size
* args.gradient_accumulation_steps
* args.dataset_world_size
)

Expand Down Expand Up @@ -1236,9 +1237,10 @@ def fused_allreduce_gradients_no_sync(paramlist, hcg):

self.state.global_step += 1
self.state.epoch = epoch + (step + 1) / steps_in_epoch
self.state.trained_samples = (
(epoch * steps_in_epoch + step + 1)
self.state.consumed_samples = (
self.state.global_step
* args.per_device_train_batch_size
* args.gradient_accumulation_steps
* args.dataset_world_size
)
self.control = self.callback_handler.on_step_end(args, self.state, self.control)
Expand Down
2 changes: 1 addition & 1 deletion paddlenlp/trainer/trainer_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ class TrainerState:

epoch: Optional[float] = None
global_step: int = 0
trained_samples: int = 0
consumed_samples: int = 0
max_steps: int = 0
num_train_epochs: int = 0
total_flos: float = 0
Expand Down

0 comments on commit 805cc67

Please sign in to comment.