Skip to content

Commit

Permalink
fighting with docstrings
Browse files Browse the repository at this point in the history
  • Loading branch information
aspfohl committed Jan 4, 2024
1 parent 50ec026 commit d1d75cb
Showing 1 changed file with 7 additions and 9 deletions.
16 changes: 7 additions & 9 deletions llmfoundry/callbacks/async_eval_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,14 +250,13 @@ def __init__(
log.info('Initialized AsyncEval callback. Will generate runs at ' +
f'interval {interval}, checking at {self.check_interval}')

@staticmethod
def _get_ready_sharded_checkpoints(
self,
checkpointer_checkpoints: List[str],
remote_files: List[str],
):
"""
Determine which checkpoints from the checkpointer are ready to be evaled,
based on which shards have been uploaded to the remote checkpoint folder.
"""Identify checkpoints are ready to be evaled based on remote files
This has special logic for sharded checkpoints to consider checkpoints composed
of multiple shards (one per gpu) and metadata
Expand Down Expand Up @@ -294,15 +293,14 @@ def _get_ready_sharded_checkpoints(

return checkpoints_to_eval

@staticmethod
def _get_ready_single_checkpoints(
self,
checkpointer_checkpoints: List[str],
remote_checkpoints: List[str],
):
"""
Determine which checkpoints from the checkpointer are ready to be evaled,
based on which checkpoints have been uploaded. This is much simpler than
the sharded case, because there is only one file
"""Identify checkpoints are ready to be evaled based on remote checkpoints
This is much simpler than the sharded case, because there is only one file
Args:
checkpointer_checkpoints: The checkpoints from the checkpointer state
Expand Down

0 comments on commit d1d75cb

Please sign in to comment.