Skip to content

Commit

Permalink
Error if metadata matches existing keys (#1313)
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg authored Jul 1, 2024
1 parent 62165de commit 1993e7f
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 3 deletions.
1 change: 1 addition & 0 deletions llmfoundry/utils/config_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,7 @@ class TrainConfig:

# Metadata
metadata: Optional[Dict[str, Any]] = None
flatten_metadata: bool = True
run_name: Optional[str] = None

# Resumption
Expand Down
18 changes: 15 additions & 3 deletions scripts/train/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,9 +320,21 @@ def main(cfg: DictConfig) -> Trainer:
loggers.append(mosaicml_logger)

if train_cfg.metadata is not None:
# Flatten the metadata for logging
logged_cfg.pop('metadata', None)
logged_cfg.update(train_cfg.metadata, merge=True)
# Optionally flatten the metadata for logging
if train_cfg.flatten_metadata:
logged_cfg.pop('metadata', None)
common_keys = set(
logged_cfg.keys(),
) & set(train_cfg.metadata.keys())
if len(common_keys) > 0:
raise ValueError(
f'Keys {common_keys} are already present in the config. Please rename them in metadata '
+
'or set flatten_metadata=False to avoid flattening the metadata in the logged config.',
)

logged_cfg.update(train_cfg.metadata, merge=True)

if mosaicml_logger is not None:
mosaicml_logger.log_metrics(train_cfg.metadata)
mosaicml_logger._flush_metadata(force_flush=True)
Expand Down

0 comments on commit 1993e7f

Please sign in to comment.