From 76699541e796d15831415b4134e7996fc0eb1107 Mon Sep 17 00:00:00 2001 From: Milo Cress Date: Wed, 10 Apr 2024 03:32:37 +0000 Subject: [PATCH] fix --- llmfoundry/utils/builders.py | 2 +- scripts/train/train.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/llmfoundry/utils/builders.py b/llmfoundry/utils/builders.py index 900f5ed384..5cdbb4ee62 100644 --- a/llmfoundry/utils/builders.py +++ b/llmfoundry/utils/builders.py @@ -388,7 +388,7 @@ def build_optimizer( 'optimizer config. Please remove it from the optimizer config kwargs.' ) - kwargs['params'] = list(params) + kwargs['params'] = params return construct_from_registry(name=name, registry=registry.optimizers, partial_function=True, diff --git a/scripts/train/train.py b/scripts/train/train.py index 4255bd3276..860c0f2854 100644 --- a/scripts/train/train.py +++ b/scripts/train/train.py @@ -259,9 +259,9 @@ def main(cfg: DictConfig) -> Trainer: # Mandatory model training configs model_config: DictConfig = DictConfig(scfg.model) - tokenizer_config: Dict[str, Any] = scfg.tokenizer - optimizer_config: Dict[str, Any] = scfg.optimizer - scheduler_config: Dict[str, Any] = scfg.scheduler + tokenizer_config: Dict[str, Any] = {**scfg.tokenizer} + optimizer_config: Dict[str, Any] = {**scfg.optimizer} + scheduler_config: Dict[str, Any] = {**scfg.scheduler} train_loader_config: DictConfig = DictConfig(scfg.train_loader) # Optional fsdp data, fine-tuning, and eval configs