From 53889fd1a4ee63d53cc8dbd4e453a0399f7b9bd4 Mon Sep 17 00:00:00 2001
From: Milo Cress <milo.cress@databricks.com>
Date: Wed, 10 Apr 2024 03:10:27 +0000
Subject: [PATCH] fix

---
 llmfoundry/utils/builders.py | 6 ++++--
 scripts/train/train.py       | 2 +-
 2 files changed, 5 insertions(+), 3 deletions(-)

diff --git a/llmfoundry/utils/builders.py b/llmfoundry/utils/builders.py
index a8c660df70..900f5ed384 100644
--- a/llmfoundry/utils/builders.py
+++ b/llmfoundry/utils/builders.py
@@ -388,7 +388,7 @@ def build_optimizer(
             'optimizer config. Please remove it from the optimizer config kwargs.'
         )
 
-    kwargs['params'] = params
+    kwargs['params'] = list(params)
     return construct_from_registry(name=name,
                                    registry=registry.optimizers,
                                    partial_function=True,
@@ -438,7 +438,9 @@ def build_tokenizer(
             int(1e30),
         )
 
-    if not hasattr(tokenizer, 'eos_token') or tokenizer.eos_token is None:
+    if not hasattr(
+            tokenizer, 'eos_token'
+    ) or tokenizer.eos_token is None:  # type: ignore (sometime's it's not none but that's ok too)
         raise ValueError(
             f'The tokenizer {tokenizer_name} must have an eos_token.')
 
diff --git a/scripts/train/train.py b/scripts/train/train.py
index a1d6741d7c..4255bd3276 100644
--- a/scripts/train/train.py
+++ b/scripts/train/train.py
@@ -113,7 +113,7 @@ def validate_config(cfg: TrainConfig):
     loaders = [cfg.train_loader]
     if cfg.eval_loader is not None or cfg.eval_loaders is not None:
         eval_loader = cfg.eval_loader
-        if isinstance(cfg.eval_loaders, ListConfig):
+        if isinstance(cfg.eval_loaders, list):
             for loader in cfg.eval_loaders:
                 if 'label' not in loader:
                     raise ValueError(