From 919bfe3ded48603ad61c6402e1d435dbd349c43b Mon Sep 17 00:00:00 2001 From: Igor Gitman Date: Mon, 16 Dec 2024 10:16:31 -0800 Subject: [PATCH] Fix for judge pipeline when data is filled Signed-off-by: Igor Gitman --- nemo_skills/inference/llm_math_judge.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nemo_skills/inference/llm_math_judge.py b/nemo_skills/inference/llm_math_judge.py index be17f1f56..6ade05b75 100644 --- a/nemo_skills/inference/llm_math_judge.py +++ b/nemo_skills/inference/llm_math_judge.py @@ -142,6 +142,9 @@ def llm_math_judge(cfg: LlmMathJudgeConfig): # additionally, skipping whatever is pre-filled, assuming offset didn't change data = data[starting_idx:] + if len(data) == 0: # we might not have any examples if skip_filled=True + return + prompt = get_prompt(cfg.prompt_config, cfg.prompt_template, examples_type=cfg.examples_type) LOG.info("Prompt used: %s", prompt) LOG.info("Example prompt:\nData dictionary: %s\nPrompt: %s", data[0], prompt.fill(data[0]))