Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
bmosaicml committed Apr 1, 2024
1 parent 414467a commit 65fbbed
Showing 1 changed file with 3 additions and 5 deletions.
8 changes: 3 additions & 5 deletions llmfoundry/eval/datasets/in_context_learning_evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -576,10 +576,8 @@ def __init__(self,
'pad_token_id': self.pad_tok_id,
'use_cache': True,
'eos_token_id': self.tokenizer.eos_token_id,
'max_new_tokens': max(self.max_answer_length, 1)
},
'generation_length': max(
self.max_answer_length,
1), # TODO: deprecate with next composer udpate
}
self.batch_mapping = {
'input_ids': self.context_key,
Expand Down Expand Up @@ -1286,6 +1284,7 @@ def __init__(
'temperature': 0.2, # good default for code
'use_cache': True,
'eos_token_id': self.tokenizer.eos_token_id,
'max_new_tokens': max(generation_length, 1)
},
'sample_id': [],
'pass_at_k':
Expand All @@ -1294,8 +1293,7 @@ def __init__(
generations_per_sample,
'dataset_size':
dataset_size,
'generation_length': # TODO: deprecate with next composer release
max(generation_length, 1)

}
if 'generation_kwargs' in kwargs:
self.update_generation_kwargs(kwargs['generation_kwargs'])
Expand Down

0 comments on commit 65fbbed

Please sign in to comment.