Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: train_on_inputs: true ignored for sharegpt #1045

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 16 additions & 8 deletions src/axolotl/prompt_tokenizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,10 +379,12 @@ def tokenize_prompt(self, prompt):
add_eos_token=False,
strip_bos_token=True,
)
# everything from this is masked out from the labels
labels = [IGNORE_TOKEN_ID] * len(res["input_ids"])
if self.train_on_inputs:
labels = copy.deepcopy(res["input_ids"])
else:
# everything from this is masked out from the labels
labels = [IGNORE_TOKEN_ID] * len(res["input_ids"])
elif assistant in role:
# TODO label assistant token/tokens w/ IGNORE_TOKEN_ID
role = (
role.replace(role_remap[1]["from"], role_remap[1]["to"])
if role_remap
Expand All @@ -406,18 +408,24 @@ def tokenize_prompt(self, prompt):
add_eos_token=False,
strip_bos_token=True,
)
# not masked out from labels
labels = copy.deepcopy(res["input_ids"])
len_role = len(role_res["input_ids"])
labels[:len_role] = [IGNORE_TOKEN_ID] * min(len_role, len(labels))
if not self.train_on_inputs:
# mask out role tokens from the labels
len_role = len(role_res["input_ids"])
labels[:len_role] = [IGNORE_TOKEN_ID] * min(
len_role, len(labels)
)
NanoCode012 marked this conversation as resolved.
Show resolved Hide resolved
elif role == "":
turn = content
# this is only ever the first part, should include the bos token and the user query
res = self._tokenize(
turn, add_eos_token=False, strip_bos_token=False
)
# everything from this is masked out from the labels
labels = [IGNORE_TOKEN_ID] * len(res["input_ids"])
if self.train_on_inputs:
labels = copy.deepcopy(res["input_ids"])
else:
# everything from this is masked out from the labels
labels = [IGNORE_TOKEN_ID] * len(res["input_ids"])
else:
LOG.warning(f"unhandled role: {role}")
continue
Expand Down
56 changes: 28 additions & 28 deletions tests/prompt_strategies/test_sharegpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def test_w_train_on_input(self, sharegpt_dataset, tokenizer):
role_key_human=None,
),
tokenizer,
True, # train_on_inputs
False, # train_on_inputs
2048, # sequence_len
)

Expand All @@ -124,30 +124,30 @@ def test_w_train_on_input(self, sharegpt_dataset, tokenizer):
]
# fmt: on

# def test_no_train_on_input(self, sharegpt_dataset, tokenizer):
# strategy = SimpleShareGPTPromptTokenizingStrategy(
# ShareGPTPrompterV2(
# conversation="chatml",
# role_key_model=None,
# role_key_human=None,
# ),
# tokenizer,
# False, # train_on_inputs
# 2048, # sequence_len
# )
#
# dataset_wrapper = TokenizedPromptDataset(
# strategy, sharegpt_dataset, process_count=1
# )
#
# labels = dataset_wrapper[0]["labels"]
# # fmt: off
# assert labels == [
# 1, # bos
# 32001, 1587, 13, 25997, 32000, 28705, 13, # system
# 32001, 2188, 13, 21558, 32000, 28705, 13, # human
# 32001, 13892, 13, 21558, 32000, 28705, 13, # gpt
# 32001, 2188, 13, 12684, 17664, 32000, 28705, 13, # human
# 32001, 13892, 13, 12684, 17664, 32000, 28705, 13, # gpt
# ]
# # fmt: on
def test_no_train_on_input(self, sharegpt_dataset, tokenizer):
strategy = SimpleShareGPTPromptTokenizingStrategy(
ShareGPTPrompterV2(
conversation="chatml",
role_key_model=None,
role_key_human=None,
),
tokenizer,
True, # train_on_inputs
2048, # sequence_len
)

dataset_wrapper = TokenizedPromptDataset(
strategy, sharegpt_dataset, process_count=1
)

labels = dataset_wrapper[0]["labels"]
# fmt: off
assert labels == [
1, # bos
32001, 1587, 13, 25997, 32000, 28705, 13, # system
32001, 2188, 13, 21558, 32000, 28705, 13, # human
32001, 13892, 13, 21558, 32000, 28705, 13, # gpt
32001, 2188, 13, 12684, 17664, 32000, 28705, 13, # human
32001, 13892, 13, 12684, 17664, 32000, 28705, 13, # gpt
]
# fmt: on