diff --git a/chatproto/conversation/models/aquila.py b/chatproto/conversation/models/aquila.py new file mode 100644 index 0000000..c653431 --- /dev/null +++ b/chatproto/conversation/models/aquila.py @@ -0,0 +1,55 @@ +from ..settings import ConversationSettings, SeparatorStyle + +# AquilaChat default template +# source: https://github.com/FlagAI-Open/FlagAI/blob/master/examples/Aquila/Aquila-chat/cyg_conversation.py +aquila_chat = ConversationSettings( + name="aquila-chat", + system_message="A chat between a curious human and an artificial intelligence assistant. " + "The assistant gives helpful, detailed, and polite answers to the human's questions.", + roles=("Human", "Assistant"), + sep_style=SeparatorStyle.ADD_COLON_SINGLE, + sep="###", + sep2="", + stop_str=["###", "", "[UNK]"], +) + +# AquilaChat2-34B default template +# source: https://huggingface.co/BAAI/AquilaChat2-34B/blob/4608b75855334b93329a771aee03869dbf7d88cc/predict.py#L212 +aquila_legacy = ConversationSettings( + name="aquila-legacy", + system_message="A chat between a curious human and an artificial intelligence assistant. " + "The assistant gives helpful, detailed, and polite answers to the human's questions.\n\n", + roles=("### Human: ", "### Assistant: "), + offset=0, + sep_style=SeparatorStyle.NO_COLON_TWO, + sep="\n", + sep2="", + stop_str=["", "[UNK]"], +) + +# AquilaChat2-7B-16K and AquilaChat2-34B-16K default template +# source: https://huggingface.co/BAAI/AquilaChat2-34B/blob/4608b75855334b93329a771aee03869dbf7d88cc/predict.py#L227 +aquila = ConversationSettings( + name="aquila", + system_message="A chat between a curious human and an artificial intelligence assistant. " + "The assistant gives helpful, detailed, and polite answers to the human's questions.", + roles=("Human", "Assistant"), + offset=0, + sep_style=SeparatorStyle.ADD_COLON_TWO, + sep="###", + sep2="", + stop_str=["", "[UNK]"], +) + + +# AquilaChat2-7B default template +# source: https://huggingface.co/BAAI/AquilaChat2-34B/blob/4608b75855334b93329a771aee03869dbf7d88cc/predict.py#L242 +aquila_v1 = ConversationSettings( + name="aquila-v1", + roles=("<|startofpiece|>", "<|endofpiece|>"), + offset=0, + sep_style=SeparatorStyle.NO_COLON_TWO, + sep="", + sep2="", + stop_str=["", "<|endoftext|>"], +) diff --git a/chatproto/conversation/models/dolphin.py b/chatproto/conversation/models/dolphin.py new file mode 100644 index 0000000..1e38488 --- /dev/null +++ b/chatproto/conversation/models/dolphin.py @@ -0,0 +1,15 @@ + +from ..settings import ConversationSettings, SeparatorStyle + +# ehartford/dolphin-2.2.1-mistral-7b template +# reference: https://huggingface.co/ehartford/dolphin-2.2.1-mistral-7b#training + +dolphin = ConversationSettings( + name="dolphin", + system_template="<|im_start|>system\n{system_message}", + system_message="You are Dolphin, a helpful AI assistant.", + roles=("<|im_start|>user", "<|im_start|>assistant"), + sep_style=SeparatorStyle.CHATML, + sep="<|im_end|>", + stop_token_ids=[32000, 32001], +) diff --git a/chatproto/conversation/models/llama.py b/chatproto/conversation/models/llama.py index bdaeb04..1eb73be 100644 --- a/chatproto/conversation/models/llama.py +++ b/chatproto/conversation/models/llama.py @@ -12,4 +12,5 @@ ) llama1 = llama.alias("llama1") -llama2 = llama.alias("llama2") \ No newline at end of file +llama2 = llama.alias("llama2") +llama3 = llama.alias("llama3") \ No newline at end of file diff --git a/chatproto/conversation/models/phi.py b/chatproto/conversation/models/phi.py new file mode 100644 index 0000000..88a06cc --- /dev/null +++ b/chatproto/conversation/models/phi.py @@ -0,0 +1,11 @@ +from ..settings import ConversationSettings, SeparatorStyle + +# Phi default template +phi = ConversationSettings( + name="phi", + roles=("<|user|>", "<|assistant|>"), + sep_style=SeparatorStyle.ADD_NEW_LINE_SINGLE, + system_template="<|system|>\n{system_message}", + sep="<|end|>\n", + stop_str="<|end|>", +) diff --git a/chatproto/conversation/models/qwen.py b/chatproto/conversation/models/qwen.py index 7751d83..e4d74ca 100644 --- a/chatproto/conversation/models/qwen.py +++ b/chatproto/conversation/models/qwen.py @@ -22,5 +22,8 @@ qwen1 = qwen.alias("qwen1") Qwen1 = qwen.alias("Qwen1") -qwen15 = qwen.alias("qwen1.5") -Qwen15 = qwen.alias("Qwen1.5") \ No newline at end of file +qwen1_5 = qwen.alias("qwen1.5") +Qwen1_5 = qwen.alias("Qwen1.5") + +qwen2 = qwen.alias("qwen2") +Qwen2 = qwen.alias("Qwen2") \ No newline at end of file