-
Notifications
You must be signed in to change notification settings - Fork 63
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #58 from YangZhiBoGreenHand/yzb/feat/update-run-as…
…sistant-model feat: update run and assistant model
- Loading branch information
Showing
9 changed files
with
225 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
"""update models | ||
Revision ID: aa4bda3363e3 | ||
Revises: 8dbb8f38ef77 | ||
Create Date: 2024-04-22 17:19:59.829072 | ||
""" | ||
|
||
from typing import Sequence, Union | ||
|
||
from alembic import op | ||
import sqlalchemy as sa | ||
import sqlmodel | ||
|
||
|
||
# revision identifiers, used by Alembic. | ||
revision: str = "aa4bda3363e3" | ||
down_revision: Union[str, None] = "8dbb8f38ef77" | ||
branch_labels: Union[str, Sequence[str], None] = None | ||
depends_on: Union[str, Sequence[str], None] = None | ||
|
||
|
||
def upgrade() -> None: | ||
# ### commands auto generated by Alembic - please adjust! ### | ||
op.add_column("assistant", sa.Column("response_format", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) | ||
op.add_column("assistant", sa.Column("tool_resources", sa.JSON(), nullable=True)) | ||
op.add_column("assistant", sa.Column("temperature", sa.Float(), nullable=True)) | ||
op.add_column("assistant", sa.Column("top_p", sa.Float(), nullable=True)) | ||
op.add_column("run", sa.Column("incomplete_details", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) | ||
op.add_column("run", sa.Column("max_completion_tokens", sa.Integer(), nullable=True)) | ||
op.add_column("run", sa.Column("max_prompt_tokens", sa.Integer(), nullable=True)) | ||
op.add_column("run", sa.Column("response_format", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) | ||
op.add_column("run", sa.Column("tool_choice", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) | ||
op.add_column("run", sa.Column("truncation_strategy", sa.JSON(), nullable=True)) | ||
op.add_column("run", sa.Column("usage", sa.JSON(), nullable=True)) | ||
op.add_column("run", sa.Column("temperature", sa.Float(), nullable=True)) | ||
op.add_column("run", sa.Column("top_p", sa.Float(), nullable=True)) | ||
# ### end Alembic commands ### | ||
|
||
|
||
def downgrade() -> None: | ||
# ### commands auto generated by Alembic - please adjust! ### | ||
op.drop_column("run", "top_p") | ||
op.drop_column("run", "temperature") | ||
op.drop_column("run", "usage") | ||
op.drop_column("run", "truncation_strategy") | ||
op.drop_column("run", "tool_choice") | ||
op.drop_column("run", "response_format") | ||
op.drop_column("run", "max_prompt_tokens") | ||
op.drop_column("run", "max_completion_tokens") | ||
op.drop_column("run", "incomplete_details") | ||
op.drop_column("assistant", "top_p") | ||
op.drop_column("assistant", "temperature") | ||
op.drop_column("assistant", "tool_resources") | ||
op.drop_column("assistant", "response_format") | ||
# ### end Alembic commands ### |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
import openai | ||
|
||
from app.models.message import Message | ||
from app.models.run import Run | ||
from app.providers.database import session | ||
|
||
|
||
# 测试创建动作 | ||
def test_create_run_with_additional_messages_and_other_parmas(): | ||
client = openai.OpenAI(base_url="http://localhost:8086/api/v1", api_key="xxx") | ||
assistant = client.beta.assistants.create( | ||
name="Assistant Demo", | ||
instructions="你是一个有用的助手", | ||
model="gpt-3.5-turbo-1106", | ||
) | ||
thread = client.beta.threads.create() | ||
run = client.beta.threads.runs.create( | ||
thread_id=thread.id, | ||
assistant_id=assistant.id, | ||
instructions="", | ||
additional_messages=[ | ||
{"role": "user", "content": "100 + 100 等于多少"}, | ||
{"role": "assistant", "content": "100 + 100 等于200"}, | ||
{"role": "user", "content": "如果是乘是多少呢?"}, | ||
], | ||
max_completion_tokens=100, | ||
max_prompt_tokens=100, | ||
temperature=0.5, | ||
top_p=0.5, | ||
) | ||
query = session.query(Run).filter(Run.id == run.id) | ||
run = query.one() | ||
assert run.instructions == "你是一个有用的助手" | ||
assert run.model == "gpt-3.5-turbo-1106" | ||
query = session.query(Message).filter(Message.run_id == run.id).order_by(Message.created_at) | ||
messages = query.all() | ||
[messgae1, messgae2, messgae3] = messages | ||
assert messgae1.content == [{"text": {"value": "100 + 100 等于多少", "annotations": []}, "type": "text"}] | ||
assert messgae1.role == "user" | ||
assert messgae2.content == [{"text": {"value": "100 + 100 等于200", "annotations": []}, "type": "text"}] | ||
assert messgae2.role == "assistant" | ||
assert messgae3.content == [{"text": {"value": "如果是乘是多少呢?", "annotations": []}, "type": "text"}] | ||
assert messgae3.role == "user" | ||
assert run.max_completion_tokens == 100 | ||
assert run.max_prompt_tokens == 100 | ||
assert run.temperature == 0.5 | ||
assert run.top_p == 0.5 | ||
session.close() |