Skip to content

Commit

Permalink
test: Add llm model converter tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Ramimashkouk committed Dec 31, 2024
1 parent be8de5e commit 4fa8bde
Show file tree
Hide file tree
Showing 6 changed files with 70 additions and 11 deletions.
6 changes: 4 additions & 2 deletions backend/chatsky_ui/schemas/front_graph_components/pipeline.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from typing import List
from typing import List, Optional

from pydantic import Field

from .base_component import BaseComponent


class Pipeline(BaseComponent):
flows: List[dict]
interface: dict
llmConfigurations: List[dict]
llmConfigurations: Optional[List[dict]] = Field(default=None)
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def _convert(self):
return {
"script": script_converter(slots_conf=slots_conf),
"messenger_interface": InterfaceConverter(self.pipeline.interface)(),
"models": LLMModelsConverter(self.pipeline.llmConfigurations)(),
"models": LLMModelsConverter(self.pipeline.llmConfigurations)() if self.pipeline.llmConfigurations else {},
"slots": slots_converter(),
"start_label": start_label,
"fallback_label": fallback_label,
Expand Down
20 changes: 14 additions & 6 deletions backend/chatsky_ui/services/json_converter/slots_converter.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import List
from typing import Dict, List

from ...schemas.front_graph_components.node import SlotsNode
from ...schemas.front_graph_components.slot import GroupSlot, RegexpSlot
Expand All @@ -10,23 +10,31 @@ def __init__(self, flows: List[dict]):
def _get_slots_node(flows):
return next(
iter([node for flow in flows for node in flow["data"]["nodes"] if node["type"] == "slots_node"]),
{"id": "999999", "data": {"groups": []}},
{},
)

slots_node = _get_slots_node(flows)
self.slots_node = SlotsNode(
id=slots_node["id"],
groups=slots_node["data"]["groups"],
self.slots_node = (
SlotsNode(
id=slots_node["id"],
groups=slots_node["data"]["groups"],
)
if slots_node
else None
)

def map_slots(self):
def map_slots(self) -> Dict[str, str]:
if self.slots_node is None:
return {}
mapped_slots = {}
for group in self.slots_node.groups.copy():
for slot in group["slots"]:
mapped_slots[slot["id"]] = ".".join([group["name"], slot["name"]])
return mapped_slots

def _convert(self):
if self.slots_node is None:
return {}
return {key: value for group in self.slots_node.groups for key, value in GroupSlotConverter(group)().items()}


Expand Down
29 changes: 29 additions & 0 deletions backend/chatsky_ui/tests/services/test_json_converter/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,3 +144,32 @@ def telegram_interface():
@pytest.fixture
def chatsky_telegram_interface():
return {"chatsky.messengers.TelegramInterface": {"token": {"external:os.getenv": "TG_BOT_TOKEN"}}}


@pytest.fixture
def llm_models_config():
return [
{
"name": "test_model",
"modelName": "gpt-4o-mini",
"systemPrompt": "test_system_prompt",
}
]


@pytest.fixture
def chatsky_llm_models():
return {
"test_model": {
"chatsky.llm.LLM_API": {
"model": {
"external:langchain_openai.ChatOpenAI": {
"model": "gpt-4o-mini",
"api_key": {"external:os.getenv": "OPENAI_API_KEY"},
"base_url": {"external:os.getenv": "LLM_API_BASE_URL"},
}
},
"system_prompt": "test_system_prompt",
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,21 @@ def test_interface_fail_multiple_interfaces(self, telegram_interface):

class TestPipelineConverter:
def test_pipeline_converter(
self, flow, telegram_interface, chatsky_telegram_interface, converted_group_slot, chatsky_flow
self,
flow,
telegram_interface,
chatsky_telegram_interface,
converted_group_slot,
chatsky_flow,
llm_models_config,
chatsky_llm_models,
):
pipeline = {"flows": [flow], "interface": telegram_interface}
pipeline = {"flows": [flow], "interface": telegram_interface, "llmConfigurations": llm_models_config}
pipeline_path = Path(__file__).parent / "test_pipeline.yaml"
with open(pipeline_path, "w") as file:
yaml.dump(pipeline, file)
os.environ["TG_BOT_TOKEN"] = "some_token"
os.environ["OPENAI_API_KEY"] = "some_token"

PipelineConverter()(pipeline_path, Path(__file__).parent)

Expand All @@ -112,4 +120,5 @@ def test_pipeline_converter(
"slots": converted_group_slot,
"start_label": ["test_flow", "test_node"],
"fallback_label": ["test_flow", "test_node"],
"models": chatsky_llm_models,
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import os

from chatsky_ui.services.json_converter.llm_converter import LLMModelsConverter


class TestLLMModelsConverter:
def test_llm_models_converter(self, llm_models_config, chatsky_llm_models):
os.environ["OPENAI_API_KEY"] = "some_token"
converted_models = LLMModelsConverter(llm_models_config)()

assert converted_models == chatsky_llm_models

0 comments on commit 4fa8bde

Please sign in to comment.