From 8bbdf1a7cb94b53961ce9631b277d69cefa0e342 Mon Sep 17 00:00:00 2001 From: alekst23 Date: Wed, 4 Sep 2024 14:37:25 -0400 Subject: [PATCH] fixes to test imports --- tests/e2e/test_text_backend_full.py | 2 +- tests/integration/api/test_assistants.py | 4 ++-- tests/integration/api/test_vector_stores.py | 2 +- tests/pytest/leapfrogai_api/test_api.py | 13 +++++++------ .../leapfrogai_api/routers/openai/test_threads.py | 5 +---- 5 files changed, 12 insertions(+), 14 deletions(-) diff --git a/tests/e2e/test_text_backend_full.py b/tests/e2e/test_text_backend_full.py index 3da7cdd0a..fdee17172 100644 --- a/tests/e2e/test_text_backend_full.py +++ b/tests/e2e/test_text_backend_full.py @@ -5,7 +5,7 @@ from openai import OpenAI from openai.types.beta.vector_store import VectorStore -from leapfrogai_api.backend.types import VectorStoreStatus +from leapfrogai_api.typedef.vectorstores import VectorStoreStatus def download_arxiv_pdf(): diff --git a/tests/integration/api/test_assistants.py b/tests/integration/api/test_assistants.py index e07fdd8dd..deb341904 100644 --- a/tests/integration/api/test_assistants.py +++ b/tests/integration/api/test_assistants.py @@ -13,11 +13,11 @@ from openai.types.beta.vector_store import ExpiresAfter import leapfrogai_api.backend.rag.index -from leapfrogai_api.backend.types import CreateVectorStoreRequest from leapfrogai_api.routers.openai.vector_stores import router as vector_store_router from leapfrogai_api.routers.openai.files import router as files_router from leapfrogai_api.routers.openai.assistants import router as assistants_router -from leapfrogai_api.routers.openai.requests.create_modify_assistant_request import ( +from leapfrogai_api.typedef.vectorstores import CreateVectorStoreRequest +from leapfrogai_api.typedef.assistants import ( CreateAssistantRequest, ModifyAssistantRequest, ) diff --git a/tests/integration/api/test_vector_stores.py b/tests/integration/api/test_vector_stores.py index 4a939cb21..f9c69f8e8 100644 --- a/tests/integration/api/test_vector_stores.py +++ b/tests/integration/api/test_vector_stores.py @@ -13,7 +13,7 @@ from langchain_core.embeddings.fake import FakeEmbeddings import leapfrogai_api.backend.rag.index -from leapfrogai_api.backend.types import ( +from leapfrogai_api.typedef.vectorstores import ( CreateVectorStoreRequest, ModifyVectorStoreRequest, ) diff --git a/tests/pytest/leapfrogai_api/test_api.py b/tests/pytest/leapfrogai_api/test_api.py index a80df6b6c..4eacd1085 100644 --- a/tests/pytest/leapfrogai_api/test_api.py +++ b/tests/pytest/leapfrogai_api/test_api.py @@ -10,7 +10,8 @@ from fastapi.testclient import TestClient from starlette.middleware.base import _CachedRequest from supabase import ClientOptions -import leapfrogai_api.backend.types as lfai_types +from leapfrogai_api.typedef.chat import ChatCompletionRequest, ChatMessage +from leapfrogai_api.typedef.embeddings import CreateEmbeddingRequest from leapfrogai_api.main import app from leapfrogai_api.routers.supabase_session import init_supabase_client @@ -209,7 +210,7 @@ def test_embedding(dummy_auth_middleware): with TestClient(app) as client: # Send request to client - embedding_request = lfai_types.CreateEmbeddingRequest( + embedding_request = CreateEmbeddingRequest( model="repeater", input="This is the embedding input text.", ) @@ -237,9 +238,9 @@ def test_chat_completion(dummy_auth_middleware): """Test the chat completion endpoint.""" with TestClient(app) as client: input_content = "this is the chat completion input." - chat_completion_request = lfai_types.ChatCompletionRequest( + chat_completion_request = ChatCompletionRequest( model="repeater", - messages=[lfai_types.ChatMessage(role="user", content=input_content)], + messages=[ChatMessage(role="user", content=input_content)], ) response = client.post( "/openai/v1/chat/completions", json=chat_completion_request.model_dump() @@ -284,9 +285,9 @@ def test_stream_chat_completion(dummy_auth_middleware): with TestClient(app) as client: input_content = "this is the stream chat completion input." - chat_completion_request = lfai_types.ChatCompletionRequest( + chat_completion_request = ChatCompletionRequest( model="repeater", - messages=[lfai_types.ChatMessage(role="user", content=input_content)], + messages=[ChatMessage(role="user", content=input_content)], stream=True, ) diff --git a/tests/unit/leapfrogai_api/routers/openai/test_threads.py b/tests/unit/leapfrogai_api/routers/openai/test_threads.py index c39f1ff84..c02853c87 100644 --- a/tests/unit/leapfrogai_api/routers/openai/test_threads.py +++ b/tests/unit/leapfrogai_api/routers/openai/test_threads.py @@ -9,10 +9,7 @@ ToolResourcesFileSearch, ) -from leapfrogai_api.routers.openai.requests.create_thread_request import ( - CreateThreadRequest, -) -from leapfrogai_api.backend.types import ModifyThreadRequest +from leapfrogai_api.typedef.threads import ModifyThreadRequest, CreateThreadRequest from leapfrogai_api.data.crud_thread import CRUDThread from leapfrogai_api.data.crud_message import CRUDMessage from leapfrogai_api.routers.openai.threads import (