From ded6324dc14e2dba29cde1b234aaab8352ec7fea Mon Sep 17 00:00:00 2001 From: csgulati09 Date: Tue, 4 Jun 2024 19:00:17 +0530 Subject: [PATCH] fix: file structure for callbackhanders --- portkey_ai/api_resources/global_constants.py | 3 ++- portkey_ai/llms/callback/__init__.py | 4 ++++ .../{langchain => callback}/portkey_langchain_callback.py | 2 +- .../{llama_index => callback}/portkey_llama_callback.py | 8 ++++++-- 4 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 portkey_ai/llms/callback/__init__.py rename portkey_ai/llms/{langchain => callback}/portkey_langchain_callback.py (99%) rename portkey_ai/llms/{llama_index => callback}/portkey_llama_callback.py (96%) diff --git a/portkey_ai/api_resources/global_constants.py b/portkey_ai/api_resources/global_constants.py index ad6fdf15..ea3308fc 100644 --- a/portkey_ai/api_resources/global_constants.py +++ b/portkey_ai/api_resources/global_constants.py @@ -29,7 +29,8 @@ VERSION = "0.1.0" DEFAULT_TIMEOUT = 60 PORTKEY_HEADER_PREFIX = "x-portkey-" -PORTKEY_BASE_URL = "https://api.portkey.ai/v1" +# PORTKEY_BASE_URL = "https://api.portkey.ai/v1" +PORTKEY_BASE_URL = "https://api.portkeydev.com/v1" PORTKEY_GATEWAY_URL = PORTKEY_BASE_URL PORTKEY_API_KEY_ENV = "PORTKEY_API_KEY" PORTKEY_PROXY_ENV = "PORTKEY_PROXY" diff --git a/portkey_ai/llms/callback/__init__.py b/portkey_ai/llms/callback/__init__.py new file mode 100644 index 00000000..3da41082 --- /dev/null +++ b/portkey_ai/llms/callback/__init__.py @@ -0,0 +1,4 @@ +from .portkey_langchain_callback import PortkeyLangchain +from .portkey_llama_callback import PortkeyLlamaindex + +__all__ = ["PortkeyLangchain", "PortkeyLlamaindex"] diff --git a/portkey_ai/llms/langchain/portkey_langchain_callback.py b/portkey_ai/llms/callback/portkey_langchain_callback.py similarity index 99% rename from portkey_ai/llms/langchain/portkey_langchain_callback.py rename to portkey_ai/llms/callback/portkey_langchain_callback.py index 1ffc6b64..7a172f90 100644 --- a/portkey_ai/llms/langchain/portkey_langchain_callback.py +++ b/portkey_ai/llms/callback/portkey_langchain_callback.py @@ -8,7 +8,7 @@ from portkey_ai.api_resources.apis.logger import Logger -class PortkeyCallbackHandler(BaseCallbackHandler): +class PortkeyLangchain(BaseCallbackHandler): def __init__( self, api_key: str, diff --git a/portkey_ai/llms/llama_index/portkey_llama_callback.py b/portkey_ai/llms/callback/portkey_llama_callback.py similarity index 96% rename from portkey_ai/llms/llama_index/portkey_llama_callback.py rename to portkey_ai/llms/callback/portkey_llama_callback.py index a33a576d..75b5928d 100644 --- a/portkey_ai/llms/llama_index/portkey_llama_callback.py +++ b/portkey_ai/llms/callback/portkey_llama_callback.py @@ -10,7 +10,7 @@ from llama_index.core.utilities.token_counting import TokenCounter -class PortkeyCallbackHandler(LlamaIndexBaseCallbackHandler): +class PortkeyLlamaindex(LlamaIndexBaseCallbackHandler): startTimestamp: int = 0 endTimestamp: float = 0 @@ -113,8 +113,10 @@ def llm_event_stop(self, payload: Any, event_id) -> None: data = payload.get(EventPayload.RESPONSE, {}) chunks = payload.get(EventPayload.MESSAGES, {}) - self.token_llm = self._token_counter.estimate_tokens_in_messages(chunks) + print("chunks", chunks) + self.token_llm = self._token_counter.estimate_tokens_in_messages(chunks) + print("token_llm", self.token_llm) self.response["status"] = 200 self.response["body"] = { "choices": [ @@ -137,6 +139,8 @@ def llm_event_stop(self, payload: Any, event_id) -> None: self.response["headers"] = {} self.response["streamingMode"] = self.streamingMode + print("response", self.response) + self.log_object.update( { "request": self.request,