diff --git a/portkey_ai/api_resources/utils.py b/portkey_ai/api_resources/utils.py index 665a5e77..86f9a596 100644 --- a/portkey_ai/api_resources/utils.py +++ b/portkey_ai/api_resources/utils.py @@ -422,6 +422,10 @@ class GenericResponse(BaseModel, extra="allow"): warning: Optional[str] _headers: Optional[httpx.Headers] = None + def __str__(self): + del self._headers + return json.dumps(self.dict(), indent=4) + def get_headers(self) -> Optional[Dict[str, str]]: return parse_headers(self._headers) diff --git a/portkey_ai/llms/langchain/chat.py b/portkey_ai/llms/langchain/chat.py index 70b2f859..314168de 100644 --- a/portkey_ai/llms/langchain/chat.py +++ b/portkey_ai/llms/langchain/chat.py @@ -39,7 +39,7 @@ logger = logging.getLogger(__name__) if TYPE_CHECKING: - from portkey_ai import LLMOptions, Modes, ModesLiteral + pass IMPORT_ERROR_MESSAGE = ( @@ -106,10 +106,6 @@ class ChatPortkey(SimpleChatModel): response = client("What are the biggest risks facing humanity?") """ - mode: Optional[Union["Modes", "ModesLiteral"]] = Field( - description="The mode for using the Portkey integration", default=None - ) - model: Optional[str] = Field(default="gpt-3.5-turbo") _client: Any = PrivateAttr() @@ -119,7 +115,7 @@ class ChatPortkey(SimpleChatModel): config: Optional[Union[Mapping, str]] = None provider: Optional[str] = None trace_id: Optional[str] = None - metadata: Optional[str] = None + custom_metadata: Optional[str] = None def __init__( self, @@ -130,7 +126,7 @@ def __init__( config: Optional[Union[Mapping, str]] = None, provider: Optional[str] = None, trace_id: Optional[str] = None, - metadata: Optional[str] = None, + custom_metadata: Optional[str] = None, **kwargs, ) -> None: super().__init__() @@ -142,49 +138,11 @@ def __init__( config=config, provider=provider, trace_id=trace_id, - metadata=metadata, + metadata=custom_metadata, **kwargs, ) self.model = None - def add_llms( - self, llm_params: Union[LLMOptions, List[LLMOptions]] - ) -> "ChatPortkey": - """ - Adds the specified LLM parameters to the list of LLMs. This may be used for - fallbacks or load-balancing as specified in the mode. - Args: - llm_params (Union[LLMOptions, List[LLMOptions]]): A single LLM parameter \ - set or a list of LLM parameter sets. Each set should be an instance of \ - LLMOptions with - the specified attributes. - > provider: Optional[ProviderTypes] - > model: str - > temperature: float - > max_tokens: Optional[int] - > max_retries: int - > trace_id: Optional[str] - > cache_status: Optional[CacheType] - > cache: Optional[bool] - > metadata: Dict[str, Any] - > weight: Optional[float] - > **kwargs : Other additional parameters that are supported by \ - LLMOptions in portkey-ai - NOTE: User may choose to pass additional params as well. - Returns: - self - """ - try: - from portkey_ai import LLMOptions - except ImportError as exc: - raise ImportError(IMPORT_ERROR_MESSAGE) from exc - if isinstance(llm_params, LLMOptions): - llm_params = [llm_params] - self.llms.extend(llm_params) - if self.model is None: - self.model = self.llms[0].model - return self - def _call( self, messages: List[BaseMessage], diff --git a/portkey_ai/llms/langchain/completion.py b/portkey_ai/llms/langchain/completion.py index f4268724..22b712cc 100644 --- a/portkey_ai/llms/langchain/completion.py +++ b/portkey_ai/llms/langchain/completion.py @@ -45,7 +45,6 @@ class PortkeyLLM(LLM): """ model: Optional[str] = Field(default="gpt-3.5-turbo") - streaming: bool = False _client: Any = PrivateAttr() api_key: Optional[str] = None @@ -54,7 +53,7 @@ class PortkeyLLM(LLM): config: Optional[Union[Mapping, str]] = None provider: Optional[str] = None trace_id: Optional[str] = None - metadata: Optional[str] = None + custom_metadata: Optional[str] = None def __init__( self, @@ -65,7 +64,7 @@ def __init__( config: Optional[Union[Mapping, str]] = None, provider: Optional[str] = None, trace_id: Optional[str] = None, - metadata: Optional[str] = None, + custom_metadata: Optional[str] = None, **kwargs, ) -> None: super().__init__() @@ -77,7 +76,7 @@ def __init__( config=config, provider=provider, trace_id=trace_id, - metadata=metadata, + metadata=custom_metadata, **kwargs, ) self.model = None