Skip to content

Commit

Permalink
fix: removed the unused methods on the integrations and removed heade…
Browse files Browse the repository at this point in the history
…rs from the generic response
  • Loading branch information
noble-varghese committed Dec 7, 2023
1 parent 0d3f1fe commit 559ed0a
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 50 deletions.
4 changes: 4 additions & 0 deletions portkey_ai/api_resources/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,6 +422,10 @@ class GenericResponse(BaseModel, extra="allow"):
warning: Optional[str]
_headers: Optional[httpx.Headers] = None

def __str__(self):
del self._headers
return json.dumps(self.dict(), indent=4)

def get_headers(self) -> Optional[Dict[str, str]]:
return parse_headers(self._headers)

Expand Down
50 changes: 4 additions & 46 deletions portkey_ai/llms/langchain/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
logger = logging.getLogger(__name__)

if TYPE_CHECKING:
from portkey_ai import LLMOptions, Modes, ModesLiteral
pass


IMPORT_ERROR_MESSAGE = (
Expand Down Expand Up @@ -106,10 +106,6 @@ class ChatPortkey(SimpleChatModel):
response = client("What are the biggest risks facing humanity?")
"""

mode: Optional[Union["Modes", "ModesLiteral"]] = Field(
description="The mode for using the Portkey integration", default=None
)

model: Optional[str] = Field(default="gpt-3.5-turbo")

_client: Any = PrivateAttr()
Expand All @@ -119,7 +115,7 @@ class ChatPortkey(SimpleChatModel):
config: Optional[Union[Mapping, str]] = None
provider: Optional[str] = None
trace_id: Optional[str] = None
metadata: Optional[str] = None
custom_metadata: Optional[str] = None

def __init__(
self,
Expand All @@ -130,7 +126,7 @@ def __init__(
config: Optional[Union[Mapping, str]] = None,
provider: Optional[str] = None,
trace_id: Optional[str] = None,
metadata: Optional[str] = None,
custom_metadata: Optional[str] = None,
**kwargs,
) -> None:
super().__init__()
Expand All @@ -142,49 +138,11 @@ def __init__(
config=config,
provider=provider,
trace_id=trace_id,
metadata=metadata,
metadata=custom_metadata,
**kwargs,
)
self.model = None

def add_llms(
self, llm_params: Union[LLMOptions, List[LLMOptions]]
) -> "ChatPortkey":
"""
Adds the specified LLM parameters to the list of LLMs. This may be used for
fallbacks or load-balancing as specified in the mode.
Args:
llm_params (Union[LLMOptions, List[LLMOptions]]): A single LLM parameter \
set or a list of LLM parameter sets. Each set should be an instance of \
LLMOptions with
the specified attributes.
> provider: Optional[ProviderTypes]
> model: str
> temperature: float
> max_tokens: Optional[int]
> max_retries: int
> trace_id: Optional[str]
> cache_status: Optional[CacheType]
> cache: Optional[bool]
> metadata: Dict[str, Any]
> weight: Optional[float]
> **kwargs : Other additional parameters that are supported by \
LLMOptions in portkey-ai
NOTE: User may choose to pass additional params as well.
Returns:
self
"""
try:
from portkey_ai import LLMOptions
except ImportError as exc:
raise ImportError(IMPORT_ERROR_MESSAGE) from exc
if isinstance(llm_params, LLMOptions):
llm_params = [llm_params]
self.llms.extend(llm_params)
if self.model is None:
self.model = self.llms[0].model
return self

def _call(
self,
messages: List[BaseMessage],
Expand Down
7 changes: 3 additions & 4 deletions portkey_ai/llms/langchain/completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ class PortkeyLLM(LLM):
"""

model: Optional[str] = Field(default="gpt-3.5-turbo")
streaming: bool = False
_client: Any = PrivateAttr()

api_key: Optional[str] = None
Expand All @@ -54,7 +53,7 @@ class PortkeyLLM(LLM):
config: Optional[Union[Mapping, str]] = None
provider: Optional[str] = None
trace_id: Optional[str] = None
metadata: Optional[str] = None
custom_metadata: Optional[str] = None

def __init__(
self,
Expand All @@ -65,7 +64,7 @@ def __init__(
config: Optional[Union[Mapping, str]] = None,
provider: Optional[str] = None,
trace_id: Optional[str] = None,
metadata: Optional[str] = None,
custom_metadata: Optional[str] = None,
**kwargs,
) -> None:
super().__init__()
Expand All @@ -77,7 +76,7 @@ def __init__(
config=config,
provider=provider,
trace_id=trace_id,
metadata=metadata,
metadata=custom_metadata,
**kwargs,
)
self.model = None
Expand Down

0 comments on commit 559ed0a

Please sign in to comment.