Skip to content

Commit

Permalink
fix: fixing llama_index imports
Browse files Browse the repository at this point in the history
  • Loading branch information
noble-varghese committed Dec 7, 2023
1 parent daf3f14 commit 9441629
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 11 deletions.
2 changes: 1 addition & 1 deletion portkey_ai/api_resources/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,7 @@ def get(self, key: str, default: Optional[Any] = None):

class GenericResponse(BaseModel, extra="allow"):
success: Optional[bool]
data: Optional[Mapping[str, Any]]
data: Optional[Any]
warning: Optional[str]
_headers: Optional[httpx.Headers] = None

Expand Down
16 changes: 6 additions & 10 deletions portkey_ai/llms/llama_index/completions.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from portkey_ai import Message, Portkey
from typing import TYPE_CHECKING, Optional, Union, List, Any, Mapping, cast, Sequence
from typing import Optional, Union, List, Any, Mapping, cast, Sequence
from portkey_ai.api_resources.utils import PortkeyResponse

from portkey_ai.llms.llama_index.utils import (
Expand All @@ -8,7 +8,9 @@
modelname_to_contextsize,
)

if TYPE_CHECKING:
try:
from llama_index.llms.custom import CustomLLM
from llama_index.bridge.pydantic import PrivateAttr
from llama_index.llms.base import (
ChatMessage,
ChatResponse,
Expand All @@ -19,10 +21,6 @@
llm_chat_callback,
llm_completion_callback,
)

try:
from llama_index.llms.custom import CustomLLM
from llama_index.bridge.pydantic import PrivateAttr
except ImportError as exc:
raise ImportError(IMPORT_ERROR_MESSAGE) from exc

Expand Down Expand Up @@ -106,8 +104,7 @@ def _chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse:
List[Message],
[{"role": i.role.value, "content": i.content} for i in messages],
)
response = self._client.chat.completions.create(
messages=_messages, **kwargs)
response = self._client.chat.completions.create(messages=_messages, **kwargs)
self.model = self._get_model(response)

message = response.choices[0].message
Expand Down Expand Up @@ -170,8 +167,7 @@ def gen() -> ChatResponseGen:
return gen()

def _stream_complete(self, prompt: str, **kwargs: Any) -> CompletionResponseGen:
response = self._client.completions.create(
prompt=prompt, stream=True, **kwargs)
response = self._client.completions.create(prompt=prompt, stream=True, **kwargs)

def gen() -> CompletionResponseGen:
text = ""
Expand Down

0 comments on commit 9441629

Please sign in to comment.