Skip to content

Commit

Permalink
mistral with openai
Browse files Browse the repository at this point in the history
  • Loading branch information
jayeshp19 committed Nov 28, 2024
1 parent 57834ec commit 4aa9c23
Show file tree
Hide file tree
Showing 3 changed files with 55 additions and 3 deletions.
1 change: 1 addition & 0 deletions livekit-agents/livekit/agents/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class Choice:
@dataclass
class LLMCapabilities:
supports_choices_on_int: bool = True
supports_stream_options: bool = True


@dataclass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
ChatModels,
DeepSeekChatModels,
GroqChatModels,
MistralChatModels,
OctoChatModels,
PerplexityChatModels,
TelnyxChatModels,
Expand Down Expand Up @@ -85,7 +86,10 @@ def __init__(
``OPENAI_API_KEY`` environmental variable.
"""
super().__init__()
self._capabilities = llm.LLMCapabilities(supports_choices_on_int=True)
self._capabilities = llm.LLMCapabilities(
supports_choices_on_int=True,
supports_stream_options=True,
)

self._opts = LLMOptions(
model=model,
Expand Down Expand Up @@ -196,6 +200,42 @@ def with_cerebras(
tool_choice=tool_choice,
)

@staticmethod
def with_mistral(
*,
model: str | MistralChatModels = "mistral-large-latest",
api_key: str | None = None,
base_url: str | None = "https://api.mistral.ai/v1",
user: str | None = None,
temperature: float | None = None,
parallel_tool_calls: bool | None = None,
tool_choice: Union[ToolChoice, Literal["auto", "required", "none"]] = "auto",
) -> LLM:
"""
Create a new instance of Mistral LLM.
``api_key`` must be set to your Mistral API key, either using the argument or by setting
the ``MISTRAL_API_KEY`` environmental variable.
"""

api_key = api_key or os.environ.get("MISTRAL_API_KEY")
if api_key is None:
raise ValueError(
"Mistral API key is required, either as argument or set MISTRAL_API_KEY environmental variable"
)

mistral_llm = LLM(
model=model,
api_key=api_key,
base_url=base_url,
user=user,
temperature=temperature,
parallel_tool_calls=parallel_tool_calls,
tool_choice=tool_choice,
)
mistral_llm._capabilities = llm.LLMCapabilities(supports_stream_options=False)
return mistral_llm

@staticmethod
def with_vertex(
*,
Expand Down Expand Up @@ -660,7 +700,8 @@ def chat(
}
else:
opts["tool_choice"] = tool_choice

if self._capabilities.supports_stream_options:
opts["stream_options"] = {"include_usage": True}
user = self._opts.user or openai.NOT_GIVEN
if temperature is None:
temperature = self._opts.temperature
Expand All @@ -672,7 +713,6 @@ def chat(
model=self._opts.model,
n=n,
temperature=temperature,
stream_options={"include_usage": True},
stream=True,
user=user,
**opts,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,17 @@

# adapters for OpenAI-compatible LLMs

MistralChatModels = Literal[
"mistral-large-latest",
"ministral-3b-latest",
"ministral-8b-latest",
"mistral-small-latest",
"mistral-large-2411",
"ministral-3b-2410",
"ministral-8b-2410",
"mistral-small-2409",
]

TelnyxChatModels = Literal[
"meta-llama/Meta-Llama-3.1-8B-Instruct",
"meta-llama/Meta-Llama-3.1-70B-Instruct",
Expand Down

0 comments on commit 4aa9c23

Please sign in to comment.