Skip to content

Commit

Permalink
feat: upgrade ollama client to 0.4.5 (#345)
Browse files Browse the repository at this point in the history
Co-authored-by: danran <[email protected]>
  • Loading branch information
JamesGuthrie and CoolDarran authored Jan 7, 2025
1 parent 2f2ab9b commit c579238
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 14 deletions.
2 changes: 1 addition & 1 deletion projects/extension/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
openai==1.44.0
tiktoken==0.7.0
ollama==0.2.1
ollama==0.4.5
anthropic==0.29.0
cohere==5.5.8
backoff==2.2.1
Expand Down
4 changes: 2 additions & 2 deletions projects/extension/sql/idempotent/002-ollama.sql
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ as $python$
args["images"] = images_1

resp = client.generate(model, prompt, stream=False, **args)
return json.dumps(resp)
return resp.model_dump_json()
$python$
language plpython3u volatile parallel safe security invoker
set search_path to pg_catalog, pg_temp
Expand Down Expand Up @@ -206,7 +206,7 @@ as $python$

resp = client.chat(model, messages_1, stream=False, **args)

return json.dumps(resp)
return resp.model_dump_json()
$python$
language plpython3u volatile parallel safe security invoker
set search_path to pg_catalog, pg_temp
Expand Down
10 changes: 4 additions & 6 deletions projects/pgai/pgai/vectorizer/embedders/ollama.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import os
from collections.abc import Mapping, Sequence
from collections.abc import Sequence
from functools import cached_property
from typing import (
Any,
Literal,
)
from typing import Literal

import ollama
from ollama import ShowResponse
from pydantic import BaseModel
from typing_extensions import TypedDict, override

Expand Down Expand Up @@ -130,7 +128,7 @@ async def call_embed_api(self, documents: str | list[str]) -> EmbeddingResponse:
)
return EmbeddingResponse(embeddings=response["embeddings"], usage=usage)

async def _model(self) -> Mapping[str, Any]:
async def _model(self) -> ShowResponse:
"""
Gets the model details from the Ollama API
:return:
Expand Down
2 changes: 1 addition & 1 deletion projects/pgai/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ dependencies = [
"langchain-openai>=0.1,<1.0",
"langchain-text-splitters>=0.2,<1.0",
"pydantic>=2.0,<3.0",
"ollama>=0.3.3,<0.4.0",
"ollama>=0.4.5,<0.5.0",
"openai>=1.44,<2.0",
"python-dotenv>=1.0,<2.0",
"structlog>=24.0,<25.0",
Expand Down
9 changes: 5 additions & 4 deletions projects/pgai/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit c579238

Please sign in to comment.