Skip to content
This repository has been archived by the owner on Apr 18, 2024. It is now read-only.

Commit

Permalink
Merge pull request #6 from dokulabs/mistral
Browse files Browse the repository at this point in the history
Feat: Add support for Mistral AI and Azure OpenAI monitoring
  • Loading branch information
patcher9 authored Mar 17, 2024
2 parents afc2061 + 9955bd7 commit 021aa16
Show file tree
Hide file tree
Showing 16 changed files with 1,352 additions and 40 deletions.
24 changes: 21 additions & 3 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@ on:
branches: [ "main" ]
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
- cron: '0 0 * * 0'

env:
OPENAI_API_TOKEN: ${{ secrets.OPENAI_API_TOKEN }}
COHERE_API_TOKEN: ${{ secrets.COHERE_API_TOKEN }}
ANTHROPIC_API_TOKEN: ${{ secrets.ANTHROPIC_API_TOKEN }}
DOKU_URL: ${{ secrets.DOKU_URL }}
DOKU_TOKEN: ${{ secrets.DOKU_TOKEN }}
MISTRAL_API_TOKEN: ${{ secrets.MISTRAL_API_TOKEN }}
DOKU_URL: http://127.0.0.1:9044

jobs:
build:
Expand All @@ -27,6 +27,24 @@ jobs:
steps:
- uses: actions/checkout@v4

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@0d103c3126aa41d772a8362f6aa67afac040f80c # v3.1.0

- name: Setup Doku Stack
run: docker-compose up -d

- name: Sleep for 30 seconds
run: sleep 30

- name: Make API Request and Set DOKU_TOKEN
run: |
RESPONSE=$(curl -X POST $DOKU_URL/api/keys \
-H 'Authorization: ""' \
-H 'Content-Type: application/json' \
-d '{"Name": "GITHUBACTION"}')
MESSAGE=$(echo $RESPONSE | jq -r '.message')
echo "DOKU_TOKEN=${MESSAGE}" >> $GITHUB_ENV
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
- ✅ OpenAI
- ✅ Anthropic
- ✅ Cohere
- ✅ Mistral

Deployed as the backbone for all your LLM monitoring needs, `dokumetry` channels crucial usage data directly to Doku, streamlining the tracking process. Unlock efficient and effective observability for your LLM applications with DokuMetry.

Expand Down
52 changes: 52 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
version: '3.8'

services:
clickhouse:
image: clickhouse/clickhouse-server:24.1.5
container_name: clickhouse
environment:
CLICKHOUSE_PASSWORD: ${DOKU_DB_PASSWORD:-DOKU}
CLICKHOUSE_USER: ${DOKU_DB_USER:-default}
volumes:
- clickhouse-data:/var/lib/clickhouse
ports:
- "9000:9000"
- "8123:8123"
restart: always

doku-ingester:
image: ghcr.io/dokulabs/doku-ingester:latest
container_name: doku-ingester
environment:
DOKU_DB_HOST: clickhouse
DOKU_DB_PORT: 9000
DOKU_DB_NAME: ${DOKU_DB_NAME:-default}
DOKU_DB_USER: ${DOKU_DB_USER:-default}
DOKU_DB_PASSWORD: ${DOKU_DB_PASSWORD:-DOKU}
ports:
- "9044:9044"
depends_on:
- clickhouse
restart: always

doku-client:
image: ghcr.io/dokulabs/doku-client:latest
container_name: doku-client
environment:
INIT_DB_HOST: clickhouse
INIT_DB_PORT: 8123
INIT_DB_DATABASE: ${DOKU_DB_NAME:-default}
INIT_DB_USERNAME: ${DOKU_DB_USER:-default}
INIT_DB_PASSWORD: ${DOKU_DB_PASSWORD:-DOKU}
SQLITE_DATABASE_URL: file:/app/client/data/data.db
ports:
- "3000:3000"
depends_on:
- clickhouse
volumes:
- doku-client-data:/app/client/data
restart: always

volumes:
clickhouse-data:
doku-client-data:
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "dokumetry"
version = "0.1.0"
version = "0.1.1"
description = "A Python library for tracking LLM and GenAI usage and sending the usage data to Doku"
authors = ["Doku Labs"]
repository = "https://github.com/dokulabs/dokumetry-python"
Expand All @@ -11,8 +11,9 @@ keywords = ["openai", "anthropic", "claude", "cohere", "llm monitoring", "observ
[tool.poetry.dependencies]
python = "^3.7.1"
requests = "^2.26.0"
openai = "^1.13.0"
openai = "^1.1.0"
anthropic = "^0.19.0"
mistralai = "^0.1.5"

[build-system]
requires = ["poetry-core>=1.1.0"]
Expand Down
38 changes: 27 additions & 11 deletions src/dokumetry/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,19 @@
__init__ module for dokumetry package.
"""
from anthropic import AsyncAnthropic, Anthropic

from openai import AsyncOpenAI, OpenAI
from openai import AsyncOpenAI, OpenAI, AzureOpenAI, AsyncAzureOpenAI
from mistralai.async_client import MistralAsyncClient
from mistralai.client import MistralClient

from .openai import init as init_openai
from .async_openai import init as init_async_openai
from .azure_openai import init as init_azure_openai
from .async_azure_openai import init as init_async_azure_openai
from .anthropic import init as init_anthropic
from .async_anthropic import init as init_async_anthropic
from .cohere import init as init_cohere
from .mistral import init as init_mistral
from .async_mistral import init as init_async_mistral

# pylint: disable=too-few-public-methods
class DokuConfig:
Expand All @@ -24,7 +29,7 @@ class DokuConfig:
application_name = None
skip_resp = None

# pylint: disable=too-many-arguments, line-too-long
# pylint: disable=too-many-arguments, line-too-long, too-many-return-statements
def init(llm, doku_url, api_key, environment="default", application_name="default", skip_resp=False):
"""
Initialize Doku configuration based on the provided function.
Expand Down Expand Up @@ -52,14 +57,25 @@ def init(llm, doku_url, api_key, environment="default", application_name="defaul
elif isinstance(llm, AsyncOpenAI):
init_async_openai(llm, doku_url, api_key, environment, application_name, skip_resp)
return
# pylint: disable=no-else-return
# pylint: disable=no-else-return, line-too-long
if hasattr(llm, 'moderations') and callable(llm.chat.completions.create) and ('.openai.azure.com/' in str(llm.base_url)):
if isinstance(llm, AzureOpenAI):
init_azure_openai(llm, doku_url, api_key, environment, application_name, skip_resp)
elif isinstance(llm, AsyncAzureOpenAI):
init_async_azure_openai(llm, doku_url, api_key, environment, application_name, skip_resp)
return
if isinstance(llm, MistralClient):
init_mistral(llm, doku_url, api_key, environment, application_name, skip_resp)
return
elif isinstance(llm, MistralAsyncClient):
init_async_mistral(llm, doku_url, api_key, environment, application_name, skip_resp)
return
elif isinstance(llm, AsyncAnthropic):
init_async_anthropic(llm, doku_url, api_key, environment, application_name, skip_resp)
return
elif isinstance(llm, Anthropic):
init_anthropic(llm, doku_url, api_key, environment, application_name, skip_resp)
return
elif hasattr(llm, 'generate') and callable(llm.generate):
init_cohere(llm, doku_url, api_key, environment, application_name, skip_resp)
return
elif hasattr(llm, 'messages') and callable(llm.messages.create):
if isinstance(llm, AsyncAnthropic):
init_async_anthropic(llm, doku_url, api_key, environment, application_name, skip_resp)
elif isinstance(llm, Anthropic):
init_anthropic(llm, doku_url, api_key, environment, application_name, skip_resp)

return
Loading

0 comments on commit 021aa16

Please sign in to comment.