Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
RogerHYang committed Jan 11, 2024
1 parent 6bd830c commit 50be6c8
Show file tree
Hide file tree
Showing 14 changed files with 461 additions and 376 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import openai
from openinference.instrumentation.openai import OpenAIInstrumentor
from opentelemetry import trace as trace_api
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import SimpleSpanProcessor

resource = Resource(attributes={})
tracer_provider = trace_sdk.TracerProvider(resource=resource)
span_exporter = OTLPSpanExporter(endpoint="http://127.0.0.1:6006/v1/traces")
span_processor = SimpleSpanProcessor(span_exporter=span_exporter)
tracer_provider.add_span_processor(span_processor=span_processor)
trace_api.set_tracer_provider(tracer_provider=tracer_provider)

OpenAIInstrumentor().instrument()


if __name__ == "__main__":
response = openai.OpenAI().chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Write a haiku."}],
max_tokens=20,
)
print(response.choices[0].message.content)
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import openai
from openinference.instrumentation.openai import OpenAIInstrumentor
from opentelemetry import trace as trace_api
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import SimpleSpanProcessor

resource = Resource(attributes={})
tracer_provider = trace_sdk.TracerProvider(resource=resource)
span_exporter = OTLPSpanExporter(endpoint="http://127.0.0.1:6006/v1/traces")
span_processor = SimpleSpanProcessor(span_exporter=span_exporter)
tracer_provider.add_span_processor(span_processor=span_processor)
trace_api.set_tracer_provider(tracer_provider=tracer_provider)

OpenAIInstrumentor().instrument()


if __name__ == "__main__":
response = openai.OpenAI().embeddings.create(
model="text-embedding-ada-002",
input="hello world",
)
print(response.data[0].embedding)
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from importlib import import_module

from openinference.instrumentation.openai import OpenAIInstrumentor
from opentelemetry import trace as trace_api
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import SimpleSpanProcessor

resource = Resource(attributes={})
tracer_provider = trace_sdk.TracerProvider(resource=resource)
span_exporter = OTLPSpanExporter(endpoint="http://127.0.0.1:6006/v1/traces")
span_processor = SimpleSpanProcessor(span_exporter=span_exporter)
tracer_provider.add_span_processor(span_processor=span_processor)
trace_api.set_tracer_provider(tracer_provider=tracer_provider)

HTTPXClientInstrumentor().instrument()
OpenAIInstrumentor().instrument()


if __name__ == "__main__":
openai = import_module("openai")
response = openai.OpenAI().chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Write a haiku."}],
max_tokens=20,
)
print(response.choices[0].message.content)
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ instruments = [
test = [
"openai == 1.0.0",
"opentelemetry-sdk",
"opentelemetry-instrumentation-httpx",
"respx",
"numpy",
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@ def _instrument(self, **kwargs: Any) -> None:
wrap_function_wrapper(
module=_MODULE,
name="OpenAI.request",
wrapper=_Request(tracer=tracer),
wrapper=_Request(tracer=tracer, openai=openai),
)
wrap_function_wrapper(
module=_MODULE,
name="AsyncOpenAI.request",
wrapper=_AsyncRequest(tracer=tracer),
wrapper=_AsyncRequest(tracer=tracer, openai=openai),
)

def _uninstrument(self, **kwargs: Any) -> None:
Expand Down
Loading

0 comments on commit 50be6c8

Please sign in to comment.