Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,10 @@ classifiers = [
"Programming Language :: Python :: 3.13",
]
dependencies = [
"openai>=1.109.1",
Copy link
Contributor

@xrmx xrmx Dec 19, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why? If it is the first version having Responses the instrumentation should continue to work with older versions.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

+1 this is not a dependency. There is a lot of junk in this PR, its a very rough draft and I am still getting my bearings, but its clearly doing far too much. I expect to cut out a majority of this.

"opentelemetry-api ~= 1.37",
"opentelemetry-instrumentation ~= 0.58b0",
"opentelemetry-semantic-conventions ~= 0.58b0"
"opentelemetry-semantic-conventions ~= 0.58b0",
]

[project.optional-dependencies]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
---
"""

import importlib
from typing import Collection

from wrapt import wrap_function_wrapper
Expand All @@ -59,6 +60,10 @@
async_embeddings_create,
chat_completions_create,
embeddings_create,
async_responses_compact,
async_responses_create,
responses_compact,
responses_create,
)


Expand Down Expand Up @@ -128,10 +133,56 @@ def _instrument(self, **kwargs):
),
)

def _uninstrument(self, **kwargs):
import openai # pylint: disable=import-outside-toplevel # noqa: PLC0415
# Add instrumentation for the Responses API
wrap_function_wrapper(
module="openai.resources.responses",
name="Responses.create",
wrapper=responses_create(
tracer, logger, instruments, is_content_enabled()
),
)

wrap_function_wrapper(
module="openai.resources.responses",
name="AsyncResponses.create",
wrapper=async_responses_create(
tracer, logger, instruments, is_content_enabled()
),
)

# `Responses.compact` was added later in openai-python; guard so older
# supported versions don't fail instrumentation.
try:
wrap_function_wrapper(
module="openai.resources.responses",
name="Responses.compact",
wrapper=responses_compact(
tracer, logger, instruments, is_content_enabled()
),
)
wrap_function_wrapper(
module="openai.resources.responses",
name="AsyncResponses.compact",
wrapper=async_responses_compact(
tracer, logger, instruments, is_content_enabled()
),
)
except AttributeError:
pass

unwrap(openai.resources.chat.completions.Completions, "create")
unwrap(openai.resources.chat.completions.AsyncCompletions, "create")
unwrap(openai.resources.embeddings.Embeddings, "create")
unwrap(openai.resources.embeddings.AsyncEmbeddings, "create")
def _uninstrument(self, **kwargs):
chat_mod = importlib.import_module("openai.resources.chat.completions")
unwrap(chat_mod.Completions, "create")
Copy link
Contributor

@xrmx xrmx Dec 19, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

unwrap takes also strings as the wrap functions, you shouldn't need the importlib usage

unwrap(chat_mod.AsyncCompletions, "create")

embeddings_mod = importlib.import_module("openai.resources.embeddings")
unwrap(embeddings_mod.Embeddings, "create")
unwrap(embeddings_mod.AsyncEmbeddings, "create")

responses_mod = importlib.import_module("openai.resources.responses")
unwrap(responses_mod.Responses, "create")
unwrap(responses_mod.AsyncResponses, "create")
if hasattr(responses_mod.Responses, "compact"):
unwrap(responses_mod.Responses, "compact")
if hasattr(responses_mod.AsyncResponses, "compact"):
unwrap(responses_mod.AsyncResponses, "compact")
Loading