Skip to content

Added events and sdk compatablity ( tested with cohere for now can easily extend) #2490

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from opentelemetry.instrumentation.alephalpha.config import Config
from opentelemetry.instrumentation.alephalpha.utils import dont_throw
from wrapt import wrap_function_wrapper

from opentelemetry.sdk.trace import Event
from opentelemetry import context as context_api
from opentelemetry.trace import get_tracer, SpanKind
from opentelemetry.trace.status import Status, StatusCode
Expand Down Expand Up @@ -35,6 +35,11 @@
},
]

def _set_span_attribute_with_config(span, name, value):
if value is not None:
if value != "":
span.set_attribute(name, value)
return

def should_send_prompts():
return (
Expand All @@ -55,26 +60,49 @@ def _set_input_attributes(span, llm_request_type, args, kwargs):

if should_send_prompts():
if llm_request_type == LLMRequestTypeValues.COMPLETION:
_set_span_attribute(span, f"{SpanAttributes.LLM_PROMPTS}.0.role", "user")
_set_span_attribute(
span,
f"{SpanAttributes.LLM_PROMPTS}.0.content",
args[0].prompt.items[0].text,
)
if Config.use_legacy_attributes:
_set_span_attribute_with_config(span, f"{SpanAttributes.LLM_PROMPTS}.0.role", "user")
_set_span_attribute_with_config(
span,
f"{SpanAttributes.LLM_PROMPTS}.0.content",
args[0].prompt.items[0].text,
)
# Emit events if not using legacy attributes
else:
span.add_event(
"prompt",
{
"messaging.role": "user",
"messaging.content": args[0].prompt.items[0].text,
"messaging.index": 0,
},
)


@dont_throw
def _set_response_attributes(span, llm_request_type, response):
if should_send_prompts():
if llm_request_type == LLMRequestTypeValues.COMPLETION:
_set_span_attribute(
span,
f"{SpanAttributes.LLM_COMPLETIONS}.0.content",
response.completions[0].completion,
)
_set_span_attribute(
span, f"{SpanAttributes.LLM_COMPLETIONS}.0.role", "assistant"
)
# Use legacy attributes if configured
if Config.use_legacy_attributes:
_set_span_attribute_with_config(
span,
f"{SpanAttributes.LLM_COMPLETIONS}.0.content",
response.completions[0].completion,
)
_set_span_attribute_with_config(
span, f"{SpanAttributes.LLM_COMPLETIONS}.0.role", "assistant"
)
# Emit events if not using legacy attributes
else:
span.add_event(
"completion",
{
"messaging.content": response.completions[0].completion,
"messaging.role": "assistant",
"messaging.index": 0,
},
)

input_tokens = getattr(response, "num_tokens_prompt_total", 0)
output_tokens = getattr(response, "num_tokens_generated", 0)
Expand Down Expand Up @@ -161,6 +189,11 @@ def instrumentation_dependencies(self) -> Collection[str]:
def _instrument(self, **kwargs):
tracer_provider = kwargs.get("tracer_provider")
tracer = get_tracer(__name__, __version__, tracer_provider)
config = kwargs.get("config", Config())
if config is None:
config = Config()
# Set the global configuration for legacy attribute usage
Config.use_legacy_attributes = config.use_legacy_attributes
for wrapped_method in WRAPPED_METHODS:
wrap_method = wrapped_method.get("method")
wrap_function_wrapper(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
class Config:
exception_logger = None
use_legacy_attributes = True
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
import os
import pytest
from opentelemetry.semconv_ai import SpanAttributes
import aleph_alpha_client
from opentelemetry.instrumentation.alephalpha.config import Config

@pytest.fixture
def reset_config():
"""Reset the Config.use_legacy_attributes to its original value after each test."""
original_value = Config().use_legacy_attributes
Config.use_legacy_attributes = original_value
yield
Config().use_legacy_attributes = original_value

def _create_client():
api_key = os.environ.get("ALEPH_ALPHA_API_KEY")
if not api_key:
pytest.skip("ALEPH_ALPHA_API_KEY environment variable not set.")
return aleph_alpha_client.Client(
token=api_key,
host=os.environ.get("ALEPH_ALPHA_API_HOST", "https://api.aleph-alpha.com")
)

def test_legacy_attributes(exporter, reset_config):
"""Test that legacy attributes are correctly set when use_legacy_attributes is True."""
# Set up legacy mode
Config().use_legacy_attributes = True
client = _create_client()

# Perform a simple completion request
prompt = "Tell me a joke"
response = client.complete(
prompt=aleph_alpha_client.Prompt.from_text(prompt), model="luminous-base"
)

# Get the span and verify legacy attribute behavior
spans = exporter.get_finished_spans()
completion_span = spans[0]

# Check that legacy attributes are present
assert completion_span.attributes.get(f"{SpanAttributes.LLM_PROMPTS}.0.content") == prompt
assert completion_span.attributes.get(f"{SpanAttributes.LLM_COMPLETIONS}.0.content") == response.completions[0].completion

# Verify that no events are present (since we're in legacy mode)
assert not any(event.name == "prompt" for event in completion_span.events)
assert not any(event.name == "completion" for event in completion_span.events)

def test_event_based_attributes(exporter, reset_config):
"""Test that events are correctly emitted when use_legacy_attributes is False."""
# Set up event-based mode
Config().use_legacy_attributes = False
client = _create_client()

# Perform a simple completion request
prompt = "Tell me a joke"
response = client.complete(
prompt=aleph_alpha_client.Prompt.from_text(prompt), model="luminous-base"
)

# Get the span and verify event-based behavior
spans = exporter.get_finished_spans()
completion_span = spans[0]

# Check that legacy attributes are not present
assert completion_span.attributes.get(f"{SpanAttributes.LLM_PROMPTS}.0.content") is None
assert completion_span.attributes.get(f"{SpanAttributes.LLM_COMPLETIONS}.0.content") is None

# Verify that events are present with correct attributes
prompt_events = [event for event in completion_span.events if event.name == "prompt"]
completion_events = [event for event in completion_span.events if event.name == "completion"]

# Check prompt event
assert len(prompt_events) == 1
assert prompt_events[0].attributes["messaging.role"] == "user"
assert prompt_events[0].attributes["messaging.content"] == prompt
assert prompt_events[0].attributes["messaging.index"] == 0

# Check completion event
assert len(completion_events) == 1
assert completion_events[0].attributes["messaging.content"] == response.completions[0].completion
assert completion_events[0].attributes["messaging.role"] == "assistant"
assert completion_events[0].attributes["messaging.index"] == 0

# Check token usage in completion event
assert completion_span.attributes["llm.usage.total_tokens"] == response.num_tokens_prompt_total + response.num_tokens_generated
assert completion_span.attributes["llm.usage.prompt_tokens"] == response.num_tokens_prompt_total
assert completion_span.attributes["llm.usage.completion_tokens"] == response.num_tokens_generated

def _create_client():
api_key = os.environ.get("ALEPH_ALPHA_API_KEY")
if not api_key:
pytest.skip("ALEPH_ALPHA_API_KEY environment variable not set.")
return aleph_alpha_client.Client(
token=api_key,
host=os.environ.get("ALEPH_ALPHA_API_HOST", "https://api.aleph-alpha.com")
)
Loading
Loading