Skip to content

Commit

Permalink
update token metric name and tests accordingly
Browse files Browse the repository at this point in the history
  • Loading branch information
ncybul committed Feb 5, 2025
1 parent 42289dd commit 8931ffa
Show file tree
Hide file tree
Showing 21 changed files with 40 additions and 53 deletions.
8 changes: 4 additions & 4 deletions ddtrace/_trace/trace_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,9 +596,9 @@ def _on_botocore_patched_bedrock_api_call_success(ctx, reqid, latency, input_tok
span.set_tag_str("bedrock.response.id", reqid)
span.set_tag_str("bedrock.response.duration", latency)
if input_token_count:
span.set_metric("bedrock.usage.prompt_tokens", int(input_token_count))
span.set_metric("bedrock.response.usage.prompt_tokens", int(input_token_count))
if output_token_count:
span.set_metric("bedrock.usage.completion_tokens", int(output_token_count))
span.set_metric("bedrock.response.usage.completion_tokens", int(output_token_count))


def _propagate_context(ctx, headers):
Expand Down Expand Up @@ -643,9 +643,9 @@ def _on_botocore_bedrock_process_response(
if metadata is not None:
for k, v in metadata.items():
if k in ["usage.completion_tokens", "usage.prompt_tokens"] and v:
span.set_metric("bedrock.{}".format(k), int(v))
span.set_metric("bedrock.response{}".format(k), int(v))
else:
span.set_tag_str("bedrock.{}".format(k), str(v))
span.set_tag_str("bedrock.response{}".format(k), str(v))
if "embed" in model_name:
span.set_metric("bedrock.response.embedding_length", len(formatted_response["text"][0]))
span.finish()
Expand Down
13 changes: 0 additions & 13 deletions ddtrace/llmobs/_integrations/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,19 +121,6 @@ def extract_message_from_part_google(part, role=None):
def get_llmobs_metrics_tags(integration_name, span):
usage = {}

# bedrock integration tags usage under meta instead of metrics
if integration_name == "bedrock":
input_tokens = int(span.get_tag("bedrock.usage.prompt_tokens") or 0)
output_tokens = int(span.get_tag("bedrock.usage.completion_tokens") or 0)
total_tokens = input_tokens + output_tokens
if input_tokens:
usage[INPUT_TOKENS_METRIC_KEY] = input_tokens
if output_tokens:
usage[OUTPUT_TOKENS_METRIC_KEY] = output_tokens
if total_tokens:
usage[TOTAL_TOKENS_METRIC_KEY] = total_tokens
return usage

# check for both prompt / completion or input / output tokens
input_tokens = span.get_metric("%s.response.usage.prompt_tokens" % integration_name) or span.get_metric(
"%s.response.usage.input_tokens" % integration_name
Expand Down
4 changes: 2 additions & 2 deletions tests/contrib/botocore/test_bedrock_llmobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,8 @@ def mock_llmobs_span_writer():
class TestLLMObsBedrock:
@staticmethod
def expected_llmobs_span_event(span, n_output, message=False):
prompt_tokens = int(span.get_metric("bedrock.usage.prompt_tokens"))
completion_tokens = int(span.get_metric("bedrock.usage.completion_tokens"))
prompt_tokens = int(span.get_metric("bedrock.response.usage.prompt_tokens"))
completion_tokens = int(span.get_metric("bedrock.response.usage.completion_tokens"))
expected_parameters = {"temperature": float(span.get_tag("bedrock.request.temperature"))}
if span.get_tag("bedrock.request.max_tokens"):
expected_parameters["max_tokens"] = int(span.get_tag("bedrock.request.max_tokens"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 10,
"bedrock.usage.prompt_tokens": 10,
"bedrock.response.usage.completion_tokens": 10,
"bedrock.response.usage.prompt_tokens": 10,
"process_id": 7458
},
"duration": 2112000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.response.embedding_length": 1536,
"bedrock.usage.prompt_tokens": 3,
"bedrock.response.usage.prompt_tokens": 3,
"process_id": 60939
},
"duration": 6739000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 50,
"bedrock.usage.prompt_tokens": 18,
"bedrock.response.usage.completion_tokens": 50,
"bedrock.response.usage.prompt_tokens": 18,
"process_id": 14088
},
"duration": 2147082000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 51,
"bedrock.usage.prompt_tokens": 18,
"bedrock.response.usage.completion_tokens": 51,
"bedrock.response.usage.prompt_tokens": 18,
"process_id": 14088
},
"duration": 2185710000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 32,
"bedrock.usage.prompt_tokens": 23,
"bedrock.response.usage.completion_tokens": 32,
"bedrock.response.usage.prompt_tokens": 23,
"process_id": 7272
},
"duration": 2434000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 4,
"bedrock.usage.prompt_tokens": 25,
"bedrock.response.usage.completion_tokens": 4,
"bedrock.response.usage.prompt_tokens": 25,
"process_id": 13707
},
"duration": 624710000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 22,
"bedrock.usage.prompt_tokens": 21,
"bedrock.response.usage.completion_tokens": 22,
"bedrock.response.usage.prompt_tokens": 21,
"process_id": 40705
},
"duration": 2160000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 22,
"bedrock.usage.prompt_tokens": 21,
"bedrock.response.usage.completion_tokens": 22,
"bedrock.response.usage.prompt_tokens": 21,
"process_id": 40896
},
"duration": 2950000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.response.embedding_length": 1024,
"bedrock.usage.prompt_tokens": 7,
"bedrock.response.usage.prompt_tokens": 7,
"process_id": 61336
},
"duration": 630192000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 20,
"bedrock.usage.prompt_tokens": 40,
"bedrock.response.usage.completion_tokens": 20,
"bedrock.response.usage.prompt_tokens": 40,
"process_id": 3568
},
"duration": 810213000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 10,
"bedrock.usage.prompt_tokens": 20,
"bedrock.response.usage.completion_tokens": 10,
"bedrock.response.usage.prompt_tokens": 20,
"process_id": 13549
},
"duration": 659370000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 20,
"bedrock.usage.prompt_tokens": 40,
"bedrock.response.usage.completion_tokens": 20,
"bedrock.response.usage.prompt_tokens": 40,
"process_id": 21816
},
"duration": 980170000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 10,
"bedrock.usage.prompt_tokens": 20,
"bedrock.response.usage.completion_tokens": 10,
"bedrock.response.usage.prompt_tokens": 20,
"process_id": 21816
},
"duration": 630536000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 50,
"bedrock.usage.prompt_tokens": 18,
"bedrock.response.usage.completion_tokens": 50,
"bedrock.response.usage.prompt_tokens": 18,
"process_id": 96028
},
"duration": 2318000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 60,
"bedrock.usage.prompt_tokens": 10,
"bedrock.response.usage.completion_tokens": 60,
"bedrock.response.usage.prompt_tokens": 10,
"process_id": 10703
},
"duration": 2120703000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 60,
"bedrock.usage.prompt_tokens": 10,
"bedrock.response.usage.completion_tokens": 60,
"bedrock.response.usage.prompt_tokens": 10,
"process_id": 2664
},
"duration": 3795000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 60,
"bedrock.usage.prompt_tokens": 10,
"bedrock.response.usage.completion_tokens": 60,
"bedrock.response.usage.prompt_tokens": 10,
"process_id": 42139
},
"duration": 2505000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
"_dd.top_level": 1,
"_dd.tracer_kr": 1.0,
"_sampling_priority_v1": 1,
"bedrock.usage.completion_tokens": 60,
"bedrock.usage.prompt_tokens": 10,
"bedrock.response.usage.completion_tokens": 60,
"bedrock.response.usage.prompt_tokens": 10,
"process_id": 42139
},
"duration": 3064000,
Expand Down

0 comments on commit 8931ffa

Please sign in to comment.