diff --git a/.github/COMMIT_TEMPLATE.txt b/.github/COMMIT_TEMPLATE.txt index f6e418cc2e8..55dc46d5378 100644 --- a/.github/COMMIT_TEMPLATE.txt +++ b/.github/COMMIT_TEMPLATE.txt @@ -29,5 +29,5 @@ feat/fix/docs/refactor/ci(xxx): commit title here # mysqlpython, openai, opentelemetry, opentracer, profile, psycopg, pylibmc, pymemcache, # pymongo, pymysql, pynamodb, pyodbc, pyramid, pytest, redis, rediscluster, requests, rq, # sanic, snowflake, sourcecode, sqlalchemy, starlette, stdlib, structlog, subprocess, -# telemetry, test_logging, tornado, tracer, unittest, urllib3, vendor, vertica, wsgi, +# telemetry, test_logging, tornado, tracer, unittest, urllib3, valkey, vendor, vertica, wsgi, # yaaredis diff --git a/.github/workflows/build_deploy.yml b/.github/workflows/build_deploy.yml index 179ed367141..47584911df4 100644 --- a/.github/workflows/build_deploy.yml +++ b/.github/workflows/build_deploy.yml @@ -27,7 +27,7 @@ jobs: build_wheels: uses: ./.github/workflows/build_python_3.yml with: - cibw_build: 'cp37* cp38* cp39* cp310* cp311* cp312* cp313*' + cibw_build: 'cp38* cp39* cp310* cp311* cp312* cp313*' build_sdist: name: Build source distribution diff --git a/.gitlab/services.yml b/.gitlab/services.yml index 0282dd9e9d0..3eb29af107a 100644 --- a/.gitlab/services.yml +++ b/.gitlab/services.yml @@ -28,6 +28,9 @@ redis: name: registry.ddbuild.io/redis:7.0.7 alias: redis + valkey: + name: registry.ddbuild.io/images/mirror/valkey:8.0-alpine + alias: valkey kafka: name: registry.ddbuild.io/images/mirror/apache/kafka:3.8.0 alias: kafka @@ -54,6 +57,9 @@ rediscluster: name: registry.ddbuild.io/images/mirror/grokzen/redis-cluster:6.2.0 alias: rediscluster + valkeycluster: + name: registry.ddbuild.io/images/mirror/grokzen/redis-cluster:6.2.0 + alias: valkeycluster elasticsearch: name: registry.ddbuild.io/images/mirror/library/elasticsearch:7.17.23 alias: elasticsearch diff --git a/.riot/requirements/11ac941.txt b/.riot/requirements/11ac941.txt new file mode 100644 index 00000000000..92df617ba6e --- /dev/null +++ b/.riot/requirements/11ac941.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.8 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/11ac941.in +# +async-timeout==5.0.1 +attrs==24.3.0 +coverage[toml]==7.6.1 +exceptiongroup==1.2.2 +hypothesis==6.45.0 +importlib-metadata==8.5.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +tomli==2.2.1 +valkey==6.0.2 +zipp==3.20.2 diff --git a/.riot/requirements/1e98e9b.txt b/.riot/requirements/1e98e9b.txt new file mode 100644 index 00000000000..6e2d11413c3 --- /dev/null +++ b/.riot/requirements/1e98e9b.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e98e9b.in +# +async-timeout==5.0.1 +attrs==24.3.0 +coverage[toml]==7.6.10 +exceptiongroup==1.2.2 +hypothesis==6.45.0 +importlib-metadata==8.5.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.23.7 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +sortedcontainers==2.4.0 +tomli==2.2.1 +valkey==6.0.2 +zipp==3.21.0 diff --git a/.riot/requirements/4aa2a2a.txt b/.riot/requirements/4aa2a2a.txt new file mode 100644 index 00000000000..6bc72515b3f --- /dev/null +++ b/.riot/requirements/4aa2a2a.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/4aa2a2a.in +# +async-timeout==5.0.1 +attrs==24.3.0 +coverage[toml]==7.6.10 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.23.7 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +sortedcontainers==2.4.0 +valkey==6.0.2 diff --git a/.riot/requirements/7219cf4.txt b/.riot/requirements/7219cf4.txt new file mode 100644 index 00000000000..ffb631b7bcb --- /dev/null +++ b/.riot/requirements/7219cf4.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/7219cf4.in +# +attrs==24.3.0 +coverage[toml]==7.6.10 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.23.7 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +sortedcontainers==2.4.0 +valkey==6.0.2 diff --git a/.riot/requirements/b96b665.txt b/.riot/requirements/b96b665.txt new file mode 100644 index 00000000000..8b14d5cb8ec --- /dev/null +++ b/.riot/requirements/b96b665.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/b96b665.in +# +attrs==24.3.0 +coverage[toml]==7.6.10 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.23.7 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +sortedcontainers==2.4.0 +valkey==6.0.2 diff --git a/.riot/requirements/dd68acc.txt b/.riot/requirements/dd68acc.txt new file mode 100644 index 00000000000..8eda9971324 --- /dev/null +++ b/.riot/requirements/dd68acc.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/dd68acc.in +# +async-timeout==5.0.1 +attrs==24.3.0 +coverage[toml]==7.6.10 +exceptiongroup==1.2.2 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.23.7 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +sortedcontainers==2.4.0 +tomli==2.2.1 +valkey==6.0.2 diff --git a/benchmarks/bm/utils.py b/benchmarks/bm/utils.py index dd7b4991c57..13e99e8be74 100644 --- a/benchmarks/bm/utils.py +++ b/benchmarks/bm/utils.py @@ -65,7 +65,7 @@ def process_trace(self, trace): def drop_traces(tracer): - tracer.configure(settings={"FILTERS": [_DropTraces()]}) + tracer.configure(trace_processors=[_DropTraces()]) def drop_telemetry_events(): diff --git a/benchmarks/rate_limiter/scenario.py b/benchmarks/rate_limiter/scenario.py index 5210647ef89..3388af1cfb8 100644 --- a/benchmarks/rate_limiter/scenario.py +++ b/benchmarks/rate_limiter/scenario.py @@ -23,8 +23,8 @@ def _(loops): windows = [start + (i * self.time_window) for i in range(self.num_windows)] per_window = math.floor(loops / self.num_windows) - for window in windows: + for _ in windows: for _ in range(per_window): - rate_limiter.is_allowed(window) + rate_limiter.is_allowed() yield _ diff --git a/ddtrace/_monkey.py b/ddtrace/_monkey.py index fad8c8f4d2b..9261af555d7 100644 --- a/ddtrace/_monkey.py +++ b/ddtrace/_monkey.py @@ -105,6 +105,7 @@ "unittest": True, "coverage": False, "selenium": True, + "valkey": True, } diff --git a/ddtrace/_trace/pin.py b/ddtrace/_trace/pin.py index dd41a1040a1..e27640a993e 100644 --- a/ddtrace/_trace/pin.py +++ b/ddtrace/_trace/pin.py @@ -6,7 +6,6 @@ import wrapt import ddtrace -from ddtrace.vendor.debtcollector import deprecate from ..internal.logger import get_logger @@ -32,25 +31,17 @@ class Pin(object): >>> conn = sqlite.connect('/tmp/image.db') """ - __slots__ = ["tags", "tracer", "_target", "_config", "_initialized"] + __slots__ = ["tags", "_tracer", "_target", "_config", "_initialized"] def __init__( self, service=None, # type: Optional[str] tags=None, # type: Optional[Dict[str, str]] - tracer=None, _config=None, # type: Optional[Dict[str, Any]] ): # type: (...) -> None - if tracer is not None and tracer is not ddtrace.tracer: - deprecate( - "Initializing ddtrace.trace.Pin with `tracer` argument is deprecated", - message="All Pin instances should use the global tracer instance", - removal_version="3.0.0", - ) - tracer = tracer or ddtrace.tracer self.tags = tags - self.tracer = tracer + self._tracer = ddtrace.tracer self._target = None # type: Optional[int] # keep the configuration attribute internal because the # public API to access it is not the Pin class @@ -68,10 +59,14 @@ def service(self): return self._config["service_name"] def __setattr__(self, name, value): - if getattr(self, "_initialized", False) and name != "_target": + if getattr(self, "_initialized", False) and name not in ("_target", "_tracer"): raise AttributeError("can't mutate a pin, use override() or clone() instead") super(Pin, self).__setattr__(name, value) + @property + def tracer(self): + return self._tracer + def __repr__(self): return "Pin(service=%s, tags=%s, tracer=%s)" % (self.service, self.tags, self.tracer) @@ -127,7 +122,6 @@ def override( obj, # type: Any service=None, # type: Optional[str] tags=None, # type: Optional[Dict[str, str]] - tracer=None, ): # type: (...) -> None """Override an object with the given attributes. @@ -139,20 +133,32 @@ def override( >>> # Override a pin for a specific connection >>> Pin.override(conn, service='user-db') """ - if tracer is not None: - deprecate( - "Calling ddtrace.trace.Pin.override(...) with the `tracer` argument is deprecated", - message="All Pin instances should use the global tracer instance", - removal_version="3.0.0", - ) + Pin._override(obj, service=service, tags=tags) + + @classmethod + def _override( + cls, + obj, # type: Any + service=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + tracer=None, + ): + # type: (...) -> None + """ + Internal method that allows overriding the global tracer in tests + """ if not obj: return pin = cls.get_from(obj) if pin is None: - Pin(service=service, tags=tags, tracer=tracer).onto(obj) + pin = Pin(service=service, tags=tags) else: - pin.clone(service=service, tags=tags, tracer=tracer).onto(obj) + pin = pin.clone(service=service, tags=tags) + + if tracer: + pin._tracer = tracer + pin.onto(obj) def enabled(self): # type: () -> bool @@ -198,21 +204,22 @@ def clone( self, service=None, # type: Optional[str] tags=None, # type: Optional[Dict[str, str]] - tracer=None, ): # type: (...) -> Pin """Return a clone of the pin with the given attributes replaced.""" + return self._clone(service=service, tags=tags) + + def _clone( + self, + service=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + tracer=None, + ): + """Internal method that can clone the tracer from an existing Pin. This is used in tests""" # do a shallow copy of Pin dicts if not tags and self.tags: tags = self.tags.copy() - if tracer is not None: - deprecate( - "Initializing ddtrace.trace.Pin with `tracer` argument is deprecated", - message="All Pin instances should use the global tracer instance", - removal_version="3.0.0", - ) - # we use a copy instead of a deepcopy because we expect configurations # to have only a root level dictionary without nested objects. Using # deepcopy introduces a big overhead: @@ -221,9 +228,10 @@ def clone( # deepcopy: 0.2787208557128906 config = self._config.copy() - return Pin( + pin = Pin( service=service or self.service, tags=tags, - tracer=tracer or self.tracer, # do not clone the Tracer _config=config, ) + pin._tracer = tracer or self.tracer + return pin diff --git a/ddtrace/_trace/sampling_rule.py b/ddtrace/_trace/sampling_rule.py index 532a0b71f51..482a95d403a 100644 --- a/ddtrace/_trace/sampling_rule.py +++ b/ddtrace/_trace/sampling_rule.py @@ -8,8 +8,6 @@ from ddtrace.internal.glob_matching import GlobMatcher from ddtrace.internal.logger import get_logger from ddtrace.internal.utils.cache import cachedmethod -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.vendor.debtcollector import deprecate if TYPE_CHECKING: # pragma: no cover @@ -210,14 +208,12 @@ def choose_matcher(self, prop): # We currently support the ability to pass in a function, a regular expression, or a string # If a string is passed in we create a GlobMatcher to handle the matching if callable(prop) or isinstance(prop, pattern_type): - # deprecated: passing a function or a regular expression' - deprecate( - "Using methods or regular expressions for SamplingRule matching is deprecated. ", - message="Please move to passing in a string for Glob matching.", - removal_version="3.0.0", - category=DDTraceDeprecationWarning, + log.error( + "Using methods or regular expressions for SamplingRule matching is not supported: %s ." + "Please move to passing in a string for Glob matching.", + str(prop), ) - return prop + return "None" # Name and Resource will never be None, but service can be, since we str() # whatever we pass into the GlobMatcher, we can just use its matching elif prop is None: diff --git a/ddtrace/_trace/span.py b/ddtrace/_trace/span.py index 446239a8091..c6eb4d4b72a 100644 --- a/ddtrace/_trace/span.py +++ b/ddtrace/_trace/span.py @@ -52,8 +52,6 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.sampling import SamplingMechanism from ddtrace.internal.sampling import set_sampling_decision_maker -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.vendor.debtcollector import deprecate _NUMERIC_TAGS = (_ANALYTICS_SAMPLE_RATE_KEY,) @@ -279,29 +277,6 @@ def duration(self) -> Optional[float]: def duration(self, value: float) -> None: self.duration_ns = int(value * 1e9) - @property - def sampled(self) -> Optional[bool]: - deprecate( - "span.sampled is deprecated and will be removed in a future version of the tracer.", - message="""span.sampled references the state of span.context.sampling_priority. - Please use span.context.sampling_priority instead to check if a span is sampled.""", - category=DDTraceDeprecationWarning, - ) - if self.context.sampling_priority is None: - # this maintains original span.sampled behavior, where all spans would start - # with span.sampled = True until sampling runs - return True - return self.context.sampling_priority > 0 - - @sampled.setter - def sampled(self, value: bool) -> None: - deprecate( - "span.sampled is deprecated and will be removed in a future version of the tracer.", - message="""span.sampled has a no-op setter. - Please use span.set_tag('manual.keep'/'manual.drop') to keep or drop spans.""", - category=DDTraceDeprecationWarning, - ) - def finish(self, finish_time: Optional[float] = None) -> None: """Mark the end time of the span and submit it to the tracer. If the span has already been finished don't do anything. diff --git a/ddtrace/_trace/trace_handlers.py b/ddtrace/_trace/trace_handlers.py index e0d99c0d020..d636a89c187 100644 --- a/ddtrace/_trace/trace_handlers.py +++ b/ddtrace/_trace/trace_handlers.py @@ -688,6 +688,11 @@ def _on_redis_command_post(ctx: core.ExecutionContext, rowcount): ctx.span.set_metric(db.ROWCOUNT, rowcount) +def _on_valkey_command_post(ctx: core.ExecutionContext, rowcount): + if rowcount is not None: + ctx.span.set_metric(db.ROWCOUNT, rowcount) + + def _on_test_visibility_enable(config) -> None: from ddtrace.internal.ci_visibility import CIVisibility @@ -797,6 +802,8 @@ def listen(): core.on("botocore.kinesis.GetRecords.post", _on_botocore_kinesis_getrecords_post) core.on("redis.async_command.post", _on_redis_command_post) core.on("redis.command.post", _on_redis_command_post) + core.on("valkey.async_command.post", _on_valkey_command_post) + core.on("valkey.command.post", _on_valkey_command_post) core.on("azure.functions.request_call_modifier", _on_azure_functions_request_span_modifier) core.on("azure.functions.start_response", _on_azure_functions_start_response) @@ -838,6 +845,7 @@ def listen(): "botocore.patched_stepfunctions_api_call", "botocore.patched_bedrock_api_call", "redis.command", + "valkey.command", "rq.queue.enqueue_job", "rq.traced_queue_fetch_job", "rq.worker.perform_job", diff --git a/ddtrace/_trace/tracer.py b/ddtrace/_trace/tracer.py index 87f312bb18c..9dac36c175e 100644 --- a/ddtrace/_trace/tracer.py +++ b/ddtrace/_trace/tracer.py @@ -24,6 +24,7 @@ from ddtrace._trace.processor import TraceProcessor from ddtrace._trace.processor import TraceSamplingProcessor from ddtrace._trace.processor import TraceTagsProcessor +from ddtrace._trace.provider import BaseContextProvider from ddtrace._trace.provider import DefaultContextProvider from ddtrace._trace.sampler import BasePrioritySampler from ddtrace._trace.sampler import BaseSampler @@ -58,7 +59,6 @@ from ddtrace.internal.serverless import in_gcp_function from ddtrace.internal.service import ServiceStatusError from ddtrace.internal.utils import _get_metas_to_propagate -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning from ddtrace.internal.utils.formats import format_trace_id from ddtrace.internal.utils.http import verify_url from ddtrace.internal.writer import AgentResponse @@ -68,7 +68,6 @@ from ddtrace.settings import Config from ddtrace.settings.asm import config as asm_config from ddtrace.settings.peer_service import _ps_config -from ddtrace.vendor.debtcollector import deprecate log = get_logger(__name__) @@ -200,7 +199,7 @@ def __init__( self, url: Optional[str] = None, dogstatsd_url: Optional[str] = None, - context_provider: Optional[DefaultContextProvider] = None, + context_provider: Optional[BaseContextProvider] = None, ) -> None: """ Create a new ``Tracer`` instance. A global tracer is already initialized @@ -216,16 +215,8 @@ def __init__( if Tracer._instance is None: Tracer._instance = self else: - # ddtrace library does not support context propagation for multiple tracers. - # All instances of ddtrace ContextProviders share the same ContextVars. This means that - # if you create multiple instances of Tracer, spans will be shared between them creating a - # broken experience. - # TODO(mabdinur): Convert this warning to an ValueError in 3.0.0 - deprecate( - "Support for multiple Tracer instances is deprecated", - ". Use ddtrace.tracer instead.", - category=DDTraceDeprecationWarning, - removal_version="3.0.0", + log.error( + "Multiple Tracer instances can not be initialized. Use ``ddtrace.trace.tracer`` instead.", ) self._user_trace_processors: List[TraceProcessor] = [] @@ -328,28 +319,6 @@ def sample(self, span): else: log.error("No sampler available to sample span") - @property - def sampler(self): - deprecate( - "tracer.sampler is deprecated and will be removed.", - message="To manually sample call tracer.sample(span) instead.", - category=DDTraceDeprecationWarning, - ) - return self._sampler - - @sampler.setter - def sampler(self, value): - deprecate( - "Setting a custom sampler is deprecated and will be removed.", - message="""Please use DD_TRACE_SAMPLING_RULES to configure the sampler instead: - https://ddtrace.readthedocs.io/en/stable/configuration.html#DD_TRACE_SAMPLING_RULES""", - category=DDTraceDeprecationWarning, - ) - if asm_config._apm_opt_out: - log.warning("Cannot set a custom sampler with Standalone ASM mode") - return - self._sampler = value - def on_start_span(self, func: Callable) -> Callable: """Register a function to execute when a span start. @@ -441,21 +410,7 @@ def get_log_correlation_context(self, active: Optional[Union[Context, Span]] = N def configure( self, - enabled: Optional[bool] = None, - hostname: Optional[str] = None, - port: Optional[int] = None, - uds_path: Optional[str] = None, - https: Optional[bool] = None, - sampler: Optional[BaseSampler] = None, - context_provider: Optional[DefaultContextProvider] = None, - wrap_executor: Optional[Callable] = None, - priority_sampling: Optional[bool] = None, - settings: Optional[Dict[str, Any]] = None, - dogstatsd_url: Optional[str] = None, - writer: Optional[TraceWriter] = None, - partial_flush_enabled: Optional[bool] = None, - partial_flush_min_spans: Optional[int] = None, - api_version: Optional[str] = None, + context_provider: Optional[BaseContextProvider] = None, compute_stats_enabled: Optional[bool] = None, appsec_enabled: Optional[bool] = None, iast_enabled: Optional[bool] = None, @@ -472,58 +427,14 @@ def configure( :param bool appsec_standalone_enabled: When tracing is disabled ensures ASM support is still enabled. :param List[TraceProcessor] trace_processors: This parameter sets TraceProcessor (ex: TraceFilters). Trace processors are used to modify and filter traces based on certain criteria. - - :param bool enabled: If True, finished traces will be submitted to the API, else they'll be dropped. - This parameter is deprecated and will be removed. - :param str hostname: Hostname running the Trace Agent. This parameter is deprecated and will be removed. - :param int port: Port of the Trace Agent. This parameter is deprecated and will be removed. - :param str uds_path: The Unix Domain Socket path of the agent. This parameter is deprecated and will be removed. - :param bool https: Whether to use HTTPS or HTTP. This parameter is deprecated and will be removed. - :param object sampler: A custom Sampler instance, locally deciding to totally drop the trace or not. - This parameter is deprecated and will be removed. - :param object wrap_executor: callable that is used when a function is decorated with - ``Tracer.wrap()``. This is an advanced option that usually doesn't need to be changed - from the default value. This parameter is deprecated and will be removed. - :param priority_sampling: This parameter is deprecated and will be removed in a future version. - :param bool settings: This parameter is deprecated and will be removed. - :param str dogstatsd_url: URL for UDP or Unix socket connection to DogStatsD - This parameter is deprecated and will be removed. - :param TraceWriter writer: This parameter is deprecated and will be removed. - :param bool partial_flush_enabled: This parameter is deprecated and will be removed. - :param bool partial_flush_min_spans: This parameter is deprecated and will be removed. - :param str api_version: This parameter is deprecated and will be removed. - :param bool compute_stats_enabled: This parameter is deprecated and will be removed. """ - if settings is not None: - deprecate( - "Support for ``tracer.configure(...)`` with the settings parameter is deprecated", - message="Please use the trace_processors parameter instead of settings['FILTERS'].", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) - trace_processors = (trace_processors or []) + (settings.get("FILTERS") or []) - return self._configure( - enabled, - hostname, - port, - uds_path, - https, - sampler, - context_provider, - wrap_executor, - priority_sampling, - trace_processors, - dogstatsd_url, - writer, - partial_flush_enabled, - partial_flush_min_spans, - api_version, - compute_stats_enabled, - appsec_enabled, - iast_enabled, - appsec_standalone_enabled, - True, + context_provider=context_provider, + trace_processors=trace_processors, + compute_stats_enabled=compute_stats_enabled, + appsec_enabled=appsec_enabled, + iast_enabled=iast_enabled, + appsec_standalone_enabled=appsec_standalone_enabled, ) def _configure( @@ -534,7 +445,7 @@ def _configure( uds_path: Optional[str] = None, https: Optional[bool] = None, sampler: Optional[BaseSampler] = None, - context_provider: Optional[DefaultContextProvider] = None, + context_provider: Optional[BaseContextProvider] = None, wrap_executor: Optional[Callable] = None, priority_sampling: Optional[bool] = None, trace_processors: Optional[List[TraceProcessor]] = None, @@ -547,48 +458,18 @@ def _configure( appsec_enabled: Optional[bool] = None, iast_enabled: Optional[bool] = None, appsec_standalone_enabled: Optional[bool] = None, - log_deprecations: bool = False, ) -> None: if enabled is not None: self.enabled = enabled - if log_deprecations: - deprecate( - "Enabling/Disabling tracing after application start is deprecated", - message="Please use DD_TRACE_ENABLED instead.", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) - - if priority_sampling is not None and log_deprecations: - deprecate( - "Disabling priority sampling is deprecated", - message="Calling `tracer.configure(priority_sampling=....) has no effect", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) if trace_processors is not None: self._user_trace_processors = trace_processors if partial_flush_enabled is not None: self._partial_flush_enabled = partial_flush_enabled - if log_deprecations: - deprecate( - "Configuring partial flushing after application start is deprecated", - message="Please use DD_TRACE_PARTIAL_FLUSH_ENABLED to enable/disable the partial flushing instead.", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) if partial_flush_min_spans is not None: self._partial_flush_min_spans = partial_flush_min_spans - if log_deprecations: - deprecate( - "Configuring partial flushing after application start is deprecated", - message="Please use DD_TRACE_PARTIAL_FLUSH_MIN_SPANS to set the flushing threshold instead.", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) if appsec_enabled is not None: asm_config._asm_enabled = appsec_enabled @@ -620,33 +501,11 @@ def _configure( if sampler is not None: self._sampler = sampler self._user_sampler = self._sampler - if log_deprecations: - deprecate( - "Configuring custom samplers is deprecated", - message="Please use DD_TRACE_SAMPLING_RULES to configure the sample rates instead", - category=DDTraceDeprecationWarning, - removal_version="3.0.0", - ) if dogstatsd_url is not None: - if log_deprecations: - deprecate( - "Configuring dogstatsd_url after application start is deprecated", - message="Please use DD_DOGSTATSD_URL instead.", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) self._dogstatsd_url = dogstatsd_url if any(x is not None for x in [hostname, port, uds_path, https]): - if log_deprecations: - deprecate( - "Configuring tracer agent connection after application start is deprecated", - message="Please use DD_TRACE_AGENT_URL instead.", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) - # If any of the parts of the URL have updated, merge them with # the previous writer values. prev_url_parsed = compat.parse.urlparse(self._agent_url) @@ -670,13 +529,6 @@ def _configure( new_url = None if compute_stats_enabled is not None: - if log_deprecations: - deprecate( - "Configuring tracer stats computation after application start is deprecated", - message="Please use DD_TRACE_STATS_COMPUTATION_ENABLED instead.", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) self._compute_stats = compute_stats_enabled try: @@ -685,14 +537,6 @@ def _configure( # It's possible the writer never got started pass - if api_version is not None and log_deprecations: - deprecate( - "Configuring Tracer API version after application start is deprecated", - message="Please use DD_TRACE_API_VERSION instead.", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) - if writer is not None: self._writer = writer elif any(x is not None for x in [new_url, api_version, sampler, dogstatsd_url, appsec_enabled]): @@ -754,12 +598,6 @@ def _configure( if wrap_executor is not None: self._wrap_executor = wrap_executor - if log_deprecations: - deprecate( - "Support for tracer.configure(...) with the wrap_executor parameter is deprecated", - version="3.0.0", - category=DDTraceDeprecationWarning, - ) self._generate_diagnostic_logs() @@ -1344,7 +1182,7 @@ def _handle_sampler_update(self, cfg: Config) -> None: and self._user_sampler ): # if we get empty configs from rc for both sample rate and rules, we should revert to the user sampler - self.sampler = self._user_sampler + self._sampler = self._user_sampler return if cfg._get_source("_trace_sample_rate") != "remote_config" and self._user_sampler: diff --git a/ddtrace/_trace/utils_valkey.py b/ddtrace/_trace/utils_valkey.py new file mode 100644 index 00000000000..ed996e885a3 --- /dev/null +++ b/ddtrace/_trace/utils_valkey.py @@ -0,0 +1,96 @@ +""" +Some utils used by the dogtrace valkey integration +""" + +from contextlib import contextmanager +from typing import List +from typing import Optional + +from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.contrib import trace_utils +from ddtrace.contrib.internal.valkey_utils import _extract_conn_tags +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import db +from ddtrace.ext import valkey as valkeyx +from ddtrace.internal import core +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_cache_operation +from ddtrace.internal.utils.formats import stringify_cache_args + + +format_command_args = stringify_cache_args + + +def _set_span_tags( + span, pin, config_integration, args: Optional[List], instance, query: Optional[List], is_cluster: bool = False +): + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + span.set_tag_str(COMPONENT, config_integration.integration_name) + span.set_tag_str(db.SYSTEM, valkeyx.APP) + span.set_tag(_SPAN_MEASURED_KEY) + if query is not None: + span_name = schematize_cache_operation(valkeyx.RAWCMD, cache_provider=valkeyx.APP) # type: ignore[operator] + span.set_tag_str(span_name, query) + if pin.tags: + span.set_tags(pin.tags) + # some valkey clients do not have a connection_pool attribute (ex. aiovalkey v1.3) + if not is_cluster and hasattr(instance, "connection_pool"): + span.set_tags(_extract_conn_tags(instance.connection_pool.connection_kwargs)) + if args is not None: + span.set_metric(valkeyx.ARGS_LEN, len(args)) + else: + for attr in ("command_stack", "_command_stack"): + if hasattr(instance, attr): + span.set_metric(valkeyx.PIPELINE_LEN, len(getattr(instance, attr))) + # set analytics sample rate if enabled + span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, config_integration.get_analytics_sample_rate()) + + +@contextmanager +def _instrument_valkey_cmd(pin, config_integration, instance, args): + query = stringify_cache_args(args, cmd_max_len=config_integration.cmd_max_length) + with core.context_with_data( + "valkey.command", + span_name=schematize_cache_operation(valkeyx.CMD, cache_provider=valkeyx.APP), + pin=pin, + service=trace_utils.ext_service(pin, config_integration), + span_type=SpanTypes.VALKEY, + resource=query.split(" ")[0] if config_integration.resource_only_command else query, + ) as ctx, ctx.span as span: + _set_span_tags(span, pin, config_integration, args, instance, query) + yield ctx + + +@contextmanager +def _instrument_valkey_execute_pipeline(pin, config_integration, cmds, instance, is_cluster=False): + cmd_string = resource = "\n".join(cmds) + if config_integration.resource_only_command: + resource = "\n".join([cmd.split(" ")[0] for cmd in cmds]) + + with pin.tracer.trace( + schematize_cache_operation(valkeyx.CMD, cache_provider=valkeyx.APP), + resource=resource, + service=trace_utils.ext_service(pin, config_integration), + span_type=SpanTypes.VALKEY, + ) as span: + _set_span_tags(span, pin, config_integration, None, instance, cmd_string) + yield span + + +@contextmanager +def _instrument_valkey_execute_async_cluster_pipeline(pin, config_integration, cmds, instance): + cmd_string = resource = "\n".join(cmds) + if config_integration.resource_only_command: + resource = "\n".join([cmd.split(" ")[0] for cmd in cmds]) + + with pin.tracer.trace( + schematize_cache_operation(valkeyx.CMD, cache_provider=valkeyx.APP), + resource=resource, + service=trace_utils.ext_service(pin, config_integration), + span_type=SpanTypes.VALKEY, + ) as span: + _set_span_tags(span, pin, config_integration, None, instance, cmd_string) + yield span diff --git a/ddtrace/appsec/_iast/_handlers.py b/ddtrace/appsec/_iast/_handlers.py index cf60fc610be..bcd913085f4 100644 --- a/ddtrace/appsec/_iast/_handlers.py +++ b/ddtrace/appsec/_iast/_handlers.py @@ -82,23 +82,28 @@ def _on_flask_patch(flask_version): "Headers.items", functools.partial(if_iast_taint_yield_tuple_for, (OriginType.HEADER_NAME, OriginType.HEADER)), ) - _set_metric_iast_instrumented_source(OriginType.HEADER_NAME) - _set_metric_iast_instrumented_source(OriginType.HEADER) try_wrap_function_wrapper( "werkzeug.datastructures", - "ImmutableMultiDict.__getitem__", - functools.partial(if_iast_taint_returned_object_for, OriginType.PARAMETER), + "EnvironHeaders.__getitem__", + functools.partial(if_iast_taint_returned_object_for, OriginType.HEADER), ) - _set_metric_iast_instrumented_source(OriginType.PARAMETER) - + # Since werkzeug 3.1.0 get doesn't call to __getitem__ try_wrap_function_wrapper( "werkzeug.datastructures", - "EnvironHeaders.__getitem__", + "EnvironHeaders.get", functools.partial(if_iast_taint_returned_object_for, OriginType.HEADER), ) + _set_metric_iast_instrumented_source(OriginType.HEADER_NAME) _set_metric_iast_instrumented_source(OriginType.HEADER) + try_wrap_function_wrapper( + "werkzeug.datastructures", + "ImmutableMultiDict.__getitem__", + functools.partial(if_iast_taint_returned_object_for, OriginType.PARAMETER), + ) + _set_metric_iast_instrumented_source(OriginType.PARAMETER) + if flask_version >= (2, 0, 0): # instance.query_string: raising an error on werkzeug/_internal.py "AttributeError: read only property" try_wrap_function_wrapper("werkzeug.wrappers.request", "Request.__init__", _on_request_init) diff --git a/ddtrace/appsec/_iast/taint_sinks/xss.py b/ddtrace/appsec/_iast/taint_sinks/xss.py index 425affac77a..6f7d263f7c2 100644 --- a/ddtrace/appsec/_iast/taint_sinks/xss.py +++ b/ddtrace/appsec/_iast/taint_sinks/xss.py @@ -52,6 +52,18 @@ def patch(): _iast_django_xss, ) + try_wrap_function_wrapper( + "jinja2.filters", + "do_mark_safe", + _iast_jinja2_xss, + ) + try_wrap_function_wrapper( + "flask", + "render_template_string", + _iast_jinja2_xss, + ) + + _set_metric_iast_instrumented_sink(VULN_XSS) _set_metric_iast_instrumented_sink(VULN_XSS) @@ -70,6 +82,12 @@ def _iast_django_xss(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) +def _iast_jinja2_xss(wrapped, instance, args, kwargs): + if args and len(args) >= 1: + _iast_report_xss(args[0]) + return wrapped(*args, **kwargs) + + def _iast_report_xss(code_string: Text): increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, XSS.vulnerability_type) _set_metric_iast_executed_sink(XSS.vulnerability_type) diff --git a/ddtrace/appsec/_python_info/stdlib/__init__.py b/ddtrace/appsec/_python_info/stdlib/__init__.py index e745c392f55..8b220b0af85 100644 --- a/ddtrace/appsec/_python_info/stdlib/__init__.py +++ b/ddtrace/appsec/_python_info/stdlib/__init__.py @@ -3,11 +3,7 @@ from sys import version_info -if version_info < (3, 7, 0): - from .module_names_py36 import STDLIB_MODULE_NAMES -elif version_info < (3, 8, 0): - from .module_names_py37 import STDLIB_MODULE_NAMES -elif version_info < (3, 9, 0): +if version_info < (3, 9, 0): from .module_names_py38 import STDLIB_MODULE_NAMES elif version_info < (3, 10, 0): from .module_names_py39 import STDLIB_MODULE_NAMES diff --git a/ddtrace/contrib/_langchain.py b/ddtrace/contrib/_langchain.py index d36cd76f3f1..4d419cc5d5c 100644 --- a/ddtrace/contrib/_langchain.py +++ b/ddtrace/contrib/_langchain.py @@ -1,9 +1,8 @@ """ -The LangChain integration instruments the LangChain Python library to emit metrics, -traces, and logs (logs are disabled by default) for requests made to the LLMs, +The LangChain integration instruments the LangChain Python library to emit traces for requests made to the LLMs, chat models, embeddings, chains, and vector store interfaces. -All metrics, logs, and traces submitted from the LangChain integration are tagged by: +All traces submitted from the LangChain integration are tagged by: - ``service``, ``env``, ``version``: see the `Unified Service Tagging docs `_. - ``langchain.request.provider``: LLM provider used in the request. @@ -26,58 +25,6 @@ - Total cost metrics for OpenAI requests -Metrics -~~~~~~~ - -The following metrics are collected by default by the LangChain integration. - -.. important:: - If the Agent is configured to use a non-default Statsd hostname or port, use ``DD_DOGSTATSD_URL`` to configure - ``ddtrace`` to use it. - - -.. py:data:: langchain.request.duration - - The duration of the LangChain request in seconds. - - Type: ``distribution`` - - -.. py:data:: langchain.request.error - - The number of errors from requests made with LangChain. - - Type: ``count`` - - -.. py:data:: langchain.tokens.prompt - - The number of tokens used in the prompt of a LangChain request. - - Type: ``distribution`` - - -.. py:data:: langchain.tokens.completion - - The number of tokens used in the completion of a LangChain response. - - Type: ``distribution`` - - -.. py:data:: langchain.tokens.total - - The total number of tokens used in the prompt and completion of a LangChain request/response. - - Type: ``distribution`` - - -.. py:data:: langchain.tokens.total_cost - - The estimated cost in USD based on token usage. - - Type: ``count`` - - (beta) Prompt and Completion Sampling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -89,18 +36,6 @@ - Prompt inputs, chain inputs, and outputs for the ``Chain`` interface. - Query inputs and document outputs for the ``VectorStore`` interface. -Prompt and message inputs and completions can also be emitted as log data. -Logs are **not** emitted by default. When logs are enabled they are sampled at ``0.1``. - -Read the **Global Configuration** section for information about enabling logs and configuring sampling -rates. - -.. important:: - - To submit logs, you must set the ``DD_API_KEY`` environment variable. - - Set ``DD_SITE`` to send logs to a Datadog site such as ``datadoghq.eu``. The default is ``datadoghq.com``. - Enabling ~~~~~~~~ @@ -143,32 +78,6 @@ Default: ``DD_SERVICE`` -.. py:data:: ddtrace.config.langchain["logs_enabled"] - - Enable collection of prompts and completions as logs. You can adjust the rate of prompts and completions collected - using the sample rate configuration described below. - - Alternatively, you can set this option with the ``DD_LANGCHAIN_LOGS_ENABLED`` environment - variable. - - Note that you must set the ``DD_API_KEY`` environment variable to enable sending logs. - - Default: ``False`` - - -.. py:data:: ddtrace.config.langchain["metrics_enabled"] - - Enable collection of LangChain metrics. - - If the Datadog Agent is configured to use a non-default Statsd hostname - or port, use ``DD_DOGSTATSD_URL`` to configure ``ddtrace`` to use it. - - Alternatively, you can set this option with the ``DD_LANGCHAIN_METRICS_ENABLED`` environment - variable. - - Default: ``True`` - - .. py:data:: (beta) ddtrace.config.langchain["span_char_limit"] Configure the maximum number of characters for the following data within span tags: @@ -195,14 +104,4 @@ Default: ``1.0`` - -.. py:data:: (beta) ddtrace.config.langchain["log_prompt_completion_sample_rate"] - - Configure the sample rate for the collection of prompts and completions as logs. - - Alternatively, you can set this option with the ``DD_LANGCHAIN_LOG_PROMPT_COMPLETION_SAMPLE_RATE`` environment - variable. - - Default: ``0.1`` - """ # noqa: E501 diff --git a/ddtrace/contrib/_openai.py b/ddtrace/contrib/_openai.py index 8e2eb87aeb5..0642bbb0881 100644 --- a/ddtrace/contrib/_openai.py +++ b/ddtrace/contrib/_openai.py @@ -1,10 +1,8 @@ """ -The OpenAI integration instruments the OpenAI Python library to emit metrics, -traces, and logs (logs are disabled by default) for requests made to the models, -completions, chat completions, edits, images, embeddings, audio, files, fine-tunes, -and moderations endpoints. +The OpenAI integration instruments the OpenAI Python library to emit traces for requests made to the models, +completions, chat completions, images, embeddings, audio, files, and moderations endpoints. -All metrics, logs, and traces submitted from the OpenAI integration are tagged by: +All traces submitted from the OpenAI integration are tagged by: - ``service``, ``env``, ``version``: see the `Unified Service Tagging docs `_. - ``openai.request.endpoint``: OpenAI API endpoint used in the request. @@ -15,84 +13,6 @@ - ``openai.user.api_key``: OpenAI API key used to make the request (obfuscated to match the OpenAI UI representation ``sk-...XXXX`` where ``XXXX`` is the last 4 digits of the key). -Metrics -~~~~~~~ - -The following metrics are collected by default by the OpenAI integration. - -.. important:: - If the Agent is configured to use a non-default Statsd hostname or port, use ``DD_DOGSTATSD_URL`` to configure - ``ddtrace`` to use it. - - -.. important:: - Ratelimit and token metrics only reflect usage of the supported completions, chat completions, and embedding - endpoints. Usage of other OpenAI endpoints will not be recorded as they are not provided. - - -.. py:data:: openai.request.duration - - The duration of the OpenAI request in seconds. - - Type: ``distribution`` - - -.. py:data:: openai.request.error - - The number of errors from requests made to OpenAI. - - Type: ``count`` - - -.. py:data:: openai.ratelimit.requests - - The maximum number of OpenAI requests permitted before exhausting the rate limit. - - Type: ``gauge`` - - -.. py:data:: openai.ratelimit.tokens - - The maximum number of OpenAI tokens permitted before exhausting the rate limit. - - Type: ``gauge`` - - -.. py:data:: openai.ratelimit.remaining.requests - - The remaining number of OpenAI requests permitted before exhausting the rate limit. - - Type: ``gauge`` - - -.. py:data:: openai.ratelimit.remaining.tokens - - The remaining number of OpenAI tokens permitted before exhausting the rate limit. - - Type: ``gauge`` - - -.. py:data:: openai.tokens.prompt - - The number of tokens used in the prompt of an OpenAI request. - - Type: ``distribution`` - - -.. py:data:: openai.tokens.completion - - The number of tokens used in the completion of a OpenAI response. - - Type: ``distribution`` - - -.. py:data:: openai.tokens.total - - The total number of tokens used in the prompt and completion of a OpenAI request/response. - - Type: ``distribution`` - - (beta) Prompt and Completion Sampling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -101,22 +21,9 @@ - Prompt inputs and completions for the ``completions`` endpoint. - Message inputs and completions for the ``chat.completions`` endpoint. - Embedding inputs for the ``embeddings`` endpoint. -- Edit inputs, instructions, and completions for the ``edits`` endpoint. - Image input filenames and completion URLs for the ``images`` endpoint. - Audio input filenames and completions for the ``audio`` endpoint. -Prompt and message inputs and completions can also be emitted as log data. -Logs are **not** emitted by default. When logs are enabled they are sampled at ``0.1``. - -Read the **Global Configuration** section for information about enabling logs and configuring sampling -rates. - -.. important:: - - To submit logs, you must set the ``DD_API_KEY`` environment variable. - - Set ``DD_SITE`` to send logs to a Datadog site such as ``datadoghq.eu``. The default is ``datadoghq.com``. - (beta) Streamed Responses Support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -172,32 +79,6 @@ Default: ``DD_SERVICE`` -.. py:data:: ddtrace.config.openai["logs_enabled"] - - Enable collection of prompts and completions as logs. You can adjust the rate of prompts and completions collected - using the sample rate configuration described below. - - Alternatively, you can set this option with the ``DD_OPENAI_LOGS_ENABLED`` environment - variable. - - Note that you must set the ``DD_API_KEY`` environment variable to enable sending logs. - - Default: ``False`` - - -.. py:data:: ddtrace.config.openai["metrics_enabled"] - - Enable collection of OpenAI metrics. - - If the Datadog Agent is configured to use a non-default Statsd hostname - or port, use ``DD_DOGSTATSD_URL`` to configure ``ddtrace`` to use it. - - Alternatively, you can set this option with the ``DD_OPENAI_METRICS_ENABLED`` environment - variable. - - Default: ``True`` - - .. py:data:: (beta) ddtrace.config.openai["span_char_limit"] Configure the maximum number of characters for the following data within span tags: @@ -225,16 +106,6 @@ Default: ``1.0`` -.. py:data:: (beta) ddtrace.config.openai["log_prompt_completion_sample_rate"] - - Configure the sample rate for the collection of prompts and completions as logs. - - Alternatively, you can set this option with the ``DD_OPENAI_LOG_PROMPT_COMPLETION_SAMPLE_RATE`` environment - variable. - - Default: ``0.1`` - - Instance Configuration ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/ddtrace/contrib/aiohttp.py b/ddtrace/contrib/aiohttp.py index dbb5def90d1..d001139dde8 100644 --- a/ddtrace/contrib/aiohttp.py +++ b/ddtrace/contrib/aiohttp.py @@ -36,6 +36,13 @@ Default: ``False`` +.. py:data:: ddtrace.config.aiohttp['disable_stream_timing_for_mem_leak'] + + Whether or not to to address a potential memory leak in the aiohttp integration. + When set to ``True``, this flag may cause streamed response span timing to be inaccurate. + + Default: ``False`` + Server ****** diff --git a/ddtrace/contrib/internal/aiohttp/middlewares.py b/ddtrace/contrib/internal/aiohttp/middlewares.py index b3dde240d44..c1a5b8e4f3b 100644 --- a/ddtrace/contrib/internal/aiohttp/middlewares.py +++ b/ddtrace/contrib/internal/aiohttp/middlewares.py @@ -59,8 +59,9 @@ async def attach_context(request): request[REQUEST_CONFIG_KEY] = app[CONFIG_KEY] try: response = await handler(request) - if isinstance(response, web.StreamResponse): - request.task.add_done_callback(lambda _: finish_request_span(request, response)) + if not config.aiohttp["disable_stream_timing_for_mem_leak"]: + if isinstance(response, web.StreamResponse): + request.task.add_done_callback(lambda _: finish_request_span(request, response)) return response except Exception: req_span.set_traceback() @@ -134,9 +135,13 @@ async def on_prepare(request, response): the trace middleware execution. """ # NB isinstance is not appropriate here because StreamResponse is a parent of the other - # aiohttp response types - if type(response) is web.StreamResponse and not response.task.done(): - return + # aiohttp response types. However in some cases this can also lead to missing the closing of + # spans, leading to a memory leak, which is why we have this flag. + # todo: this is a temporary fix for a memory leak in aiohttp. We should find a way to + # consistently close spans with the correct timing. + if not config.aiohttp["disable_stream_timing_for_mem_leak"]: + if type(response) is web.StreamResponse and not response.task.done(): + return finish_request_span(request, response) diff --git a/ddtrace/contrib/internal/aiohttp/patch.py b/ddtrace/contrib/internal/aiohttp/patch.py index 900a8d26e41..4643ba2ae43 100644 --- a/ddtrace/contrib/internal/aiohttp/patch.py +++ b/ddtrace/contrib/internal/aiohttp/patch.py @@ -22,6 +22,7 @@ from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool from ddtrace.propagation.http import HTTPPropagator +from ddtrace.settings._core import get_config as _get_config from ddtrace.trace import Pin @@ -31,7 +32,12 @@ # Server config config._add( "aiohttp", - dict(distributed_tracing=True), + dict( + distributed_tracing=True, + disable_stream_timing_for_mem_leak=asbool( + _get_config("DD_AIOHTTP_CLIENT_DISABLE_STREAM_TIMING_FOR_MEM_LEAK", default=False) + ), + ), ) config._add( diff --git a/ddtrace/contrib/internal/asgi/middleware.py b/ddtrace/contrib/internal/asgi/middleware.py index 7da2b11b929..70b284c8882 100644 --- a/ddtrace/contrib/internal/asgi/middleware.py +++ b/ddtrace/contrib/internal/asgi/middleware.py @@ -150,12 +150,8 @@ async def __call__(self, scope, receive, send): if scope["type"] == "http": operation_name = schematize_url_operation(operation_name, direction=SpanDirection.INBOUND, protocol="http") - # Calling ddtrace.trace.Pin(...) with the `tracer` argument is deprecated - # Remove this if statement when the `tracer` argument is removed - if self.tracer is ddtrace.tracer: - pin = ddtrace.trace.Pin(service="asgi") - else: - pin = ddtrace.trace.Pin(service="asgi", tracer=self.tracer) + pin = ddtrace.trace.Pin(service="asgi") + pin._tracer = self.tracer with core.context_with_data( "asgi.__call__", diff --git a/ddtrace/contrib/internal/django/patch.py b/ddtrace/contrib/internal/django/patch.py index ba96675fef3..3fea1e9b6a0 100644 --- a/ddtrace/contrib/internal/django/patch.py +++ b/ddtrace/contrib/internal/django/patch.py @@ -17,7 +17,6 @@ import wrapt from wrapt.importer import when_imported -import ddtrace from ddtrace import config from ddtrace.appsec._utils import _UserInfoRetriever from ddtrace.constants import SPAN_KIND @@ -149,12 +148,9 @@ def cursor(django, pin, func, instance, args, kwargs): tags = {"django.db.vendor": vendor, "django.db.alias": alias} tags.update(getattr(conn, "_datadog_tags", {})) - # Calling ddtrace.pin.Pin(...) with the `tracer` argument generates a deprecation warning. - # Remove this if statement when the `tracer` argument is removed - if pin.tracer is ddtrace.tracer: - pin = Pin(service, tags=tags) - else: - pin = Pin(service, tags=tags, tracer=pin.tracer) + tracer = pin.tracer + pin = Pin(service, tags=tags) + pin._tracer = tracer cursor = func(*args, **kwargs) diff --git a/ddtrace/contrib/internal/graphql/patch.py b/ddtrace/contrib/internal/graphql/patch.py index fe538303a52..0b3c7a4d790 100644 --- a/ddtrace/contrib/internal/graphql/patch.py +++ b/ddtrace/contrib/internal/graphql/patch.py @@ -1,20 +1,14 @@ import os import re import sys -from typing import TYPE_CHECKING -from typing import List - -from ddtrace.internal.schema.span_attribute_schema import SpanDirection -from ddtrace.trace import Span - - -if TYPE_CHECKING: # pragma: no cover - from typing import Callable # noqa:F401 - from typing import Dict # noqa:F401 - from typing import Iterable # noqa:F401 - from typing import Tuple # noqa:F401 - from typing import Union # noqa:F401 - +import traceback +from typing import Callable # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Iterable # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 import graphql from graphql import MiddlewareManager @@ -26,12 +20,14 @@ from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ERROR_MSG +from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE from ddtrace.contrib import trace_utils from ddtrace.ext import SpanTypes from ddtrace.internal.constants import COMPONENT from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.schema import schematize_url_operation +from ddtrace.internal.schema.span_attribute_schema import SpanDirection from ddtrace.internal.utils import ArgumentError from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils import set_argument_value @@ -40,6 +36,7 @@ from ddtrace.internal.wrapping import unwrap from ddtrace.internal.wrapping import wrap from ddtrace.trace import Pin +from ddtrace.trace import Span _graphql_version_str = graphql.__version__ @@ -289,12 +286,33 @@ def _get_source_str(obj): return re.sub(r"\s+", " ", source_str).strip() +def _validate_error_extensions(error: GraphQLError, extensions: Optional[str], attributes: Dict) -> Tuple[Dict, Dict]: + """Validate user-provided extensions + All extensions values MUST be stringified, EXCEPT for numeric values and + boolean values, which remain in their original type. + """ + + if not extensions: + return {}, attributes + + fields = [e.strip() for e in extensions.split(",")] + error_extensions = {} + for field in fields: + if field in error.extensions: + if isinstance(error.extensions[field], (int, float, bool)): + error_extensions[field] = error.extensions[field] + else: + error_extensions[field] = str(error.extensions[field]) + + return error_extensions, attributes + + def _set_span_errors(errors: List[GraphQLError], span: Span) -> None: if not errors: # do nothing if the list of graphql errors is empty return - span.error = 1 + exc_type_str = "%s.%s" % (GraphQLError.__module__, GraphQLError.__name__) span.set_tag_str(ERROR_TYPE, exc_type_str) error_msgs = "\n".join([str(error) for error in errors]) @@ -302,6 +320,37 @@ def _set_span_errors(errors: List[GraphQLError], span: Span) -> None: # we will not set the error.stack tag on graphql spans. Setting only one traceback # could be misleading and might obfuscate errors. span.set_tag_str(ERROR_MSG, error_msgs) + for error in errors: + locations = " ".join(f"{loc.formatted['line']}:{loc.formatted['column']}" for loc in error.locations) + attributes = { + "message": error.message, + "type": span.get_tag("error.type"), + "locations": locations, + } + + if error.__traceback__: + stacktrace = "".join( + traceback.format_exception( + type(error), error, error.__traceback__, limit=config._span_traceback_max_size + ) + ) + attributes["stacktrace"] = stacktrace + span.set_tag_str(ERROR_STACK, stacktrace) + + if error.path is not None: + path = ",".join([str(path_obj) for path_obj in error.path]) + attributes["path"] = path + + if os.environ.get("DD_TRACE_GRAPHQL_ERROR_EXTENSIONS") is not None: + extensions = os.environ.get("DD_TRACE_GRAPHQL_ERROR_EXTENSIONS") + + error_extensions, attributes = _validate_error_extensions(error, extensions, attributes) + if error_extensions: + attributes["extensions"] = str(error_extensions) + span._add_event( + name="dd.graphql.query.error", + attributes=attributes, + ) def _set_span_operation_tags(span, document): diff --git a/ddtrace/contrib/internal/langchain/constants.py b/ddtrace/contrib/internal/langchain/constants.py index cdc0fc47cc2..40ea9e7a993 100644 --- a/ddtrace/contrib/internal/langchain/constants.py +++ b/ddtrace/contrib/internal/langchain/constants.py @@ -80,7 +80,3 @@ } API_KEY = "langchain.request.api_key" -MODEL = "langchain.request.model" -COMPLETION_TOKENS = "langchain.tokens.completion_tokens" -PROMPT_TOKENS = "langchain.tokens.prompt_tokens" -TOTAL_COST = "langchain.tokens.total_cost" diff --git a/ddtrace/contrib/internal/langchain/patch.py b/ddtrace/contrib/internal/langchain/patch.py index 9badbf22d87..58c635dc46f 100644 --- a/ddtrace/contrib/internal/langchain/patch.py +++ b/ddtrace/contrib/internal/langchain/patch.py @@ -41,10 +41,6 @@ from ddtrace import config from ddtrace.contrib.internal.langchain.constants import API_KEY -from ddtrace.contrib.internal.langchain.constants import COMPLETION_TOKENS -from ddtrace.contrib.internal.langchain.constants import MODEL -from ddtrace.contrib.internal.langchain.constants import PROMPT_TOKENS -from ddtrace.contrib.internal.langchain.constants import TOTAL_COST from ddtrace.contrib.internal.langchain.constants import agent_output_parser_classes from ddtrace.contrib.internal.langchain.constants import text_embedding_models from ddtrace.contrib.internal.langchain.constants import vectorstore_classes @@ -56,7 +52,6 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.utils import ArgumentError from ddtrace.internal.utils import get_argument_value -from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import deep_getattr from ddtrace.internal.utils.version import parse_version from ddtrace.llmobs._integrations import LangChainIntegration @@ -76,10 +71,7 @@ def get_version(): config._add( "langchain", { - "logs_enabled": asbool(os.getenv("DD_LANGCHAIN_LOGS_ENABLED", False)), - "metrics_enabled": asbool(os.getenv("DD_LANGCHAIN_METRICS_ENABLED", True)), "span_prompt_completion_sample_rate": float(os.getenv("DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE", 1.0)), - "log_prompt_completion_sample_rate": float(os.getenv("DD_LANGCHAIN_LOG_PROMPT_COMPLETION_SAMPLE_RATE", 0.1)), "span_char_limit": int(os.getenv("DD_LANGCHAIN_SPAN_CHAR_LIMIT", 128)), }, ) @@ -118,9 +110,7 @@ def _extract_api_key(instance: Any) -> str: return "" -def _tag_openai_token_usage( - span: Span, llm_output: Dict[str, Any], propagated_cost: int = 0, propagate: bool = False -) -> None: +def _tag_openai_token_usage(span: Span, llm_output: Dict[str, Any]) -> None: """ Extract token usage from llm_output, tag on span. Calculate the total cost for each LLM/chat_model, then propagate those values up the trace so that @@ -130,23 +120,6 @@ def _tag_openai_token_usage( current_metric_value = span.get_metric("langchain.tokens.%s_tokens" % token_type) or 0 metric_value = llm_output["token_usage"].get("%s_tokens" % token_type, 0) span.set_metric("langchain.tokens.%s_tokens" % token_type, current_metric_value + metric_value) - total_cost = span.get_metric(TOTAL_COST) or 0 - if not propagate and get_openai_token_cost_for_model: - try: - completion_cost = get_openai_token_cost_for_model( - span.get_tag(MODEL), - span.get_metric(COMPLETION_TOKENS), - is_completion=True, - ) - prompt_cost = get_openai_token_cost_for_model(span.get_tag(MODEL), span.get_metric(PROMPT_TOKENS)) - total_cost = completion_cost + prompt_cost - except ValueError: - # If not in langchain's openai model catalog, the above helpers will raise a ValueError. - log.debug("Cannot calculate token/cost as the model is not in LangChain's OpenAI model catalog.") - if get_openai_token_cost_for_model: - span.set_metric(TOTAL_COST, propagated_cost + total_cost) - if span._parent is not None: - _tag_openai_token_usage(span._parent, llm_output, propagated_cost=propagated_cost + total_cost, propagate=True) def _is_openai_llm_instance(instance): @@ -221,7 +194,6 @@ def traced_llm_generate(langchain, pin, func, instance, args, kwargs): completions = func(*args, **kwargs) if _is_openai_llm_instance(instance): _tag_openai_token_usage(span, completions.llm_output) - integration.record_usage(span, completions.llm_output) for idx, completion in enumerate(completions.generations): if integration.is_pc_sampled_span(span): @@ -237,28 +209,10 @@ def traced_llm_generate(langchain, pin, func, instance, args, kwargs): ) except Exception: span.set_exc_info(*sys.exc_info()) - integration.metric(span, "incr", "request.error", 1) raise finally: integration.llmobs_set_tags(span, args=args, kwargs=kwargs, response=completions, operation="llm") span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) - if integration.is_pc_sampled_log(span): - if completions is None: - log_completions = [] - else: - log_completions = [ - [{"text": completion.text} for completion in completions] for completions in completions.generations - ] - integration.log( - span, - "info" if span.error == 0 else "error", - "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), - attrs={ - "prompts": prompts, - "choices": log_completions, - }, - ) return completions @@ -292,7 +246,6 @@ async def traced_llm_agenerate(langchain, pin, func, instance, args, kwargs): completions = await func(*args, **kwargs) if _is_openai_llm_instance(instance): _tag_openai_token_usage(span, completions.llm_output) - integration.record_usage(span, completions.llm_output) for idx, completion in enumerate(completions.generations): if integration.is_pc_sampled_span(span): @@ -308,28 +261,10 @@ async def traced_llm_agenerate(langchain, pin, func, instance, args, kwargs): ) except Exception: span.set_exc_info(*sys.exc_info()) - integration.metric(span, "incr", "request.error", 1) raise finally: integration.llmobs_set_tags(span, args=args, kwargs=kwargs, response=completions, operation="llm") span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) - if integration.is_pc_sampled_log(span): - if completions is None: - log_completions = [] - else: - log_completions = [ - [{"text": completion.text} for completion in completions] for completions in completions.generations - ] - integration.log( - span, - "info" if span.error == 0 else "error", - "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), - attrs={ - "prompts": prompts, - "choices": log_completions, - }, - ) return completions @@ -376,7 +311,6 @@ def traced_chat_model_generate(langchain, pin, func, instance, args, kwargs): chat_completions = func(*args, **kwargs) if _is_openai_chat_instance(instance): _tag_openai_token_usage(span, chat_completions.llm_output) - integration.record_usage(span, chat_completions.llm_output) for message_set_idx, message_set in enumerate(chat_completions.generations): for idx, chat_completion in enumerate(message_set): @@ -417,45 +351,10 @@ def traced_chat_model_generate(langchain, pin, func, instance, args, kwargs): ) except Exception: span.set_exc_info(*sys.exc_info()) - integration.metric(span, "incr", "request.error", 1) raise finally: integration.llmobs_set_tags(span, args=args, kwargs=kwargs, response=chat_completions, operation="chat") span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) - if integration.is_pc_sampled_log(span): - if chat_completions is None: - log_chat_completions = [] - else: - log_chat_completions = [ - [ - {"content": message.text, "message_type": message.message.__class__.__name__} - for message in messages - ] - for messages in chat_completions.generations - ] - integration.log( - span, - "info" if span.error == 0 else "error", - "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), - attrs={ - "messages": [ - [ - { - "content": ( - message.get("content", "") - if isinstance(message, dict) - else str(getattr(message, "content", "")) - ), - "message_type": message.__class__.__name__, - } - for message in messages - ] - for messages in chat_messages - ], - "choices": log_chat_completions, - }, - ) return chat_completions @@ -502,7 +401,6 @@ async def traced_chat_model_agenerate(langchain, pin, func, instance, args, kwar chat_completions = await func(*args, **kwargs) if _is_openai_chat_instance(instance): _tag_openai_token_usage(span, chat_completions.llm_output) - integration.record_usage(span, chat_completions.llm_output) for message_set_idx, message_set in enumerate(chat_completions.generations): for idx, chat_completion in enumerate(message_set): @@ -542,45 +440,10 @@ async def traced_chat_model_agenerate(langchain, pin, func, instance, args, kwar ) except Exception: span.set_exc_info(*sys.exc_info()) - integration.metric(span, "incr", "request.error", 1) raise finally: integration.llmobs_set_tags(span, args=args, kwargs=kwargs, response=chat_completions, operation="chat") span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) - if integration.is_pc_sampled_log(span): - if chat_completions is None: - log_chat_completions = [] - else: - log_chat_completions = [ - [ - {"content": message.text, "message_type": message.message.__class__.__name__} - for message in messages - ] - for messages in chat_completions.generations - ] - integration.log( - span, - "info" if span.error == 0 else "error", - "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), - attrs={ - "messages": [ - [ - { - "content": ( - message.get("content", "") - if isinstance(message, dict) - else str(getattr(message, "content", "")) - ), - "message_type": message.__class__.__name__, - } - for message in messages - ] - for messages in chat_messages - ], - "choices": log_chat_completions, - }, - ) return chat_completions @@ -627,19 +490,10 @@ def traced_embedding(langchain, pin, func, instance, args, kwargs): span.set_metric("langchain.response.outputs.embedding_length", len(embeddings)) except Exception: span.set_exc_info(*sys.exc_info()) - integration.metric(span, "incr", "request.error", 1) raise finally: integration.llmobs_set_tags(span, args=args, kwargs=kwargs, response=embeddings, operation="embedding") span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) - if integration.is_pc_sampled_log(span): - integration.log( - span, - "info" if span.error == 0 else "error", - "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), - attrs={"inputs": [input_texts] if isinstance(input_texts, str) else input_texts}, - ) return embeddings @@ -689,12 +543,10 @@ def traced_lcel_runnable_sequence(langchain, pin, func, instance, args, kwargs): span.set_tag_str("langchain.response.outputs.%d" % idx, integration.trunc(str(output))) except Exception: span.set_exc_info(*sys.exc_info()) - integration.metric(span, "incr", "request.error", 1) raise finally: integration.llmobs_set_tags(span, args=[], kwargs=inputs, response=final_output, operation="chain") span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) return final_output @@ -735,12 +587,10 @@ async def traced_lcel_runnable_sequence_async(langchain, pin, func, instance, ar span.set_tag_str("langchain.response.outputs.%d" % idx, integration.trunc(str(output))) except Exception: span.set_exc_info(*sys.exc_info()) - integration.metric(span, "incr", "request.error", 1) raise finally: integration.llmobs_set_tags(span, args=[], kwargs=inputs, response=final_output, operation="chain") span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) return final_output @@ -793,25 +643,10 @@ def traced_similarity_search(langchain, pin, func, instance, args, kwargs): ) except Exception: span.set_exc_info(*sys.exc_info()) - integration.metric(span, "incr", "request.error", 1) raise finally: integration.llmobs_set_tags(span, args=args, kwargs=kwargs, response=documents, operation="retrieval") span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) - if integration.is_pc_sampled_log(span): - integration.log( - span, - "info" if span.error == 0 else "error", - "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), - attrs={ - "query": query, - "k": k or "", - "documents": [ - {"page_content": document.page_content, "metadata": document.metadata} for document in documents - ], - }, - ) return documents diff --git a/ddtrace/contrib/internal/mongoengine/trace.py b/ddtrace/contrib/internal/mongoengine/trace.py index 5539cf1d2e4..49ed5ee2590 100644 --- a/ddtrace/contrib/internal/mongoengine/trace.py +++ b/ddtrace/contrib/internal/mongoengine/trace.py @@ -29,11 +29,10 @@ def __call__(self, *args, **kwargs): client = self.__wrapped__(*args, **kwargs) pin = ddtrace.trace.Pin.get_from(self) if pin: - # Calling ddtrace.trace.Pin(...) with the `tracer` argument generates a deprecation warning. - # Remove this if statement when the `tracer` argument is removed - if pin.tracer is ddtrace.tracer: - ddtrace.trace.Pin(service=pin.service).onto(client) - else: - ddtrace.trace.Pin(service=pin.service, tracer=pin.tracer).onto(client) + tracer = pin.tracer + pp = ddtrace.trace.Pin(service=pin.service) + if tracer is not None: + pp._tracer = tracer + pp.onto(client) return client diff --git a/ddtrace/contrib/internal/openai/_endpoint_hooks.py b/ddtrace/contrib/internal/openai/_endpoint_hooks.py index 00ee44aef4b..786bb67f919 100644 --- a/ddtrace/contrib/internal/openai/_endpoint_hooks.py +++ b/ddtrace/contrib/internal/openai/_endpoint_hooks.py @@ -112,7 +112,6 @@ def shared_gen(): _process_finished_stream(integration, span, kwargs, streamed_chunks, is_completion=is_completion) finally: span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) if _is_async_generator(resp): @@ -199,16 +198,6 @@ def _record_response(self, pin, integration, span, args, kwargs, resp, error): resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) if kwargs.get("stream") and error is None: return self._handle_streamed_response(integration, span, kwargs, resp, is_completion=True) - if integration.is_pc_sampled_log(span): - attrs_dict = {"prompt": kwargs.get("prompt", "")} - if error is None: - log_choices = resp.choices - if hasattr(resp.choices[0], "model_dump"): - log_choices = [choice.model_dump() for choice in resp.choices] - attrs_dict.update({"choices": log_choices}) - integration.log( - span, "info" if error is None else "error", "sampled %s" % self.OPERATION_ID, attrs=attrs_dict - ) integration.llmobs_set_tags(span, args=[], kwargs=kwargs, response=resp, operation="completion") if not resp: return @@ -268,14 +257,6 @@ def _record_response(self, pin, integration, span, args, kwargs, resp, error): resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) if kwargs.get("stream") and error is None: return self._handle_streamed_response(integration, span, kwargs, resp, is_completion=False) - if integration.is_pc_sampled_log(span): - log_choices = resp.choices - if hasattr(resp.choices[0], "model_dump"): - log_choices = [choice.model_dump() for choice in resp.choices] - attrs_dict = {"messages": kwargs.get("messages", []), "completion": log_choices} - integration.log( - span, "info" if error is None else "error", "sampled %s" % self.OPERATION_ID, attrs=attrs_dict - ) integration.llmobs_set_tags(span, args=[], kwargs=kwargs, response=resp, operation="chat") if not resp: return @@ -518,26 +499,6 @@ def _record_request(self, pin, integration, instance, span, args, kwargs): def _record_response(self, pin, integration, span, args, kwargs, resp, error): resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) - if integration.is_pc_sampled_log(span): - attrs_dict = {} - if kwargs.get("response_format", "") == "b64_json": - attrs_dict.update({"choices": [{"b64_json": "returned"} for _ in resp.data]}) - else: - log_choices = resp.data - if hasattr(resp.data[0], "model_dump"): - log_choices = [choice.model_dump() for choice in resp.data] - attrs_dict.update({"choices": log_choices}) - if "prompt" in self._request_kwarg_params: - attrs_dict.update({"prompt": kwargs.get("prompt", "")}) - if "image" in self._request_kwarg_params: - image = args[0] if len(args) >= 1 else kwargs.get("image", "") - attrs_dict.update({"image": image.name.split("/")[-1]}) - if "mask" in self._request_kwarg_params: - mask = args[1] if len(args) >= 2 else kwargs.get("mask", "") - attrs_dict.update({"mask": mask.name.split("/")[-1]}) - integration.log( - span, "info" if error is None else "error", "sampled %s" % self.OPERATION_ID, attrs=attrs_dict - ) if not resp: return choices = resp.data @@ -629,19 +590,6 @@ def _record_response(self, pin, integration, span, args, kwargs, resp, error): span.set_metric("openai.response.segments_count", len(resp_to_tag.get("segments"))) if integration.is_pc_sampled_span(span): span.set_tag_str("openai.response.text", integration.trunc(text)) - if integration.is_pc_sampled_log(span): - file_input = args[1] if len(args) >= 2 else kwargs.get("file", "") - integration.log( - span, - "info" if error is None else "error", - "sampled %s" % self.OPERATION_ID, - attrs={ - "file": getattr(file_input, "name", "").split("/")[-1], - "prompt": kwargs.get("prompt", ""), - "language": kwargs.get("language", ""), - "text": text, - }, - ) return resp diff --git a/ddtrace/contrib/internal/openai/patch.py b/ddtrace/contrib/internal/openai/patch.py index 3696314acc4..812c786dfc4 100644 --- a/ddtrace/contrib/internal/openai/patch.py +++ b/ddtrace/contrib/internal/openai/patch.py @@ -10,7 +10,6 @@ from ddtrace.contrib.trace_utils import with_traced_module from ddtrace.contrib.trace_utils import wrap from ddtrace.internal.logger import get_logger -from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import deep_getattr from ddtrace.internal.utils.version import parse_version from ddtrace.llmobs._integrations import OpenAIIntegration @@ -23,10 +22,7 @@ config._add( "openai", { - "logs_enabled": asbool(os.getenv("DD_OPENAI_LOGS_ENABLED", False)), - "metrics_enabled": asbool(os.getenv("DD_OPENAI_METRICS_ENABLED", True)), "span_prompt_completion_sample_rate": float(os.getenv("DD_OPENAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE", 1.0)), - "log_prompt_completion_sample_rate": float(os.getenv("DD_OPENAI_LOG_PROMPT_COMPLETION_SAMPLE_RATE", 0.1)), "span_char_limit": int(os.getenv("DD_OPENAI_SPAN_CHAR_LIMIT", 128)), }, ) @@ -183,7 +179,6 @@ def _traced_endpoint(endpoint_hook, integration, instance, pin, args, kwargs): # Record any error information if err is not None: span.set_exc_info(*sys.exc_info()) - integration.metric(span, "incr", "request.error", 1) # Pass the response and the error to the hook try: @@ -196,7 +191,6 @@ def _traced_endpoint(endpoint_hook, integration, instance, pin, args, kwargs): # Streamed responses with error will need to be finished manually as well. if not kwargs.get("stream") or err is not None: span.finish() - integration.metric(span, "dist", "request.duration", span.duration_ns) def _patched_endpoint(openai, patch_hook): @@ -256,7 +250,6 @@ async def patched_endpoint(openai, pin, func, instance, args, kwargs): @with_traced_module def patched_convert(openai, pin, func, instance, args, kwargs): """Patch convert captures header information in the openai response""" - integration = openai._datadog_integration span = pin.tracer.current_span() if not span: return func(*args, **kwargs) @@ -281,23 +274,19 @@ def patched_convert(openai, pin, func, instance, args, kwargs): if headers.get("x-ratelimit-limit-requests"): v = headers.get("x-ratelimit-limit-requests") if v is not None: - integration.metric(span, "gauge", "ratelimit.requests", int(v)) span.set_metric("openai.organization.ratelimit.requests.limit", int(v)) if headers.get("x-ratelimit-limit-tokens"): v = headers.get("x-ratelimit-limit-tokens") if v is not None: - integration.metric(span, "gauge", "ratelimit.tokens", int(v)) span.set_metric("openai.organization.ratelimit.tokens.limit", int(v)) # Gauge and set span info for remaining requests and tokens if headers.get("x-ratelimit-remaining-requests"): v = headers.get("x-ratelimit-remaining-requests") if v is not None: - integration.metric(span, "gauge", "ratelimit.remaining.requests", int(v)) span.set_metric("openai.organization.ratelimit.requests.remaining", int(v)) if headers.get("x-ratelimit-remaining-tokens"): v = headers.get("x-ratelimit-remaining-tokens") if v is not None: - integration.metric(span, "gauge", "ratelimit.remaining.tokens", int(v)) span.set_metric("openai.organization.ratelimit.tokens.remaining", int(v)) return func(*args, **kwargs) diff --git a/ddtrace/contrib/internal/pylibmc/client.py b/ddtrace/contrib/internal/pylibmc/client.py index 5c48e8465f8..e6b367b243b 100644 --- a/ddtrace/contrib/internal/pylibmc/client.py +++ b/ddtrace/contrib/internal/pylibmc/client.py @@ -51,12 +51,8 @@ def __init__(self, client=None, service=memcached.SERVICE, tracer=None, *args, * super(TracedClient, self).__init__(client) schematized_service = schematize_service_name(service) - # Calling ddtrace.trace.Pin(...) with the `tracer` argument generates a deprecation warning. - # Remove this if statement when the `tracer` argument is removed - if tracer is ddtrace.tracer: - pin = ddtrace.trace.Pin(service=schematized_service) - else: - pin = ddtrace.trace.Pin(service=schematized_service, tracer=tracer) + pin = ddtrace.trace.Pin(service=schematized_service) + pin._tracer = tracer pin.onto(self) # attempt to collect the pool of urls this client talks to diff --git a/ddtrace/contrib/internal/sqlalchemy/engine.py b/ddtrace/contrib/internal/sqlalchemy/engine.py index a3dcb324700..a20199dbcc2 100644 --- a/ddtrace/contrib/internal/sqlalchemy/engine.py +++ b/ddtrace/contrib/internal/sqlalchemy/engine.py @@ -67,12 +67,9 @@ def __init__(self, tracer, service, engine): self.name = schematize_database_operation("%s.query" % self.vendor, database_provider=self.vendor) # attach the PIN - # Calling ddtrace.trace.Pin(...) with the `tracer` argument generates a deprecation warning. - # Remove this if statement when the `tracer` argument is removed - if self.tracer is ddtrace.tracer: - Pin(service=self.service).onto(engine) - else: - Pin(tracer=tracer, service=self.service).onto(engine) + pin = Pin(service=self.service) + pin._tracer = self.tracer + pin.onto(engine) listen(engine, "before_cursor_execute", self._before_cur_exec) listen(engine, "after_cursor_execute", self._after_cur_exec) diff --git a/ddtrace/contrib/internal/tornado/application.py b/ddtrace/contrib/internal/tornado/application.py index f36857b81b1..587912f603b 100644 --- a/ddtrace/contrib/internal/tornado/application.py +++ b/ddtrace/contrib/internal/tornado/application.py @@ -54,10 +54,6 @@ def tracer_config(__init__, app, args, kwargs): if tags: tracer.set_tags(tags) - # configure the PIN object for template rendering - # Required for backwards compatibility. Remove the else clause when - # the `ddtrace.trace.Pin` object no longer accepts the Pin argument. - if tracer is ddtrace.tracer: - ddtrace.trace.Pin(service=service).onto(template) - else: - ddtrace.trace.Pin(service=service, tracer=tracer).onto(template) + pin = ddtrace.trace.Pin(service=service) + pin._tracer = tracer + pin.onto(template) diff --git a/ddtrace/contrib/internal/valkey/asyncio_patch.py b/ddtrace/contrib/internal/valkey/asyncio_patch.py new file mode 100644 index 00000000000..b8d15a7c603 --- /dev/null +++ b/ddtrace/contrib/internal/valkey/asyncio_patch.py @@ -0,0 +1,36 @@ +from ddtrace import config +from ddtrace._trace.utils_valkey import _instrument_valkey_cmd +from ddtrace._trace.utils_valkey import _instrument_valkey_execute_async_cluster_pipeline +from ddtrace._trace.utils_valkey import _instrument_valkey_execute_pipeline +from ddtrace.contrib.internal.valkey_utils import _run_valkey_command_async +from ddtrace.internal.utils.formats import stringify_cache_args +from ddtrace.trace import Pin + + +async def instrumented_async_execute_command(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + with _instrument_valkey_cmd(pin, config.valkey, instance, args) as ctx: + return await _run_valkey_command_async(ctx=ctx, func=func, args=args, kwargs=kwargs) + + +async def instrumented_async_execute_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + cmds = [stringify_cache_args(c, cmd_max_len=config.valkey.cmd_max_length) for c, _ in instance.command_stack] + with _instrument_valkey_execute_pipeline(pin, config.valkey, cmds, instance): + return await func(*args, **kwargs) + + +async def instrumented_async_execute_cluster_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + cmds = [stringify_cache_args(c.args, cmd_max_len=config.valkey.cmd_max_length) for c in instance._command_stack] + with _instrument_valkey_execute_async_cluster_pipeline(pin, config.valkey, cmds, instance): + return await func(*args, **kwargs) diff --git a/ddtrace/contrib/internal/valkey/patch.py b/ddtrace/contrib/internal/valkey/patch.py new file mode 100644 index 00000000000..7de63f947c1 --- /dev/null +++ b/ddtrace/contrib/internal/valkey/patch.py @@ -0,0 +1,223 @@ +""" +The valkey integration traces valkey requests. + + +Enabling +~~~~~~~~ + +The valkey integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(valkey=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.valkey["service"] + + The service name reported by default for valkey traces. + + This option can also be set with the ``DD_VALKEY_SERVICE`` environment + variable. + + Default: ``"valkey"`` + + +.. py:data:: ddtrace.config.valkey["cmd_max_length"] + + Max allowable size for the valkey command span tag. + Anything beyond the max length will be replaced with ``"..."``. + + This option can also be set with the ``DD_VALKEY_CMD_MAX_LENGTH`` environment + variable. + + Default: ``1000`` + + +.. py:data:: ddtrace.config.valkey["resource_only_command"] + + The span resource will only include the command executed. To include all + arguments in the span resource, set this value to ``False``. + + This option can also be set with the ``DD_VALKEY_RESOURCE_ONLY_COMMAND`` environment + variable. + + Default: ``True`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure particular valkey instances use the :class:`Pin ` API:: + + import valkey + from ddtrace.trace import Pin + + client = valkey.StrictValkey(host="localhost", port=6379) + + # Override service name for this instance + Pin.override(client, service="my-custom-queue") + + # Traces reported for this client will now have "my-custom-queue" + # as the service name. + client.get("my-key") +""" +import os + +import valkey +import wrapt + +from ddtrace import config +from ddtrace._trace.utils_valkey import _instrument_valkey_cmd +from ddtrace._trace.utils_valkey import _instrument_valkey_execute_pipeline +from ddtrace.contrib.internal.valkey_utils import ROW_RETURNING_COMMANDS +from ddtrace.contrib.internal.valkey_utils import determine_row_count +from ddtrace.contrib.trace_utils import unwrap +from ddtrace.internal import core +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.utils.formats import CMD_MAX_LEN +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.formats import stringify_cache_args +from ddtrace.trace import Pin + + +config._add( + "valkey", + { + "_default_service": schematize_service_name("valkey"), + "cmd_max_length": int(os.getenv("DD_VALKEY_CMD_MAX_LENGTH", CMD_MAX_LEN)), + "resource_only_command": asbool(os.getenv("DD_VALKEY_RESOURCE_ONLY_COMMAND", True)), + }, +) + + +def get_version(): + # type: () -> str + return getattr(valkey, "__version__", "") + + +def patch(): + """Patch the instrumented methods + + This duplicated doesn't look nice. The nicer alternative is to use an ObjectProxy on top + of Valkey and StrictValkey. However, it means that any "import valkey.Valkey" won't be instrumented. + """ + if getattr(valkey, "_datadog_patch", False): + return + valkey._datadog_patch = True + + _w = wrapt.wrap_function_wrapper + + from .asyncio_patch import instrumented_async_execute_cluster_pipeline + from .asyncio_patch import instrumented_async_execute_command + from .asyncio_patch import instrumented_async_execute_pipeline + + _w("valkey", "Valkey.execute_command", instrumented_execute_command(config.valkey)) + _w("valkey", "Valkey.pipeline", instrumented_pipeline) + _w("valkey.client", "Pipeline.execute", instrumented_execute_pipeline(config.valkey, False)) + _w("valkey.client", "Pipeline.immediate_execute_command", instrumented_execute_command(config.valkey)) + _w("valkey.cluster", "ValkeyCluster.execute_command", instrumented_execute_command(config.valkey)) + _w("valkey.cluster", "ValkeyCluster.pipeline", instrumented_pipeline) + _w("valkey.cluster", "ClusterPipeline.execute", instrumented_execute_pipeline(config.valkey, True)) + Pin(service=None).onto(valkey.cluster.ValkeyCluster) + + _w("valkey.asyncio.client", "Valkey.execute_command", instrumented_async_execute_command) + _w("valkey.asyncio.client", "Valkey.pipeline", instrumented_pipeline) + _w("valkey.asyncio.client", "Pipeline.execute", instrumented_async_execute_pipeline) + _w("valkey.asyncio.client", "Pipeline.immediate_execute_command", instrumented_async_execute_command) + Pin(service=None).onto(valkey.asyncio.Valkey) + + _w("valkey.asyncio.cluster", "ValkeyCluster.execute_command", instrumented_async_execute_command) + _w("valkey.asyncio.cluster", "ValkeyCluster.pipeline", instrumented_pipeline) + _w("valkey.asyncio.cluster", "ClusterPipeline.execute", instrumented_async_execute_cluster_pipeline) + Pin(service=None).onto(valkey.asyncio.ValkeyCluster) + + Pin(service=None).onto(valkey.StrictValkey) + + +def unpatch(): + if getattr(valkey, "_datadog_patch", False): + valkey._datadog_patch = False + + unwrap(valkey.Valkey, "execute_command") + unwrap(valkey.Valkey, "pipeline") + unwrap(valkey.client.Pipeline, "execute") + unwrap(valkey.client.Pipeline, "immediate_execute_command") + unwrap(valkey.cluster.ValkeyCluster, "execute_command") + unwrap(valkey.cluster.ValkeyCluster, "pipeline") + unwrap(valkey.cluster.ClusterPipeline, "execute") + unwrap(valkey.asyncio.client.Valkey, "execute_command") + unwrap(valkey.asyncio.client.Valkey, "pipeline") + unwrap(valkey.asyncio.client.Pipeline, "execute") + unwrap(valkey.asyncio.client.Pipeline, "immediate_execute_command") + unwrap(valkey.asyncio.cluster.ValkeyCluster, "execute_command") + unwrap(valkey.asyncio.cluster.ValkeyCluster, "pipeline") + unwrap(valkey.asyncio.cluster.ClusterPipeline, "execute") + + +def _run_valkey_command(ctx: core.ExecutionContext, func, args, kwargs): + parsed_command = stringify_cache_args(args) + valkey_command = parsed_command.split(" ")[0] + rowcount = None + result = None + try: + result = func(*args, **kwargs) + return result + except Exception: + rowcount = 0 + raise + finally: + if rowcount is None: + rowcount = determine_row_count(valkey_command=valkey_command, result=result) + if valkey_command not in ROW_RETURNING_COMMANDS: + rowcount = None + core.dispatch("valkey.command.post", [ctx, rowcount]) + + +# +# tracing functions +# +def instrumented_execute_command(integration_config): + def _instrumented_execute_command(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + with _instrument_valkey_cmd(pin, integration_config, instance, args) as ctx: + return _run_valkey_command(ctx=ctx, func=func, args=args, kwargs=kwargs) + + return _instrumented_execute_command + + +def instrumented_pipeline(func, instance, args, kwargs): + pipeline = func(*args, **kwargs) + pin = Pin.get_from(instance) + if pin: + pin.onto(pipeline) + return pipeline + + +def instrumented_execute_pipeline(integration_config, is_cluster=False): + def _instrumented_execute_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + if is_cluster: + cmds = [ + stringify_cache_args(c.args, cmd_max_len=integration_config.cmd_max_length) + for c in instance.command_stack + ] + else: + cmds = [ + stringify_cache_args(c, cmd_max_len=integration_config.cmd_max_length) + for c, _ in instance.command_stack + ] + with _instrument_valkey_execute_pipeline(pin, integration_config, cmds, instance, is_cluster): + return func(*args, **kwargs) + + return _instrumented_execute_pipeline diff --git a/ddtrace/contrib/internal/valkey_utils.py b/ddtrace/contrib/internal/valkey_utils.py new file mode 100644 index 00000000000..8518dbe648a --- /dev/null +++ b/ddtrace/contrib/internal/valkey_utils.py @@ -0,0 +1,84 @@ +from typing import Dict +from typing import List +from typing import Optional +from typing import Union + +from ddtrace.ext import net +from ddtrace.ext import valkey as valkeyx +from ddtrace.internal import core +from ddtrace.internal.utils.formats import stringify_cache_args + + +SINGLE_KEY_COMMANDS = [ + "GET", + "GETDEL", + "GETEX", + "GETRANGE", + "GETSET", + "LINDEX", + "LRANGE", + "RPOP", + "LPOP", + "HGET", + "HGETALL", + "HKEYS", + "HMGET", + "HRANDFIELD", + "HVALS", +] +MULTI_KEY_COMMANDS = ["MGET"] +ROW_RETURNING_COMMANDS = SINGLE_KEY_COMMANDS + MULTI_KEY_COMMANDS + + +def _extract_conn_tags(conn_kwargs): + """Transform valkey conn info into dogtrace metas""" + try: + conn_tags = { + net.TARGET_HOST: conn_kwargs["host"], + net.TARGET_PORT: conn_kwargs["port"], + net.SERVER_ADDRESS: conn_kwargs["host"], + valkeyx.DB: conn_kwargs.get("db") or 0, + } + client_name = conn_kwargs.get("client_name") + if client_name: + conn_tags[valkeyx.CLIENT_NAME] = client_name + return conn_tags + except Exception: + return {} + + +def determine_row_count(valkey_command: str, result: Optional[Union[List, Dict, str]]) -> int: + empty_results = [b"", [], {}, None] + # result can be an empty list / dict / string + if result not in empty_results: + if valkey_command == "MGET": + # only include valid key results within count + result = [x for x in result if x not in empty_results] + return len(result) + elif valkey_command == "HMGET": + # only include valid key results within count + result = [x for x in result if x not in empty_results] + return 1 if len(result) > 0 else 0 + else: + return 1 + else: + return 0 + + +async def _run_valkey_command_async(ctx: core.ExecutionContext, func, args, kwargs): + parsed_command = stringify_cache_args(args) + valkey_command = parsed_command.split(" ")[0] + rowcount = None + result = None + try: + result = await func(*args, **kwargs) + return result + except BaseException: + rowcount = 0 + raise + finally: + if rowcount is None: + rowcount = determine_row_count(valkey_command=valkey_command, result=result) + if valkey_command not in ROW_RETURNING_COMMANDS: + rowcount = None + core.dispatch("valkey.async_command.post", [ctx, rowcount]) diff --git a/ddtrace/contrib/valkey/__init__.py b/ddtrace/contrib/valkey/__init__.py new file mode 100644 index 00000000000..c898aff012d --- /dev/null +++ b/ddtrace/contrib/valkey/__init__.py @@ -0,0 +1,68 @@ +""" +The valkey integration traces valkey requests. + + +Enabling +~~~~~~~~ + +The valkey integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(valkey=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.valkey["service"] + + The service name reported by default for valkey traces. + + This option can also be set with the ``DD_VALKEY_SERVICE`` environment + variable. + + Default: ``"valkey"`` + + +.. py:data:: ddtrace.config.valkey["cmd_max_length"] + + Max allowable size for the valkey command span tag. + Anything beyond the max length will be replaced with ``"..."``. + + This option can also be set with the ``DD_VALKEY_CMD_MAX_LENGTH`` environment + variable. + + Default: ``1000`` + + +.. py:data:: ddtrace.config.valkey["resource_only_command"] + + The span resource will only include the command executed. To include all + arguments in the span resource, set this value to ``False``. + + This option can also be set with the ``DD_VALKEY_RESOURCE_ONLY_COMMAND`` environment + variable. + + Default: ``True`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure particular valkey instances use the :class:`Pin ` API:: + + import valkey + from ddtrace.trace import Pin + + client = valkey.StrictValkey(host="localhost", port=6379) + + # Override service name for this instance + Pin.override(client, service="my-custom-queue") + + # Traces reported for this client will now have "my-custom-queue" + # as the service name. + client.get("my-key") +""" diff --git a/ddtrace/ext/__init__.py b/ddtrace/ext/__init__.py index 965dd04f43f..98cc5226100 100644 --- a/ddtrace/ext/__init__.py +++ b/ddtrace/ext/__init__.py @@ -16,6 +16,7 @@ class SpanTypes(object): AUTH = "auth" SYSTEM = "system" LLM = "llm" + VALKEY = "valkey" class SpanKind(object): @@ -35,5 +36,6 @@ class SpanKind(object): SpanTypes.REDIS, SpanTypes.SQL, SpanTypes.WORKER, + SpanTypes.VALKEY, } ) diff --git a/ddtrace/ext/valkey.py b/ddtrace/ext/valkey.py new file mode 100644 index 00000000000..3246af841f6 --- /dev/null +++ b/ddtrace/ext/valkey.py @@ -0,0 +1,14 @@ +# defaults +APP = "valkey" +DEFAULT_SERVICE = "valkey" + +# net extension +DB = "out.valkey_db" + +# standard tags +RAWCMD = "valkey.raw_command" +CMD = "valkey.command" +ARGS_LEN = "valkey.args_length" +PIPELINE_LEN = "valkey.pipeline_length" +PIPELINE_AGE = "valkey.pipeline_age" +CLIENT_NAME = "valkey.client_name" diff --git a/ddtrace/internal/rate_limiter.py b/ddtrace/internal/rate_limiter.py index 0a97a6a7abc..9b514e5ff32 100644 --- a/ddtrace/internal/rate_limiter.py +++ b/ddtrace/internal/rate_limiter.py @@ -9,9 +9,6 @@ from typing import Callable # noqa:F401 from typing import Optional # noqa:F401 -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.vendor.debtcollector import deprecate - class RateLimiter(object): """ @@ -57,26 +54,18 @@ def __init__(self, rate_limit: int, time_window: float = 1e9): self._lock = threading.Lock() - def is_allowed(self, timestamp_ns: Optional[int] = None) -> bool: + def is_allowed(self) -> bool: """ Check whether the current request is allowed or not This method will also reduce the number of available tokens by 1 - :param int timestamp_ns: timestamp in nanoseconds for the current request. :returns: Whether the current request is allowed or not :rtype: :obj:`bool` """ - if timestamp_ns is not None: - deprecate( - "The `timestamp_ns` parameter is deprecated and will be removed in a future version." - "Ratelimiter will use the current time.", - category=DDTraceDeprecationWarning, - ) - # rate limits are tested and mocked in pytest so we need to compute the timestamp here # (or move the unit tests to rust) - timestamp_ns = timestamp_ns or time.monotonic_ns() + timestamp_ns = time.monotonic_ns() allowed = self._is_allowed(timestamp_ns) # Update counts used to determine effective rate self._update_rate_counts(allowed, timestamp_ns) diff --git a/ddtrace/internal/remoteconfig/worker.py b/ddtrace/internal/remoteconfig/worker.py index 5429e599e74..08650bd8507 100644 --- a/ddtrace/internal/remoteconfig/worker.py +++ b/ddtrace/internal/remoteconfig/worker.py @@ -2,7 +2,6 @@ from typing import List # noqa:F401 from ddtrace.internal import agent -from ddtrace.internal import atexit from ddtrace.internal import forksafe from ddtrace.internal import periodic from ddtrace.internal.logger import get_logger @@ -132,9 +131,6 @@ def disable(self, join=False): if self.status == ServiceStatus.STOPPED: return - forksafe.unregister(self.reset_at_fork) - atexit.unregister(self.disable) - self.stop(join=join) def _stop_service(self, *args, **kwargs): diff --git a/ddtrace/internal/tracemethods.py b/ddtrace/internal/tracemethods.py index 5328797c09f..456cca597e1 100644 --- a/ddtrace/internal/tracemethods.py +++ b/ddtrace/internal/tracemethods.py @@ -4,8 +4,6 @@ import wrapt from ddtrace.internal.logger import get_logger -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.vendor.debtcollector import deprecate log = get_logger(__name__) @@ -65,102 +63,10 @@ def _parse_trace_methods(raw_dd_trace_methods: str) -> List[Tuple[str, str]]: return dd_trace_methods -def _parse_legacy_trace_methods(raw_dd_trace_methods: str) -> List[str]: - """ - Return a list of method names to trace based on the specification of - DD_TRACE_METHODS. - - Note that support for wildcard methods with [*] is not implemented. - - This square bracket notation will be deprecated in favor of the new ':' notation - TODO: This method can be deleted once the legacy syntax is officially deprecated - """ - if not raw_dd_trace_methods: - return [] - dd_trace_methods = [] - for qualified_methods in raw_dd_trace_methods.split(";"): - # Validate that methods are specified - if "[" not in qualified_methods or "]" not in qualified_methods: - log.warning( - ( - "Invalid DD_TRACE_METHODS: %s. " - "Methods must be specified in square brackets following the fully qualified module or class name." - ), - qualified_methods, - ) - return [] - - # Store the prefix of the qualified method name (eg. for "foo.bar.baz[qux,quux]", this is "foo.bar.baz") - qualified_method_prefix = qualified_methods.split("[")[0] - - if qualified_method_prefix == "__main__": - # __main__ cannot be used since the __main__ that exists now is not the same as the __main__ that the user - # application will have. __main__ when sitecustomize module is run is the builtin __main__. - log.warning( - "Invalid DD_TRACE_METHODS: %s. Methods cannot be traced on the __main__ module.", qualified_methods - ) - return [] - - # Get the class or module name of the method (eg. for "foo.bar.baz[qux,quux]", this is "baz[qux,quux]") - class_or_module_with_methods = qualified_methods.split(".")[-1] - - # Strip off the leading 'moduleOrClass[' and trailing ']' - methods = class_or_module_with_methods.split("[")[1] - methods = methods[:-1] - - # Add the methods to the list of methods to trace - for method in methods.split(","): - if not str.isidentifier(method): - log.warning( - "Invalid method name: %r. %s", - method, - ( - "You might have a trailing comma." - if method == "" - else "Method names must be valid Python identifiers." - ), - ) - return [] - dd_trace_methods.append("%s.%s" % (qualified_method_prefix, method)) - return dd_trace_methods - - def _install_trace_methods(raw_dd_trace_methods: str) -> None: """Install tracing on the given methods.""" - if "[" in raw_dd_trace_methods: - deprecate( - "Using DD_TRACE_METHODS with the '[]' notation is deprecated", - message="Please use DD_TRACE_METHODS with the new ':' notation instead", - removal_version="3.0.0", - category=DDTraceDeprecationWarning, - ) - # Using legacy syntax - for qualified_method in _parse_legacy_trace_methods(raw_dd_trace_methods): - # We don't know if the method is a class method or a module method, so we need to assume it's a module - # and if the import fails then go a level up and try again. - base_module_guess = ".".join(qualified_method.split(".")[:-1]) - method_name = qualified_method.split(".")[-1] - module = None - - while base_module_guess: - try: - module = __import__(base_module_guess) - except ImportError: - # Add the class to the method name - method_name = "%s.%s" % (base_module_guess.split(".")[-1], method_name) - base_module_guess = ".".join(base_module_guess.split(".")[:-1]) - else: - break - - if module is None: - log.warning("Could not import module for %r", qualified_method) - continue - - trace_method(base_module_guess, method_name) - else: - # Using updated syntax, no need to try to import - for module_name, method_name in _parse_trace_methods(raw_dd_trace_methods): - trace_method(module_name, method_name) + for module_name, method_name in _parse_trace_methods(raw_dd_trace_methods): + trace_method(module_name, method_name) def trace_method(module, method_name): diff --git a/ddtrace/internal/wrapping/asyncs.py b/ddtrace/internal/wrapping/asyncs.py index 855578f9db2..d0ed131e962 100644 --- a/ddtrace/internal/wrapping/asyncs.py +++ b/ddtrace/internal/wrapping/asyncs.py @@ -537,96 +537,6 @@ """ ) -elif PY >= (3, 7): - COROUTINE_ASSEMBLY.parse( - r""" - get_awaitable - load_const None - yield_from - """ - ) - - ASYNC_GEN_ASSEMBLY.parse( - r""" - setup_except @stopiter - dup_top - store_fast $__ddgen - load_attr $asend - store_fast $__ddgensend - load_fast $__ddgen - load_attr $__anext__ - call_function 0 - - loop: - get_awaitable - load_const None - yield_from - - yield: - setup_except @genexit - yield_value - pop_block - load_fast $__ddgensend - rot_two - call_function 1 - jump_absolute @loop - - genexit: - dup_top - load_const GeneratorExit - compare_op asm.Compare.EXC_MATCH - pop_jump_if_false @exc - pop_top - pop_top - pop_top - pop_top - load_fast $__ddgen - load_attr $aclose - call_function 0 - get_awaitable - load_const None - yield_from - pop_except - return_value - - exc: - pop_top - pop_top - pop_top - pop_top - load_fast $__ddgen - load_attr $athrow - load_const sys.exc_info - call_function 0 - call_function_ex 0 - get_awaitable - load_const None - yield_from - store_fast $__value - pop_except - load_fast $__value - jump_absolute @yield - - stopiter: - dup_top - load_const StopAsyncIteration - compare_op asm.Compare.EXC_MATCH - pop_jump_if_false @propagate - pop_top - pop_top - pop_top - pop_except - load_const None - return_value - - propagate: - end_finally - load_const None - return_value - """ - ) - - else: msg = "No async wrapping support for Python %d.%d" % PY[:2] raise RuntimeError(msg) diff --git a/ddtrace/internal/wrapping/context.py b/ddtrace/internal/wrapping/context.py index cf36a93011b..393bd097da5 100644 --- a/ddtrace/internal/wrapping/context.py +++ b/ddtrace/internal/wrapping/context.py @@ -274,7 +274,7 @@ ) -elif sys.version_info >= (3, 7): +elif sys.version_info >= (3, 8): CONTEXT_HEAD.parse( r""" load_const {context} diff --git a/ddtrace/internal/wrapping/generators.py b/ddtrace/internal/wrapping/generators.py index f2a98b42a18..9ec5a654556 100644 --- a/ddtrace/internal/wrapping/generators.py +++ b/ddtrace/internal/wrapping/generators.py @@ -383,77 +383,6 @@ """ ) - -elif PY >= (3, 7): - GENERATOR_ASSEMBLY.parse( - r""" - setup_except @stopiter - dup_top - store_fast $__ddgen - load_attr $send - store_fast $__ddgensend - load_const next - load_fast $__ddgen - - loop: - call_function 1 - - yield: - setup_except @genexit - yield_value - pop_block - load_fast $__ddgensend - rot_two - jump_absolute @loop - - genexit: - dup_top - load_const GeneratorExit - compare_op asm.Compare.EXC_MATCH - pop_jump_if_false @exc - pop_top - pop_top - pop_top - pop_top - load_fast $__ddgen - load_attr $close - call_function 0 - return_value - - exc: - pop_top - pop_top - pop_top - pop_top - load_fast $__ddgen - load_attr $throw - load_const sys.exc_info - call_function 0 - call_function_ex 0 - store_fast $__value - pop_except - load_fast $__value - jump_absolute @yield - - stopiter: - dup_top - load_const StopIteration - compare_op asm.Compare.EXC_MATCH - pop_jump_if_false @propagate - pop_top - pop_top - pop_top - pop_except - load_const None - return_value - - propagate: - end_finally - load_const None - return_value - """ - ) - else: msg = "No generator wrapping support for Python %d.%d" % PY[:2] raise RuntimeError(msg) diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index a3224a083cd..bb4f96e7814 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -7,16 +7,14 @@ from ddtrace.internal.logger import get_logger from ddtrace.llmobs._constants import INPUT_MESSAGES -from ddtrace.llmobs._constants import INPUT_TOKENS_METRIC_KEY from ddtrace.llmobs._constants import METADATA from ddtrace.llmobs._constants import METRICS from ddtrace.llmobs._constants import MODEL_NAME from ddtrace.llmobs._constants import MODEL_PROVIDER from ddtrace.llmobs._constants import OUTPUT_MESSAGES -from ddtrace.llmobs._constants import OUTPUT_TOKENS_METRIC_KEY from ddtrace.llmobs._constants import SPAN_KIND -from ddtrace.llmobs._constants import TOTAL_TOKENS_METRIC_KEY from ddtrace.llmobs._integrations.base import BaseLLMIntegration +from ddtrace.llmobs._integrations.utils import get_llmobs_metrics_tags from ddtrace.llmobs._utils import _get_attr from ddtrace.trace import Span @@ -77,7 +75,7 @@ def _llmobs_set_tags( INPUT_MESSAGES: input_messages, METADATA: parameters, OUTPUT_MESSAGES: output_messages, - METRICS: self._get_llmobs_metrics_tags(span), + METRICS: get_llmobs_metrics_tags("anthropic", span), } ) @@ -188,18 +186,3 @@ def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: span.set_metric("anthropic.response.usage.output_tokens", output_tokens) if input_tokens is not None and output_tokens is not None: span.set_metric("anthropic.response.usage.total_tokens", input_tokens + output_tokens) - - @staticmethod - def _get_llmobs_metrics_tags(span): - usage = {} - input_tokens = span.get_metric("anthropic.response.usage.input_tokens") - output_tokens = span.get_metric("anthropic.response.usage.output_tokens") - total_tokens = span.get_metric("anthropic.response.usage.total_tokens") - - if input_tokens is not None: - usage[INPUT_TOKENS_METRIC_KEY] = input_tokens - if output_tokens is not None: - usage[OUTPUT_TOKENS_METRIC_KEY] = output_tokens - if total_tokens is not None: - usage[TOTAL_TOKENS_METRIC_KEY] = total_tokens - return usage diff --git a/ddtrace/llmobs/_integrations/bedrock.py b/ddtrace/llmobs/_integrations/bedrock.py index ac6092cbe1a..cbc1456fc24 100644 --- a/ddtrace/llmobs/_integrations/bedrock.py +++ b/ddtrace/llmobs/_integrations/bedrock.py @@ -5,18 +5,16 @@ from ddtrace.internal.logger import get_logger from ddtrace.llmobs._constants import INPUT_MESSAGES -from ddtrace.llmobs._constants import INPUT_TOKENS_METRIC_KEY from ddtrace.llmobs._constants import METADATA from ddtrace.llmobs._constants import METRICS from ddtrace.llmobs._constants import MODEL_NAME from ddtrace.llmobs._constants import MODEL_PROVIDER from ddtrace.llmobs._constants import OUTPUT_MESSAGES -from ddtrace.llmobs._constants import OUTPUT_TOKENS_METRIC_KEY from ddtrace.llmobs._constants import PARENT_ID_KEY from ddtrace.llmobs._constants import PROPAGATED_PARENT_ID_KEY from ddtrace.llmobs._constants import SPAN_KIND -from ddtrace.llmobs._constants import TOTAL_TOKENS_METRIC_KEY from ddtrace.llmobs._integrations import BaseLLMIntegration +from ddtrace.llmobs._integrations.utils import get_llmobs_metrics_tags from ddtrace.llmobs._utils import _get_llmobs_parent_id from ddtrace.trace import Span @@ -57,22 +55,11 @@ def _llmobs_set_tags( MODEL_PROVIDER: span.get_tag("bedrock.request.model_provider") or "", INPUT_MESSAGES: input_messages, METADATA: parameters, - METRICS: self._llmobs_metrics(span, response), + METRICS: get_llmobs_metrics_tags("bedrock", span), OUTPUT_MESSAGES: output_messages, } ) - @staticmethod - def _llmobs_metrics(span: Span, response: Optional[Dict[str, Any]]) -> Dict[str, Any]: - metrics = {} - if response and response.get("text"): - prompt_tokens = int(span.get_tag("bedrock.usage.prompt_tokens") or 0) - completion_tokens = int(span.get_tag("bedrock.usage.completion_tokens") or 0) - metrics[INPUT_TOKENS_METRIC_KEY] = prompt_tokens - metrics[OUTPUT_TOKENS_METRIC_KEY] = completion_tokens - metrics[TOTAL_TOKENS_METRIC_KEY] = prompt_tokens + completion_tokens - return metrics - @staticmethod def _extract_input_message(prompt): """Extract input messages from the stored prompt. diff --git a/ddtrace/llmobs/_integrations/gemini.py b/ddtrace/llmobs/_integrations/gemini.py index ecec71e0645..0407ec7188b 100644 --- a/ddtrace/llmobs/_integrations/gemini.py +++ b/ddtrace/llmobs/_integrations/gemini.py @@ -14,7 +14,7 @@ from ddtrace.llmobs._constants import SPAN_KIND from ddtrace.llmobs._integrations.base import BaseLLMIntegration from ddtrace.llmobs._integrations.utils import extract_message_from_part_google -from ddtrace.llmobs._integrations.utils import get_llmobs_metrics_tags_google +from ddtrace.llmobs._integrations.utils import get_llmobs_metrics_tags from ddtrace.llmobs._integrations.utils import get_system_instructions_from_google_model from ddtrace.llmobs._integrations.utils import llmobs_get_metadata_google from ddtrace.llmobs._utils import _get_attr @@ -59,7 +59,7 @@ def _llmobs_set_tags( METADATA: metadata, INPUT_MESSAGES: input_messages, OUTPUT_MESSAGES: output_messages, - METRICS: get_llmobs_metrics_tags_google("google_generativeai", span), + METRICS: get_llmobs_metrics_tags("google_generativeai", span), } ) diff --git a/ddtrace/llmobs/_integrations/langchain.py b/ddtrace/llmobs/_integrations/langchain.py index c6a77fad3bc..d380c6ab7a8 100644 --- a/ddtrace/llmobs/_integrations/langchain.py +++ b/ddtrace/llmobs/_integrations/langchain.py @@ -6,8 +6,6 @@ from typing import Optional from typing import Union -from ddtrace import config -from ddtrace.constants import ERROR_TYPE from ddtrace.internal.logger import get_logger from ddtrace.internal.utils import ArgumentError from ddtrace.internal.utils import get_argument_value @@ -454,54 +452,6 @@ def _set_base_span_tags( # type: ignore[override] else: span.set_tag_str(API_KEY, api_key) - @classmethod - def _logs_tags(cls, span: Span) -> str: - api_key = span.get_tag(API_KEY) or "" - tags = "env:%s,version:%s,%s:%s,%s:%s,%s:%s,%s:%s" % ( # noqa: E501 - (config.env or ""), - (config.version or ""), - PROVIDER, - (span.get_tag(PROVIDER) or ""), - MODEL, - (span.get_tag(MODEL) or ""), - TYPE, - (span.get_tag(TYPE) or ""), - API_KEY, - api_key, - ) - return tags - - @classmethod - def _metrics_tags(cls, span: Span) -> List[str]: - provider = span.get_tag(PROVIDER) or "" - api_key = span.get_tag(API_KEY) or "" - tags = [ - "version:%s" % (config.version or ""), - "env:%s" % (config.env or ""), - "service:%s" % (span.service or ""), - "%s:%s" % (PROVIDER, provider), - "%s:%s" % (MODEL, span.get_tag(MODEL) or ""), - "%s:%s" % (TYPE, span.get_tag(TYPE) or ""), - "%s:%s" % (API_KEY, api_key), - "error:%d" % span.error, - ] - err_type = span.get_tag(ERROR_TYPE) - if err_type: - tags.append("%s:%s" % (ERROR_TYPE, err_type)) - return tags - - def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: - if not usage or self.metrics_enabled is False: - return - for token_type in ("prompt", "completion", "total"): - num_tokens = usage.get("token_usage", {}).get(token_type + "_tokens") - if not num_tokens: - continue - self.metric(span, "dist", "tokens.%s" % token_type, num_tokens) - total_cost = span.get_metric(TOTAL_COST) - if total_cost: - self.metric(span, "incr", "tokens.total_cost", total_cost) - def check_token_usage_chat_or_llm_result(self, result): """Checks for token usage on the top-level ChatResult or LLMResult object""" llm_output = getattr(result, "llm_output", {}) diff --git a/ddtrace/llmobs/_integrations/openai.py b/ddtrace/llmobs/_integrations/openai.py index 7ed3aace08a..eb01a679191 100644 --- a/ddtrace/llmobs/_integrations/openai.py +++ b/ddtrace/llmobs/_integrations/openai.py @@ -5,7 +5,6 @@ from typing import Optional from typing import Tuple -from ddtrace import config from ddtrace.internal.constants import COMPONENT from ddtrace.internal.utils.version import parse_version from ddtrace.llmobs._constants import INPUT_DOCUMENTS @@ -21,6 +20,7 @@ from ddtrace.llmobs._constants import SPAN_KIND from ddtrace.llmobs._constants import TOTAL_TOKENS_METRIC_KEY from ddtrace.llmobs._integrations.base import BaseLLMIntegration +from ddtrace.llmobs._integrations.utils import get_llmobs_metrics_tags from ddtrace.llmobs._utils import _get_attr from ddtrace.llmobs.utils import Document from ddtrace.trace import Pin @@ -88,54 +88,14 @@ def _is_azure_openai(span): return False return "azure" in base_url.lower() - @classmethod - def _logs_tags(cls, span: Span) -> str: - tags = ( - "env:%s,version:%s,openai.request.endpoint:%s,openai.request.method:%s,openai.request.model:%s,openai.organization.name:%s," - "openai.user.api_key:%s" - % ( # noqa: E501 - (config.env or ""), - (config.version or ""), - (span.get_tag("openai.request.endpoint") or ""), - (span.get_tag("openai.request.method") or ""), - (span.get_tag("openai.request.model") or ""), - (span.get_tag("openai.organization.name") or ""), - (span.get_tag("openai.user.api_key") or ""), - ) - ) - return tags - - @classmethod - def _metrics_tags(cls, span: Span) -> List[str]: - model_name = span.get_tag("openai.request.model") or "" - tags = [ - "version:%s" % (config.version or ""), - "env:%s" % (config.env or ""), - "service:%s" % (span.service or ""), - "openai.request.model:%s" % model_name, - "model:%s" % model_name, - "openai.request.endpoint:%s" % (span.get_tag("openai.request.endpoint") or ""), - "openai.request.method:%s" % (span.get_tag("openai.request.method") or ""), - "openai.organization.id:%s" % (span.get_tag("openai.organization.id") or ""), - "openai.organization.name:%s" % (span.get_tag("openai.organization.name") or ""), - "openai.user.api_key:%s" % (span.get_tag("openai.user.api_key") or ""), - "error:%d" % span.error, - ] - err_type = span.get_tag("error.type") - if err_type: - tags.append("error_type:%s" % err_type) - return tags - def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: - if not usage or not self.metrics_enabled: + if not usage: return - tags = ["openai.estimated:false"] for token_type in ("prompt", "completion", "total"): num_tokens = getattr(usage, token_type + "_tokens", None) if not num_tokens: continue span.set_metric("openai.response.usage.%s_tokens" % token_type, num_tokens) - self.metric(span, "dist", "tokens.%s" % token_type, num_tokens, tags=tags) def _llmobs_set_tags( self, @@ -275,12 +235,4 @@ def _extract_llmobs_metrics_tags(span: Span, resp: Any) -> Dict[str, Any]: OUTPUT_TOKENS_METRIC_KEY: completion_tokens, TOTAL_TOKENS_METRIC_KEY: prompt_tokens + completion_tokens, } - prompt_tokens = span.get_metric("openai.response.usage.prompt_tokens") - completion_tokens = span.get_metric("openai.response.usage.completion_tokens") - if prompt_tokens is None or completion_tokens is None: - return {} - return { - INPUT_TOKENS_METRIC_KEY: prompt_tokens, - OUTPUT_TOKENS_METRIC_KEY: completion_tokens, - TOTAL_TOKENS_METRIC_KEY: prompt_tokens + completion_tokens, - } + return get_llmobs_metrics_tags("openai", span) diff --git a/ddtrace/llmobs/_integrations/utils.py b/ddtrace/llmobs/_integrations/utils.py index f180e0c1820..331b4e3062d 100644 --- a/ddtrace/llmobs/_integrations/utils.py +++ b/ddtrace/llmobs/_integrations/utils.py @@ -118,10 +118,29 @@ def extract_message_from_part_google(part, role=None): return message -def get_llmobs_metrics_tags_google(integration_name, span): +def get_llmobs_metrics_tags(integration_name, span): usage = {} - input_tokens = span.get_metric("%s.response.usage.prompt_tokens" % integration_name) - output_tokens = span.get_metric("%s.response.usage.completion_tokens" % integration_name) + + # bedrock integration tags usage under meta instead of metrics + if integration_name == "bedrock": + input_tokens = int(span.get_tag("bedrock.usage.prompt_tokens") or 0) + output_tokens = int(span.get_tag("bedrock.usage.completion_tokens") or 0) + total_tokens = input_tokens + output_tokens + if input_tokens: + usage[INPUT_TOKENS_METRIC_KEY] = input_tokens + if output_tokens: + usage[OUTPUT_TOKENS_METRIC_KEY] = output_tokens + if total_tokens: + usage[TOTAL_TOKENS_METRIC_KEY] = total_tokens + return usage + + # check for both prompt / completion or input / output tokens + input_tokens = span.get_metric("%s.response.usage.prompt_tokens" % integration_name) or span.get_metric( + "%s.response.usage.input_tokens" % integration_name + ) + output_tokens = span.get_metric("%s.response.usage.completion_tokens" % integration_name) or span.get_metric( + "%s.response.usage.output_tokens" % integration_name + ) total_tokens = span.get_metric("%s.response.usage.total_tokens" % integration_name) if input_tokens is not None: diff --git a/ddtrace/llmobs/_integrations/vertexai.py b/ddtrace/llmobs/_integrations/vertexai.py index 88d38f1975e..db40ac15b19 100644 --- a/ddtrace/llmobs/_integrations/vertexai.py +++ b/ddtrace/llmobs/_integrations/vertexai.py @@ -15,7 +15,7 @@ from ddtrace.llmobs._constants import SPAN_KIND from ddtrace.llmobs._integrations.base import BaseLLMIntegration from ddtrace.llmobs._integrations.utils import extract_message_from_part_google -from ddtrace.llmobs._integrations.utils import get_llmobs_metrics_tags_google +from ddtrace.llmobs._integrations.utils import get_llmobs_metrics_tags from ddtrace.llmobs._integrations.utils import get_system_instructions_from_google_model from ddtrace.llmobs._integrations.utils import llmobs_get_metadata_google from ddtrace.llmobs._utils import _get_attr @@ -65,7 +65,7 @@ def _llmobs_set_tags( METADATA: metadata, INPUT_MESSAGES: input_messages, OUTPUT_MESSAGES: output_messages, - METRICS: get_llmobs_metrics_tags_google("vertexai", span), + METRICS: get_llmobs_metrics_tags("vertexai", span), } ) diff --git a/ddtrace/opentracer/tracer.py b/ddtrace/opentracer/tracer.py index ca10cb8125a..65d1b95b314 100644 --- a/ddtrace/opentracer/tracer.py +++ b/ddtrace/opentracer/tracer.py @@ -18,7 +18,6 @@ from ddtrace.trace import Context as DatadogContext # noqa:F401 from ddtrace.trace import Span as DatadogSpan from ddtrace.trace import Tracer as DatadogTracer -from ddtrace.vendor.debtcollector import deprecate from ..internal.logger import get_logger from .propagation import HTTPPropagator @@ -55,7 +54,7 @@ def __init__( service_name: Optional[str] = None, config: Optional[Dict[str, Any]] = None, scope_manager: Optional[ScopeManager] = None, - dd_tracer: Optional[DatadogTracer] = None, + _dd_tracer: Optional[DatadogTracer] = None, ) -> None: """Initialize a new Datadog opentracer. @@ -70,9 +69,6 @@ def __init__( here: https://github.com/opentracing/opentracing-python#scope-managers. If ``None`` is provided, defaults to :class:`opentracing.scope_managers.ThreadLocalScopeManager`. - :param dd_tracer: (optional) the Datadog tracer for this tracer to use. This - parameter is deprecated and will be removed in v3.0.0. The - to the global tracer (``ddtrace.tracer``) should always be used. """ # Merge the given config with the default into a new dict self._config = DEFAULT_CONFIG.copy() @@ -100,14 +96,7 @@ def __init__( self._scope_manager = scope_manager or ThreadLocalScopeManager() dd_context_provider = get_context_provider_for_scope_manager(self._scope_manager) - if dd_tracer is not None: - deprecate( - "The ``dd_tracer`` parameter is deprecated", - message="The global tracer (``ddtrace.tracer``) will be used instead.", - removal_version="3.0.0", - ) - - self._dd_tracer = dd_tracer or ddtrace.tracer + self._dd_tracer = _dd_tracer or ddtrace.tracer self._dd_tracer.set_tags(self._config.get(keys.GLOBAL_TAGS)) # type: ignore[arg-type] trace_processors = None if keys.SETTINGS in self._config: @@ -121,7 +110,7 @@ def __init__( trace_processors=trace_processors, priority_sampling=self._config.get(keys.PRIORITY_SAMPLING), uds_path=self._config.get(keys.UDS_PATH), - context_provider=dd_context_provider, # type: ignore[arg-type] + context_provider=dd_context_provider, ) self._propagators = { Format.HTTP_HEADERS: HTTPPropagator, diff --git a/ddtrace/settings/_config.py b/ddtrace/settings/_config.py index 35d2849884d..0072986286e 100644 --- a/ddtrace/settings/_config.py +++ b/ddtrace/settings/_config.py @@ -16,8 +16,6 @@ from ddtrace.internal.serverless import in_gcp_function from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.utils.cache import cachedmethod -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.vendor.debtcollector import deprecate from .._trace.pin import Pin from ..internal import gitmetadata @@ -264,9 +262,11 @@ def _parse_global_tags(s): def _default_config() -> Dict[str, _ConfigItem]: return { + # Remove the _trace_sample_rate property, _trace_sampling_rules should be the source of truth "_trace_sample_rate": _ConfigItem( default=1.0, - envs=[("DD_TRACE_SAMPLE_RATE", float)], + # trace_sample_rate is placeholder, this code will be removed up after v3.0 + envs=[("trace_sample_rate", float)], ), "_trace_sampling_rules": _ConfigItem( default=lambda: "", @@ -352,14 +352,6 @@ def __init__(self): self._from_endpoint = ENDPOINT_FETCHED_CONFIG self._config = _default_config() - sample_rate = os.getenv("DD_TRACE_SAMPLE_RATE") - if sample_rate is not None: - deprecate( - "DD_TRACE_SAMPLE_RATE is deprecated", - message="Please use DD_TRACE_SAMPLING_RULES instead.", - removal_version="3.0.0", - ) - # Use a dict as underlying storing mechanism for integration configs self._integration_configs = {} @@ -368,9 +360,6 @@ def __init__(self): rate_limit = os.getenv("DD_TRACE_RATE_LIMIT") if rate_limit is not None and self._trace_sampling_rules in ("", "[]"): - # This warning will be logged when DD_TRACE_SAMPLE_RATE is set. This is intentional. - # Even though DD_TRACE_SAMPLE_RATE is treated as a global trace sampling rule, this configuration - # is deprecated. We should always encourage users to set DD_TRACE_SAMPLING_RULES instead. log.warning( "DD_TRACE_RATE_LIMIT is set to %s and DD_TRACE_SAMPLING_RULES is not set. " "Tracer rate limiting is only applied to spans that match tracer sampling rules. " @@ -388,13 +377,9 @@ def __init__(self): ) self._trace_api = _get_config("DD_TRACE_API_VERSION") if self._trace_api == "v0.3": - deprecate( - "DD_TRACE_API_VERSION=v0.3 is deprecated", - message="Traces will be submitted to the v0.4/traces agent endpoint instead.", - removal_version="3.0.0", - category=DDTraceDeprecationWarning, + log.error( + "Setting DD_TRACE_API_VERSION to ``v0.3`` is not supported. The default ``v0.5`` format will be used.", ) - self._trace_api = "v0.4" self._trace_writer_buffer_size = _get_config("DD_TRACE_WRITER_BUFFER_SIZE_BYTES", DEFAULT_BUFFER_SIZE, int) self._trace_writer_payload_size = _get_config( "DD_TRACE_WRITER_MAX_PAYLOAD_SIZE_BYTES", DEFAULT_MAX_PAYLOAD_SIZE, int @@ -418,18 +403,8 @@ def __init__(self): self._span_traceback_max_size = _get_config("DD_TRACE_SPAN_TRACEBACK_MAX_SIZE", 30, int) - # Master switch for turning on and off trace search by default - # this weird invocation of getenv is meant to read the DD_ANALYTICS_ENABLED - # legacy environment variable. It should be removed in the future - self._analytics_enabled = _get_config(["DD_TRACE_ANALYTICS_ENABLED", "DD_ANALYTICS_ENABLED"], False, asbool) - if self._analytics_enabled: - deprecate( - "Datadog App Analytics is deprecated and will be removed in a future version. " - "App Analytics can be enabled via DD_TRACE_ANALYTICS_ENABLED and DD_ANALYTICS_ENABLED " - "environment variables and ddtrace.config.analytics_enabled configuration. " - "These configurations will also be removed.", - category=DDTraceDeprecationWarning, - ) + # DD_ANALYTICS_ENABLED is not longer supported, remove this functionatiy from all integrations in the future + self._analytics_enabled = False self._client_ip_header = _get_config("DD_TRACE_CLIENT_IP_HEADER") self._retrieve_client_ip = _get_config("DD_TRACE_CLIENT_IP_ENABLED", False, asbool) @@ -477,14 +452,6 @@ def __init__(self): self._128_bit_trace_id_enabled = _get_config("DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED", True, asbool) self._128_bit_trace_id_logging_enabled = _get_config("DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED", False, asbool) - if self._128_bit_trace_id_logging_enabled: - deprecate( - "Using DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED is deprecated.", - message="Log injection format is now configured automatically.", - removal_version="3.0.0", - category=DDTraceDeprecationWarning, - ) - self._sampling_rules = _get_config("DD_SPAN_SAMPLING_RULES") self._sampling_rules_file = _get_config("DD_SPAN_SAMPLING_RULES_FILE") @@ -536,18 +503,7 @@ def __init__(self): ["DD_TRACE_COMPUTE_STATS", "DD_TRACE_STATS_COMPUTATION_ENABLED"], trace_compute_stats_default, asbool ) self._data_streams_enabled = _get_config("DD_DATA_STREAMS_ENABLED", False, asbool) - - legacy_client_tag_enabled = _get_config("DD_HTTP_CLIENT_TAG_QUERY_STRING") - if legacy_client_tag_enabled is None: - self._http_client_tag_query_string = _get_config("DD_TRACE_HTTP_CLIENT_TAG_QUERY_STRING", "true") - else: - deprecate( - "DD_HTTP_CLIENT_TAG_QUERY_STRING is deprecated", - message="Please use DD_TRACE_HTTP_CLIENT_TAG_QUERY_STRING instead.", - removal_version="3.0.0", - category=DDTraceDeprecationWarning, - ) - self._http_client_tag_query_string = legacy_client_tag_enabled.lower() + self._http_client_tag_query_string = _get_config("DD_TRACE_HTTP_CLIENT_TAG_QUERY_STRING", "true") dd_trace_obfuscation_query_string_regexp = _get_config( "DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP", DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP_DEFAULT @@ -577,15 +533,8 @@ def __init__(self): # https://github.com/open-telemetry/opentelemetry-python/blob/v1.16.0/opentelemetry-api/src/opentelemetry/context/__init__.py#L53 os.environ["OTEL_PYTHON_CONTEXT"] = "ddcontextvars_context" self._subscriptions = [] # type: List[Tuple[List[str], Callable[[Config, List[str]], None]]] - self._span_aggregator_rlock = _get_config("DD_TRACE_SPAN_AGGREGATOR_RLOCK", True, asbool) - if self._span_aggregator_rlock is False: - deprecate( - "DD_TRACE_SPAN_AGGREGATOR_RLOCK is deprecated", - message="Soon the ddtrace library will only support using threading.Rlock to " - "aggregate and encode span data. If you need to disable the re-entrant lock and " - "revert to using threading.Lock, please contact Datadog support.", - removal_version="3.0.0", - ) + # Disabled Span Aggregator Rlock is not supported. Remove this configuration in the future + self._span_aggregator_rlock = True self._trace_methods = _get_config("DD_TRACE_METHODS") diff --git a/ddtrace/settings/_otel_remapper.py b/ddtrace/settings/_otel_remapper.py index ec238e8a3cb..e495f783cd3 100644 --- a/ddtrace/settings/_otel_remapper.py +++ b/ddtrace/settings/_otel_remapper.py @@ -52,12 +52,16 @@ def _remap_traces_sampler(otel_value: str) -> Optional[str]: otel_value, ) otel_value = f"parentbased_{otel_value}" + rate = None if otel_value == "parentbased_always_on": - return "1.0" + rate = "1.0" elif otel_value == "parentbased_always_off": - return "0.0" + rate = "0.0" elif otel_value == "parentbased_traceidratio": - return os.environ.get("OTEL_TRACES_SAMPLER_ARG", "1") + rate = os.environ.get("OTEL_TRACES_SAMPLER_ARG", "1") + + if rate is not None: + return f'[{{"sample_rate":{rate}}}]' return None @@ -130,7 +134,7 @@ def _remap_default(otel_value: str) -> Optional[str]: "OTEL_SERVICE_NAME": ("DD_SERVICE", _remap_default), "OTEL_LOG_LEVEL": ("DD_TRACE_DEBUG", _remap_otel_log_level), "OTEL_PROPAGATORS": ("DD_TRACE_PROPAGATION_STYLE", _remap_otel_propagators), - "OTEL_TRACES_SAMPLER": ("DD_TRACE_SAMPLE_RATE", _remap_traces_sampler), + "OTEL_TRACES_SAMPLER": ("DD_TRACE_SAMPLING_RULES", _remap_traces_sampler), "OTEL_TRACES_EXPORTER": ("DD_TRACE_ENABLED", _remap_traces_exporter), "OTEL_METRICS_EXPORTER": ("DD_RUNTIME_METRICS_ENABLED", _remap_metrics_exporter), "OTEL_LOGS_EXPORTER": ("", _validate_logs_exporter), # Does not set a DDTRACE environment variable. diff --git a/ddtrace/settings/config.py b/ddtrace/settings/config.py deleted file mode 100644 index 00c0ee9917c..00000000000 --- a/ddtrace/settings/config.py +++ /dev/null @@ -1,11 +0,0 @@ -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.settings._config import * # noqa: F403 -from ddtrace.vendor.debtcollector import deprecate - - -deprecate( - "The ddtrace.settings.config module is deprecated", - message="Access the global configuration using ``ddtrace.config``.", - category=DDTraceDeprecationWarning, - removal_version="3.0.0", -) diff --git a/ddtrace/settings/integration.py b/ddtrace/settings/integration.py index 354e99f7625..eef7f5c81c6 100644 --- a/ddtrace/settings/integration.py +++ b/ddtrace/settings/integration.py @@ -1,13 +1,8 @@ import os from typing import Optional # noqa:F401 -from typing import Tuple # noqa:F401 - -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.vendor.debtcollector import deprecate from .._hooks import Hooks from ..internal.utils.attrdict import AttrDict -from ..internal.utils.formats import asbool from .http import HttpConfig @@ -43,9 +38,10 @@ def __init__(self, global_config, name, *args, **kwargs): object.__setattr__(self, "hooks", Hooks()) object.__setattr__(self, "http", HttpConfig()) - analytics_enabled, analytics_sample_rate = self._get_analytics_settings() - self.setdefault("analytics_enabled", analytics_enabled) - self.setdefault("analytics_sample_rate", float(analytics_sample_rate)) + # Trace Analytics was removed in v3.0.0 + # TODO(munir): Remove all references to analytics_enabled and analytics_sample_rate + self.setdefault("analytics_enabled", False) + self.setdefault("analytics_sample_rate", 1.0) service = os.getenv( "DD_%s_SERVICE" % name.upper(), default=os.getenv( @@ -65,33 +61,6 @@ def __init__(self, global_config, name, *args, **kwargs): self.get_http_tag_query_string(getattr(self, "default_http_tag_query_string", None)), ) - def _get_analytics_settings(self): - # type: () -> Tuple[Optional[bool], float] - # Set default analytics configuration, default is disabled - # DEV: Default to `None` which means do not set this key - # Inject environment variables for integration - env = "DD_TRACE_%s_ANALYTICS_ENABLED" % self.integration_name.upper() - legacy_env = "DD_%s_ANALYTICS_ENABLED" % self.integration_name.upper() - analytics_enabled = asbool(os.getenv(env, os.getenv(legacy_env, default=None))) - - if analytics_enabled: - deprecate( - "Datadog App Analytics is deprecated. " - f"App Analytics can be enabled via {env} and {legacy_env} " - f"environment variables and the ddtrace.config.{self.integration_name}.analytics_enabled configuration." - " This feature and its associated configurations will be removed in a future release.", - category=DDTraceDeprecationWarning, - ) - - analytics_sample_rate = float( - os.getenv( - "DD_TRACE_%s_ANALYTICS_SAMPLE_RATE" % self.integration_name.upper(), - os.getenv("DD_%s_ANALYTICS_SAMPLE_RATE" % self.integration_name.upper(), default=1.0), - ) - ) - - return analytics_enabled, analytics_sample_rate - def get_http_tag_query_string(self, value): if self.global_config._http_tag_query_string: dd_http_server_tag_query_string = value if value else os.getenv("DD_HTTP_SERVER_TAG_QUERY_STRING", "true") diff --git a/docker-compose.yml b/docker-compose.yml index dfcee9a54ce..642575a19d7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -206,5 +206,23 @@ services: - DD_TRACE_AGENT_URL=http://testagent:8126 - DD_IAST_DEDUPLICATION_ENABLED=false + valkey: + image: valkey/valkey:8.0-alpine + ports: + - "127.0.0.1:6379:6379" + + valkeycluster: + platform: linux/amd64 + image: grokzen/redis-cluster:6.2.0 + environment: + - IP=0.0.0.0 + ports: + - "127.0.0.1:7000:7000" + - "127.0.0.1:7001:7001" + - "127.0.0.1:7002:7002" + - "127.0.0.1:7003:7003" + - "127.0.0.1:7004:7004" + - "127.0.0.1:7005:7005" + volumes: ddagent: diff --git a/docs/advanced_usage.rst b/docs/advanced_usage.rst index 5fe21321680..e6ead60c1f5 100644 --- a/docs/advanced_usage.rst +++ b/docs/advanced_usage.rst @@ -1,28 +1,6 @@ Advanced Usage ============== -.. _agentconfiguration: - -Agent Configuration -------------------- - -If the Datadog Agent is on a separate host from your application, you can modify -the default ``ddtrace.tracer`` object to utilize another hostname and port. Here -is a small example showcasing this:: - - from ddtrace.trace import tracer - - tracer.configure(hostname=, port=, https=) - -By default, these will be set to ``localhost``, ``8126``, and ``False`` respectively. - -You can also use a Unix Domain Socket to connect to the agent:: - - from ddtrace.trace import tracer - - tracer.configure(uds_path="/path/to/socket") - - .. _context: @@ -223,7 +201,7 @@ provider can be used. It must implement the :class:`ddtrace.trace.BaseContextProvider` interface and can be configured with:: - tracer.configure(context_provider=MyContextProvider) + tracer.configure(context_provider=MyContextProvider()) .. _disttracing: diff --git a/docs/configuration.rst b/docs/configuration.rst index f45ac992582..6f5c87a945e 100644 --- a/docs/configuration.rst +++ b/docs/configuration.rst @@ -817,6 +817,7 @@ Sampling version_added: v0.33.0: v2.15.0: Only applied when DD_TRACE_SAMPLE_RATE, DD_TRACE_SAMPLING_RULES, or DD_SPAN_SAMPLING_RULE are set. + v3.0.0: Only applied when DD_TRACE_SAMPLING_RULES or DD_SPAN_SAMPLING_RULE are set. DD_TRACE_SAMPLING_RULES: type: JSON array diff --git a/docs/index.rst b/docs/index.rst index dbe0d61bb7d..d71419dc03e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -174,6 +174,8 @@ contacting support. +--------------------------------------------------+---------------+----------------+ | :ref:`urllib3` | >= 1.25.8 | No | +--------------------------------------------------+---------------+----------------+ +| :ref:`valkey` | >= 6.0.0 | Yes | ++--------------------------------------------------+---------------+----------------+ | :ref:`vertexai` | >= 1.71.1 | Yes | +--------------------------------------------------+---------------+----------------+ | :ref:`vertica` | >= 0.6 | Yes | diff --git a/docs/integrations.rst b/docs/integrations.rst index 0566ab7fa9c..d14c03bcfb7 100644 --- a/docs/integrations.rst +++ b/docs/integrations.rst @@ -485,6 +485,13 @@ urllib3 .. automodule:: ddtrace.contrib._urllib3 +.. _valkey: + +valkey +^^^^^^ +.. automodule:: ddtrace.contrib.valkey + + .. _vertexai: vertexai diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index ff2cfc09c6d..913ad7f8319 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -301,6 +301,7 @@ urls username uvicorn uWSGI +valkey vendored versioned vertexai diff --git a/hatch.toml b/hatch.toml index 3e80f24a5e7..74dcba41602 100644 --- a/hatch.toml +++ b/hatch.toml @@ -399,6 +399,13 @@ flask = ["~=2.2"] python = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] flask = ["~=3.0"] +[[envs.appsec_integrations_flask.matrix]] +# werkzeug 3.1 drops support for py3.8 +python = ["3.11", "3.12", "3.13"] +flask = ["~=3.1"] +werkzeug = ["~=3.1"] + +## ASM appsec_integrations_fastapi [envs.appsec_integrations_fastapi] template = "appsec_integrations_fastapi" diff --git a/lib-injection/sources/sitecustomize.py b/lib-injection/sources/sitecustomize.py index 32ab1c31ff3..0daa9c2413a 100644 --- a/lib-injection/sources/sitecustomize.py +++ b/lib-injection/sources/sitecustomize.py @@ -35,7 +35,7 @@ def parse_version(version): SCRIPT_DIR = os.path.dirname(__file__) RUNTIMES_ALLOW_LIST = { "cpython": { - "min": Version(version=(3, 7), constraint=""), + "min": Version(version=(3, 8), constraint=""), "max": Version(version=(3, 13), constraint=""), } } diff --git a/min_compatible_versions.csv b/min_compatible_versions.csv index c7366036a89..382aec7fd6a 100644 --- a/min_compatible_versions.csv +++ b/min_compatible_versions.csv @@ -182,6 +182,7 @@ typing-extensions,0 typing_extensions,0 urllib3,~=1.0 uwsgi,0 +valkey,~=6.0.0 vcrpy,==4.2.1 vertexai,0 vertica-python,>=0.6.0 diff --git a/releasenotes/notes/add-valkey-support-6cc9f41351dc0cd9.yaml b/releasenotes/notes/add-valkey-support-6cc9f41351dc0cd9.yaml new file mode 100644 index 00000000000..945f8eb6ec9 --- /dev/null +++ b/releasenotes/notes/add-valkey-support-6cc9f41351dc0cd9.yaml @@ -0,0 +1,3 @@ +features: + - | + valkey: adds automatic instrumentation of the Valkey package. Thank you [AhmadMasry](https://github.com/AhmadMasry)! \ No newline at end of file diff --git a/releasenotes/notes/add_aiohttp_memory_leak_flag-66005f987dbfbd47.yaml b/releasenotes/notes/add_aiohttp_memory_leak_flag-66005f987dbfbd47.yaml new file mode 100644 index 00000000000..67ef6980a36 --- /dev/null +++ b/releasenotes/notes/add_aiohttp_memory_leak_flag-66005f987dbfbd47.yaml @@ -0,0 +1,5 @@ +--- + +fixes: + - | + aiohttp: Adds the environment variable ``DD_AIOHTTP_CLIENT_DISABLE_STREAM_TIMING_FOR_MEM_LEAK`` to address a potential memory leak in the aiohttp integration. When set to true, this flag may cause streamed response span timing to be inaccurate. The flag defaults to false. \ No newline at end of file diff --git a/releasenotes/notes/deprecate-langchain-openai-cost-042f3a04cda9d23b.yaml b/releasenotes/notes/deprecate-langchain-openai-cost-042f3a04cda9d23b.yaml new file mode 100644 index 00000000000..c2c589693ab --- /dev/null +++ b/releasenotes/notes/deprecate-langchain-openai-cost-042f3a04cda9d23b.yaml @@ -0,0 +1,5 @@ +--- +upgrade: + - | + langchain: Removes the `langchain.tokens.total_cost` span metric for OpenAI calls. + For continued cost estimation of OpenAI calls, enable `LLM Observability `_. diff --git a/releasenotes/notes/drop-multitracer-support-ec109486f71c9c62.yaml b/releasenotes/notes/drop-multitracer-support-ec109486f71c9c62.yaml new file mode 100644 index 00000000000..79a86b59d05 --- /dev/null +++ b/releasenotes/notes/drop-multitracer-support-ec109486f71c9c62.yaml @@ -0,0 +1,4 @@ +--- +upgrade: + - | + tracing: Drops support for multiple Tracer instances in the same process. Use ``ddtrace.trace.tracer`` to access the global tracer instance. \ No newline at end of file diff --git a/releasenotes/notes/graphql-error-span-events-add-extensions-5eece423cc8ff93e.yaml b/releasenotes/notes/graphql-error-span-events-add-extensions-5eece423cc8ff93e.yaml new file mode 100644 index 00000000000..67eb95aaefe --- /dev/null +++ b/releasenotes/notes/graphql-error-span-events-add-extensions-5eece423cc8ff93e.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + graphql: Add user provided extensions to graphql error span events. diff --git a/releasenotes/notes/iast-feat-xss-django-6781a8b9a4092832.yaml b/releasenotes/notes/iast-feat-xss-django-6781a8b9a4092832.yaml index db470196b36..c29e9f49936 100644 --- a/releasenotes/notes/iast-feat-xss-django-6781a8b9a4092832.yaml +++ b/releasenotes/notes/iast-feat-xss-django-6781a8b9a4092832.yaml @@ -1,6 +1,6 @@ --- features: - | - Code Security (IAST): XSS detection for Django applications, + Code Security (IAST): XSS detection for Django applications and Jinja2 (Flask and FastAPI applications), which will be displayed on your DataDog Vulnerability Explorer dashboard. See the `Application Vulnerability Management `_ documentation for more information about this feature. diff --git a/releasenotes/notes/langchain-drop-logs-metrics-a997e8059886b20a.yaml b/releasenotes/notes/langchain-drop-logs-metrics-a997e8059886b20a.yaml new file mode 100644 index 00000000000..e7099dd1a77 --- /dev/null +++ b/releasenotes/notes/langchain-drop-logs-metrics-a997e8059886b20a.yaml @@ -0,0 +1,8 @@ +--- +upgrade: + - | + langchain: Removes prompt-completion log sampling from the LangChain integration. To continue logging prompt completions, + enable LLM Observability. + - | + langchain: Removes integration metrics from the LangChain integration. To continue tracking operational metrics from the + OpenAI integration, enable LLM Observability or use trace metrics instead. diff --git a/releasenotes/notes/remove-deprecated-tracing-configs-c6711b57037576f6.yaml b/releasenotes/notes/remove-deprecated-tracing-configs-c6711b57037576f6.yaml new file mode 100644 index 00000000000..b47613d504e --- /dev/null +++ b/releasenotes/notes/remove-deprecated-tracing-configs-c6711b57037576f6.yaml @@ -0,0 +1,10 @@ +--- +upgrade: + - | + configurations: Drops support for deprecated tracing configurations. The following configurations are no longer supported: + - DD_TRACE_SAMPLE_RATE, use DD_TRACE_SAMPLING_RULES instead. + - DD_TRACE_API_VERSION=v0.3, the default ``v0.5`` version is used instead. + - DD_ANALYTICS_ENABLED, Datadog Analytics is no longer supported. + - DD_TRACE_ANALYTICS_ENABLED, Datadog Analytics is no longer supported. + - DD_HTTP_CLIENT_TAG_QUERY_STRING, DD_TRACE_HTTP_CLIENT_TAG_QUERY_STRING should be used instead. + - DD_TRACE_SPAN_AGGREGATOR_RLOCK, disabling the span aggregator rlock is no longer supported. diff --git a/releasenotes/notes/remove-deprecations-from-pin-a700ae0cb6d51d03.yaml b/releasenotes/notes/remove-deprecations-from-pin-a700ae0cb6d51d03.yaml new file mode 100644 index 00000000000..0401cdc4258 --- /dev/null +++ b/releasenotes/notes/remove-deprecations-from-pin-a700ae0cb6d51d03.yaml @@ -0,0 +1,4 @@ +--- +upgrade: + - | + tracing: Removes support for overriding the global tracer in ``ddtrace.trace.Pin`` \ No newline at end of file diff --git a/releasenotes/notes/remove-openai-metrics-logs-656c6ba8e2e07ea3.yaml b/releasenotes/notes/remove-openai-metrics-logs-656c6ba8e2e07ea3.yaml new file mode 100644 index 00000000000..63153702d16 --- /dev/null +++ b/releasenotes/notes/remove-openai-metrics-logs-656c6ba8e2e07ea3.yaml @@ -0,0 +1,8 @@ +--- +upgrade: + - | + openai: Removes prompt-completion log sampling from the OpenAI integration. To continue logging prompt completions, + enable LLM Observability. + - | + openai: Removes integration metrics from the OpenAI integration. To continue tracking operational metrics from the + OpenAI integration, enable LLM Observability or use trace metrics instead. diff --git a/releasenotes/notes/remove-tracing-attrs-3-0-5743fa668289d5bc.yaml b/releasenotes/notes/remove-tracing-attrs-3-0-5743fa668289d5bc.yaml new file mode 100644 index 00000000000..35ee9378801 --- /dev/null +++ b/releasenotes/notes/remove-tracing-attrs-3-0-5743fa668289d5bc.yaml @@ -0,0 +1,15 @@ +--- +upgrade: + - | + tracer: Removes deprecated parameters from ``Tracer.configure(...)`` method and removes the ``Tracer.sampler`` attribute. + - | + tracing: Drops support for multiple tracer instances, ``ddtrace.trace.Tracer`` can not be reinitialized. + - | + span: Removes the deprecated ``Span.sampled`` property + - | + sampling: Drops support for configuring sampling rules using functions and regex in the ``ddtrace.tracer.sampler.rules[].choose_matcher(...)`` method + and removes the ``timestamp_ns`` parameter from ``ddtrace.internal.rate_limiter.RateLimiter.is_allowed()``. + - | + configurations: Drops support for configuring ``DD_TRACE_METHODS`` with the '[]' notation. Ensure DD_TRACE_METHODS use the ':' notation instead". + - | + opentracing: Removes the deprecated ``ddtracer`` parameter from ``ddtrace.opentracer.tracer.Tracer()``. \ No newline at end of file diff --git a/releasenotes/notes/remove_unneeded_unregister-ad20120201768a7e.yaml b/releasenotes/notes/remove_unneeded_unregister-ad20120201768a7e.yaml new file mode 100644 index 00000000000..1a5dc451340 --- /dev/null +++ b/releasenotes/notes/remove_unneeded_unregister-ad20120201768a7e.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + logging: Resolves an an unneeded info log being logged on process exit + due to a forksafe hook being unregistered that was never registered to begin with. diff --git a/riotfile.py b/riotfile.py index f70b53532ff..fe4aece3126 100644 --- a/riotfile.py +++ b/riotfile.py @@ -2801,6 +2801,16 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, pys=select_pys(min_version="3.8"), ), + Venv( + name="valkey", + command="pytest {cmdargs} tests/contrib/valkey", + pkgs={ + "valkey": latest, + "pytest-randomly": latest, + "pytest-asyncio": "==0.23.7", + }, + pys=select_pys(min_version="3.8"), + ), Venv( name="profile", # NB riot commands that use this Venv must include --pass-env to work properly diff --git a/supported_versions_output.json b/supported_versions_output.json index a51bb17bb9a..06eff37be25 100644 --- a/supported_versions_output.json +++ b/supported_versions_output.json @@ -294,6 +294,12 @@ "max_tracer_supported": "2.2.3", "auto-instrumented": false }, + { + "integration": "valkey", + "minimum_tracer_supported": "6.0.0", + "max_tracer_supported": "6.0.2", + "auto-instrumented": true + }, { "integration": "vertexai", "minimum_tracer_supported": "1.71.1", diff --git a/supported_versions_table.csv b/supported_versions_table.csv index 3f7384a0cdd..edbe73503cd 100644 --- a/supported_versions_table.csv +++ b/supported_versions_table.csv @@ -47,5 +47,6 @@ starlette,0.13.6,0.41.3,True structlog,20.2.0,24.4.0,False tornado *,4.5.3,6.4,False urllib3,1.24.3,2.2.3,False +valkey,6.0.0,6.0.2,True vertexai,1.71.1,1.71.1,True yaaredis,2.0.4,3.0.0,True diff --git a/tests/appsec/contrib_appsec/django_app/urls.py b/tests/appsec/contrib_appsec/django_app/urls.py index 3ca8557c550..bb93a34ff8a 100644 --- a/tests/appsec/contrib_appsec/django_app/urls.py +++ b/tests/appsec/contrib_appsec/django_app/urls.py @@ -196,7 +196,7 @@ def login_user(request): def new_service(request, service_name: str): import ddtrace - ddtrace.trace.Pin.override(django, service=service_name, tracer=ddtrace.tracer) + ddtrace.trace.Pin._override(django, service=service_name, tracer=ddtrace.tracer) return HttpResponse(service_name, status=200) diff --git a/tests/appsec/contrib_appsec/fastapi_app/app.py b/tests/appsec/contrib_appsec/fastapi_app/app.py index c636e65b7c3..ddefe6cf33c 100644 --- a/tests/appsec/contrib_appsec/fastapi_app/app.py +++ b/tests/appsec/contrib_appsec/fastapi_app/app.py @@ -104,7 +104,7 @@ async def multi_view_no_param(request: Request): # noqa: B008 async def new_service(service_name: str, request: Request): # noqa: B008 import ddtrace - ddtrace.trace.Pin.override(app, service=service_name, tracer=ddtrace.tracer) + ddtrace.trace.Pin._override(app, service=service_name, tracer=ddtrace.tracer) return HTMLResponse(service_name, 200) async def slow_numbers(minimum, maximum): diff --git a/tests/appsec/contrib_appsec/flask_app/app.py b/tests/appsec/contrib_appsec/flask_app/app.py index 32228375f37..ef2fcb0ce2c 100644 --- a/tests/appsec/contrib_appsec/flask_app/app.py +++ b/tests/appsec/contrib_appsec/flask_app/app.py @@ -55,7 +55,7 @@ def multi_view(param_int=0, param_str=""): def new_service(service_name: str): import ddtrace - ddtrace.trace.Pin.override(Flask, service=service_name, tracer=ddtrace.tracer) + ddtrace.trace.Pin._override(Flask, service=service_name, tracer=ddtrace.tracer) return service_name diff --git a/tests/appsec/contrib_appsec/test_flask.py b/tests/appsec/contrib_appsec/test_flask.py index b497de98bf9..690b893c89f 100644 --- a/tests/appsec/contrib_appsec/test_flask.py +++ b/tests/appsec/contrib_appsec/test_flask.py @@ -37,7 +37,7 @@ def setUp(self): self.app = app self.app.test_client_class = DDFlaskTestClient self.client = self.app.test_client() - Pin.override(self.app, tracer=self.tracer) + Pin._override(self.app, tracer=self.tracer) def tearDown(self): super(BaseFlaskTestCase, self).tearDown() diff --git a/tests/appsec/contrib_appsec/utils.py b/tests/appsec/contrib_appsec/utils.py index 5cf5e1da6b1..e510ed34029 100644 --- a/tests/appsec/contrib_appsec/utils.py +++ b/tests/appsec/contrib_appsec/utils.py @@ -1568,7 +1568,7 @@ def test_tracer(): @contextmanager def post_tracer(interface): original_tracer = getattr(ddtrace.trace.Pin.get_from(interface.framework), "tracer", None) - ddtrace.trace.Pin.override(interface.framework, tracer=interface.tracer) + ddtrace.trace.Pin._override(interface.framework, tracer=interface.tracer) yield if original_tracer is not None: - ddtrace.trace.Pin.override(interface.framework, tracer=original_tracer) + ddtrace.trace.Pin._override(interface.framework, tracer=original_tracer) diff --git a/tests/appsec/iast/test_processor.py b/tests/appsec/iast/test_processor.py index 3bb5eaa5015..4f9b912ffc2 100644 --- a/tests/appsec/iast/test_processor.py +++ b/tests/appsec/iast/test_processor.py @@ -51,7 +51,7 @@ def test_appsec_iast_processor_ensure_span_is_manual_keep(iast_context_defaults, test_appsec_iast_processor_ensure_span_is_manual_keep. This test throws 'finished span not connected to a trace' log error """ - with override_env(dict(DD_TRACE_SAMPLE_RATE=sampling_rate)): + with override_env({"DD_TRACE_SAMPLING_RULES": '[{"sample_rate":%s]"}]' % (sampling_rate,)}): oce.reconfigure() tracer = DummyTracer(iast_enabled=True) @@ -59,7 +59,6 @@ def test_appsec_iast_processor_ensure_span_is_manual_keep(iast_context_defaults, tracer._on_span_finish(span) result = span.get_tag(IAST.JSON) - assert len(json.loads(result)["vulnerabilities"]) == 1 assert span.get_metric(_SAMPLING_PRIORITY_KEY) is USER_KEEP diff --git a/tests/appsec/iast_packages/test_packages.py b/tests/appsec/iast_packages/test_packages.py index 83e53ae92c9..d65cf2ea709 100644 --- a/tests/appsec/iast_packages/test_packages.py +++ b/tests/appsec/iast_packages/test_packages.py @@ -626,7 +626,7 @@ def uninstall(self, python_cmd): "", import_module_to_validate="soupsieve.css_match", extras=[("beautifulsoup4", "4.12.3")], - skip_python_version=[(3, 6), (3, 7), (3, 8)], + skip_python_version=[(3, 8)], test_propagation=True, fixme_propagation_fails=True, ), @@ -638,7 +638,7 @@ def uninstall(self, python_cmd): # "Original password: your-password\nHashed password: replaced_hashed\nPassword match: True", # "", # import_module_to_validate="werkzeug.http", - # skip_python_version=[(3, 6), (3, 7), (3, 8)], + # skip_python_version=[(3, 8)], # ), PackageForTesting( "yarl", @@ -648,7 +648,7 @@ def uninstall(self, python_cmd): + " example.com\nPath: /path\nQuery: \n", "", import_module_to_validate="yarl._url", - skip_python_version=[(3, 6), (3, 7), (3, 8)], + skip_python_version=[(3, 8)], test_propagation=True, fixme_propagation_fails=True, ), @@ -659,7 +659,7 @@ def uninstall(self, python_cmd): # "example.zip", # "Contents of example.zip: ['example.zip/example.txt']", # "", - # skip_python_version=[(3, 6), (3, 7), (3, 8)], + # skip_python_version=[(3, 8)], # ), ## Skip due to typing-extensions added to the denylist # PackageForTesting( @@ -670,7 +670,7 @@ def uninstall(self, python_cmd): # "", # import_name="typing_extensions", # test_e2e=False, - # skip_python_version=[(3, 6), (3, 7), (3, 8)], + # skip_python_version=[(3, 8)], # ), PackageForTesting( "six", @@ -678,7 +678,7 @@ def uninstall(self, python_cmd): "", "We're in Python 3", "", - skip_python_version=[(3, 6), (3, 7), (3, 8)], + skip_python_version=[(3, 8)], ), ## Skip due to pillow added to the denylist # PackageForTesting( @@ -688,7 +688,7 @@ def uninstall(self, python_cmd): # "Image correctly generated", # "", # import_name="PIL.Image", - # skip_python_version=[(3, 6), (3, 7), (3, 8)], + # skip_python_version=[(3, 8)], # ), PackageForTesting( "aiobotocore", "2.13.0", "", "", "", test_e2e=False, test_import=False, import_name="aiobotocore.session" @@ -853,7 +853,7 @@ def uninstall(self, python_cmd): "Processed value: 15", "", import_name="annotated_types", - skip_python_version=[(3, 6), (3, 7), (3, 8)], + skip_python_version=[(3, 8)], ), ] diff --git a/tests/appsec/integrations/django_tests/conftest.py b/tests/appsec/integrations/django_tests/conftest.py index 57bd68db6a6..d047b7acee5 100644 --- a/tests/appsec/integrations/django_tests/conftest.py +++ b/tests/appsec/integrations/django_tests/conftest.py @@ -50,7 +50,7 @@ def tracer(): # Patch Django and override tracer to be our test tracer pin = Pin.get_from(django) original_tracer = pin.tracer - Pin.override(django, tracer=tracer) + Pin._override(django, tracer=tracer) # Yield to our test yield tracer @@ -59,7 +59,7 @@ def tracer(): # Reset the tracer pinned to Django and unpatch # DEV: unable to properly unpatch and reload django app with each test # unpatch() - Pin.override(django, tracer=original_tracer) + Pin._override(django, tracer=original_tracer) @pytest.fixture diff --git a/tests/appsec/integrations/fastapi_tests/test_fastapi_appsec_iast.py b/tests/appsec/integrations/fastapi_tests/test_fastapi_appsec_iast.py index 81702f3c8d9..07d8e1c9dc6 100644 --- a/tests/appsec/integrations/fastapi_tests/test_fastapi_appsec_iast.py +++ b/tests/appsec/integrations/fastapi_tests/test_fastapi_appsec_iast.py @@ -27,6 +27,7 @@ from ddtrace.appsec._iast.constants import VULN_NO_SAMESITE_COOKIE from ddtrace.appsec._iast.constants import VULN_SQL_INJECTION from ddtrace.appsec._iast.constants import VULN_STACKTRACE_LEAK +from ddtrace.appsec._iast.constants import VULN_XSS from ddtrace.contrib.internal.fastapi.patch import patch as patch_fastapi from ddtrace.contrib.internal.sqlite3.patch import patch as patch_sqlite_sqli from tests.appsec.iast.iast_utils import get_line_and_hash @@ -987,3 +988,38 @@ async def stacktrace_leak_inline_response(request: Request): assert len(loaded["vulnerabilities"]) == 1 vulnerability = loaded["vulnerabilities"][0] assert vulnerability["type"] == VULN_STACKTRACE_LEAK + + +def test_fastapi_xss(fastapi_application, client, tracer, test_spans): + @fastapi_application.get("/index.html") + async def test_route(request: Request): + from fastapi.responses import HTMLResponse + from jinja2 import Template + + query_params = request.query_params.get("iast_queryparam") + template = Template("

{{ user_input|safe }}

") + html = template.render(user_input=query_params) + return HTMLResponse(html) + + with override_global_config(dict(_iast_enabled=True, _iast_request_sampling=100.0)): + patch_iast({"xss": True}) + from jinja2.filters import FILTERS + from jinja2.filters import do_mark_safe + + FILTERS["safe"] = do_mark_safe + _aux_appsec_prepare_tracer(tracer) + resp = client.get( + "/index.html?iast_queryparam=test1234", + headers={"Content-Type": "application/json"}, + ) + assert resp.status_code == 200 + + span = test_spans.pop_traces()[0][0] + assert span.get_metric(IAST.ENABLED) == 1.0 + + iast_tag = span.get_tag(IAST.JSON) + assert iast_tag is not None + loaded = json.loads(iast_tag) + assert len(loaded["vulnerabilities"]) == 1 + vulnerability = loaded["vulnerabilities"][0] + assert vulnerability["type"] == VULN_XSS diff --git a/tests/appsec/integrations/flask_tests/test_iast_flask.py b/tests/appsec/integrations/flask_tests/test_iast_flask.py index be45e6bb82f..0d8f7c5b4ad 100644 --- a/tests/appsec/integrations/flask_tests/test_iast_flask.py +++ b/tests/appsec/integrations/flask_tests/test_iast_flask.py @@ -16,7 +16,9 @@ from ddtrace.appsec._iast.constants import VULN_NO_SAMESITE_COOKIE from ddtrace.appsec._iast.constants import VULN_SQL_INJECTION from ddtrace.appsec._iast.constants import VULN_STACKTRACE_LEAK +from ddtrace.appsec._iast.constants import VULN_XSS from ddtrace.appsec._iast.taint_sinks.header_injection import patch as patch_header_injection +from ddtrace.appsec._iast.taint_sinks.xss import patch as patch_xss_injection from ddtrace.contrib.internal.sqlite3.patch import patch as patch_sqlite_sqli from ddtrace.settings.asm import config as asm_config from tests.appsec.iast.iast_utils import get_line_and_hash @@ -45,11 +47,15 @@ def setUp(self): _iast_request_sampling=100.0, ) ): - super(FlaskAppSecIASTEnabledTestCase, self).setUp() patch_sqlite_sqli() patch_header_injection() + patch_xss_injection() patch_json() + from jinja2.filters import FILTERS + from jinja2.filters import do_mark_safe + FILTERS["safe"] = do_mark_safe + super(FlaskAppSecIASTEnabledTestCase, self).setUp() self.tracer._configure(api_version="v0.4", appsec_enabled=True, iast_enabled=True) oce.reconfigure() @@ -59,7 +65,6 @@ def test_flask_full_sqli_iast_http_request_path_parameter(self): def sqli_1(param_str): import sqlite3 - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect assert is_pyobject_tainted(param_str) @@ -161,6 +166,62 @@ def sqli_2(param_str): assert vulnerability["location"]["path"] == TEST_FILE_PATH assert vulnerability["hash"] == hash_value + @pytest.mark.skipif(not asm_config._iast_supported, reason="Python version not supported by IAST") + def test_flask_iast_enabled_http_request_header_get(self): + @self.app.route("/sqli//", methods=["GET", "POST"]) + def sqli_2(param_str): + import sqlite3 + + from flask import request + + from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect + + con = sqlite3.connect(":memory:") + cur = con.cursor() + # label test_flask_iast_enabled_http_request_header_get + cur.execute(add_aspect("SELECT 1 FROM ", request.headers.get("User-Agent"))) + + return "OK", 200 + + with override_global_config( + dict( + _iast_enabled=True, + _iast_deduplication_enabled=False, + ) + ): + resp = self.client.post( + "/sqli/sqlite_master/", data={"name": "test"}, headers={"User-Agent": "sqlite_master"} + ) + assert resp.status_code == 200 + + root_span = self.pop_spans()[0] + assert root_span.get_metric(IAST.ENABLED) == 1.0 + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + assert loaded["sources"] == [ + {"origin": "http.request.header", "name": "User-Agent", "value": "sqlite_master"} + ] + + line, hash_value = get_line_and_hash( + "test_flask_iast_enabled_http_request_header_get", + VULN_SQL_INJECTION, + filename=TEST_FILE_PATH, + ) + vulnerability = loaded["vulnerabilities"][0] + + assert vulnerability["type"] == VULN_SQL_INJECTION + assert vulnerability["evidence"] == { + "valueParts": [ + {"value": "SELECT "}, + {"redacted": True}, + {"value": " FROM "}, + {"value": "sqlite_master", "source": 0}, + ] + } + assert vulnerability["location"]["line"] == line + assert vulnerability["location"]["path"] == TEST_FILE_PATH + assert vulnerability["hash"] == hash_value + @pytest.mark.skipif(not asm_config._iast_supported, reason="Python version not supported by IAST") def test_flask_full_sqli_iast_enabled_http_request_header_name_keys(self): @self.app.route("/sqli//", methods=["GET", "POST"]) @@ -274,7 +335,6 @@ def sqli_5(param_str, param_int): from ddtrace.appsec._iast._taint_tracking import OriginType from ddtrace.appsec._iast._taint_tracking._taint_objects import get_tainted_ranges - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted header_ranges = get_tainted_ranges(request.headers["User-Agent"]) assert header_ranges @@ -324,8 +384,6 @@ def test_flask_simple_iast_path_header_and_querystring_tainted_request_sampling_ def sqli_6(param_str): from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - # Note: these are not tainted because of request sampling at 0% assert not is_pyobject_tainted(request.headers["User-Agent"]) assert not is_pyobject_tainted(request.query_string) @@ -535,7 +593,6 @@ def test_flask_full_sqli_iast_http_request_parameter_name_post(self): def sqli_13(): import sqlite3 - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect for i in request.form.keys(): @@ -593,7 +650,6 @@ def test_flask_full_sqli_iast_http_request_parameter_name_get(self): def sqli_14(): import sqlite3 - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect for i in request.args.keys(): @@ -654,7 +710,6 @@ def sqli_10(): from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect con = sqlite3.connect(":memory:") @@ -719,7 +774,6 @@ def sqli_11(): from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect con = sqlite3.connect(":memory:") @@ -784,7 +838,6 @@ def sqli_11(): from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect con = sqlite3.connect(":memory:") @@ -849,7 +902,6 @@ def sqli_11(): from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect con = sqlite3.connect(":memory:") @@ -916,7 +968,6 @@ def sqli_11(): from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect def iterate_json(data, parent_key=""): @@ -1057,7 +1108,6 @@ def sqli_10(): from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect con = sqlite3.connect(":memory:") @@ -1160,8 +1210,6 @@ def header_injection(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1194,14 +1242,12 @@ def header_injection(): # TODO: vulnerability path is flaky, it points to "tests/contrib/flask/__init__.py" @pytest.mark.skipif(not asm_config._iast_supported, reason="Python version not supported by IAST") - def test_flask_header_injection_exlusions_location(self): + def test_flask_header_injection_exclusions_location(self): @self.app.route("/header_injection/", methods=["GET", "POST"]) def header_injection(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1223,14 +1269,12 @@ def header_injection(): assert root_span.get_tag(IAST.JSON) is None @pytest.mark.skipif(not asm_config._iast_supported, reason="Python version not supported by IAST") - def test_flask_header_injection_exlusions_access_control(self): + def test_flask_header_injection_exclusions_access_control(self): @self.app.route("/header_injection/", methods=["GET", "POST"]) def header_injection(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1258,8 +1302,6 @@ def insecure_cookie(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1296,8 +1338,6 @@ def insecure_cookie_empty(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1326,8 +1366,6 @@ def no_http_only_cookie(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1364,8 +1402,6 @@ def no_http_only_cookie_empty(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1395,8 +1431,6 @@ def no_samesite_cookie(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1433,8 +1467,6 @@ def no_samesite_cookie_empty(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1461,8 +1493,6 @@ def cookie_secure(): from flask import Response from flask import request - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - tainted_string = request.form.get("name") assert is_pyobject_tainted(tainted_string) resp = Response("OK") @@ -1587,6 +1617,159 @@ def stacktrace_leak(): ) assert "Exception: ValueError" in vulnerability["evidence"]["valueParts"][0]["value"] + def test_flask_xss(self): + @self.app.route("/xss/", methods=["GET"]) + def xss_view(): + from flask import render_template_string + from flask import request + + user_input = request.args.get("input", "") + + # label test_flask_xss + return render_template_string("

XSS: {{ user_input|safe }}

", user_input=user_input) + + with override_global_config( + dict( + _iast_enabled=True, + _iast_deduplication_enabled=False, + _iast_request_sampling=100.0, + ) + ): + resp = self.client.get("/xss/?input=") + assert resp.status_code == 200 + assert resp.data == b"

XSS:

" + + root_span = self.pop_spans()[0] + assert root_span.get_metric(IAST.ENABLED) == 1.0 + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + assert loaded["sources"] == [ + {"origin": "http.request.parameter", "name": "input", "value": ""} + ] + + line, hash_value = get_line_and_hash("test_flask_xss", VULN_SQL_INJECTION, filename=TEST_FILE_PATH) + vulnerability = loaded["vulnerabilities"][0] + assert vulnerability["type"] == VULN_XSS + assert vulnerability["evidence"] == { + "valueParts": [ + {"value": "", "source": 0}, + ] + } + assert vulnerability["location"]["line"] == line + assert vulnerability["location"]["path"] == TEST_FILE_PATH + + def test_flask_xss_concat(self): + @self.app.route("/xss/concat/", methods=["GET"]) + def xss_view(): + from flask import render_template_string + from flask import request + + from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect + + user_input = request.args.get("input", "") + + # label test_flask_xss_concat + return render_template_string(add_aspect(add_aspect("

XSS: ", user_input), "

")) + + with override_global_config( + dict( + _iast_enabled=True, + _iast_deduplication_enabled=False, + _iast_request_sampling=100.0, + ) + ): + resp = self.client.get("/xss/concat/?input=") + assert resp.status_code == 200 + assert resp.data == b"

XSS:

" + + root_span = self.pop_spans()[0] + assert root_span.get_metric(IAST.ENABLED) == 1.0 + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + assert loaded["sources"] == [ + {"origin": "http.request.parameter", "name": "input", "value": ""} + ] + + line, hash_value = get_line_and_hash("test_flask_xss_concat", VULN_SQL_INJECTION, filename=TEST_FILE_PATH) + vulnerability = loaded["vulnerabilities"][0] + assert vulnerability["type"] == VULN_XSS + assert vulnerability["evidence"] == { + "valueParts": [ + {"value": "

XSS: "}, + {"source": 0, "value": ""}, + {"value": "

"}, + ] + } + assert vulnerability["location"]["line"] == line + assert vulnerability["location"]["path"] == TEST_FILE_PATH + + def test_flask_xss_template_secure(self): + @self.app.route("/xss/template/secure/", methods=["GET"]) + def xss_view_template(): + from flask import render_template + from flask import request + + user_input = request.args.get("input", "") + + # label test_flask_xss_template + return render_template("test.html", world=user_input) + + with override_global_config( + dict( + _iast_enabled=True, + _iast_deduplication_enabled=False, + _iast_request_sampling=100.0, + ) + ): + resp = self.client.get("/xss/template/secure/?input=") + assert resp.status_code == 200 + assert resp.data == b"hello <script>alert('XSS')</script>" + + root_span = self.pop_spans()[0] + assert root_span.get_metric(IAST.ENABLED) == 1.0 + + assert root_span.get_tag(IAST.JSON) is None + + def test_flask_xss_template(self): + @self.app.route("/xss/template/", methods=["GET"]) + def xss_view_template(): + from flask import render_template + from flask import request + + user_input = request.args.get("input", "") + + # label test_flask_xss_template + return render_template("test_insecure.html", world=user_input) + + with override_global_config( + dict( + _iast_enabled=True, + _iast_deduplication_enabled=False, + _iast_request_sampling=100.0, + ) + ): + resp = self.client.get("/xss/template/?input=") + assert resp.status_code == 200 + assert resp.data == b"hello " + + root_span = self.pop_spans()[0] + assert root_span.get_metric(IAST.ENABLED) == 1.0 + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + assert loaded["sources"] == [ + {"origin": "http.request.parameter", "name": "input", "value": ""} + ] + + line, hash_value = get_line_and_hash("test_flask_xss", VULN_SQL_INJECTION, filename=TEST_FILE_PATH) + vulnerability = loaded["vulnerabilities"][0] + assert vulnerability["type"] == VULN_XSS + assert vulnerability["evidence"] == { + "valueParts": [ + {"value": "", "source": 0}, + ] + } + assert vulnerability["location"]["path"] == "tests/contrib/flask/test_templates/test_insecure.html" + class FlaskAppSecIASTDisabledTestCase(BaseFlaskTestCase): @pytest.fixture(autouse=True) diff --git a/tests/ci_visibility/test_ci_visibility.py b/tests/ci_visibility/test_ci_visibility.py index 1db4f068c7a..778568f544e 100644 --- a/tests/ci_visibility/test_ci_visibility.py +++ b/tests/ci_visibility/test_ci_visibility.py @@ -28,6 +28,7 @@ from ddtrace.internal.ci_visibility.git_client import METADATA_UPLOAD_STATUS from ddtrace.internal.ci_visibility.git_client import CIVisibilityGitClient from ddtrace.internal.ci_visibility.git_client import CIVisibilityGitClientSerializerV1 +from ddtrace.internal.ci_visibility.recorder import CIVisibilityTracer from ddtrace.internal.ci_visibility.recorder import _extract_repository_name_from_url import ddtrace.internal.test_visibility._internal_item_ids from ddtrace.internal.utils.http import Response @@ -685,7 +686,7 @@ def test_civisibilitywriter_evp_proxy_url(self): ), mock.patch( "ddtrace.internal.agent.get_trace_url", return_value="http://evpproxy.bar:1234" ), mock.patch("ddtrace.settings._config.Config", _get_default_civisibility_ddconfig()), mock.patch( - "ddtrace.tracer", ddtrace.trace.Tracer() + "ddtrace.tracer", CIVisibilityTracer() ), mock.patch( "ddtrace.internal.ci_visibility.recorder.CIVisibility._agent_evp_proxy_is_available", return_value=True ), _dummy_noop_git_client(), mock.patch( @@ -705,7 +706,7 @@ def test_civisibilitywriter_only_traces(self): ) ), mock.patch( "ddtrace.internal.agent.get_trace_url", return_value="http://onlytraces:1234" - ), mock.patch("ddtrace.tracer", ddtrace.trace.Tracer()), mock.patch( + ), mock.patch("ddtrace.tracer", CIVisibilityTracer()), mock.patch( "ddtrace.internal.ci_visibility.recorder.CIVisibility._agent_evp_proxy_is_available", return_value=False ), mock.patch( "ddtrace.internal.ci_visibility.writer.config", ddtrace.settings.Config() @@ -1119,7 +1120,7 @@ def test_civisibility_enable_respects_passed_in_tracer(): ), _dummy_noop_git_client(), mock.patch( "ddtrace.internal.ci_visibility.recorder.ddconfig", _get_default_civisibility_ddconfig() ), mock.patch("ddtrace.internal.ci_visibility.writer.config", ddtrace.settings.Config()): - tracer = ddtrace.trace.Tracer() + tracer = CIVisibilityTracer() tracer._configure(partial_flush_enabled=False, partial_flush_min_spans=100) CIVisibility.enable(tracer=tracer) assert CIVisibility._instance.tracer._partial_flush_enabled is False diff --git a/tests/ci_visibility/util.py b/tests/ci_visibility/util.py index dc0b886ca64..f1911e20e93 100644 --- a/tests/ci_visibility/util.py +++ b/tests/ci_visibility/util.py @@ -12,6 +12,7 @@ from ddtrace.internal.ci_visibility.git_client import METADATA_UPLOAD_STATUS from ddtrace.internal.ci_visibility.git_client import CIVisibilityGitClient from ddtrace.internal.ci_visibility.recorder import CIVisibility +from ddtrace.internal.ci_visibility.recorder import CIVisibilityTracer from ddtrace.internal.test_visibility._internal_item_ids import InternalTestId from tests.utils import DummyCIVisibilityWriter from tests.utils import override_env @@ -209,5 +210,5 @@ def _ci_override_env( new_vars: t.Optional[t.Dict[str, str]] = None, mock_ci_env=False, replace_os_env=True, full_clear=False ): env_vars = _get_default_ci_env_vars(new_vars, mock_ci_env, full_clear) - with override_env(env_vars, replace_os_env=replace_os_env), mock.patch("ddtrace.tracer", ddtrace.trace.Tracer()): + with override_env(env_vars, replace_os_env=replace_os_env), mock.patch("ddtrace.tracer", CIVisibilityTracer()): yield diff --git a/tests/contrib/aiobotocore/utils.py b/tests/contrib/aiobotocore/utils.py index b51b6550327..e2eb56c6026 100644 --- a/tests/contrib/aiobotocore/utils.py +++ b/tests/contrib/aiobotocore/utils.py @@ -42,11 +42,11 @@ async def aiobotocore_client(service, tracer): client, aiobotocore.session.ClientCreatorContext ): async with client as client: - Pin.override(client, tracer=tracer) + Pin._override(client, tracer=tracer) await yield_(client) else: - Pin.override(client, tracer=tracer) + Pin._override(client, tracer=tracer) try: await yield_(client) finally: diff --git a/tests/contrib/aiohttp/test_aiohttp_client.py b/tests/contrib/aiohttp/test_aiohttp_client.py index 2b2b51c2650..c020b1266c6 100644 --- a/tests/contrib/aiohttp/test_aiohttp_client.py +++ b/tests/contrib/aiohttp/test_aiohttp_client.py @@ -189,7 +189,7 @@ def test_configure_service_name_pin(ddtrace_run_python_code_in_subprocess): async def test(): async with aiohttp.ClientSession() as session: - Pin.override(session, service="pin-custom-svc") + Pin._override(session, service="pin-custom-svc") async with session.get(URL_200) as resp: pass diff --git a/tests/contrib/aiohttp/test_request.py b/tests/contrib/aiohttp/test_request.py index 36e9d8a399a..056eda09c4b 100644 --- a/tests/contrib/aiohttp/test_request.py +++ b/tests/contrib/aiohttp/test_request.py @@ -31,6 +31,33 @@ async def test_full_request(patched_app_tracer, aiohttp_client, loop): assert "GET /" == request_span.resource +async def test_full_request_w_mem_leak_prevention_flag(patched_app_tracer, aiohttp_client, loop): + config.aiohttp.disable_stream_timing_for_mem_leak = True + try: + app, tracer = patched_app_tracer + client = await aiohttp_client(app) + # it should create a root span when there is a handler hit + # with the proper tags + request = await client.request("GET", "/") + assert 200 == request.status + await request.text() + # the trace is created + traces = tracer.pop_traces() + assert 1 == len(traces) + assert 1 == len(traces[0]) + request_span = traces[0][0] + assert_is_measured(request_span) + + # request + assert "aiohttp-web" == request_span.service + assert "aiohttp.request" == request_span.name + assert "GET /" == request_span.resource + except Exception: + raise + finally: + config.aiohttp.disable_stream_timing_for_mem_leak = False + + async def test_stream_request(patched_app_tracer, aiohttp_client, loop): app, tracer = patched_app_tracer async with await aiohttp_client(app) as client: diff --git a/tests/contrib/aiohttp_jinja2/conftest.py b/tests/contrib/aiohttp_jinja2/conftest.py index a58b72f7f49..1624753a635 100644 --- a/tests/contrib/aiohttp_jinja2/conftest.py +++ b/tests/contrib/aiohttp_jinja2/conftest.py @@ -13,7 +13,7 @@ def patched_app_tracer_jinja(patched_app_tracer): # noqa: F811 app, tracer = patched_app_tracer patch() - Pin.override(aiohttp_jinja2, tracer=tracer) + Pin._override(aiohttp_jinja2, tracer=tracer) yield app, tracer unpatch() @@ -22,6 +22,6 @@ def patched_app_tracer_jinja(patched_app_tracer): # noqa: F811 def untraced_app_tracer_jinja(untraced_app_tracer): # noqa: F811 patch() app, tracer = untraced_app_tracer - Pin.override(aiohttp_jinja2, tracer=tracer) + Pin._override(aiohttp_jinja2, tracer=tracer) yield app, tracer unpatch() diff --git a/tests/contrib/aiohttp_jinja2/test_aiohttp_jinja2.py b/tests/contrib/aiohttp_jinja2/test_aiohttp_jinja2.py index 089c799ebe0..98a942b1637 100644 --- a/tests/contrib/aiohttp_jinja2/test_aiohttp_jinja2.py +++ b/tests/contrib/aiohttp_jinja2/test_aiohttp_jinja2.py @@ -35,7 +35,7 @@ async def test_template_rendering(untraced_app_tracer_jinja, aiohttp_client): async def test_template_rendering_snapshot(untraced_app_tracer_jinja, aiohttp_client, snapshot_context): app, _ = untraced_app_tracer_jinja - Pin.override(aiohttp_jinja2, tracer=tracer) + Pin._override(aiohttp_jinja2, tracer=tracer) with snapshot_context(): client = await aiohttp_client(app) # it should trace a template rendering @@ -51,7 +51,7 @@ async def test_template_rendering_snapshot_patched_server( use_global_tracer, ): app, _ = patched_app_tracer_jinja - Pin.override(aiohttp_jinja2, tracer=tracer) + Pin._override(aiohttp_jinja2, tracer=tracer) # Ignore meta.http.url tag as the port is not fixed on the server with snapshot_context(ignores=["meta.http.url", "meta.http.useragent"]): client = await aiohttp_client(app) diff --git a/tests/contrib/aiomysql/test_aiomysql.py b/tests/contrib/aiomysql/test_aiomysql.py index 8199b5c16a1..06d53d4ce05 100644 --- a/tests/contrib/aiomysql/test_aiomysql.py +++ b/tests/contrib/aiomysql/test_aiomysql.py @@ -9,7 +9,6 @@ from ddtrace.contrib.internal.aiomysql.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from ddtrace.trace import Pin -from ddtrace.trace import Tracer from tests.contrib import shared_tests_async as shared_tests from tests.contrib.asyncio.utils import AsyncioTestCase from tests.contrib.asyncio.utils import mark_asyncio @@ -31,19 +30,16 @@ def patch_aiomysql(): @pytest.fixture async def patched_conn(tracer): conn = await aiomysql.connect(**AIOMYSQL_CONFIG) - Pin.get_from(conn).clone(tracer=tracer).onto(conn) yield conn conn.close() @pytest.fixture() -async def snapshot_conn(): - tracer = Tracer() +async def snapshot_conn(tracer): conn = await aiomysql.connect(**AIOMYSQL_CONFIG) - Pin.get_from(conn).clone(tracer=tracer).onto(conn) yield conn conn.close() - tracer.shutdown() + tracer.flush() @pytest.mark.asyncio @@ -66,7 +62,7 @@ async def test_queries(snapshot_conn): @pytest.mark.asyncio @pytest.mark.snapshot async def test_pin_override(patched_conn, tracer): - Pin.override(patched_conn, service="db") + Pin._override(patched_conn, service="db") cursor = await patched_conn.cursor() await cursor.execute("SELECT 1") rows = await cursor.fetchall() @@ -82,7 +78,7 @@ async def test_patch_unpatch(tracer, test_spans): service = "fo" conn = await aiomysql.connect(**AIOMYSQL_CONFIG) - Pin.get_from(conn).clone(service=service, tracer=tracer).onto(conn) + Pin.get_from(conn)._clone(service=service, tracer=tracer).onto(conn) await (await conn.cursor()).execute("select 'dba4x4'") conn.close() @@ -104,7 +100,7 @@ async def test_patch_unpatch(tracer, test_spans): patch() conn = await aiomysql.connect(**AIOMYSQL_CONFIG) - Pin.get_from(conn).clone(service=service, tracer=tracer).onto(conn) + Pin.get_from(conn)._clone(service=service, tracer=tracer).onto(conn) await (await conn.cursor()).execute("select 'dba4x4'") conn.close() @@ -241,7 +237,7 @@ async def _get_conn_tracer(self, tags=None): assert pin # Customize the service # we have to apply it on the existing one since new one won't inherit `app` - pin.clone(tracer=self.tracer, tags={**tags, **pin.tags}).onto(self.conn) + pin._clone(tracer=self.tracer, tags={**tags, **pin.tags}).onto(self.conn) return self.conn, self.tracer diff --git a/tests/contrib/aiopg/test.py b/tests/contrib/aiopg/test.py index eb738e009d8..939aa376570 100644 --- a/tests/contrib/aiopg/test.py +++ b/tests/contrib/aiopg/test.py @@ -39,7 +39,7 @@ def tearDown(self): @pytest.mark.asyncio async def _get_conn_and_tracer(self): conn = self._conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) return conn, self.tracer @@ -159,7 +159,7 @@ async def test_connect_factory(self): services = ["db", "another"] for service in services: conn, _ = await self._get_conn_and_tracer() - Pin.get_from(conn).clone(service=service, tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(service=service, tracer=self.tracer).onto(conn) await self.assert_conn_is_traced(self.tracer, conn, service) conn.close() @@ -172,7 +172,7 @@ async def test_patch_unpatch(self): service = "fo" conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(service=service, tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(service=service, tracer=self.tracer).onto(conn) await (await conn.cursor()).execute("select 'blah'") conn.close() @@ -194,7 +194,7 @@ async def test_patch_unpatch(self): patch() conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(service=service, tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(service=service, tracer=self.tracer).onto(conn) await (await conn.cursor()).execute("select 'blah'") conn.close() @@ -218,7 +218,7 @@ async def test_user_specified_service_v0(self): assert config.service == "mysvc" conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) await (await conn.cursor()).execute("select 'blah'") conn.close() @@ -240,7 +240,7 @@ async def test_user_specified_service_v1(self): assert config.service == "mysvc" conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) await (await conn.cursor()).execute("select 'blah'") conn.close() @@ -258,7 +258,7 @@ async def test_unspecified_service_v1(self): """ # Ensure that the service name was configured conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) await (await conn.cursor()).execute("select 'blah'") conn.close() @@ -271,7 +271,7 @@ async def test_unspecified_service_v1(self): @run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) async def test_trace_span_name_v0_schema(self): conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) await (await conn.cursor()).execute("select 'blah'") conn.close() @@ -284,7 +284,7 @@ async def test_trace_span_name_v0_schema(self): @run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) async def test_trace_span_name_v1_schema(self): conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) await (await conn.cursor()).execute("select 'blah'") conn.close() @@ -299,7 +299,7 @@ async def test_trace_span_name_v1_schema(self): ) async def test_user_specified_service_integration_v0(self): conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) cursor = await conn.cursor() await cursor.execute("SELECT 1") @@ -314,7 +314,7 @@ async def test_user_specified_service_integration_v0(self): ) async def test_user_specified_service_integration_v1(self): conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) cursor = await conn.cursor() await cursor.execute("SELECT 1") @@ -327,7 +327,7 @@ async def test_user_specified_service_integration_v1(self): @AsyncioTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) async def test_user_specified_service_env_var_v0(self): conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) cursor = await conn.cursor() await cursor.execute("SELECT 1") @@ -340,7 +340,7 @@ async def test_user_specified_service_env_var_v0(self): @AsyncioTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) async def test_user_specified_service_env_var_v1(self): conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) cursor = await conn.cursor() await cursor.execute("SELECT 1") @@ -355,7 +355,7 @@ async def test_user_specified_service_env_var_v1(self): ) async def test_pin_override_service_v1(self): conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer, service="override").onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer, service="override").onto(conn) cursor = await conn.cursor() await cursor.execute("SELECT 1") @@ -368,7 +368,7 @@ async def test_pin_override_service_v1(self): @AsyncioTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) async def test_span_name_v0_schema(self): conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) cursor = await conn.cursor() await cursor.execute("SELECT 1") @@ -381,7 +381,7 @@ async def test_span_name_v0_schema(self): @AsyncioTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) async def test_span_name_v1_schema(self): conn = await aiopg.connect(**POSTGRES_CONFIG) - Pin.get_from(conn).clone(tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(tracer=self.tracer).onto(conn) cursor = await conn.cursor() await cursor.execute("SELECT 1") @@ -395,7 +395,7 @@ class AiopgAnalyticsTestCase(AiopgTestCase): async def trace_spans(self): conn, _ = await self._get_conn_and_tracer() - Pin.get_from(conn).clone(service="db", tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(service="db", tracer=self.tracer).onto(conn) cursor = await conn.cursor() await cursor.execute("select 'foobar'") diff --git a/tests/contrib/algoliasearch/test.py b/tests/contrib/algoliasearch/test.py index 87f5f7b6910..53603f47287 100644 --- a/tests/contrib/algoliasearch/test.py +++ b/tests/contrib/algoliasearch/test.py @@ -52,7 +52,7 @@ def search(self, query, args=None, request_options=None): def patch_algoliasearch(self): patch() - Pin.override(self.index, tracer=self.tracer) + Pin._override(self.index, tracer=self.tracer) def tearDown(self): super(AlgoliasearchTest, self).tearDown() @@ -157,7 +157,7 @@ def test_patch_unpatch(self): def test_patch_all_auto_enable(self): patch_all() - Pin.override(self.index, tracer=self.tracer) + Pin._override(self.index, tracer=self.tracer) self.perform_search("test search") spans = self.get_spans() @@ -179,7 +179,7 @@ def test_user_specified_service_default(self): The algoliasearch integration shouldn't use it as the service name """ patch_all() - Pin.override(self.index, tracer=self.tracer) + Pin._override(self.index, tracer=self.tracer) self.perform_search("test search") spans = self.get_spans() self.reset() @@ -195,7 +195,7 @@ def test_user_specified_service_v0(self): The algoliasearch integration shouldn't use it as the service name """ patch_all() - Pin.override(self.index, tracer=self.tracer) + Pin._override(self.index, tracer=self.tracer) self.perform_search("test search") spans = self.get_spans() self.reset() @@ -211,7 +211,7 @@ def test_user_specified_service_v1(self): so make sure that is used and not the v0 schema 'algoliasearch' """ patch_all() - Pin.override(self.index, tracer=self.tracer) + Pin._override(self.index, tracer=self.tracer) self.perform_search("test search") spans = self.get_spans() self.reset() @@ -223,7 +223,7 @@ def test_user_specified_service_v1(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_span_name_v0_schema(self): patch_all() - Pin.override(self.index, tracer=self.tracer) + Pin._override(self.index, tracer=self.tracer) self.perform_search("test search") spans = self.get_spans() self.reset() @@ -235,7 +235,7 @@ def test_span_name_v0_schema(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_span_name_v1_schema(self): patch_all() - Pin.override(self.index, tracer=self.tracer) + Pin._override(self.index, tracer=self.tracer) self.perform_search("test search") spans = self.get_spans() self.reset() diff --git a/tests/contrib/anthropic/conftest.py b/tests/contrib/anthropic/conftest.py index 3e5dac0a442..1cb600b0b82 100644 --- a/tests/contrib/anthropic/conftest.py +++ b/tests/contrib/anthropic/conftest.py @@ -36,7 +36,7 @@ def mock_tracer(ddtrace_global_config, anthropic): try: pin = Pin.get_from(anthropic) mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) - pin.override(anthropic, tracer=mock_tracer) + pin._override(anthropic, tracer=mock_tracer) pin.tracer._configure() if ddtrace_global_config.get("_llmobs_enabled", False): # Have to disable and re-enable LLMObs to use to mock tracer. diff --git a/tests/contrib/aredis/test_aredis.py b/tests/contrib/aredis/test_aredis.py index e62cfa974be..298abdbf85b 100644 --- a/tests/contrib/aredis/test_aredis.py +++ b/tests/contrib/aredis/test_aredis.py @@ -122,7 +122,7 @@ async def test_meta_override(tracer, test_spans): r = aredis.StrictRedis(port=REDIS_CONFIG["port"]) pin = Pin.get_from(r) assert pin is not None - pin.clone(tags={"cheese": "camembert"}, tracer=tracer).onto(r) + pin._clone(tags={"cheese": "camembert"}, tracer=tracer).onto(r) await r.get("cheese") test_spans.assert_trace_count(1) @@ -162,7 +162,7 @@ async def test(tracer, test_spans): r = aredis.StrictRedis(port=REDIS_CONFIG["port"]) pin = Pin.get_from(r) assert pin is not None - pin.clone(tags={{"cheese": "camembert"}}, tracer=tracer).onto(r) + pin._clone(tags={{"cheese": "camembert"}}, tracer=tracer).onto(r) await r.get("cheese") test_spans.assert_trace_count(1) diff --git a/tests/contrib/asyncpg/test_asyncpg.py b/tests/contrib/asyncpg/test_asyncpg.py index 5e5b649faaf..5de995c80aa 100644 --- a/tests/contrib/asyncpg/test_asyncpg.py +++ b/tests/contrib/asyncpg/test_asyncpg.py @@ -158,7 +158,7 @@ async def test_cursor_manual(patched_conn): @pytest.mark.snapshot @pytest.mark.xfail async def test_service_override_pin(patched_conn): - Pin.override(patched_conn, service="custom-svc") + Pin._override(patched_conn, service="custom-svc") await patched_conn.execute("SELECT 1") @@ -351,7 +351,7 @@ async def _get_conn_tracer(self): assert pin # Customize the service # we have to apply it on the existing one since new one won't inherit `app` - pin.clone(tracer=self.tracer).onto(self.conn) + pin._clone(tracer=self.tracer).onto(self.conn) return self.conn, self.tracer @@ -468,7 +468,7 @@ async def test_asyncpg_dbm_propagation_comment_pin_service_name_override(self): db_name = POSTGRES_CONFIG["dbname"] conn, tracer = await self._get_conn_tracer() - Pin.override(conn, service="pin-service-name-override", tracer=tracer) + Pin._override(conn, service="pin-service-name-override", tracer=tracer) def mock_func(args, kwargs, sql_pos, sql_kw, sql_with_dbm_tags): return args, kwargs diff --git a/tests/contrib/avro/test_avro.py b/tests/contrib/avro/test_avro.py index 3db10460a23..b2db731a71e 100644 --- a/tests/contrib/avro/test_avro.py +++ b/tests/contrib/avro/test_avro.py @@ -49,7 +49,7 @@ def test_basic_schema_serialize(avro, tracer, test_spans): pin = Pin.get_from(writer) assert pin is not None - pin.clone(tags={"cheese": "camembert"}, tracer=tracer).onto(writer) + pin._clone(tags={"cheese": "camembert"}, tracer=tracer).onto(writer) with tracer.trace("basic_avro_schema.serialization") as span: span.context.sampling_priority = AUTO_KEEP @@ -82,7 +82,7 @@ def test_advanced_schema_serialize(avro, tracer, test_spans): pin = Pin.get_from(writer) assert pin is not None - pin.clone(tags={"cheese": "camembert"}, tracer=tracer).onto(writer) + pin._clone(tags={"cheese": "camembert"}, tracer=tracer).onto(writer) with tracer.trace("advanced_avro_schema.serialization") as span: span.context.sampling_priority = AUTO_KEEP @@ -128,7 +128,7 @@ def test_basic_schema_deserialize(avro, tracer, test_spans): pin = Pin.get_from(reader) assert pin is not None - pin.clone(tags={"cheese": "camembert"}, tracer=tracer).onto(reader) + pin._clone(tags={"cheese": "camembert"}, tracer=tracer).onto(reader) with tracer.trace("basic_avro_schema.deserialization") as span: span.context.sampling_priority = AUTO_KEEP @@ -160,7 +160,7 @@ def test_advanced_schema_deserialize(avro, tracer, test_spans): pin = Pin.get_from(reader) assert pin is not None - pin.clone(tags={"cheese": "camembert"}, tracer=tracer).onto(reader) + pin._clone(tags={"cheese": "camembert"}, tracer=tracer).onto(reader) with tracer.trace("advanced_avro_schema.deserialization") as span: span.context.sampling_priority = AUTO_KEEP diff --git a/tests/contrib/boto/test.py b/tests/contrib/boto/test.py index 2570ca9c65c..6da67eac9d3 100644 --- a/tests/contrib/boto/test.py +++ b/tests/contrib/boto/test.py @@ -39,7 +39,9 @@ def setUp(self): @mock_ec2 def test_ec2_client(self): ec2 = boto.ec2.connect_to_region("us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ec2) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(ec2) ec2.get_all_instances() spans = self.pop_spans() @@ -77,7 +79,7 @@ def test_ec2_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) def test_schematized_env_service_default_ec2_client(self): ec2 = boto.ec2.connect_to_region("us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) # Create an instance ec2.run_instances(21) @@ -91,7 +93,7 @@ def test_schematized_env_service_default_ec2_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_env_service_v0_ec2_client(self): ec2 = boto.ec2.connect_to_region("us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) # Create an instance ec2.run_instances(21) @@ -105,7 +107,7 @@ def test_schematized_env_service_v0_ec2_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_env_service_v1_ec2_client(self): ec2 = boto.ec2.connect_to_region("us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) # Create an instance ec2.run_instances(21) @@ -119,7 +121,7 @@ def test_schematized_env_service_v1_ec2_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict()) def test_schematized_unspecified_service_default_ec2_client(self): ec2 = boto.ec2.connect_to_region("us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) # Create an instance ec2.run_instances(21) @@ -133,7 +135,7 @@ def test_schematized_unspecified_service_default_ec2_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_unspecified_service_v0_ec2_client(self): ec2 = boto.ec2.connect_to_region("us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) # Create an instance ec2.run_instances(21) @@ -147,7 +149,7 @@ def test_schematized_unspecified_service_v0_ec2_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_unspecified_service_v1_ec2_client(self): ec2 = boto.ec2.connect_to_region("us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) # Create an instance ec2.run_instances(21) @@ -161,7 +163,7 @@ def test_schematized_unspecified_service_v1_ec2_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_operation_name_v0_ec2_client(self): ec2 = boto.ec2.connect_to_region("us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) # Create an instance ec2.run_instances(21) @@ -175,7 +177,7 @@ def test_schematized_operation_name_v0_ec2_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_operation_name_v1_ec2_client(self): ec2 = boto.ec2.connect_to_region("us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) # Create an instance ec2.run_instances(21) @@ -188,7 +190,9 @@ def test_schematized_operation_name_v1_ec2_client(self): def _test_s3_client(self): # DEV: To test tag params check create bucket's span s3 = boto.s3.connect_to_region("us-east-1") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) s3.get_all_buckets() spans = self.pop_spans() @@ -247,7 +251,7 @@ def _test_s3_client(self): def test_schematized_env_service_name_default_s3_client(self): # DEV: To test tag params check create bucket's span s3 = boto.s3.connect_to_region("us-east-1") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) # Create the test bucket s3.create_bucket("cheese") @@ -266,7 +270,7 @@ def test_schematized_env_service_name_default_s3_client(self): def test_schematized_env_service_name_v0_s3_client(self): # DEV: To test tag params check create bucket's span s3 = boto.s3.connect_to_region("us-east-1") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) # Create the test bucket s3.create_bucket("cheese") @@ -285,7 +289,7 @@ def test_schematized_env_service_name_v0_s3_client(self): def test_schematized_env_service_name_v1_s3_client(self): # DEV: To test tag params check create bucket's span s3 = boto.s3.connect_to_region("us-east-1") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) # Create the test bucket s3.create_bucket("cheese") @@ -304,7 +308,7 @@ def test_schematized_env_service_name_v1_s3_client(self): def test_schematized_operation_name_v0_s3_client(self): # DEV: To test tag params check create bucket's span s3 = boto.s3.connect_to_region("us-east-1") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) # Create the test bucket s3.create_bucket("cheese") @@ -323,7 +327,7 @@ def test_schematized_operation_name_v0_s3_client(self): def test_schematized_operation_name_v1_s3_client(self): # DEV: To test tag params check create bucket's span s3 = boto.s3.connect_to_region("us-east-1") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) # Create the test bucket s3.create_bucket("cheese") @@ -342,7 +346,7 @@ def test_schematized_operation_name_v1_s3_client(self): def test_schematized_unspecified_service_name_default_s3_client(self): # DEV: To test tag params check create bucket's span s3 = boto.s3.connect_to_region("us-east-1") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) # Create the test bucket s3.create_bucket("cheese") @@ -361,7 +365,7 @@ def test_schematized_unspecified_service_name_default_s3_client(self): def test_schematized_unspecified_service_name_v0_s3_client(self): # DEV: To test tag params check create bucket's span s3 = boto.s3.connect_to_region("us-east-1") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) # Create the test bucket s3.create_bucket("cheese") @@ -380,7 +384,7 @@ def test_schematized_unspecified_service_name_v0_s3_client(self): def test_schematized_unspecified_service_name_v1_s3_client(self): # DEV: To test tag params check create bucket's span s3 = boto.s3.connect_to_region("us-east-1") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) # Create the test bucket s3.create_bucket("cheese") @@ -411,7 +415,9 @@ def test_s3_client_no_params(self): @mock_s3 def test_s3_put(self): s3 = boto.s3.connect_to_region("us-east-1") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) s3.create_bucket("mybucket") bucket = s3.get_bucket("mybucket") k = boto.s3.key.Key(bucket) @@ -445,7 +451,9 @@ def test_s3_put(self): @mock_lambda def test_unpatch(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(lamb) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(lamb) unpatch() # multiple calls @@ -456,11 +464,12 @@ def test_unpatch(self): @mock_s3 def test_double_patch(self): s3 = boto.s3.connect_to_region("us-east-1") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) patch() patch() - # Get the created bucket s3.create_bucket("cheese") spans = self.pop_spans() @@ -470,7 +479,9 @@ def test_double_patch(self): @mock_lambda def test_lambda_client(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(lamb) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(lamb) # multiple calls lamb.list_functions() @@ -495,7 +506,7 @@ def test_lambda_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) def test_schematized_env_service_name_default_lambda_client(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.list_functions() @@ -507,7 +518,7 @@ def test_schematized_env_service_name_default_lambda_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_env_service_name_v0_lambda_client(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.list_functions() @@ -519,7 +530,7 @@ def test_schematized_env_service_name_v0_lambda_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_env_service_name_v1_lambda_client(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.list_functions() @@ -531,7 +542,7 @@ def test_schematized_env_service_name_v1_lambda_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) def test_schematized_unspecified_service_name_default_lambda_client(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.list_functions() @@ -543,7 +554,7 @@ def test_schematized_unspecified_service_name_default_lambda_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_unspecified_service_name_v0_lambda_client(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.list_functions() @@ -555,7 +566,7 @@ def test_schematized_unspecified_service_name_v0_lambda_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_unspecified_service_name_v1_lambda_client(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.list_functions() @@ -567,7 +578,7 @@ def test_schematized_unspecified_service_name_v1_lambda_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_operation_name_v0_lambda_client(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.list_functions() @@ -579,7 +590,7 @@ def test_schematized_operation_name_v0_lambda_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_operation_name_v1_lambda_client(self): lamb = boto.awslambda.connect_to_region("us-east-2") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.list_functions() @@ -590,7 +601,9 @@ def test_schematized_operation_name_v1_lambda_client(self): @mock_sts def test_sts_client(self): sts = boto.sts.connect_to_region("us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sts) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sts) sts.get_federation_token(12, duration=10) @@ -610,7 +623,7 @@ def test_sts_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) def test_schematized_env_default_sts_client(self): sts = boto.sts.connect_to_region("us-west-2") - Pin.get_from(sts).clone(tracer=self.tracer).onto(sts) + Pin.get_from(sts)._clone(tracer=self.tracer).onto(sts) sts.get_federation_token(12, duration=10) @@ -624,7 +637,7 @@ def test_schematized_env_default_sts_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_env_v0_sts_client(self): sts = boto.sts.connect_to_region("us-west-2") - Pin.get_from(sts).clone(tracer=self.tracer).onto(sts) + Pin.get_from(sts)._clone(tracer=self.tracer).onto(sts) sts.get_federation_token(12, duration=10) @@ -638,7 +651,7 @@ def test_schematized_env_v0_sts_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_env_v1_sts_client(self): sts = boto.sts.connect_to_region("us-west-2") - Pin.get_from(sts).clone(tracer=self.tracer).onto(sts) + Pin.get_from(sts)._clone(tracer=self.tracer).onto(sts) sts.get_federation_token(12, duration=10) @@ -652,7 +665,7 @@ def test_schematized_env_v1_sts_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict()) def test_schematized_unspecified_service_default_sts_client(self): sts = boto.sts.connect_to_region("us-west-2") - Pin.get_from(sts).clone(tracer=self.tracer).onto(sts) + Pin.get_from(sts)._clone(tracer=self.tracer).onto(sts) sts.get_federation_token(12, duration=10) @@ -666,7 +679,7 @@ def test_schematized_unspecified_service_default_sts_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_unspecified_service_v0_sts_client(self): sts = boto.sts.connect_to_region("us-west-2") - Pin.get_from(sts).clone(tracer=self.tracer).onto(sts) + Pin.get_from(sts)._clone(tracer=self.tracer).onto(sts) sts.get_federation_token(12, duration=10) @@ -680,7 +693,7 @@ def test_schematized_unspecified_service_v0_sts_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_unspecified_service_sts_client(self): sts = boto.sts.connect_to_region("us-west-2") - Pin.get_from(sts).clone(tracer=self.tracer).onto(sts) + Pin.get_from(sts)._clone(tracer=self.tracer).onto(sts) sts.get_federation_token(12, duration=10) @@ -694,7 +707,7 @@ def test_schematized_unspecified_service_sts_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_operation_name_v0_sts_client(self): sts = boto.sts.connect_to_region("us-west-2") - Pin.get_from(sts).clone(tracer=self.tracer).onto(sts) + Pin.get_from(sts)._clone(tracer=self.tracer).onto(sts) sts.get_federation_token(12, duration=10) @@ -708,7 +721,7 @@ def test_schematized_operation_name_v0_sts_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_operation_name_sts_client(self): sts = boto.sts.connect_to_region("us-west-2") - Pin.get_from(sts).clone(tracer=self.tracer).onto(sts) + Pin.get_from(sts)._clone(tracer=self.tracer).onto(sts) sts.get_federation_token(12, duration=10) @@ -727,7 +740,9 @@ def test_schematized_operation_name_sts_client(self): ) def test_elasticache_client(self): elasticache = boto.elasticache.connect_to_region("us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(elasticache) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(elasticache) elasticache.describe_cache_clusters() @@ -746,7 +761,9 @@ def test_ec2_client_ot(self): """OpenTracing compatibility check of the test_ec2_client test.""" ec2 = boto.ec2.connect_to_region("us-west-2") ot_tracer = init_tracer("my_svc", self.tracer) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ec2) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(ec2) with ot_tracer.start_active_span("ot_span"): ec2.get_all_instances() diff --git a/tests/contrib/botocore/test.py b/tests/contrib/botocore/test.py index 67eaaf55fc3..cb1a06bec1c 100644 --- a/tests/contrib/botocore/test.py +++ b/tests/contrib/botocore/test.py @@ -104,7 +104,9 @@ def setUp(self): super(BotocoreTest, self).setUp() - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(botocore.parsers.ResponseParser) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(botocore.parsers.ResponseParser) # Setting the validated flag to False ensures the redaction paths configurations are re-validated # FIXME: Ensure AWSPayloadTagging._REQUEST_REDACTION_PATHS_DEFAULTS is always in sync with # config.botocore.payload_tagging_request @@ -123,7 +125,9 @@ def tearDown(self): def test_patch_submodules(self): patch_submodules(["s3"]) ec2 = self.session.create_client("ec2", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ec2) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(ec2) ec2.describe_instances() @@ -131,7 +135,9 @@ def test_patch_submodules(self): assert spans == [] s3 = self.session.create_client("s3", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) s3.list_buckets() s3.list_buckets() @@ -142,7 +148,9 @@ def test_patch_submodules(self): @mock_ec2 def test_traced_client(self): ec2 = self.session.create_client("ec2", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ec2) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(ec2) ec2.describe_instances() @@ -169,7 +177,7 @@ def test_traced_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) def test_schematized_ec2_call_default(self): ec2 = self.session.create_client("ec2", region_name="us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) ec2.describe_instances() @@ -182,7 +190,7 @@ def test_schematized_ec2_call_default(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_ec2_call_v0(self): ec2 = self.session.create_client("ec2", region_name="us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) ec2.describe_instances() @@ -195,7 +203,7 @@ def test_schematized_ec2_call_v0(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_ec2_call_v1(self): ec2 = self.session.create_client("ec2", region_name="us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) ec2.describe_instances() @@ -208,7 +216,7 @@ def test_schematized_ec2_call_v1(self): @TracerTestCase.run_in_subprocess(env_overrides=dict()) def test_schematized_unspecified_service_ec2_call_default(self): ec2 = self.session.create_client("ec2", region_name="us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) ec2.describe_instances() @@ -221,7 +229,7 @@ def test_schematized_unspecified_service_ec2_call_default(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_unspecified_service_ec2_call_v0(self): ec2 = self.session.create_client("ec2", region_name="us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) ec2.describe_instances() @@ -234,7 +242,7 @@ def test_schematized_unspecified_service_ec2_call_v0(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_unspecified_service_ec2_call_v1(self): ec2 = self.session.create_client("ec2", region_name="us-west-2") - Pin.get_from(ec2).clone(tracer=self.tracer).onto(ec2) + Pin.get_from(ec2)._clone(tracer=self.tracer).onto(ec2) ec2.describe_instances() @@ -252,7 +260,9 @@ def test_schematized_unspecified_service_ec2_call_v1(self): @mock_dynamodb def test_dynamodb_put_get(self): ddb = self.session.create_client("dynamodb", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ddb) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(ddb) with self.override_config("botocore", dict(instrument_internals=True)): ddb.create_table( @@ -296,7 +306,9 @@ def test_dynamodb_put_get(self): @mock_dynamodb def test_dynamodb_put_get_with_table_primary_key_mapping(self): ddb = self.session.create_client("dynamodb", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ddb) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(ddb) with self.override_config( "botocore", @@ -358,7 +370,9 @@ def test_dynamodb_put_get_with_table_primary_key_mapping(self): @mock_dynamodb def test_dynamodb_put_get_with_broken_table_primary_key_mapping(self): ddb = self.session.create_client("dynamodb", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ddb) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(ddb) with self.override_config( "botocore", @@ -407,7 +421,9 @@ def test_dynamodb_put_get_with_broken_table_primary_key_mapping(self): @mock_s3 def test_s3_client(self): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) s3.list_buckets() s3.list_buckets() @@ -444,7 +460,9 @@ def test_s3_head_404_default(self): API calls with a 404 response """ s3 = self.session.create_client("s3", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) # We need a bucket for this test s3.create_bucket(Bucket="test", CreateBucketConfiguration=dict(LocationConstraint="us-west-2")) @@ -472,7 +490,9 @@ def test_s3_head_404_as_errors(self): we attach exception information to S3 HeadObject 404 responses """ s3 = self.session.create_client("s3", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) # We need a bucket for this test s3.create_bucket(Bucket="test", CreateBucketConfiguration=dict(LocationConstraint="us-west-2")) @@ -500,7 +520,9 @@ def test_s3_head_404_as_errors(self): def _test_s3_put(self): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) params = { "Bucket": "mybucket", "CreateBucketConfiguration": { @@ -581,7 +603,7 @@ def test_s3_put_no_params(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_BOTOCORE_SERVICE="botocore")) def test_service_name_override(self): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) params = { "Bucket": "mybucket", @@ -612,7 +634,7 @@ def test_service_name_override(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) def test_schematized_s3_client_default(self): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) s3.list_buckets() @@ -626,7 +648,7 @@ def test_schematized_s3_client_default(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_s3_client_v0(self): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) s3.list_buckets() @@ -640,7 +662,7 @@ def test_schematized_s3_client_v0(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_s3_client_v1(self): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) s3.list_buckets() @@ -654,7 +676,7 @@ def test_schematized_s3_client_v1(self): @TracerTestCase.run_in_subprocess(env_overrides=dict()) def test_schematized_unspecified_service_s3_client_default(self): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) s3.list_buckets() @@ -668,7 +690,7 @@ def test_schematized_unspecified_service_s3_client_default(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_unspecified_service_s3_client_v0(self): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) s3.list_buckets() @@ -682,7 +704,7 @@ def test_schematized_unspecified_service_s3_client_v0(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_unspecified_service_s3_client_v1(self): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin.get_from(s3).clone(tracer=self.tracer).onto(s3) + Pin.get_from(s3)._clone(tracer=self.tracer).onto(s3) s3.list_buckets() @@ -695,7 +717,9 @@ def test_schematized_unspecified_service_s3_client_v1(self): def _test_sqs_client(self): self.sqs_client.delete_queue(QueueUrl=self.queue_name) # Delete so we can test create_queue spans - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) self.sqs_test_queue = self.sqs_client.create_queue(QueueName=self.queue_name) spans = self.get_spans() @@ -731,7 +755,9 @@ def test_sqs_client_no_params(self): @mock_sqs def test_sqs_send_message_non_url_queue(self): - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) self.sqs_client.send_message(QueueUrl="Test", MessageBody="world") spans = self.get_spans() @@ -744,7 +770,9 @@ def test_sqs_send_message_non_url_queue(self): @mock_sqs def test_sqs_send_message_distributed_tracing_off(self): with self.override_config("botocore", dict(distributed_tracing=False)): - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) self.sqs_client.send_message(QueueUrl=self.sqs_test_queue["QueueUrl"], MessageBody="world") spans = self.get_spans() @@ -774,7 +802,9 @@ def test_sqs_send_message_distributed_tracing_off(self): @mock_sqs def test_sqs_send_message_distributed_tracing_on(self): with self.override_config("botocore", dict(distributed_tracing=True, propagation_enabled=True)): - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) self.sqs_client.send_message(QueueUrl=self.sqs_test_queue["QueueUrl"], MessageBody="world") spans = self.get_spans() @@ -851,8 +881,8 @@ def _test_distributed_tracing_sns_to_sqs(self, raw_message_delivery): AttributeValue="true", ) - Pin.get_from(sns).clone(tracer=self.tracer).onto(sns) - Pin.get_from(self.sqs_client).clone(tracer=self.tracer).onto(self.sqs_client) + Pin.get_from(sns)._clone(tracer=self.tracer).onto(sns) + Pin.get_from(self.sqs_client)._clone(tracer=self.tracer).onto(self.sqs_client) sns.publish(TopicArn=topic_arn, Message="test") @@ -891,7 +921,9 @@ def _test_distributed_tracing_sns_to_sqs(self, raw_message_delivery): @mock_sqs def test_sqs_send_message_trace_injection_with_max_message_attributes(self): - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, "two": {"DataType": "String", "StringValue": "two"}, @@ -934,7 +966,9 @@ def test_sqs_send_message_trace_injection_with_max_message_attributes(self): @mock_sqs def test_sqs_send_message_batch_trace_injection_with_no_message_attributes(self): - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) entries = [ { "Id": "1", @@ -969,7 +1003,9 @@ def test_sqs_send_message_batch_trace_injection_with_no_message_attributes(self) @mock_sqs def test_sqs_send_message_batch_trace_injection_with_message_attributes(self): - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) entries = [ { "Id": "1", @@ -1016,7 +1052,9 @@ def test_sqs_send_message_batch_trace_injection_with_message_attributes(self): @mock_sqs def test_sqs_send_message_batch_trace_injection_with_max_message_attributes(self): - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) entries = [ { "Id": "1", @@ -1063,7 +1101,7 @@ def test_sqs_send_message_batch_trace_injection_with_max_message_attributes(self @mock_sqs @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) def test_schematized_sqs_client_default(self): - Pin.get_from(self.sqs_client).clone(tracer=self.tracer).onto(self.sqs_client) + Pin.get_from(self.sqs_client)._clone(tracer=self.tracer).onto(self.sqs_client) self.sqs_client.send_message(QueueUrl=self.sqs_test_queue["QueueUrl"], MessageBody="world") self.sqs_client.send_message_batch( QueueUrl=self.sqs_test_queue["QueueUrl"], Entries=[{"Id": "1", "MessageBody": "hello"}] @@ -1085,7 +1123,7 @@ def test_schematized_sqs_client_default(self): @mock_sqs @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_sqs_client_v0(self): - Pin.get_from(self.sqs_client).clone(tracer=self.tracer).onto(self.sqs_client) + Pin.get_from(self.sqs_client)._clone(tracer=self.tracer).onto(self.sqs_client) self.sqs_client.send_message(QueueUrl=self.sqs_test_queue["QueueUrl"], MessageBody="world") self.sqs_client.send_message_batch( @@ -1108,7 +1146,7 @@ def test_schematized_sqs_client_v0(self): @mock_sqs @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_sqs_client_v1(self): - Pin.get_from(self.sqs_client).clone(tracer=self.tracer).onto(self.sqs_client) + Pin.get_from(self.sqs_client)._clone(tracer=self.tracer).onto(self.sqs_client) self.sqs_client.send_message(QueueUrl=self.sqs_test_queue["QueueUrl"], MessageBody="world") self.sqs_client.send_message_batch( @@ -1131,7 +1169,7 @@ def test_schematized_sqs_client_v1(self): @mock_sqs @TracerTestCase.run_in_subprocess(env_overrides=dict()) def test_schematized_unspecified_service_sqs_client_default(self): - Pin.get_from(self.sqs_client).clone(tracer=self.tracer).onto(self.sqs_client) + Pin.get_from(self.sqs_client)._clone(tracer=self.tracer).onto(self.sqs_client) self.sqs_client.send_message(QueueUrl=self.sqs_test_queue["QueueUrl"], MessageBody="world") self.sqs_client.send_message_batch( @@ -1154,7 +1192,7 @@ def test_schematized_unspecified_service_sqs_client_default(self): @mock_sqs @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_unspecified_service_sqs_client_v0(self): - Pin.get_from(self.sqs_client).clone(tracer=self.tracer).onto(self.sqs_client) + Pin.get_from(self.sqs_client)._clone(tracer=self.tracer).onto(self.sqs_client) self.sqs_client.send_message(QueueUrl=self.sqs_test_queue["QueueUrl"], MessageBody="world") self.sqs_client.send_message_batch( @@ -1177,7 +1215,7 @@ def test_schematized_unspecified_service_sqs_client_v0(self): @mock_sqs @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_unspecified_service_sqs_client_v1(self): - Pin.get_from(self.sqs_client).clone(tracer=self.tracer).onto(self.sqs_client) + Pin.get_from(self.sqs_client)._clone(tracer=self.tracer).onto(self.sqs_client) self.sqs_client.send_message(QueueUrl=self.sqs_test_queue["QueueUrl"], MessageBody="world") self.sqs_client.send_message_batch( @@ -1205,7 +1243,9 @@ def test_stepfunctions_send_start_execution_trace_injection(self): definition='{"StartAt": "HelloWorld","States": {"HelloWorld": {"Type": "Pass","End": true}}}', roleArn="arn:aws:iam::012345678901:role/DummyRole", ) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sf) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sf) start_execution_dict = { "stateMachineArn": "arn:aws:states:us-west-2:000000000000:stateMachine:lincoln", "input": '{"baz": 1}', @@ -1226,7 +1266,9 @@ def test_stepfunctions_send_start_execution_trace_injection_with_array_input(sel definition='{"StartAt": "HelloWorld","States": {"HelloWorld": {"Type": "Pass","End": true}}}', roleArn="arn:aws:iam::012345678901:role/DummyRole", ) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sf) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sf) sf.start_execution( stateMachineArn="arn:aws:states:us-west-2:000000000000:stateMachine:miller", input='["one", "two", "three"]' ) @@ -1245,7 +1287,9 @@ def test_stepfunctions_send_start_execution_trace_injection_with_true_input(self definition='{"StartAt": "HelloWorld","States": {"HelloWorld": {"Type": "Pass","End": true}}}', roleArn="arn:aws:iam::012345678901:role/DummyRole", ) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sf) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sf) sf.start_execution(stateMachineArn="arn:aws:states:us-west-2:000000000000:stateMachine:hobart", input="true") # I've tried to find a way to make Moto show me the input to the execution, but can't get that to work. spans = self.get_spans() @@ -1264,7 +1308,9 @@ def _test_kinesis_client(self): {"Data": json.dumps({"Hello": "World"}), "PartitionKey": partition_key}, {"Data": json.dumps({"foo": "bar"}), "PartitionKey": partition_key}, ] - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) client.put_records(StreamName=stream_name, Records=data) spans = self.get_spans() @@ -1335,7 +1381,9 @@ def test_kinesis_distributed_tracing_on(self): @mock_kinesis def test_unpatch(self): kinesis = self.session.create_client("kinesis", region_name="us-east-1") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(kinesis) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(kinesis) unpatch() @@ -1345,7 +1393,9 @@ def test_unpatch(self): @mock_sqs def test_double_patch(self): - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) patch() patch() @@ -1402,8 +1452,8 @@ def _test_data_streams_sns_to_sqs(self, use_raw_delivery): AttributeValue="true", ) - Pin.get_from(sns).clone(tracer=self.tracer).onto(sns) - Pin.get_from(self.sqs_client).clone(tracer=self.tracer).onto(self.sqs_client) + Pin.get_from(sns)._clone(tracer=self.tracer).onto(sns) + Pin.get_from(self.sqs_client)._clone(tracer=self.tracer).onto(self.sqs_client) sns.publish(TopicArn=topic_arn, Message="test") @@ -1478,7 +1528,9 @@ def test_data_streams_sqs(self): ): mt.return_value = 1642544540 - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, "two": {"DataType": "String", "StringValue": "two"}, @@ -1537,7 +1589,9 @@ def test_data_streams_sqs_batch(self): ): mt.return_value = 1642544540 - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, "two": {"DataType": "String", "StringValue": "two"}, @@ -1615,7 +1669,9 @@ def test_data_streams_sqs_no_header(self): ): mt.return_value = 1642544540 - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, "two": {"DataType": "String", "StringValue": "two"}, @@ -1655,7 +1711,9 @@ def test_data_streams_sqs_no_header(self): def test_lambda_client(self): # DEV: No lambda params tagged so we only check no ClientContext lamb = self.session.create_client("lambda", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(lamb) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(lamb) lamb.list_functions() @@ -1691,7 +1749,9 @@ def test_lambda_invoke_distributed_tracing_off(self): MemorySize=128, ) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(lamb) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(lamb) lamb.invoke( FunctionName="ironmaiden", @@ -1731,7 +1791,9 @@ def test_lambda_invoke_bad_context_client(self): MemorySize=128, ) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(lamb) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(lamb) lamb.invoke( FunctionName="black-sabbath", @@ -1757,7 +1819,7 @@ def test_schematized_lambda_client_default(self): # DEV: No lambda params tagged so we only check no ClientContext lamb = self.session.create_client("lambda", region_name="us-west-2", endpoint_url="http://localhost:4566") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.create_function( FunctionName="guns-and-roses", Runtime="python3.8", @@ -1791,7 +1853,7 @@ def test_schematized_lambda_client_default(self): def test_schematized_lambda_client_v0(self): # DEV: No lambda params tagged so we only check no ClientContext lamb = self.session.create_client("lambda", region_name="us-west-2", endpoint_url="http://localhost:4566") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.create_function( FunctionName="guns-and-roses", @@ -1822,7 +1884,7 @@ def test_schematized_lambda_client_v0(self): def test_schematized_lambda_client_v1(self): # DEV: No lambda params tagged so we only check no ClientContext lamb = self.session.create_client("lambda", region_name="us-west-2", endpoint_url="http://localhost:4566") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.create_function( FunctionName="guns-and-roses", @@ -1853,7 +1915,7 @@ def test_schematized_lambda_client_v1(self): def test_schematized_unspecified_service_lambda_client_default(self): # DEV: No lambda params tagged so we only check no ClientContext lamb = self.session.create_client("lambda", region_name="us-west-2", endpoint_url="http://localhost:4566") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.create_function( FunctionName="guns-and-roses", @@ -1884,7 +1946,7 @@ def test_schematized_unspecified_service_lambda_client_default(self): def test_schematized_unspecified_service_lambda_client_v0(self): # DEV: No lambda params tagged so we only check no ClientContext lamb = self.session.create_client("lambda", region_name="us-west-2", endpoint_url="http://localhost:4566") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.create_function( FunctionName="guns-and-roses", @@ -1915,7 +1977,7 @@ def test_schematized_unspecified_service_lambda_client_v0(self): def test_schematized_unspecified_service_lambda_client_v1(self): # DEV: No lambda params tagged so we only check no ClientContext lamb = self.session.create_client("lambda", region_name="us-west-2", endpoint_url="http://localhost:4566") - Pin.get_from(lamb).clone(tracer=self.tracer).onto(lamb) + Pin.get_from(lamb)._clone(tracer=self.tracer).onto(lamb) lamb.create_function( FunctionName="guns-and-roses", @@ -1968,7 +2030,9 @@ def test_eventbridge_single_entry_trace_injection(self): Targets=[{"Id": "a-test-bus-rule-target", "Arn": "arn:aws:sqs:us-east-1:000000000000:Test"}], ) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(bridge) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(bridge) bridge.put_events(Entries=entries) messages = self.sqs_client.receive_message(QueueUrl=queue_url, WaitTimeSeconds=2) @@ -2030,7 +2094,9 @@ def test_eventbridge_multiple_entries_trace_injection(self): Targets=[{"Id": "a-test-bus-rule-target", "Arn": "arn:aws:sqs:us-east-1:000000000000:Test"}], ) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(bridge) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(bridge) bridge.put_events(Entries=entries) messages = self.sqs_client.receive_message(QueueUrl=queue_url, WaitTimeSeconds=2) @@ -2070,7 +2136,9 @@ def test_eventbridge_multiple_entries_trace_injection(self): @mock_kms def test_kms_client(self): kms = self.session.create_client("kms", region_name="us-east-1") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(kms) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(kms) kms.list_keys(Limit=21) @@ -2095,7 +2163,7 @@ def test_kms_client(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) def test_schematized_kms_client_default(self): kms = self.session.create_client("kms", region_name="us-east-1") - Pin.get_from(kms).clone(tracer=self.tracer).onto(kms) + Pin.get_from(kms)._clone(tracer=self.tracer).onto(kms) kms.list_keys(Limit=21) @@ -2110,7 +2178,7 @@ def test_schematized_kms_client_default(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_kms_client_v0(self): kms = self.session.create_client("kms", region_name="us-east-1") - Pin.get_from(kms).clone(tracer=self.tracer).onto(kms) + Pin.get_from(kms)._clone(tracer=self.tracer).onto(kms) kms.list_keys(Limit=21) @@ -2125,7 +2193,7 @@ def test_schematized_kms_client_v0(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_kms_client_v1(self): kms = self.session.create_client("kms", region_name="us-east-1") - Pin.get_from(kms).clone(tracer=self.tracer).onto(kms) + Pin.get_from(kms)._clone(tracer=self.tracer).onto(kms) kms.list_keys(Limit=21) @@ -2140,7 +2208,7 @@ def test_schematized_kms_client_v1(self): @TracerTestCase.run_in_subprocess(env_overrides=dict()) def test_schematized_unspecified_service_kms_client_default(self): kms = self.session.create_client("kms", region_name="us-east-1") - Pin.get_from(kms).clone(tracer=self.tracer).onto(kms) + Pin.get_from(kms)._clone(tracer=self.tracer).onto(kms) kms.list_keys(Limit=21) @@ -2155,7 +2223,7 @@ def test_schematized_unspecified_service_kms_client_default(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) def test_schematized_unspecified_service_kms_client_v0(self): kms = self.session.create_client("kms", region_name="us-east-1") - Pin.get_from(kms).clone(tracer=self.tracer).onto(kms) + Pin.get_from(kms)._clone(tracer=self.tracer).onto(kms) kms.list_keys(Limit=21) @@ -2170,7 +2238,7 @@ def test_schematized_unspecified_service_kms_client_v0(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_schematized_unspecified_service_kms_client_v1(self): kms = self.session.create_client("kms", region_name="us-east-1") - Pin.get_from(kms).clone(tracer=self.tracer).onto(kms) + Pin.get_from(kms)._clone(tracer=self.tracer).onto(kms) kms.list_keys(Limit=21) @@ -2188,7 +2256,9 @@ def test_traced_client_ot(self): with ot_tracer.start_active_span("ec2_op"): ec2 = self.session.create_client("ec2", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(ec2) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(ec2) ec2.describe_instances() spans = self.get_spans() @@ -2235,7 +2305,9 @@ def test_stubber_no_response_metadata(self): @mock_firehose def test_firehose_no_records_arg(self): firehose = self.session.create_client("firehose", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(firehose) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(firehose) stream_name = "test-stream" account_id = "test-account" @@ -2309,9 +2381,11 @@ def _test_sns(self, use_default_tracer=False): sns.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=sqs_arn) if use_default_tracer: - Pin.get_from(sns).clone(tracer=self.tracer).onto(sns) + Pin.get_from(sns)._clone(tracer=self.tracer).onto(sns) else: - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sns) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sns) sns.publish(TopicArn=topic_arn, Message="test") spans = self.get_spans() @@ -2410,7 +2484,9 @@ def test_sns_send_message_trace_injection_with_no_message_attributes(self): sqs_arn = "arn:aws:sqs:{}:{}:{}".format("us-east-1", url_parts[-2], url_parts[-1]) sns.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=sqs_arn) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sns) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sns) sns.publish(TopicArn=topic_arn, Message="test") spans = self.get_spans() @@ -2469,7 +2545,9 @@ def test_sns_send_message_trace_injection_with_message_attributes(self): sqs_arn = "arn:aws:sqs:{}:{}:{}".format(region, url_parts[-2], url_parts[-1]) sns.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=sqs_arn) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sns) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sns) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, @@ -2544,7 +2622,9 @@ def test_sns_send_message_trace_injection_with_max_message_attributes(self): sqs_arn = "arn:aws:sqs:{}:{}:{}".format(region, url_parts[-2], url_parts[-1]) sns.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=sqs_arn) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sns) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sns) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, @@ -2610,8 +2690,10 @@ def test_sns_send_message_batch_trace_injection_with_no_message_attributes(self) sqs_arn = "arn:aws:sqs:{}:{}:{}".format(region, url_parts[-2], url_parts[-1]) sns.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=sqs_arn) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sns) - Pin.get_from(sns).clone(tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sns) + Pin.get_from(sns)._clone(tracer=self.tracer).onto(self.sqs_client) entries = [ { "Id": "1", @@ -2704,7 +2786,9 @@ def test_sns_send_message_batch_trace_injection_with_message_attributes(self): sqs_arn = "arn:aws:sqs:{}:{}:{}".format(region, url_parts[-2], url_parts[-1]) sns.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=sqs_arn) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sns) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sns) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, @@ -2775,7 +2859,9 @@ def test_sns_send_message_batch_trace_injection_with_max_message_attributes(self sqs_arn = "arn:aws:sqs:{}:{}:{}".format(region, url_parts[-2], url_parts[-1]) sns.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=sqs_arn) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sns) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sns) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, @@ -2902,7 +2988,9 @@ def test_kinesis_get_records_empty_poll_disabled(self): stream_name = "kinesis_get_records_empty_poll_disabled" shard_id, _ = self._kinesis_create_stream(client, stream_name) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) shard_iterator = self._kinesis_get_shard_iterator(client, stream_name, shard_id) @@ -2927,7 +3015,9 @@ def test_kinesis_get_records_empty_poll_enabled(self): stream_name = "kinesis_get_records_empty_poll_enabled" shard_id, _ = self._kinesis_create_stream(client, stream_name) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) shard_iterator = self._kinesis_get_shard_iterator(client, stream_name, shard_id) @@ -2950,7 +3040,9 @@ def test_sqs_get_records_empty_poll_disabled(self): # pop any spans created from previous operations spans = self.pop_spans() - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) response = None response = self.sqs_client.receive_message( @@ -2971,7 +3063,9 @@ def test_sqs_get_records_empty_poll_enabled(self): # pop any spans created from previous operations spans = self.pop_spans() - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) response = None response = self.sqs_client.receive_message( @@ -2994,7 +3088,9 @@ def _test_kinesis_put_record_trace_injection(self, test_name, data, client=None, partition_key = "1234" - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) if enable_stream_arn: client.put_record(StreamName=stream_name, Data=data, PartitionKey=partition_key, StreamARN=stream_arn) else: @@ -3029,7 +3125,9 @@ def _test_kinesis_put_records_trace_injection( stream_name = "kinesis_put_records_" + test_name shard_id, stream_arn = self._kinesis_create_stream(client, stream_name) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) if enable_stream_arn: client.put_records(StreamName=stream_name, Records=data, StreamARN=stream_arn) else: @@ -3328,7 +3426,9 @@ def test_kinesis_put_records_inject_data_streams_to_every_record_propagation_dis data = json.dumps({"json": "string"}) records = self._kinesis_generate_records(data, 5) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) client.put_records(StreamName=stream_name, Records=records, StreamARN=stream_arn) shard_iterator = self._kinesis_get_shard_iterator(client, stream_name, shard_id) @@ -3357,7 +3457,9 @@ def test_kinesis_put_records_inject_data_streams_to_every_record_propagation_ena data = json.dumps({"json": "string"}) records = self._kinesis_generate_records(data, 5) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) client.put_records(StreamName=stream_name, Records=records, StreamARN=stream_arn) shard_iterator = self._kinesis_get_shard_iterator(client, stream_name, shard_id) @@ -3386,7 +3488,9 @@ def test_kinesis_put_records_inject_data_streams_to_every_record_disable_all_inj data = json.dumps({"json": "string"}) records = self._kinesis_generate_records(data, 5) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) client.put_records(StreamName=stream_name, Records=records, StreamARN=stream_arn) shard_iterator = self._kinesis_get_shard_iterator(client, stream_name, shard_id) @@ -3454,7 +3558,7 @@ def test_kinesis_parenting(self): {"Data": json.dumps({"foo": "bar"}), "PartitionKey": partition_key}, ] - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) with self.tracer.trace("kinesis.manual_span"): client.create_stream(StreamName=stream_name, ShardCount=1) @@ -3474,7 +3578,7 @@ def test_kinesis_parenting(self): @mock_sqs def test_sqs_parenting(self): - Pin.get_from(self.sqs_client).clone(tracer=self.tracer).onto(self.sqs_client) + Pin.get_from(self.sqs_client)._clone(tracer=self.tracer).onto(self.sqs_client) with self.tracer.trace("sqs.manual_span"): self.sqs_client.send_message(QueueUrl=self.sqs_test_queue["QueueUrl"], MessageBody="world") @@ -3510,7 +3614,7 @@ def test_schematized_kinesis_client_default(self): {"Data": json.dumps({"foo": "bar"}), "PartitionKey": partition_key}, ] - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) client.create_stream(StreamName=stream_name, ShardCount=1) client.put_records(StreamName=stream_name, Records=data) @@ -3531,7 +3635,7 @@ def test_schematized_kinesis_client_v0(self): {"Data": json.dumps({"Hello": "World"}), "PartitionKey": partition_key}, {"Data": json.dumps({"foo": "bar"}), "PartitionKey": partition_key}, ] - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) client.create_stream(StreamName=stream_name, ShardCount=1) client.put_records(StreamName=stream_name, Records=data) @@ -3552,7 +3656,7 @@ def test_schematized_kinesis_client_v1(self): {"Data": json.dumps({"Hello": "World"}), "PartitionKey": partition_key}, {"Data": json.dumps({"foo": "bar"}), "PartitionKey": partition_key}, ] - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) client.create_stream(StreamName=stream_name, ShardCount=1) client.put_records(StreamName=stream_name, Records=data) @@ -3573,7 +3677,7 @@ def test_schematized_unspecified_service_kinesis_client_default(self): {"Data": json.dumps({"Hello": "World"}), "PartitionKey": partition_key}, {"Data": json.dumps({"foo": "bar"}), "PartitionKey": partition_key}, ] - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) client.create_stream(StreamName=stream_name, ShardCount=1) client.put_records(StreamName=stream_name, Records=data) @@ -3594,7 +3698,7 @@ def test_schematized_unspecified_service_kinesis_client_v0(self): {"Data": json.dumps({"Hello": "World"}), "PartitionKey": partition_key}, {"Data": json.dumps({"foo": "bar"}), "PartitionKey": partition_key}, ] - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) client.create_stream(StreamName=stream_name, ShardCount=1) client.put_records(StreamName=stream_name, Records=data) @@ -3615,7 +3719,7 @@ def test_schematized_unspecified_service_kinesis_client_v1(self): {"Data": json.dumps({"Hello": "World"}), "PartitionKey": partition_key}, {"Data": json.dumps({"foo": "bar"}), "PartitionKey": partition_key}, ] - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) client.create_stream(StreamName=stream_name, ShardCount=1) client.put_records(StreamName=stream_name, Records=data) @@ -3630,7 +3734,9 @@ def test_secretsmanager(self): with mock_secretsmanager(): client = self.session.create_client("secretsmanager", region_name="us-east-1") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) resp = client.create_secret(Name="/my/secrets", SecretString="supersecret-string") assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 @@ -3655,7 +3761,9 @@ def test_secretsmanager_binary(self): with mock_secretsmanager(): client = self.session.create_client("secretsmanager", region_name="us-east-1") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) resp = client.create_secret(Name="/my/secrets", SecretBinary=b"supersecret-binary") assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 @@ -3681,7 +3789,7 @@ def test_schematized_secretsmanager_default(self): with mock_secretsmanager(): client = self.session.create_client("secretsmanager", region_name="us-east-1") - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) resp = client.create_secret(Name="/my/secrets", SecretString="supersecret-string") assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 @@ -3699,7 +3807,7 @@ def test_schematized_secretsmanager_v0(self): with mock_secretsmanager(): client = self.session.create_client("secretsmanager", region_name="us-east-1") - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) resp = client.create_secret(Name="/my/secrets", SecretString="supersecret-string") assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 @@ -3717,7 +3825,7 @@ def test_schematized_secretsmanager_v1(self): with mock_secretsmanager(): client = self.session.create_client("secretsmanager", region_name="us-east-1") - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) resp = client.create_secret(Name="/my/secrets", SecretString="supersecret-string") assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 @@ -3735,7 +3843,7 @@ def test_schematized_unspecified_service_secretsmanager_default(self): with mock_secretsmanager(): client = self.session.create_client("secretsmanager", region_name="us-east-1") - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) resp = client.create_secret(Name="/my/secrets", SecretString="supersecret-string") assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 @@ -3753,7 +3861,7 @@ def test_schematized_unspecified_service_secretsmanager_v0(self): with mock_secretsmanager(): client = self.session.create_client("secretsmanager", region_name="us-east-1") - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) resp = client.create_secret(Name="/my/secrets", SecretString="supersecret-string") assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 @@ -3771,7 +3879,7 @@ def test_schematized_unspecified_service_secretsmanager_v1(self): with mock_secretsmanager(): client = self.session.create_client("secretsmanager", region_name="us-east-1") - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) resp = client.create_secret(Name="/my/secrets", SecretString="supersecret-string") assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 @@ -3787,7 +3895,9 @@ def test_schematized_unspecified_service_secretsmanager_v1(self): @mock_sqs def test_aws_payload_tagging_sqs(self): with self.override_config("botocore", dict(payload_tagging_request="all", payload_tagging_response="all")): - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(self.sqs_client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(self.sqs_client) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, "two": {"DataType": "String", "StringValue": "two"}, @@ -3844,7 +3954,9 @@ def test_aws_payload_tagging_sns(self): sqs_arn = "arn:aws:sqs:{}:{}:{}".format(region, url_parts[-2], url_parts[-1]) sns.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=sqs_arn) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sns) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sns) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, @@ -3897,7 +4009,9 @@ def test_aws_payload_tagging_sns_valid_config(self): sqs_arn = "arn:aws:sqs:{}:{}:{}".format(region, url_parts[-2], url_parts[-1]) sns.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=sqs_arn) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(sns) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(sns) message_attributes = { "one": {"DataType": "String", "StringValue": "one"}, @@ -3933,7 +4047,9 @@ def test_aws_payload_tagging_sns_valid_config(self): def test_aws_payload_tagging_s3(self): with self.override_config("botocore", dict(payload_tagging_request="all", payload_tagging_response="all")): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) s3.list_buckets() s3.list_buckets() @@ -3965,7 +4081,9 @@ def test_aws_payload_tagging_s3_invalid_config(self): dict(payload_tagging_request="non_json_path", payload_tagging_response="$..Attr ibutes.PlatformCredential"), ): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) s3.list_buckets() s3.list_buckets() @@ -3983,7 +4101,9 @@ def test_aws_payload_tagging_s3_valid_config(self): "botocore", dict(payload_tagging_request="$..bucket", payload_tagging_response="$..HTTPHeaders") ): s3 = self.session.create_client("s3", region_name="us-west-2") - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(s3) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(s3) s3.list_buckets() s3.list_buckets() @@ -4028,7 +4148,9 @@ def test_aws_payload_tagging_eventbridge(self): Targets=[{"Id": "a-test-bus-rule-target", "Arn": "arn:aws:sqs:us-east-1:000000000000:Test"}], ) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(bridge) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(bridge) bridge.put_events(Entries=entries) self.sqs_client.receive_message(QueueUrl=queue_url, WaitTimeSeconds=2) @@ -4048,7 +4170,7 @@ def test_aws_payload_tagging_kinesis(self): {"Data": json.dumps({"foo": "bar"}), "PartitionKey": partition_key}, ] - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) with self.tracer.trace("kinesis.manual_span"): client.create_stream(StreamName=stream_name, ShardCount=1) diff --git a/tests/contrib/botocore/test_bedrock.py b/tests/contrib/botocore/test_bedrock.py index 1cf5618bd0e..578c34ce981 100644 --- a/tests/contrib/botocore/test_bedrock.py +++ b/tests/contrib/botocore/test_bedrock.py @@ -42,7 +42,7 @@ def aws_credentials(): def mock_tracer(ddtrace_global_config, bedrock_client): pin = Pin.get_from(bedrock_client) mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) - pin.override(bedrock_client, tracer=mock_tracer) + pin._override(bedrock_client, tracer=mock_tracer) yield mock_tracer @@ -102,7 +102,7 @@ def setUp(self): self.bedrock_client = self.session.client("bedrock-runtime") self.mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) pin = Pin.get_from(self.bedrock_client) - pin.override(self.bedrock_client, tracer=self.mock_tracer) + pin._override(self.bedrock_client, tracer=self.mock_tracer) super(TestBedrockConfig, self).setUp() diff --git a/tests/contrib/botocore/test_bedrock_llmobs.py b/tests/contrib/botocore/test_bedrock_llmobs.py index 790b86f0704..711f1287f99 100644 --- a/tests/contrib/botocore/test_bedrock_llmobs.py +++ b/tests/contrib/botocore/test_bedrock_llmobs.py @@ -107,7 +107,7 @@ def expected_llmobs_span_event(span, n_output, message=False): def _test_llmobs_invoke(cls, provider, bedrock_client, mock_llmobs_span_writer, cassette_name=None, n_output=1): mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) pin = Pin.get_from(bedrock_client) - pin.override(bedrock_client, tracer=mock_tracer) + pin._override(bedrock_client, tracer=mock_tracer) # Need to disable and re-enable LLMObs service to use the mock tracer LLMObs.disable() LLMObs.enable(_tracer=mock_tracer, integrations_enabled=False) # only want botocore patched @@ -148,7 +148,7 @@ def _test_llmobs_invoke_stream( ): mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) pin = Pin.get_from(bedrock_client) - pin.override(bedrock_client, tracer=mock_tracer) + pin._override(bedrock_client, tracer=mock_tracer) # Need to disable and re-enable LLMObs service to use the mock tracer LLMObs.disable() LLMObs.enable(_tracer=mock_tracer, integrations_enabled=False) # only want botocore patched @@ -249,7 +249,7 @@ def test_llmobs_error(self, ddtrace_global_config, bedrock_client, mock_llmobs_s mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) pin = Pin.get_from(bedrock_client) - pin.override(bedrock_client, tracer=mock_tracer) + pin._override(bedrock_client, tracer=mock_tracer) # Need to disable and re-enable LLMObs service to use the mock tracer LLMObs.disable() LLMObs.enable(_tracer=mock_tracer, integrations_enabled=False) # only want botocore patched diff --git a/tests/contrib/cassandra/test.py b/tests/contrib/cassandra/test.py index 21b98d6396f..e63572076ee 100644 --- a/tests/contrib/cassandra/test.py +++ b/tests/contrib/cassandra/test.py @@ -360,7 +360,7 @@ def setUp(self): def _traced_session(self): tracer = DummyTracer() - Pin.get_from(self.cluster).clone(tracer=tracer).onto(self.cluster) + Pin.get_from(self.cluster)._clone(tracer=tracer).onto(self.cluster) return self.cluster.connect(self.TEST_KEYSPACE), tracer @@ -379,7 +379,9 @@ def setUp(self): def _traced_session(self): tracer = DummyTracer() # pin the global Cluster to test if they will conflict - Pin(service=self.TEST_SERVICE, tracer=tracer).onto(Cluster) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = tracer + pin.onto(Cluster) self.cluster = Cluster(port=CASSANDRA_CONFIG["port"]) return self.cluster.connect(self.TEST_KEYSPACE), tracer @@ -403,7 +405,9 @@ def _traced_session(self): Pin(service="not-%s" % self.TEST_SERVICE).onto(Cluster) self.cluster = Cluster(port=CASSANDRA_CONFIG["port"]) - Pin(service=self.TEST_SERVICE, tracer=tracer).onto(self.cluster) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = tracer + pin.onto(self.cluster) return self.cluster.connect(self.TEST_KEYSPACE), tracer def test_patch_unpatch(self): @@ -412,7 +416,7 @@ def test_patch_unpatch(self): patch() tracer = DummyTracer() - Pin.get_from(Cluster).clone(tracer=tracer).onto(Cluster) + Pin.get_from(Cluster)._clone(tracer=tracer).onto(Cluster) session = Cluster(port=CASSANDRA_CONFIG["port"]).connect(self.TEST_KEYSPACE) session.execute(self.TEST_QUERY) @@ -432,7 +436,7 @@ def test_patch_unpatch(self): # Test patch again patch() - Pin.get_from(Cluster).clone(tracer=tracer).onto(Cluster) + Pin.get_from(Cluster)._clone(tracer=tracer).onto(Cluster) session = Cluster(port=CASSANDRA_CONFIG["port"]).connect(self.TEST_KEYSPACE) session.execute(self.TEST_QUERY) @@ -454,7 +458,7 @@ def setUp(self): patch() self.tracer = DummyTracer() self.cluster = Cluster(port=CASSANDRA_CONFIG["port"]) - Pin.get_from(self.cluster).clone(tracer=self.tracer).onto(self.cluster) + Pin.get_from(self.cluster)._clone(tracer=self.tracer).onto(self.cluster) self.session = self.cluster.connect(self.TEST_KEYSPACE) def tearDown(self): diff --git a/tests/contrib/celery/base.py b/tests/contrib/celery/base.py index c2b7de22a54..040566ee57d 100644 --- a/tests/contrib/celery/base.py +++ b/tests/contrib/celery/base.py @@ -88,10 +88,11 @@ def celery_test_setup(self): def setUp(self): super(CeleryBaseTestCase, self).setUp() - self.pin = Pin(service="celery-unittest", tracer=self.tracer) + self.pin = Pin(service="celery-unittest") + self.pin._tracer = self.tracer # override pins to use our Dummy Tracer - Pin.override(self.app, tracer=self.tracer) - Pin.override(celery.beat.Scheduler, tracer=self.tracer) + Pin._override(self.app, tracer=self.tracer) + Pin._override(celery.beat.Scheduler, tracer=self.tracer) def tearDown(self): self.app = None diff --git a/tests/contrib/celery/test_tagging.py b/tests/contrib/celery/test_tagging.py index 6b88acf9434..d68ab3adbcb 100644 --- a/tests/contrib/celery/test_tagging.py +++ b/tests/contrib/celery/test_tagging.py @@ -61,14 +61,14 @@ def dummy_tracer(): @pytest.fixture(autouse=False) def traced_redis_celery_app(instrument_celery, dummy_tracer): Pin.get_from(redis_celery_app) - Pin.override(redis_celery_app, tracer=dummy_tracer) + Pin._override(redis_celery_app, tracer=dummy_tracer) yield redis_celery_app @pytest.fixture(autouse=False) def traced_amqp_celery_app(instrument_celery, dummy_tracer): Pin.get_from(amqp_celery_app) - Pin.override(amqp_celery_app, tracer=dummy_tracer) + Pin._override(amqp_celery_app, tracer=dummy_tracer) yield amqp_celery_app diff --git a/tests/contrib/config.py b/tests/contrib/config.py index 6ed086109a2..0b5f3d2bfbb 100644 --- a/tests/contrib/config.py +++ b/tests/contrib/config.py @@ -97,3 +97,13 @@ "host": os.getenv("TEST_KAFKA_HOST", "127.0.0.1"), "port": int(os.getenv("TEST_KAFKA_PORT", 29092)), } + +VALKEY_CONFIG = { + "host": os.getenv("TEST_VALKEY_HOST", "localhost"), + "port": int(os.getenv("TEST_VALKEY_PORT", 6379)), +} + +VALKEY_CLUSTER_CONFIG = { + "host": "127.0.0.1", + "ports": os.getenv("TEST_VALKEYCLUSTER_PORTS", "7000,7001,7002,7003,7004,7005"), +} diff --git a/tests/contrib/consul/test.py b/tests/contrib/consul/test.py index 285287f9e95..71e2f0c6da0 100644 --- a/tests/contrib/consul/test.py +++ b/tests/contrib/consul/test.py @@ -22,8 +22,8 @@ def setUp(self): host=CONSUL_CONFIG["host"], port=CONSUL_CONFIG["port"], ) - Pin.override(consul.Consul, service=self.TEST_SERVICE, tracer=self.tracer) - Pin.override(consul.Consul.KV, service=self.TEST_SERVICE, tracer=self.tracer) + Pin._override(consul.Consul, service=self.TEST_SERVICE, tracer=self.tracer) + Pin._override(consul.Consul.KV, service=self.TEST_SERVICE, tracer=self.tracer) self.c = c def tearDown(self): @@ -168,8 +168,8 @@ def setUp(self): host=CONSUL_CONFIG["host"], port=CONSUL_CONFIG["port"], ) - Pin.override(consul.Consul, tracer=self.tracer) - Pin.override(consul.Consul.KV, tracer=self.tracer) + Pin._override(consul.Consul, tracer=self.tracer) + Pin._override(consul.Consul.KV, tracer=self.tracer) self.c = c def tearDown(self): diff --git a/tests/contrib/dbapi/test_dbapi.py b/tests/contrib/dbapi/test_dbapi.py index c60e49c56af..00e1dc34c56 100644 --- a/tests/contrib/dbapi/test_dbapi.py +++ b/tests/contrib/dbapi/test_dbapi.py @@ -24,7 +24,8 @@ def test_execute_wrapped_is_called_and_returned(self): cursor.rowcount = 0 cursor.execute.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedCursor(cursor, pin, {}) # DEV: We always pass through the result assert "__result__" == traced_cursor.execute("__query__", "arg_1", kwarg1="kwarg1") @@ -37,7 +38,9 @@ def test_dbm_propagation_not_supported(self): # By default _dbm_propagator attribute should not be set or have a value of None. # DBM context propagation should be opt in. assert getattr(cfg, "_dbm_propagator", None) is None - traced_cursor = TracedCursor(cursor, Pin("dbapi_service", tracer=self.tracer), cfg) + pin = Pin("dbapi_service") + pin._tracer = self.tracer + traced_cursor = TracedCursor(cursor, pin, cfg) # Ensure dbm comment is not appended to sql statement traced_cursor.execute("SELECT * FROM db;") cursor.execute.assert_called_once_with("SELECT * FROM db;") @@ -53,7 +56,9 @@ def test_dbm_propagation_not_supported(self): def test_cursor_execute_with_dbm_injection(self): cursor = self.cursor cfg = IntegrationConfig(Config(), "dbapi", service="orders-db", _dbm_propagator=_DBM_Propagator(0, "query")) - traced_cursor = TracedCursor(cursor, Pin(service="orders-db", tracer=self.tracer), cfg) + pin = Pin(service="orders-db") + pin._tracer = self.tracer + traced_cursor = TracedCursor(cursor, pin, cfg) # The following operations should generate DBM comments traced_cursor.execute("SELECT * FROM db;") @@ -73,7 +78,8 @@ def test_executemany_wrapped_is_called_and_returned(self): cursor.rowcount = 0 cursor.executemany.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedCursor(cursor, pin, {}) # DEV: We always pass through the result assert "__result__" == traced_cursor.executemany("__query__", "arg_1", kwarg1="kwarg1") @@ -83,7 +89,8 @@ def test_fetchone_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchone.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.fetchone("arg_1", kwarg1="kwarg1") cursor.fetchone.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -92,7 +99,8 @@ def test_fetchall_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchall.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.fetchall("arg_1", kwarg1="kwarg1") cursor.fetchall.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -101,7 +109,8 @@ def test_fetchmany_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchmany.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.fetchmany("arg_1", kwarg1="kwarg1") cursor.fetchmany.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -110,7 +119,8 @@ def test_correct_span_names(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 0 - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_cursor = TracedCursor(cursor, pin, {}) traced_cursor.execute("arg_1", kwarg1="kwarg1") @@ -145,7 +155,8 @@ def test_when_pin_disabled_then_no_tracing(self): cursor.executemany.return_value = "__result__" tracer.enabled = False - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_cursor = TracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.execute("arg_1", kwarg1="kwarg1") @@ -174,7 +185,8 @@ def test_span_info(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = TracedCursor(cursor, pin, {}) def method(): @@ -199,7 +211,8 @@ def test_cfg_service(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin(None, tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin(None, tags={"pin1": "value_pin1"}) + pin._tracer = tracer cfg = IntegrationConfig(Config(), "db-test", service="cfg-service") traced_cursor = TracedCursor(cursor, pin, cfg) @@ -214,7 +227,8 @@ def test_default_service(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin(None, tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin(None, tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = TracedCursor(cursor, pin, {}) @@ -229,7 +243,8 @@ def test_default_service_cfg(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin(None, tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin(tags={"pin1": "value_pin1"}) + pin._tracer = tracer cfg = IntegrationConfig(Config(), "db-test", _default_service="default-svc") traced_cursor = TracedCursor(cursor, pin, cfg) @@ -244,7 +259,8 @@ def test_service_cfg_and_pin(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin("pin-svc", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("pin-svc", tags={"pin1": "value_pin1"}) + pin._tracer = tracer cfg = IntegrationConfig(Config(), "db-test", _default_service="default-svc") traced_cursor = TracedCursor(cursor, pin, cfg) @@ -262,7 +278,8 @@ def test_django_traced_cursor_backward_compatibility(self): # implementation with the generic dbapi traced cursor, we had to make sure to add the tag 'sql.rows' that was # set by the legacy replaced implementation. cursor.rowcount = 123 - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer cfg = IntegrationConfig(Config(), "db-test") traced_cursor = TracedCursor(cursor, pin, cfg) @@ -286,7 +303,8 @@ def test_execute_wrapped_is_called_and_returned(self): cursor.rowcount = 0 cursor.execute.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.execute("__query__", "arg_1", kwarg1="kwarg1") cursor.execute.assert_called_once_with("__query__", "arg_1", kwarg1="kwarg1") @@ -296,7 +314,8 @@ def test_executemany_wrapped_is_called_and_returned(self): cursor.rowcount = 0 cursor.executemany.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.executemany("__query__", "arg_1", kwarg1="kwarg1") cursor.executemany.assert_called_once_with("__query__", "arg_1", kwarg1="kwarg1") @@ -305,7 +324,8 @@ def test_fetchone_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchone.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.fetchone("arg_1", kwarg1="kwarg1") cursor.fetchone.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -314,7 +334,8 @@ def test_fetchall_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchall.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.fetchall("arg_1", kwarg1="kwarg1") cursor.fetchall.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -323,7 +344,8 @@ def test_fetchmany_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchmany.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.fetchmany("arg_1", kwarg1="kwarg1") cursor.fetchmany.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -332,7 +354,8 @@ def test_correct_span_names(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 0 - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) traced_cursor.execute("arg_1", kwarg1="kwarg1") @@ -367,7 +390,8 @@ def test_when_pin_disabled_then_no_tracing(self): cursor.executemany.return_value = "__result__" tracer.enabled = False - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) assert "__result__" == traced_cursor.execute("arg_1", kwarg1="kwarg1") @@ -396,7 +420,8 @@ def test_span_info(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) def method(): @@ -422,7 +447,8 @@ def test_django_traced_cursor_backward_compatibility(self): # implementation with the generic dbapi traced cursor, we had to make sure to add the tag 'sql.rows' that was # set by the legacy replaced implementation. cursor.rowcount = 123 - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) def method(): @@ -440,7 +466,8 @@ class Unknown(object): cursor = self.cursor tracer = self.tracer cursor.rowcount = Unknown() - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = FetchTracedCursor(cursor, pin, {}) def method(): @@ -453,7 +480,8 @@ def method(): def test_callproc_can_handle_arbitrary_args(self): cursor = self.cursor tracer = self.tracer - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer cursor.callproc.return_value = "gme --> moon" traced_cursor = TracedCursor(cursor, pin, {}) @@ -484,7 +512,9 @@ def test_cursor_execute_fetch_with_dbm_injection(self): cursor = self.cursor dbm_propagator = _DBM_Propagator(0, "query") cfg = IntegrationConfig(Config(), "dbapi", service="dbapi_service", _dbm_propagator=dbm_propagator) - traced_cursor = FetchTracedCursor(cursor, Pin("dbapi_service", tracer=self.tracer), cfg) + pin = Pin("dbapi_service") + pin._tracer = self.tracer + traced_cursor = FetchTracedCursor(cursor, pin, cfg) # The following operations should not generate DBM comments traced_cursor.fetchone() @@ -517,7 +547,8 @@ def setUp(self): self.connection = mock.Mock() def test_cursor_class(self): - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer # Default traced_connection = TracedConnection(self.connection, pin=pin) @@ -537,7 +568,8 @@ def test_commit_is_traced(self): connection = self.connection tracer = self.tracer connection.commit.return_value = None - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_connection = TracedConnection(connection, pin) traced_connection.commit() assert tracer.pop()[0].name == "mock.connection.commit" @@ -547,7 +579,8 @@ def test_rollback_is_traced(self): connection = self.connection tracer = self.tracer connection.rollback.return_value = None - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_connection = TracedConnection(connection, pin) traced_connection.rollback() assert tracer.pop()[0].name == "mock.connection.rollback" @@ -588,7 +621,8 @@ def cursor(self): def commit(self): pass - pin = Pin("pin", tracer=self.tracer) + pin = Pin("pin") + pin._tracer = self.tracer conn = TracedConnection(ConnectionConnection(), pin) with conn as conn2: conn2.commit() diff --git a/tests/contrib/dbapi/test_dbapi_appsec.py b/tests/contrib/dbapi/test_dbapi_appsec.py index 062a4ca667e..f1afbdb24af 100644 --- a/tests/contrib/dbapi/test_dbapi_appsec.py +++ b/tests/contrib/dbapi/test_dbapi_appsec.py @@ -50,7 +50,9 @@ def test_tainted_query(self): cursor = self.cursor cfg = IntegrationConfig(Config(), "sqlite", service="dbapi_service") - traced_cursor = TracedCursor(cursor, Pin("dbapi_service", tracer=self.tracer), cfg) + pin = Pin("dbapi_service") + pin._tracer = self.tracer + traced_cursor = TracedCursor(cursor, pin, cfg) traced_cursor.execute(query) cursor.execute.assert_called_once_with(query) @@ -73,7 +75,9 @@ def test_tainted_query_args(self): cursor = self.cursor cfg = IntegrationConfig(Config(), "sqlite", service="dbapi_service") - traced_cursor = TracedCursor(cursor, Pin("dbapi_service", tracer=self.tracer), cfg) + pin = Pin("dbapi_service") + pin._tracer = self.tracer + traced_cursor = TracedCursor(cursor, pin, cfg) traced_cursor.execute(query, (query_arg,)) cursor.execute.assert_called_once_with(query, (query_arg,)) @@ -88,7 +92,9 @@ def test_untainted_query(self): cursor = self.cursor cfg = IntegrationConfig(Config(), "sqlite", service="dbapi_service") - traced_cursor = TracedCursor(cursor, Pin("dbapi_service", tracer=self.tracer), cfg) + pin = Pin("dbapi_service") + pin._tracer = self.tracer + traced_cursor = TracedCursor(cursor, pin, cfg) traced_cursor.execute(query) cursor.execute.assert_called_once_with(query) @@ -104,7 +110,9 @@ def test_untainted_query_and_args(self): cursor = self.cursor cfg = IntegrationConfig(Config(), "sqlite", service="dbapi_service") - traced_cursor = TracedCursor(cursor, Pin("dbapi_service", tracer=self.tracer), cfg) + pin = Pin("dbapi_service") + pin._tracer = self.tracer + traced_cursor = TracedCursor(cursor, pin, cfg) traced_cursor.execute(query, (query_arg,)) cursor.execute.assert_called_once_with(query, (query_arg,)) @@ -124,7 +132,9 @@ def test_tainted_query_iast_disabled(self): cursor = self.cursor cfg = IntegrationConfig(Config(), "sqlite", service="dbapi_service") - traced_cursor = TracedCursor(cursor, Pin("dbapi_service", tracer=self.tracer), cfg) + pin = Pin("dbapi_service") + pin._tracer = self.tracer + traced_cursor = TracedCursor(cursor, pin, cfg) traced_cursor.execute(query) cursor.execute.assert_called_once_with(query) diff --git a/tests/contrib/dbapi_async/test_dbapi_async.py b/tests/contrib/dbapi_async/test_dbapi_async.py index ceb2b0cf6a1..dd16b7a2e33 100644 --- a/tests/contrib/dbapi_async/test_dbapi_async.py +++ b/tests/contrib/dbapi_async/test_dbapi_async.py @@ -26,7 +26,8 @@ async def test_execute_wrapped_is_called_and_returned(self): cursor.rowcount = 0 cursor.execute.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedAsyncCursor(cursor, pin, {}) # DEV: We always pass through the result assert "__result__" == await traced_cursor.execute("__query__", "arg_1", kwarg1="kwarg1") @@ -40,7 +41,9 @@ async def test_dbm_propagation_not_supported(self): # By default _dbm_propagator attribute should not be set or have a value of None. # DBM context propagation should be opt in. assert getattr(cfg, "_dbm_propagator", None) is None - traced_cursor = TracedAsyncCursor(cursor, Pin("dbapi_service", tracer=self.tracer), cfg) + pin = Pin(service="dbapi_service") + pin._tracer = self.tracer + traced_cursor = TracedAsyncCursor(cursor, pin, cfg) # Ensure dbm comment is not appended to sql statement await traced_cursor.execute("SELECT * FROM db;") cursor.execute.assert_called_once_with("SELECT * FROM db;") @@ -57,7 +60,9 @@ async def test_dbm_propagation_not_supported(self): async def test_cursor_execute_with_dbm_injection(self): cursor = self.cursor cfg = IntegrationConfig(Config(), "dbapi", service="orders-db", _dbm_propagator=_DBM_Propagator(0, "query")) - traced_cursor = TracedAsyncCursor(cursor, Pin(service="orders-db", tracer=self.tracer), cfg) + pin = Pin(service="orders-db") + pin._tracer = self.tracer + traced_cursor = TracedAsyncCursor(cursor, pin, cfg) # The following operations should generate DBM comments await traced_cursor.execute("SELECT * FROM db;") @@ -78,7 +83,8 @@ async def test_executemany_wrapped_is_called_and_returned(self): cursor.rowcount = 0 cursor.executemany.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedAsyncCursor(cursor, pin, {}) # DEV: We always pass through the result assert "__result__" == await traced_cursor.executemany("__query__", "arg_1", kwarg1="kwarg1") @@ -89,7 +95,8 @@ async def test_fetchone_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchone.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.fetchone("arg_1", kwarg1="kwarg1") cursor.fetchone.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -101,7 +108,9 @@ async def test_cursor_async_connection(self): def method(): pass - async with TracedAsyncCursor(self.cursor, Pin("dbapi_service", tracer=self.tracer), {}) as cursor: + pin = Pin("dbapi_service") + pin._tracer = self.tracer + async with TracedAsyncCursor(self.cursor, pin, {}) as cursor: await cursor.execute("""select 'one' as x""") await cursor.execute("""select 'blah'""") @@ -120,7 +129,8 @@ async def test_fetchall_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchall.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.fetchall("arg_1", kwarg1="kwarg1") cursor.fetchall.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -130,7 +140,8 @@ async def test_fetchmany_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchmany.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = TracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.fetchmany("arg_1", kwarg1="kwarg1") cursor.fetchmany.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -140,7 +151,8 @@ async def test_correct_span_names(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 0 - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_cursor = TracedAsyncCursor(cursor, pin, {}) await traced_cursor.execute("arg_1", kwarg1="kwarg1") @@ -176,7 +188,8 @@ async def test_when_pin_disabled_then_no_tracing(self): cursor.executemany.return_value = "__result__" tracer.enabled = False - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_cursor = TracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.execute("arg_1", kwarg1="kwarg1") @@ -206,7 +219,8 @@ async def test_span_info(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = TracedAsyncCursor(cursor, pin, {}) async def method(): @@ -232,7 +246,8 @@ async def test_cfg_service(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin(None, tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin(None, tags={"pin1": "value_pin1"}) + pin._tracer = tracer cfg = IntegrationConfig(Config(), "db-test", service="cfg-service") traced_cursor = TracedAsyncCursor(cursor, pin, cfg) @@ -248,7 +263,8 @@ async def test_default_service(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin(None, tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin(None, tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = TracedAsyncCursor(cursor, pin, {}) @@ -264,7 +280,8 @@ async def test_default_service_cfg(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin(None, tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin(None, tags={"pin1": "value_pin1"}) + pin._tracer = tracer cfg = IntegrationConfig(Config(), "db-test", _default_service="default-svc") traced_cursor = TracedAsyncCursor(cursor, pin, cfg) @@ -280,7 +297,8 @@ async def test_service_cfg_and_pin(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin("pin-svc", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("pin-svc", tags={"pin1": "value_pin1"}) + pin._tracer = tracer cfg = IntegrationConfig(Config(), "db-test", _default_service="default-svc") traced_cursor = TracedAsyncCursor(cursor, pin, cfg) @@ -299,7 +317,8 @@ async def test_django_traced_cursor_backward_compatibility(self): # implementation with the generic dbapi traced cursor, we had to make sure to add the tag 'sql.rows' that was # set by the legacy replaced implementation. cursor.rowcount = 123 - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer cfg = IntegrationConfig(Config(), "db-test") traced_cursor = TracedAsyncCursor(cursor, pin, cfg) @@ -324,7 +343,8 @@ async def test_execute_wrapped_is_called_and_returned(self): cursor.rowcount = 0 cursor.execute.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.execute("__query__", "arg_1", kwarg1="kwarg1") cursor.execute.assert_called_once_with("__query__", "arg_1", kwarg1="kwarg1") @@ -335,7 +355,8 @@ async def test_executemany_wrapped_is_called_and_returned(self): cursor.rowcount = 0 cursor.executemany.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.executemany("__query__", "arg_1", kwarg1="kwarg1") cursor.executemany.assert_called_once_with("__query__", "arg_1", kwarg1="kwarg1") @@ -345,7 +366,8 @@ async def test_fetchone_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchone.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.fetchone("arg_1", kwarg1="kwarg1") cursor.fetchone.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -355,7 +377,8 @@ async def test_fetchall_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchall.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.fetchall("arg_1", kwarg1="kwarg1") cursor.fetchall.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -365,7 +388,8 @@ async def test_fetchmany_wrapped_is_called_and_returned(self): cursor = self.cursor cursor.rowcount = 0 cursor.fetchmany.return_value = "__result__" - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.fetchmany("arg_1", kwarg1="kwarg1") cursor.fetchmany.assert_called_once_with("arg_1", kwarg1="kwarg1") @@ -375,7 +399,8 @@ async def test_correct_span_names(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 0 - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) await traced_cursor.execute("arg_1", kwarg1="kwarg1") @@ -411,7 +436,8 @@ async def test_when_pin_disabled_then_no_tracing(self): cursor.executemany.return_value = "__result__" tracer.enabled = False - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) assert "__result__" == await traced_cursor.execute("arg_1", kwarg1="kwarg1") @@ -441,7 +467,8 @@ async def test_span_info(self): cursor = self.cursor tracer = self.tracer cursor.rowcount = 123 - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) async def method(): @@ -468,7 +495,8 @@ async def test_django_traced_cursor_backward_compatibility(self): # implementation with the generic dbapi traced cursor, we had to make sure to add the tag 'sql.rows' that was # set by the legacy replaced implementation. cursor.rowcount = 123 - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) async def method(): @@ -487,7 +515,8 @@ class Unknown(object): cursor = self.cursor tracer = self.tracer cursor.rowcount = Unknown() - pin = Pin("my_service", tracer=tracer, tags={"pin1": "value_pin1"}) + pin = Pin("my_service", tags={"pin1": "value_pin1"}) + pin._tracer = tracer traced_cursor = FetchTracedAsyncCursor(cursor, pin, {}) async def method(): @@ -501,7 +530,8 @@ async def method(): async def test_callproc_can_handle_arbitrary_args(self): cursor = self.cursor tracer = self.tracer - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer cursor.callproc.return_value = "gme --> moon" traced_cursor = TracedAsyncCursor(cursor, pin, {}) @@ -533,7 +563,9 @@ async def test_cursor_execute_fetch_with_dbm_injection(self): cursor = self.cursor dbm_propagator = _DBM_Propagator(0, "query") cfg = IntegrationConfig(Config(), "dbapi", service="dbapi_service", _dbm_propagator=dbm_propagator) - traced_cursor = FetchTracedAsyncCursor(cursor, Pin("dbapi_service", tracer=self.tracer), cfg) + pin = Pin("dbapi_service") + pin._tracer = self.tracer + traced_cursor = FetchTracedAsyncCursor(cursor, pin, cfg) # The following operations should not generate DBM comments await traced_cursor.fetchone() @@ -567,7 +599,8 @@ def setUp(self): @mark_asyncio async def test_cursor_class(self): - pin = Pin("pin_name", tracer=self.tracer) + pin = Pin("pin_name") + pin._tracer = self.tracer # Default traced_connection = TracedAsyncConnection(self.connection, pin=pin) @@ -588,7 +621,8 @@ async def test_commit_is_traced(self): connection = self.connection tracer = self.tracer connection.commit.return_value = None - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_connection = TracedAsyncConnection(connection, pin) await traced_connection.commit() assert tracer.pop()[0].name == "mock.connection.commit" @@ -599,7 +633,8 @@ async def test_rollback_is_traced(self): connection = self.connection tracer = self.tracer connection.rollback.return_value = None - pin = Pin("pin_name", tracer=tracer) + pin = Pin("pin_name") + pin._tracer = tracer traced_connection = TracedAsyncConnection(connection, pin) await traced_connection.rollback() assert tracer.pop()[0].name == "mock.connection.rollback" @@ -641,7 +676,8 @@ def cursor(self): async def commit(self): pass - pin = Pin("pin", tracer=self.tracer) + pin = Pin("pin") + pin._tracer = self.tracer conn = TracedAsyncConnection(ConnectionConnection(), pin) async with conn as conn2: await conn2.commit() diff --git a/tests/contrib/django/conftest.py b/tests/contrib/django/conftest.py index 3dd992681b4..7a398abba46 100644 --- a/tests/contrib/django/conftest.py +++ b/tests/contrib/django/conftest.py @@ -32,7 +32,7 @@ def tracer(): # Patch Django and override tracer to be our test tracer pin = Pin.get_from(django) original_tracer = pin.tracer - Pin.override(django, tracer=tracer) + Pin._override(django, tracer=tracer) # Yield to our test yield tracer @@ -41,7 +41,7 @@ def tracer(): # Reset the tracer pinned to Django and unpatch # DEV: unable to properly unpatch and reload django app with each test # unpatch() - Pin.override(django, tracer=original_tracer) + Pin._override(django, tracer=original_tracer) @pytest.fixture diff --git a/tests/contrib/django/test_django.py b/tests/contrib/django/test_django.py index 79baceb1652..03fdffeaf45 100644 --- a/tests/contrib/django/test_django.py +++ b/tests/contrib/django/test_django.py @@ -1588,7 +1588,7 @@ def test_connection(client, test_spans): span = spans[0] assert span.name == "sqlite.query" - assert span.service == "{}" + assert span.service == "{}", span.service assert span.span_type == "sql" assert span.get_tag("django.db.vendor") == "sqlite" assert span.get_tag("django.db.alias") == "default" diff --git a/tests/contrib/django/test_django_dbm.py b/tests/contrib/django/test_django_dbm.py index d44f90f3208..cd2eb436c42 100644 --- a/tests/contrib/django/test_django_dbm.py +++ b/tests/contrib/django/test_django_dbm.py @@ -21,7 +21,7 @@ def get_cursor(tracer, service=None, propagation_mode="service", tags={}): pin = Pin.get_from(cursor) assert pin is not None - pin.clone(tracer=tracer, tags={**pin.tags, **tags}).onto(cursor) + pin._clone(tracer=tracer, tags={**pin.tags, **tags}).onto(cursor) return cursor diff --git a/tests/contrib/dogpile_cache/test_tracing.py b/tests/contrib/dogpile_cache/test_tracing.py index fec78818eda..ac42a8512ac 100644 --- a/tests/contrib/dogpile_cache/test_tracing.py +++ b/tests/contrib/dogpile_cache/test_tracing.py @@ -31,7 +31,7 @@ def region(tracer): # The backend is trivial so we can use memory to simplify test setup. test_region = dogpile.cache.make_region(name="TestRegion", key_mangler=lambda x: x) test_region.configure("dogpile.cache.memory") - Pin.override(dogpile.cache, tracer=tracer) + Pin._override(dogpile.cache, tracer=tracer) return test_region diff --git a/tests/contrib/dramatiq/test_integration.py b/tests/contrib/dramatiq/test_integration.py index 526aadc3861..990bef92bdc 100644 --- a/tests/contrib/dramatiq/test_integration.py +++ b/tests/contrib/dramatiq/test_integration.py @@ -35,7 +35,9 @@ def test_idempotent_unpatch(self): unpatch() tracer = DummyTracer() - Pin(tracer=tracer).onto(dramatiq) + pin = Pin() + pin._tracer = tracer + pin.onto(dramatiq) @dramatiq.actor def fn_task(): @@ -51,7 +53,9 @@ def test_fn_task_synchronous(self): # the body of the function is not instrumented so calling it # directly doesn't create a trace tracer = DummyTracer() - Pin(tracer=tracer).onto(dramatiq) + pin = Pin() + pin._tracer = tracer + pin.onto(dramatiq) @dramatiq.actor def fn_task(): diff --git a/tests/contrib/elasticsearch/test_elasticsearch.py b/tests/contrib/elasticsearch/test_elasticsearch.py index 6e381bc1e31..f1c461f4e51 100644 --- a/tests/contrib/elasticsearch/test_elasticsearch.py +++ b/tests/contrib/elasticsearch/test_elasticsearch.py @@ -92,7 +92,9 @@ def setUp(self): # `custom_tag` is a custom tag that can be set via `Pin`. "custom_tag": "bar", } - Pin(tracer=self.tracer, tags=tags).onto(es.transport) + pin = Pin(tags=tags) + pin._tracer = self.tracer + pin.onto(es.transport) self.create_index(es) patch() @@ -225,7 +227,9 @@ def test_patch_unpatch(self): patch() es = self._get_es() - Pin(tracer=self.tracer).onto(es.transport) + pin = Pin() + pin._tracer = self.tracer + pin.onto(es.transport) # Test index creation self.create_index(es) @@ -253,7 +257,9 @@ def test_patch_unpatch(self): patch() es = self._get_es() - Pin(tracer=self.tracer).onto(es.transport) + pin = Pin() + pin._tracer = self.tracer + pin.onto(es.transport) # Test index creation self.create_index(es) @@ -286,7 +292,9 @@ def test_user_specified_service_v1(self): assert config.service == "mysvc" self.create_index(self.es) - Pin(service="es", tracer=self.tracer).onto(self.es.transport) + pin = Pin(service="es") + pin._tracer = self.tracer + pin.onto(self.es.transport) spans = self.get_spans() self.reset() assert len(spans) == 1 @@ -303,7 +311,9 @@ def test_unspecified_service_v0(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) def test_unspecified_service_v1(self): self.create_index(self.es) - Pin(service="es", tracer=self.tracer).onto(self.es.transport) + pin = Pin(service="es") + pin._tracer = self.tracer + pin.onto(self.es.transport) spans = self.get_spans() self.reset() assert len(spans) == 1 diff --git a/tests/contrib/flask/__init__.py b/tests/contrib/flask/__init__.py index a512a79f196..2a6fc49285c 100644 --- a/tests/contrib/flask/__init__.py +++ b/tests/contrib/flask/__init__.py @@ -36,7 +36,7 @@ def setUp(self): self.app = flask.Flask(__name__, template_folder="test_templates/") self.app.test_client_class = DDFlaskTestClient self.client = self.app.test_client() - Pin.override(self.app, tracer=self.tracer) + Pin._override(self.app, tracer=self.tracer) def tearDown(self): super(BaseFlaskTestCase, self).tearDown() diff --git a/tests/contrib/flask/test_blueprint.py b/tests/contrib/flask/test_blueprint.py index 96401dfa1a9..dffa959eceb 100644 --- a/tests/contrib/flask/test_blueprint.py +++ b/tests/contrib/flask/test_blueprint.py @@ -36,7 +36,9 @@ def test_blueprint_register(self): We do not use the ``flask.Flask`` app ``Pin`` """ bp = flask.Blueprint("pinned", __name__) - Pin(service="flask-bp", tracer=self.tracer).onto(bp) + pin = Pin(service="flask-bp") + pin._tracer = self.tracer + pin.onto(bp) # DEV: This is more common than calling ``flask.Blueprint.register`` directly self.app.register_blueprint(bp) @@ -58,7 +60,9 @@ def test_blueprint_add_url_rule(self): """ # When the Blueprint has a Pin attached bp = flask.Blueprint("pinned", __name__) - Pin(service="flask-bp", tracer=self.tracer).onto(bp) + pin = Pin(service="flask-bp") + pin._tracer = self.tracer + pin.onto(bp) @bp.route("/") def test_view(): @@ -113,7 +117,7 @@ def test_blueprint_request_pin_override(self): We create the expected spans """ bp = flask.Blueprint("bp", __name__) - Pin.override(bp, service="flask-bp", tracer=self.tracer) + Pin._override(bp, service="flask-bp", tracer=self.tracer) @bp.route("/") def test(): diff --git a/tests/contrib/flask/test_templates/test_insecure.html b/tests/contrib/flask/test_templates/test_insecure.html new file mode 100644 index 00000000000..a1921295a57 --- /dev/null +++ b/tests/contrib/flask/test_templates/test_insecure.html @@ -0,0 +1 @@ +hello {{world|safe}} diff --git a/tests/contrib/flask_autopatch/test_flask_autopatch.py b/tests/contrib/flask_autopatch/test_flask_autopatch.py index 27c4b47e2d0..9d0bfb7820b 100644 --- a/tests/contrib/flask_autopatch/test_flask_autopatch.py +++ b/tests/contrib/flask_autopatch/test_flask_autopatch.py @@ -17,7 +17,7 @@ class FlaskAutopatchTestCase(TracerTestCase): def setUp(self): super(FlaskAutopatchTestCase, self).setUp() self.app = flask.Flask(__name__) - Pin.override(self.app, service="test-flask", tracer=self.tracer) + Pin._override(self.app, service="test-flask", tracer=self.tracer) self.client = self.app.test_client() def test_patched(self): diff --git a/tests/contrib/flask_cache/test_utils.py b/tests/contrib/flask_cache/test_utils.py index fc5d640b5cf..6e779d21109 100644 --- a/tests/contrib/flask_cache/test_utils.py +++ b/tests/contrib/flask_cache/test_utils.py @@ -6,7 +6,7 @@ from ddtrace.contrib.internal.flask_cache.utils import _extract_client from ddtrace.contrib.internal.flask_cache.utils import _extract_conn_tags from ddtrace.contrib.internal.flask_cache.utils import _resource_from_cache_prefix -from ddtrace.trace import Tracer +from ddtrace.trace import tracer from ..config import MEMCACHED_CONFIG from ..config import REDIS_CONFIG @@ -17,7 +17,6 @@ class FlaskCacheUtilsTest(unittest.TestCase): def test_extract_redis_connection_metadata(self): # create the TracedCache instance for a Flask app - tracer = Tracer() Cache = get_traced_cache(tracer, service=self.SERVICE) app = Flask(__name__) config = { @@ -37,7 +36,6 @@ def test_extract_redis_connection_metadata(self): def test_extract_memcached_connection_metadata(self): # create the TracedCache instance for a Flask app - tracer = Tracer() Cache = get_traced_cache(tracer, service=self.SERVICE) app = Flask(__name__) config = { @@ -56,7 +54,6 @@ def test_extract_memcached_connection_metadata(self): def test_extract_memcached_multiple_connection_metadata(self): # create the TracedCache instance for a Flask app - tracer = Tracer() Cache = get_traced_cache(tracer, service=self.SERVICE) app = Flask(__name__) config = { @@ -78,7 +75,6 @@ def test_extract_memcached_multiple_connection_metadata(self): def test_resource_from_cache_with_prefix(self): # create the TracedCache instance for a Flask app - tracer = Tracer() Cache = get_traced_cache(tracer, service=self.SERVICE) app = Flask(__name__) config = { @@ -94,7 +90,6 @@ def test_resource_from_cache_with_prefix(self): def test_resource_from_cache_with_empty_prefix(self): # create the TracedCache instance for a Flask app - tracer = Tracer() Cache = get_traced_cache(tracer, service=self.SERVICE) app = Flask(__name__) config = { @@ -110,7 +105,6 @@ def test_resource_from_cache_with_empty_prefix(self): def test_resource_from_cache_without_prefix(self): # create the TracedCache instance for a Flask app - tracer = Tracer() Cache = get_traced_cache(tracer, service=self.SERVICE) app = Flask(__name__) traced_cache = Cache(app, config={"CACHE_TYPE": "redis"}) diff --git a/tests/contrib/google_generativeai/conftest.py b/tests/contrib/google_generativeai/conftest.py index 64b2eb83d1b..69d86a7f05d 100644 --- a/tests/contrib/google_generativeai/conftest.py +++ b/tests/contrib/google_generativeai/conftest.py @@ -35,7 +35,7 @@ def mock_tracer(ddtrace_global_config, genai): try: pin = Pin.get_from(genai) mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) - pin.override(genai, tracer=mock_tracer) + pin._override(genai, tracer=mock_tracer) pin.tracer._configure() if ddtrace_global_config.get("_llmobs_enabled", False): # Have to disable and re-enable LLMObs to use to mock tracer. diff --git a/tests/contrib/graphene/test_graphene.py b/tests/contrib/graphene/test_graphene.py index 5dca40212ee..dbfee8cc611 100644 --- a/tests/contrib/graphene/test_graphene.py +++ b/tests/contrib/graphene/test_graphene.py @@ -1,4 +1,7 @@ +import os + import graphene +import graphql import pytest from ddtrace.contrib.internal.graphql.patch import patch @@ -26,6 +29,23 @@ def resolve_patron(root, info): raise Exception("exception was raised in a graphene query") +class Query(graphene.ObjectType): + user = graphene.String(id=graphene.ID()) + + def resolve_user(self, info, id): # noqa: A002 + if id != "123": + raise graphql.error.GraphQLError( + "User not found", + extensions={ + "code": "USER_NOT_FOUND", + "timestamp": "2025-01-30T12:34:56Z", + "status": 404, + "retryable": False, + }, + ) + return "John Doe" + + @pytest.fixture(autouse=True) def enable_graphql_patching(): patch() @@ -94,8 +114,17 @@ async def test_schema_execute_async_with_resolvers(test_schema, test_source_str, assert result.data == {"patron": {"id": "1", "name": "Syrus", "age": 27}} +@pytest.mark.snapshot(ignores=["meta.events", "meta.error.stack"]) +def test_schema_failing_extensions(test_schema, test_source_str, enable_graphql_resolvers): + schema = graphene.Schema(query=Query) + os.environ["DD_TRACE_GRAPHQL_ERROR_EXTENSIONS"] = "code, status" + query_string = '{ user(id: "999") }' + result = schema.execute(query_string) + assert result.errors + + @pytest.mark.snapshot( - ignores=["meta.error.stack"], variants={"v2": graphene.VERSION < (3,), "": graphene.VERSION >= (3,)} + ignores=["meta.events", "meta.error.stack"], variants={"v2": graphene.VERSION < (3,), "": graphene.VERSION >= (3,)} ) def test_schema_failing_execute(failing_schema, test_source_str, enable_graphql_resolvers): result = failing_schema.execute(test_source_str) diff --git a/tests/contrib/graphql/test_graphql.py b/tests/contrib/graphql/test_graphql.py index 7072b35bc81..ab3321aad8b 100644 --- a/tests/contrib/graphql/test_graphql.py +++ b/tests/contrib/graphql/test_graphql.py @@ -1,6 +1,8 @@ import os import graphql +from graphql import build_schema +from graphql import graphql_sync import pytest from ddtrace.contrib.internal.graphql.patch import _graphql_version as graphql_version @@ -73,13 +75,48 @@ async def test_graphql_with_traced_resolver(test_schema, test_source_str, snapsh assert result.data == {"hello": "friend"} +def resolve_fail(root, info): + undefined_var = None + return undefined_var.property + + +@snapshot(ignores=["meta.error.type", "meta.error.message", "meta.error.stack", "meta.events"]) +def test_graphql_fail(enable_graphql_resolvers): + query = """ + query { + fail + } + """ + + resolvers = { + "Query": { + "fail": resolve_fail, + } + } + schema_definition = """ + type Query { + fail: String + } + """ + + test_schema = build_schema(schema_definition) + result = graphql_sync( + test_schema, query, root_value=None, field_resolver=lambda _type, _field: resolvers[_type.name][_field.name] + ) + + assert result.errors is not None + assert len(result.errors) == 1 + assert isinstance(result.errors[0], graphql.error.GraphQLError) + assert "'NoneType' object has no attribute 'name'" in result.errors[0].message + + @pytest.mark.asyncio async def test_graphql_error(test_schema, snapshot_context): - with snapshot_context(ignores=["meta.error.type", "meta.error.message"]): + with snapshot_context(ignores=["meta.error.type", "meta.error.message", "meta.events"]): if graphql_version < (3, 0): - result = graphql.graphql(test_schema, "{ invalid_schema }") + result = graphql.graphql(test_schema, "query my_query{ invalid_schema }") else: - result = await graphql.graphql(test_schema, "{ invalid_schema }") + result = await graphql.graphql(test_schema, "query my_query{ invalid_schema }") assert len(result.errors) == 1 assert isinstance(result.errors[0], graphql.error.GraphQLError) assert "Cannot query field" in result.errors[0].message @@ -99,7 +136,7 @@ def test_graphql_v2_promise(test_schema, test_source_str): ) @pytest.mark.skipif(graphql_version >= (3, 0), reason="graphql.graphql is NOT async in v2.0") def test_graphql_error_v2_promise(test_schema): - promise = graphql.graphql(test_schema, "{ invalid_schema }", return_promise=True) + promise = graphql.graphql(test_schema, "query my_query{ invalid_schema }", return_promise=True) result = promise.get() assert len(result.errors) == 1 assert isinstance(result.errors[0], graphql.error.GraphQLError) diff --git a/tests/contrib/grpc/common.py b/tests/contrib/grpc/common.py index e67e4f32a92..bcc444a3fe2 100644 --- a/tests/contrib/grpc/common.py +++ b/tests/contrib/grpc/common.py @@ -20,8 +20,8 @@ class GrpcBaseTestCase(TracerTestCase): def setUp(self): super(GrpcBaseTestCase, self).setUp() patch() - Pin.override(constants.GRPC_PIN_MODULE_SERVER, tracer=self.tracer) - Pin.override(constants.GRPC_PIN_MODULE_CLIENT, tracer=self.tracer) + Pin._override(constants.GRPC_PIN_MODULE_SERVER, tracer=self.tracer) + Pin._override(constants.GRPC_PIN_MODULE_CLIENT, tracer=self.tracer) self._start_server() def tearDown(self): diff --git a/tests/contrib/grpc/test_grpc.py b/tests/contrib/grpc/test_grpc.py index d0559f2dff5..6164682c449 100644 --- a/tests/contrib/grpc/test_grpc.py +++ b/tests/contrib/grpc/test_grpc.py @@ -227,9 +227,9 @@ def test_pin_not_activated(self): def test_pin_tags_are_put_in_span(self): # DEV: stop and restart server to catch overridden pin self._stop_server() - Pin.override(constants.GRPC_PIN_MODULE_SERVER, service="server1") - Pin.override(constants.GRPC_PIN_MODULE_SERVER, tags={"tag1": "server"}) - Pin.override(constants.GRPC_PIN_MODULE_CLIENT, tags={"tag2": "client"}) + Pin._override(constants.GRPC_PIN_MODULE_SERVER, service="server1") + Pin._override(constants.GRPC_PIN_MODULE_SERVER, tags={"tag1": "server"}) + Pin._override(constants.GRPC_PIN_MODULE_CLIENT, tags={"tag2": "client"}) self._start_server() with grpc.insecure_channel("127.0.0.1:%d" % (_GRPC_PORT)) as channel: stub = HelloStub(channel) @@ -241,10 +241,10 @@ def test_pin_tags_are_put_in_span(self): assert spans[0].get_tag("tag2") == "client" def test_pin_can_be_defined_per_channel(self): - Pin.override(constants.GRPC_PIN_MODULE_CLIENT, service="grpc1") + Pin._override(constants.GRPC_PIN_MODULE_CLIENT, service="grpc1") channel1 = grpc.insecure_channel("127.0.0.1:%d" % (_GRPC_PORT)) - Pin.override(constants.GRPC_PIN_MODULE_CLIENT, service="grpc2") + Pin._override(constants.GRPC_PIN_MODULE_CLIENT, service="grpc2") channel2 = grpc.insecure_channel("127.0.0.1:%d" % (_GRPC_PORT)) stub1 = HelloStub(channel1) diff --git a/tests/contrib/grpc_aio/test_grpc_aio.py b/tests/contrib/grpc_aio/test_grpc_aio.py index 0606bcc3db2..e256a37a160 100644 --- a/tests/contrib/grpc_aio/test_grpc_aio.py +++ b/tests/contrib/grpc_aio/test_grpc_aio.py @@ -179,8 +179,8 @@ def patch_grpc_aio(): @pytest.fixture def tracer(): tracer = DummyTracer() - Pin.override(GRPC_AIO_PIN_MODULE_CLIENT, tracer=tracer) - Pin.override(GRPC_AIO_PIN_MODULE_SERVER, tracer=tracer) + Pin._override(GRPC_AIO_PIN_MODULE_CLIENT, tracer=tracer) + Pin._override(GRPC_AIO_PIN_MODULE_SERVER, tracer=tracer) yield tracer tracer.pop() @@ -354,13 +354,13 @@ async def test_pin_not_activated(server_info, tracer): [_CoroHelloServicer(), _SyncHelloServicer()], ) async def test_pin_tags_put_in_span(servicer, tracer): - Pin.override(GRPC_AIO_PIN_MODULE_SERVER, service="server1") - Pin.override(GRPC_AIO_PIN_MODULE_SERVER, tags={"tag1": "server"}) + Pin._override(GRPC_AIO_PIN_MODULE_SERVER, service="server1") + Pin._override(GRPC_AIO_PIN_MODULE_SERVER, tags={"tag1": "server"}) target = f"localhost:{_GRPC_PORT}" _server = _create_server(servicer, target) await _server.start() - Pin.override(GRPC_AIO_PIN_MODULE_CLIENT, tags={"tag2": "client"}) + Pin._override(GRPC_AIO_PIN_MODULE_CLIENT, tags={"tag2": "client"}) async with aio.insecure_channel(target) as channel: stub = HelloStub(channel) await stub.SayHello(HelloRequest(name="test")) @@ -383,10 +383,10 @@ async def test_pin_tags_put_in_span(servicer, tracer): @pytest.mark.parametrize("server_info", [_CoroHelloServicer(), _SyncHelloServicer()], indirect=True) async def test_pin_can_be_defined_per_channel(server_info, tracer): - Pin.override(GRPC_AIO_PIN_MODULE_CLIENT, service="grpc1") + Pin._override(GRPC_AIO_PIN_MODULE_CLIENT, service="grpc1") channel1 = aio.insecure_channel(server_info.target) - Pin.override(GRPC_AIO_PIN_MODULE_CLIENT, service="grpc2") + Pin._override(GRPC_AIO_PIN_MODULE_CLIENT, service="grpc2") channel2 = aio.insecure_channel(server_info.target) stub1 = HelloStub(channel1) diff --git a/tests/contrib/httplib/test_httplib.py b/tests/contrib/httplib/test_httplib.py index 24a5fe3f051..a57aff294a1 100644 --- a/tests/contrib/httplib/test_httplib.py +++ b/tests/contrib/httplib/test_httplib.py @@ -41,7 +41,7 @@ def setUp(self): super(HTTPLibBaseMixin, self).setUp() patch() - Pin.override(httplib, tracer=self.tracer) + Pin._override(httplib, tracer=self.tracer) def tearDown(self): unpatch() @@ -59,12 +59,12 @@ def to_str(self, value): def get_http_connection(self, *args, **kwargs): conn = httplib.HTTPConnection(*args, **kwargs) - Pin.override(conn, tracer=self.tracer) + Pin._override(conn, tracer=self.tracer) return conn def get_https_connection(self, *args, **kwargs): conn = httplib.HTTPSConnection(*args, **kwargs) - Pin.override(conn, tracer=self.tracer) + Pin._override(conn, tracer=self.tracer) return conn def test_patch(self): diff --git a/tests/contrib/httplib/test_httplib_distributed.py b/tests/contrib/httplib/test_httplib_distributed.py index 706921388bc..922f219f5ee 100644 --- a/tests/contrib/httplib/test_httplib_distributed.py +++ b/tests/contrib/httplib/test_httplib_distributed.py @@ -38,7 +38,7 @@ def headers_not_here(self, tracer): def get_http_connection(self, *args, **kwargs): conn = httplib.HTTPConnection(*args, **kwargs) - Pin.override(conn, tracer=self.tracer) + Pin._override(conn, tracer=self.tracer) return conn def request(self, conn=None): diff --git a/tests/contrib/httpx/test_httpx.py b/tests/contrib/httpx/test_httpx.py index 33ecadb825f..457c077f645 100644 --- a/tests/contrib/httpx/test_httpx.py +++ b/tests/contrib/httpx/test_httpx.py @@ -59,7 +59,7 @@ def test_httpx_service_name(tracer, test_spans): We set the span service name as a text type and not binary """ client = httpx.Client() - Pin.override(client, tracer=tracer) + Pin._override(client, tracer=tracer) with override_config("httpx", {"split_by_domain": True}): resp = client.get(get_url("/status/200")) @@ -124,15 +124,15 @@ def assert_spans(test_spans, service): # override the tracer on the default sync client # DEV: `httpx.get` will call `with Client() as client: client.get()` - Pin.override(httpx.Client, tracer=tracer) + Pin._override(httpx.Client, tracer=tracer) # sync client client = httpx.Client() - Pin.override(client, service="sync-client", tracer=tracer) + Pin._override(client, service="sync-client", tracer=tracer) # async client async_client = httpx.AsyncClient() - Pin.override(async_client, service="async-client", tracer=tracer) + Pin._override(async_client, service="async-client", tracer=tracer) resp = httpx.get(url, headers=DEFAULT_HEADERS) assert resp.status_code == 200 diff --git a/tests/contrib/httpx/test_httpx_pre_0_11.py b/tests/contrib/httpx/test_httpx_pre_0_11.py index 315c53cb29c..0f37df47ea3 100644 --- a/tests/contrib/httpx/test_httpx_pre_0_11.py +++ b/tests/contrib/httpx/test_httpx_pre_0_11.py @@ -57,7 +57,7 @@ async def test_httpx_service_name(tracer, test_spans): We set the span service name as a text type and not binary """ client = httpx.Client() - Pin.override(client, tracer=tracer) + Pin._override(client, tracer=tracer) with override_config("httpx", {"split_by_domain": True}): resp = await client.get(get_url("/status/200")) @@ -112,11 +112,11 @@ def assert_spans(test_spans, service): # override the tracer on the default sync client # DEV: `httpx.get` will call `with Client() as client: client.get()` - Pin.override(httpx.Client, tracer=tracer) + Pin._override(httpx.Client, tracer=tracer) # sync client client = httpx.Client() - Pin.override(client, service="sync-client", tracer=tracer) + Pin._override(client, service="sync-client", tracer=tracer) resp = await httpx.get(url, headers=DEFAULT_HEADERS) assert resp.status_code == 200 diff --git a/tests/contrib/jinja2/test_jinja2.py b/tests/contrib/jinja2/test_jinja2.py index ce91c7e5ed0..eac8aeffd85 100644 --- a/tests/contrib/jinja2/test_jinja2.py +++ b/tests/contrib/jinja2/test_jinja2.py @@ -26,7 +26,7 @@ def setUp(self): jinja2.environment._spontaneous_environments.clear() except AttributeError: jinja2.utils.clear_caches() - Pin.override(jinja2.environment.Environment, tracer=self.tracer) + Pin._override(jinja2.environment.Environment, tracer=self.tracer) def tearDown(self): super(Jinja2Test, self).tearDown() diff --git a/tests/contrib/kafka/test_kafka.py b/tests/contrib/kafka/test_kafka.py index c67bdd08b01..f99489595ac 100644 --- a/tests/contrib/kafka/test_kafka.py +++ b/tests/contrib/kafka/test_kafka.py @@ -22,7 +22,7 @@ from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter from ddtrace.trace import Pin from ddtrace.trace import TraceFilter -from ddtrace.trace import Tracer +from ddtrace.trace import tracer as ddtracer from tests.contrib.config import KAFKA_CONFIG from tests.datastreams.test_public_api import MockedTracer from tests.utils import DummyTracer @@ -106,16 +106,16 @@ def should_filter_empty_polls(): @pytest.fixture def tracer(should_filter_empty_polls): patch() - t = Tracer() if should_filter_empty_polls: - t._configure(trace_processors=[KafkaConsumerPollFilter()]) + ddtracer.configure(trace_processors=[KafkaConsumerPollFilter()]) # disable backoff because it makes these tests less reliable - t._writer._send_payload_with_backoff = t._writer._send_payload + previous_backoff = ddtracer._writer._send_payload_with_backoff + ddtracer._writer._send_payload_with_backoff = ddtracer._writer._send_payload try: - yield t + yield ddtracer finally: - t.flush() - t.shutdown() + ddtracer.flush() + ddtracer._writer._send_payload_with_backoff = previous_backoff unpatch() @@ -124,12 +124,14 @@ def dsm_processor(tracer): processor = tracer.data_streams_processor with mock.patch("ddtrace.internal.datastreams.data_streams_processor", return_value=processor): yield processor + # flush buckets for the next test run + processor.periodic() @pytest.fixture def producer(tracer): _producer = confluent_kafka.Producer({"bootstrap.servers": BOOTSTRAP_SERVERS}) - Pin.override(_producer, tracer=tracer) + Pin._override(_producer, tracer=tracer) return _producer @@ -146,7 +148,7 @@ def consumer(tracer, kafka_topic): tp = TopicPartition(kafka_topic, 0) tp.offset = 0 # we want to read the first message _consumer.commit(offsets=[tp]) - Pin.override(_consumer, tracer=tracer) + Pin._override(_consumer, tracer=tracer) _consumer.subscribe([kafka_topic]) yield _consumer _consumer.close() @@ -165,7 +167,7 @@ def non_auto_commit_consumer(tracer, kafka_topic): tp = TopicPartition(kafka_topic, 0) tp.offset = 0 # we want to read the first message _consumer.commit(offsets=[tp]) - Pin.override(_consumer, tracer=tracer) + Pin._override(_consumer, tracer=tracer) _consumer.subscribe([kafka_topic]) yield _consumer _consumer.close() @@ -176,7 +178,7 @@ def serializing_producer(tracer): _producer = confluent_kafka.SerializingProducer( {"bootstrap.servers": BOOTSTRAP_SERVERS, "value.serializer": lambda x, y: x} ) - Pin.override(_producer, tracer=tracer) + Pin._override(_producer, tracer=tracer) return _producer @@ -190,7 +192,7 @@ def deserializing_consumer(tracer, kafka_topic): "value.deserializer": lambda x, y: x, } ) - Pin.override(_consumer, tracer=tracer) + Pin._override(_consumer, tracer=tracer) _consumer.subscribe([kafka_topic]) yield _consumer _consumer.close() @@ -248,7 +250,7 @@ def test_producer_bootstrap_servers(config, expect_servers, tracer): def test_produce_single_server(dummy_tracer, producer, kafka_topic): - Pin.override(producer, tracer=dummy_tracer) + Pin._override(producer, tracer=dummy_tracer) producer.produce(kafka_topic, PAYLOAD, key=KEY) producer.flush() @@ -259,18 +261,18 @@ def test_produce_single_server(dummy_tracer, producer, kafka_topic): def test_produce_none_key(dummy_tracer, producer, kafka_topic): - Pin.override(producer, tracer=dummy_tracer) + Pin._override(producer, tracer=dummy_tracer) producer.produce(kafka_topic, PAYLOAD, key=None) producer.flush() traces = dummy_tracer.pop_traces() assert 1 == len(traces), "key=None does not cause produce() call to raise an exception" - Pin.override(producer, tracer=None) + Pin._override(producer, tracer=None) def test_produce_multiple_servers(dummy_tracer, kafka_topic): producer = confluent_kafka.Producer({"bootstrap.servers": ",".join([BOOTSTRAP_SERVERS] * 3)}) - Pin.override(producer, tracer=dummy_tracer) + Pin._override(producer, tracer=dummy_tracer) producer.produce(kafka_topic, PAYLOAD, key=KEY) producer.flush() @@ -278,7 +280,7 @@ def test_produce_multiple_servers(dummy_tracer, kafka_topic): assert 1 == len(traces) produce_span = traces[0][0] assert produce_span.get_tag("messaging.kafka.bootstrap.servers") == ",".join([BOOTSTRAP_SERVERS] * 3) - Pin.override(producer, tracer=None) + Pin._override(producer, tracer=None) @pytest.mark.parametrize("tombstone", [False, True]) @@ -325,6 +327,7 @@ def test_commit_with_consume_with_multiple_messages(producer, consumer, kafka_to @pytest.mark.snapshot(ignores=SNAPSHOT_IGNORES) @pytest.mark.parametrize("should_filter_empty_polls", [False]) +@pytest.mark.skip(reason="FIXME: This test requires the initialization of a new tracer. This is not supported") def test_commit_with_consume_with_error(producer, consumer, kafka_topic): producer.produce(kafka_topic, PAYLOAD, key=KEY) producer.flush() @@ -518,8 +521,8 @@ def _generate_in_subprocess(random_topic): "auto.offset.reset": "earliest", } ) - ddtrace.trace.Pin.override(producer, tracer=ddtrace.tracer) - ddtrace.trace.Pin.override(consumer, tracer=ddtrace.tracer) + ddtrace.trace.Pin._override(producer, tracer=ddtrace.tracer) + ddtrace.trace.Pin._override(consumer, tracer=ddtrace.tracer) # We run all of these commands with retry attempts because the kafka-confluent API # sys.exits on connection failures, which causes the test to fail. We want to retry @@ -750,8 +753,8 @@ def test_data_streams_default_context_propagation(consumer, producer, kafka_topi # It is not currently expected for kafka produce and consume spans to connect in a trace def test_tracing_context_is_not_propagated_by_default(dummy_tracer, consumer, producer, kafka_topic): - Pin.override(producer, tracer=dummy_tracer) - Pin.override(consumer, tracer=dummy_tracer) + Pin._override(producer, tracer=dummy_tracer) + Pin._override(consumer, tracer=dummy_tracer) test_string = "context test no propagation" test_key = "context test key no propagation" @@ -788,8 +791,8 @@ def test_tracing_context_is_not_propagated_by_default(dummy_tracer, consumer, pr # None of these spans are part of the same trace assert produce_span.trace_id != consume_span.trace_id - Pin.override(consumer, tracer=None) - Pin.override(producer, tracer=None) + Pin._override(consumer, tracer=None) + Pin._override(producer, tracer=None) # Propagation should work when enabled @@ -813,8 +816,8 @@ def test(consumer, producer, kafka_topic): patch() dummy_tracer = DummyTracer() dummy_tracer.flush() - Pin.override(producer, tracer=dummy_tracer) - Pin.override(consumer, tracer=dummy_tracer) + Pin._override(producer, tracer=dummy_tracer) + Pin._override(consumer, tracer=dummy_tracer) # use a random int in this string to prevent reading a message produced by a previous test run test_string = "context propagation enabled test " + str(random.randint(0, 1000)) @@ -851,8 +854,8 @@ def test(consumer, producer, kafka_topic): # Two of these spans are part of the same trace assert produce_span.trace_id == consume_span.trace_id - Pin.override(consumer, tracer=None) - Pin.override(producer, tracer=None) + Pin._override(consumer, tracer=None) + Pin._override(producer, tracer=None) if __name__ == "__main__": sys.exit(pytest.main(["-x", __file__])) @@ -897,7 +900,7 @@ def test_consumer_uses_active_context_when_no_valid_distributed_context_exists( producer.produce(kafka_topic, PAYLOAD, key=test_key) producer.flush() - Pin.override(consumer, tracer=dummy_tracer) + Pin._override(consumer, tracer=dummy_tracer) with dummy_tracer.trace("kafka consumer parent span") as parent_span: with override_config("kafka", dict(distributed_tracing_enabled=True)): @@ -912,12 +915,12 @@ def test_consumer_uses_active_context_when_no_valid_distributed_context_exists( assert consume_span.name == "kafka.consume" assert consume_span.parent_id == parent_span.span_id - Pin.override(consumer, tracer=None) + Pin._override(consumer, tracer=None) def test_span_has_dsm_payload_hash(dummy_tracer, consumer, producer, kafka_topic): - Pin.override(producer, tracer=dummy_tracer) - Pin.override(consumer, tracer=dummy_tracer) + Pin._override(producer, tracer=dummy_tracer) + Pin._override(consumer, tracer=dummy_tracer) test_string = "payload hash test" PAYLOAD = bytes(test_string, encoding="utf-8") @@ -943,8 +946,8 @@ def test_span_has_dsm_payload_hash(dummy_tracer, consumer, producer, kafka_topic assert consume_span.name == "kafka.consume" assert consume_span.get_tag("pathway.hash") is not None - Pin.override(consumer, tracer=None) - Pin.override(producer, tracer=None) + Pin._override(consumer, tracer=None) + Pin._override(producer, tracer=None) def test_tracing_with_serialization_works(dummy_tracer, kafka_topic): @@ -978,8 +981,8 @@ def json_deserializer(as_bytes, ctx): _consumer.commit(offsets=[tp]) _consumer.subscribe([kafka_topic]) - Pin.override(_producer, tracer=dummy_tracer) - Pin.override(_consumer, tracer=dummy_tracer) + Pin._override(_producer, tracer=dummy_tracer) + Pin._override(_consumer, tracer=dummy_tracer) test_string = "serializing_test" PAYLOAD = {"val": test_string} @@ -1004,12 +1007,12 @@ def json_deserializer(as_bytes, ctx): # a string assert consume_span.get_tag("kafka.message_key") is None - Pin.override(_consumer, tracer=None) - Pin.override(_producer, tracer=None) + Pin._override(_consumer, tracer=None) + Pin._override(_producer, tracer=None) def test_traces_empty_poll_by_default(dummy_tracer, consumer, kafka_topic): - Pin.override(consumer, tracer=dummy_tracer) + Pin._override(consumer, tracer=dummy_tracer) message = "hello" while message is not None: @@ -1029,7 +1032,7 @@ def test_traces_empty_poll_by_default(dummy_tracer, consumer, kafka_topic): assert empty_poll_span_created is True - Pin.override(consumer, tracer=None) + Pin._override(consumer, tracer=None) # Poll should not be traced when disabled @@ -1054,8 +1057,8 @@ def test(consumer, producer, kafka_topic): patch() dummy_tracer = DummyTracer() dummy_tracer.flush() - Pin.override(producer, tracer=dummy_tracer) - Pin.override(consumer, tracer=dummy_tracer) + Pin._override(producer, tracer=dummy_tracer) + Pin._override(consumer, tracer=dummy_tracer) assert config.kafka.trace_empty_poll_enabled is False @@ -1102,8 +1105,8 @@ def test(consumer, producer, kafka_topic): assert non_empty_poll_span_created is True - Pin.override(consumer, tracer=None) - Pin.override(producer, tracer=None) + Pin._override(consumer, tracer=None) + Pin._override(producer, tracer=None) if __name__ == "__main__": sys.exit(pytest.main(["-x", __file__])) diff --git a/tests/contrib/kombu/test.py b/tests/contrib/kombu/test.py index b56ecdf0d0f..949cffd5bd5 100644 --- a/tests/contrib/kombu/test.py +++ b/tests/contrib/kombu/test.py @@ -28,7 +28,7 @@ def setUp(self): conn = kombu.Connection("amqp://guest:guest@127.0.0.1:{p}//".format(p=self.TEST_PORT)) conn.connect() producer = conn.Producer() - Pin.override(producer, service=self.TEST_SERVICE, tracer=self.tracer) + Pin._override(producer, service=self.TEST_SERVICE, tracer=self.tracer) self.conn = conn self.producer = producer @@ -63,7 +63,7 @@ def process_message(body, message): ) with kombu.Consumer(self.conn, [task_queue], accept=["json"], callbacks=[process_message]) as consumer: - Pin.override(consumer, service="kombu-patch", tracer=self.tracer) + Pin._override(consumer, service="kombu-patch", tracer=self.tracer) self.conn.drain_events(timeout=2) self.assertEqual(results[0], to_publish) @@ -130,7 +130,7 @@ def setUp(self): conn = kombu.Connection("amqp://guest:guest@127.0.0.1:{p}//".format(p=RABBITMQ_CONFIG["port"])) conn.connect() producer = conn.Producer() - Pin.override(producer, tracer=self.tracer) + Pin._override(producer, tracer=self.tracer) self.conn = conn self.producer = producer @@ -151,7 +151,7 @@ def setUp(self): conn = kombu.Connection("amqp://guest:guest@127.0.0.1:{p}//".format(p=self.TEST_PORT)) conn.connect() producer = conn.Producer() - Pin.override(producer, tracer=self.tracer) + Pin._override(producer, tracer=self.tracer) self.conn = conn self.producer = producer @@ -180,7 +180,7 @@ def process_message(body, message): ) with kombu.Consumer(self.conn, [task_queue], accept=["json"], callbacks=[process_message]) as consumer: - Pin.override(consumer, tracer=self.tracer) + Pin._override(consumer, tracer=self.tracer) self.conn.drain_events(timeout=2) return self.get_spans() @@ -256,7 +256,7 @@ def process_message(body, message): ) with kombu.Consumer(self.conn, [task_queue], accept=["json"], callbacks=[process_message]) as consumer: - Pin.override(consumer, tracer=self.tracer) + Pin._override(consumer, tracer=self.tracer) self.conn.drain_events(timeout=2) spans = self.get_spans() @@ -275,7 +275,7 @@ def setUp(self): self.conn = kombu.Connection("amqp://guest:guest@127.0.0.1:{p}//".format(p=RABBITMQ_CONFIG["port"])) self.conn.connect() self.producer = self.conn.Producer() - Pin.override(self.producer, tracer=self.tracer) + Pin._override(self.producer, tracer=self.tracer) self.patcher = mock.patch( "ddtrace.internal.datastreams.data_streams_processor", return_value=self.tracer.data_streams_processor @@ -313,7 +313,7 @@ def process_message(body, message): self.producer.publish(to_publish, routing_key=task_queue.routing_key, declare=[task_queue]) with kombu.Consumer(self.conn, [task_queue], accept=["json"], callbacks=[process_message]) as consumer: - Pin.override(consumer, service="kombu-patch", tracer=self.tracer) + Pin._override(consumer, service="kombu-patch", tracer=self.tracer) self.conn.drain_events(timeout=2) queue_name = consumer.channel.queue_declare("tasks", passive=True).queue diff --git a/tests/contrib/langchain/conftest.py b/tests/contrib/langchain/conftest.py index 6c1e5816df5..be76954215c 100644 --- a/tests/contrib/langchain/conftest.py +++ b/tests/contrib/langchain/conftest.py @@ -30,7 +30,7 @@ def snapshot_tracer(langchain, mock_logs, mock_metrics): def mock_tracer(langchain): pin = Pin.get_from(langchain) mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) - pin.override(langchain, tracer=mock_tracer) + pin._override(langchain, tracer=mock_tracer) pin.tracer._configure() yield mock_tracer diff --git a/tests/contrib/langchain/test_langchain.py b/tests/contrib/langchain/test_langchain.py index 86a14f524d9..f26edb2bca7 100644 --- a/tests/contrib/langchain/test_langchain.py +++ b/tests/contrib/langchain/test_langchain.py @@ -23,7 +23,6 @@ "meta.langchain.request.openai.parameters.logprobs", "meta.langchain.request.openai.parameters.seed", # langchain-openai llm call now includes seed as param "meta.langchain.request.openai.parameters.logprobs", # langchain-openai llm call now includes seed as param - "metrics.langchain.tokens.total_cost", # total_cost depends on if tiktoken is installed # these are sometimes named differently "meta.langchain.request.openai.parameters.max_tokens", "meta.langchain.request.openai.parameters.max_completion_tokens", diff --git a/tests/contrib/langgraph/conftest.py b/tests/contrib/langgraph/conftest.py index 13e1a5f9242..7c6671167bb 100644 --- a/tests/contrib/langgraph/conftest.py +++ b/tests/contrib/langgraph/conftest.py @@ -32,7 +32,7 @@ def langgraph(monkeypatch, mock_tracer): import langgraph pin = Pin.get_from(langgraph) - pin.override(langgraph, tracer=mock_tracer) + pin._override(langgraph, tracer=mock_tracer) yield langgraph unpatch() diff --git a/tests/contrib/mako/test_mako.py b/tests/contrib/mako/test_mako.py index 7e690b04a43..7b839177e0e 100644 --- a/tests/contrib/mako/test_mako.py +++ b/tests/contrib/mako/test_mako.py @@ -23,7 +23,7 @@ class MakoTest(TracerTestCase): def setUp(self): super(MakoTest, self).setUp() patch() - Pin.override(Template, tracer=self.tracer) + Pin._override(Template, tracer=self.tracer) def tearDown(self): super(MakoTest, self).tearDown() diff --git a/tests/contrib/mariadb/test_mariadb.py b/tests/contrib/mariadb/test_mariadb.py index 7ea8cd27feb..2f51f2e9b0a 100644 --- a/tests/contrib/mariadb/test_mariadb.py +++ b/tests/contrib/mariadb/test_mariadb.py @@ -39,7 +39,7 @@ def tracer(): def get_connection(tracer): connection = mariadb.connect(**MARIADB_CONFIG) - Pin.override(connection, tracer=tracer) + Pin._override(connection, tracer=tracer) return connection diff --git a/tests/contrib/molten/test_molten.py b/tests/contrib/molten/test_molten.py index cc73ceef861..74d6f1bf628 100644 --- a/tests/contrib/molten/test_molten.py +++ b/tests/contrib/molten/test_molten.py @@ -49,7 +49,7 @@ class TestMolten(TracerTestCase): def setUp(self): super(TestMolten, self).setUp() patch() - Pin.override(molten, tracer=self.tracer) + Pin._override(molten, tracer=self.tracer) self.app = molten_app() self.client = TestClient(self.app) @@ -89,7 +89,7 @@ def test_route_success(self): self.assertEqual(len(spans), 16) # test override of service name - Pin.override(molten, service=self.TEST_SERVICE) + Pin._override(molten, service=self.TEST_SERVICE) response = self.make_request() spans = self.pop_spans() self.assertEqual(spans[0].service, "molten-patch") @@ -273,7 +273,7 @@ def test_unpatch_patch(self): patch() # Need to override Pin here as we do in setUp - Pin.override(molten, tracer=self.tracer) + Pin._override(molten, tracer=self.tracer) self.assertTrue(Pin.get_from(molten) is not None) self.make_request() spans = self.pop_spans() diff --git a/tests/contrib/molten/test_molten_di.py b/tests/contrib/molten/test_molten_di.py index d360698f4cb..848517aca34 100644 --- a/tests/contrib/molten/test_molten_di.py +++ b/tests/contrib/molten/test_molten_di.py @@ -85,7 +85,7 @@ class TestMoltenDI(TracerTestCase): def setUp(self): super(TestMoltenDI, self).setUp() patch() - Pin.override(molten, tracer=self.tracer, service=self.TEST_SERVICE) + Pin._override(molten, tracer=self.tracer, service=self.TEST_SERVICE) def tearDown(self): unpatch() diff --git a/tests/contrib/mongoengine/test.py b/tests/contrib/mongoengine/test.py index b3961e3808c..51b0ff70c47 100644 --- a/tests/contrib/mongoengine/test.py +++ b/tests/contrib/mongoengine/test.py @@ -186,7 +186,7 @@ def tearDown(self): def get_tracer_and_connect(self): tracer = DummyTracer() client = mongoengine.connect(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) return tracer @@ -315,7 +315,9 @@ class TestMongoEnginePatchConnect(TestMongoEnginePatchConnectDefault): def get_tracer_and_connect(self): tracer = TestMongoEnginePatchConnectDefault.get_tracer_and_connect(self) - Pin(service=self.TEST_SERVICE, tracer=tracer).onto(mongoengine.connect) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = tracer + pin.onto(mongoengine.connect) mongoengine.connect(port=MONGO_CONFIG["port"]) return tracer @@ -337,7 +339,7 @@ def tearDown(self): def get_tracer_and_connect(self): tracer = DummyTracer() client = mongoengine.connect(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) return tracer @@ -352,7 +354,9 @@ def get_tracer_and_connect(self): # Set a connect-level service, to check that we properly override it Pin(service="not-%s" % self.TEST_SERVICE).onto(mongoengine.connect) client = mongoengine.connect(port=MONGO_CONFIG["port"]) - Pin(service=self.TEST_SERVICE, tracer=tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = tracer + pin.onto(client) return tracer @@ -364,7 +368,7 @@ def test_patch_unpatch(self): patch() client = mongoengine.connect(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) Artist.drop_collection() spans = tracer.pop() @@ -389,7 +393,7 @@ def test_patch_unpatch(self): # Test patch again patch() client = mongoengine.connect(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) Artist.drop_collection() spans = tracer.pop() diff --git a/tests/contrib/mysql/test_mysql.py b/tests/contrib/mysql/test_mysql.py index 08626890fac..ec0da0aff35 100644 --- a/tests/contrib/mysql/test_mysql.py +++ b/tests/contrib/mysql/test_mysql.py @@ -418,7 +418,7 @@ def _get_conn_tracer(self): # assert pin.service == 'mysql' # Customize the service # we have to apply it on the existing one since new one won't inherit `app` - pin.clone(tracer=self.tracer).onto(self.conn) + pin._clone(tracer=self.tracer).onto(self.conn) return self.conn, self.tracer @@ -434,7 +434,7 @@ def test_patch_unpatch(self): conn = mysql.connector.connect(**MYSQL_CONFIG) pin = Pin.get_from(conn) assert pin - pin.clone(service="pin-svc", tracer=self.tracer).onto(conn) + pin._clone(service="pin-svc", tracer=self.tracer).onto(conn) assert conn.is_connected() cursor = conn.cursor() diff --git a/tests/contrib/mysqldb/test_mysqldb.py b/tests/contrib/mysqldb/test_mysqldb.py index 5d2c98a752c..4117eb69a10 100644 --- a/tests/contrib/mysqldb/test_mysqldb.py +++ b/tests/contrib/mysqldb/test_mysqldb.py @@ -534,7 +534,7 @@ def _add_dummy_tracer_to_pinned(self, obj): assert pin # Customize the service # we have to apply it on the existing one since new one won't inherit `app` - pin.clone(tracer=self.tracer).onto(obj) + pin._clone(tracer=self.tracer).onto(obj) def _get_conn_tracer(self): if not self.conn: @@ -559,7 +559,7 @@ def _get_conn_tracer_with_positional_args(self): assert pin # Customize the service # we have to apply it on the existing one since new one won't inherit `app` - pin.clone(tracer=self.tracer).onto(self.conn) + pin._clone(tracer=self.tracer).onto(self.conn) return self.conn, self.tracer @@ -575,7 +575,7 @@ def test_patch_unpatch(self): conn = self._connect_with_kwargs() pin = Pin.get_from(conn) assert pin - pin.clone(tracer=self.tracer).onto(conn) + pin._clone(tracer=self.tracer).onto(conn) conn.ping() cursor = conn.cursor() @@ -617,7 +617,7 @@ def test_patch_unpatch(self): def test_user_pin_override(self): conn, tracer = self._get_conn_tracer() pin = Pin.get_from(conn) - pin.clone(service="pin-svc", tracer=self.tracer).onto(conn) + pin._clone(service="pin-svc", tracer=self.tracer).onto(conn) cursor = conn.cursor() cursor.execute("SELECT 1") rows = cursor.fetchall() diff --git a/tests/contrib/openai/conftest.py b/tests/contrib/openai/conftest.py index 615a4e773b1..9cd8c998c79 100644 --- a/tests/contrib/openai/conftest.py +++ b/tests/contrib/openai/conftest.py @@ -92,34 +92,6 @@ def process_trace(self, trace): return trace -@pytest.fixture(scope="session") -def mock_metrics(): - patcher = mock.patch("ddtrace.llmobs._integrations.base.get_dogstatsd_client") - try: - DogStatsdMock = patcher.start() - m = mock.MagicMock() - DogStatsdMock.return_value = m - yield m - finally: - patcher.stop() - - -@pytest.fixture(scope="session") -def mock_logs(): - """ - Note that this fixture must be ordered BEFORE mock_tracer as it needs to patch the log writer - before it is instantiated. - """ - patcher = mock.patch("ddtrace.llmobs._integrations.base.V2LogWriter") - try: - V2LogWriterMock = patcher.start() - m = mock.MagicMock() - V2LogWriterMock.return_value = m - yield m - finally: - patcher.stop() - - @pytest.fixture() def mock_llmobs_writer(): patcher = mock.patch("ddtrace.llmobs._llmobs.LLMObsSpanWriter") @@ -163,21 +135,18 @@ def patch_openai(ddtrace_global_config, ddtrace_config_openai, openai_api_key, o @pytest.fixture -def snapshot_tracer(openai, patch_openai, mock_logs, mock_metrics): +def snapshot_tracer(openai, patch_openai): pin = Pin.get_from(openai) pin.tracer._configure(trace_processors=[FilterOrg()]) yield pin.tracer - mock_logs.reset_mock() - mock_metrics.reset_mock() - @pytest.fixture -def mock_tracer(ddtrace_global_config, openai, patch_openai, mock_logs, mock_metrics): +def mock_tracer(ddtrace_global_config, openai, patch_openai): pin = Pin.get_from(openai) mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) - pin.override(openai, tracer=mock_tracer) + pin._override(openai, tracer=mock_tracer) pin.tracer._configure(trace_processors=[FilterOrg()]) if ddtrace_global_config.get("_llmobs_enabled", False): @@ -187,6 +156,4 @@ def mock_tracer(ddtrace_global_config, openai, patch_openai, mock_logs, mock_met yield mock_tracer - mock_logs.reset_mock() - mock_metrics.reset_mock() LLMObs.disable() diff --git a/tests/contrib/openai/test_openai_llmobs.py b/tests/contrib/openai/test_openai_llmobs.py index 70adff39ef5..91e454c1673 100644 --- a/tests/contrib/openai/test_openai_llmobs.py +++ b/tests/contrib/openai/test_openai_llmobs.py @@ -602,9 +602,7 @@ def test_embedding_string_base64(self, openai, ddtrace_global_config, mock_llmob [dict(_llmobs_enabled=True, _llmobs_ml_app="", _llmobs_agentless_enabled=True)], ) @pytest.mark.skipif(parse_version(openai_module.version.VERSION) < (1, 0), reason="These tests are for openai >= 1.0") -def test_agentless_enabled_does_not_submit_metrics( - openai, ddtrace_global_config, mock_llmobs_writer, mock_tracer, mock_metrics -): +def test_agentless_enabled_does_not_submit_metrics(openai, ddtrace_global_config, mock_llmobs_writer, mock_tracer): """Ensure openai metrics are not emitted when agentless mode is enabled.""" with get_openai_vcr(subdirectory_name="v1").use_cassette("completion.yaml"): model = "ada" @@ -619,7 +617,3 @@ def test_agentless_enabled_does_not_submit_metrics( user="ddtrace-test", ) assert mock_llmobs_writer.enqueue.call_count == 1 - mock_metrics.assert_not_called() - assert mock_metrics.increment.call_count == 0 - assert mock_metrics.distribution.call_count == 0 - assert mock_metrics.gauge.call_count == 0 diff --git a/tests/contrib/openai/test_openai_v1.py b/tests/contrib/openai/test_openai_v1.py index a081583b4d1..468f4b03606 100644 --- a/tests/contrib/openai/test_openai_v1.py +++ b/tests/contrib/openai/test_openai_v1.py @@ -25,18 +25,8 @@ def openai_vcr(): yield get_openai_vcr(subdirectory_name="v1") -@pytest.mark.parametrize("ddtrace_config_openai", [dict(metrics_enabled=True), dict(metrics_enabled=False)]) -def test_config(ddtrace_config_openai, mock_tracer, openai): - # Ensure that the module state is reloaded for each test run - assert not hasattr(openai, "_test") - openai._test = 1 - - # Ensure overriding the config works - assert ddtrace.config.openai.metrics_enabled is ddtrace_config_openai["metrics_enabled"] - - @pytest.mark.parametrize("api_key_in_env", [True, False]) -def test_model_list(api_key_in_env, request_api_key, openai, openai_vcr, mock_metrics, snapshot_tracer): +def test_model_list(api_key_in_env, request_api_key, openai, openai_vcr, snapshot_tracer): with snapshot_context( token="tests.contrib.openai.test_openai.test_model_list", ignores=["meta.http.useragent", "meta.openai.api_type", "meta.openai.api_base", "meta.openai.request.user"], @@ -47,7 +37,7 @@ def test_model_list(api_key_in_env, request_api_key, openai, openai_vcr, mock_me @pytest.mark.parametrize("api_key_in_env", [True, False]) -async def test_model_alist(api_key_in_env, request_api_key, openai, openai_vcr, mock_metrics, snapshot_tracer): +async def test_model_alist(api_key_in_env, request_api_key, openai, openai_vcr, snapshot_tracer): with snapshot_context( token="tests.contrib.openai.test_openai.test_model_list", ignores=["meta.http.useragent", "meta.openai.api_type", "meta.openai.api_base", "meta.openai.request.user"], @@ -58,7 +48,7 @@ async def test_model_alist(api_key_in_env, request_api_key, openai, openai_vcr, @pytest.mark.parametrize("api_key_in_env", [True, False]) -def test_model_retrieve(api_key_in_env, request_api_key, openai, openai_vcr, mock_metrics, snapshot_tracer): +def test_model_retrieve(api_key_in_env, request_api_key, openai, openai_vcr, snapshot_tracer): with snapshot_context( token="tests.contrib.openai.test_openai.test_model_retrieve", ignores=["meta.http.useragent", "meta.openai.api_type", "meta.openai.api_base", "meta.openai.request.user"], @@ -69,7 +59,7 @@ def test_model_retrieve(api_key_in_env, request_api_key, openai, openai_vcr, moc @pytest.mark.parametrize("api_key_in_env", [True, False]) -async def test_model_aretrieve(api_key_in_env, request_api_key, openai, openai_vcr, mock_metrics, snapshot_tracer): +async def test_model_aretrieve(api_key_in_env, request_api_key, openai, openai_vcr, snapshot_tracer): with snapshot_context( token="tests.contrib.openai.test_openai.test_model_retrieve", ignores=["meta.http.useragent", "meta.openai.api_type", "meta.openai.api_base", "meta.openai.request.user"], @@ -80,9 +70,7 @@ async def test_model_aretrieve(api_key_in_env, request_api_key, openai, openai_v @pytest.mark.parametrize("api_key_in_env", [True, False]) -def test_completion( - api_key_in_env, request_api_key, openai, openai_vcr, mock_metrics, mock_logs, mock_llmobs_writer, snapshot_tracer -): +def test_completion(api_key_in_env, request_api_key, openai, openai_vcr, mock_llmobs_writer, snapshot_tracer): with snapshot_context( token="tests.contrib.openai.test_openai.test_completion", ignores=["meta.http.useragent", "meta.openai.api_type", "meta.openai.api_base"], @@ -111,42 +99,12 @@ def test_completion( assert choice.logprobs == expected_choices[idx]["logprobs"] assert choice.text == expected_choices[idx]["text"] - expected_tags = [ - "version:", - "env:", - "service:tests.contrib.openai", - "openai.request.model:ada", - "model:ada", - "openai.request.endpoint:/v1/completions", - "openai.request.method:POST", - "openai.organization.id:", - "openai.organization.name:datadog-4", - "openai.user.api_key:sk-...key>", - "error:0", - ] - mock_metrics.assert_has_calls( - [ - mock.call.distribution("tokens.prompt", 2, tags=expected_tags + ["openai.estimated:false"]), - mock.call.distribution("tokens.completion", 12, tags=expected_tags + ["openai.estimated:false"]), - mock.call.distribution("tokens.total", 14, tags=expected_tags + ["openai.estimated:false"]), - mock.call.distribution("request.duration", mock.ANY, tags=expected_tags), - mock.call.gauge("ratelimit.remaining.requests", mock.ANY, tags=expected_tags), - mock.call.gauge("ratelimit.requests", mock.ANY, tags=expected_tags), - mock.call.gauge("ratelimit.remaining.tokens", mock.ANY, tags=expected_tags), - mock.call.gauge("ratelimit.tokens", mock.ANY, tags=expected_tags), - ], - any_order=True, - ) - mock_logs.start.assert_not_called() - mock_logs.enqueue.assert_not_called() mock_llmobs_writer.start.assert_not_called() mock_llmobs_writer.enqueue.assert_not_called() @pytest.mark.parametrize("api_key_in_env", [True, False]) -async def test_acompletion( - api_key_in_env, request_api_key, openai, openai_vcr, mock_metrics, mock_logs, mock_llmobs_writer, snapshot_tracer -): +async def test_acompletion(api_key_in_env, request_api_key, openai, openai_vcr, mock_llmobs_writer, snapshot_tracer): with snapshot_context( token="tests.contrib.openai.test_openai.test_acompletion", ignores=["meta.http.useragent", "meta.openai.api_type", "meta.openai.api_base"], @@ -181,88 +139,11 @@ async def test_acompletion( for key, value in expected_choices.items(): assert getattr(resp.choices[0], key, None) == value - expected_tags = [ - "version:", - "env:", - "service:tests.contrib.openai", - "openai.request.model:curie", - "model:curie", - "openai.request.endpoint:/v1/completions", - "openai.request.method:POST", - "openai.organization.id:", - "openai.organization.name:datadog-4", - "openai.user.api_key:sk-...key>", - "error:0", - ] - mock_metrics.assert_has_calls( - [ - mock.call.distribution("tokens.prompt", 10, tags=expected_tags + ["openai.estimated:false"]), - mock.call.distribution("tokens.completion", 150, tags=expected_tags + ["openai.estimated:false"]), - mock.call.distribution("tokens.total", 160, tags=expected_tags + ["openai.estimated:false"]), - mock.call.distribution("request.duration", mock.ANY, tags=expected_tags), - mock.call.gauge("ratelimit.remaining.requests", mock.ANY, tags=expected_tags), - mock.call.gauge("ratelimit.requests", mock.ANY, tags=expected_tags), - mock.call.gauge("ratelimit.remaining.tokens", mock.ANY, tags=expected_tags), - mock.call.gauge("ratelimit.tokens", mock.ANY, tags=expected_tags), - ], - any_order=True, - ) - mock_logs.start.assert_not_called() - mock_logs.enqueue.assert_not_called() mock_llmobs_writer.start.assert_not_called() mock_llmobs_writer.enqueue.assert_not_called() -@pytest.mark.xfail(reason="An API key is required when logs are enabled") -@pytest.mark.parametrize( - "ddtrace_global_config,ddtrace_config_openai", - [(dict(_dd_api_key=""), dict(logs_enabled=True))], -) -def test_logs_no_api_key(openai, ddtrace_global_config, ddtrace_config_openai, mock_tracer): - """When no DD_API_KEY is set, the patching fails""" - pass - - -@pytest.mark.parametrize("ddtrace_config_openai", [dict(logs_enabled=True, log_prompt_completion_sample_rate=1.0)]) -def test_logs_completions(openai_vcr, openai, ddtrace_config_openai, mock_logs, mock_tracer): - """Ensure logs are emitted for completion endpoints when configured. - - Also ensure the logs have the correct tagging including the trace-logs correlation tagging. - """ - with openai_vcr.use_cassette("completion.yaml"): - client = openai.OpenAI() - client.completions.create( - model="ada", prompt="Hello world", temperature=0.8, n=2, stop=".", max_tokens=10, user="ddtrace-test" - ) - - span = mock_tracer.pop_traces()[0][0] - trace_id, span_id = span.trace_id, span.span_id - - assert mock_logs.enqueue.call_count == 1 - mock_logs.assert_has_calls( - [ - mock.call.start(), - mock.call.enqueue( - { - "timestamp": mock.ANY, - "message": mock.ANY, - "hostname": mock.ANY, - "ddsource": "openai", - "service": "tests.contrib.openai", - "status": "info", - "ddtags": "env:,version:,openai.request.endpoint:/v1/completions,openai.request.method:POST,openai.request.model:ada,openai.organization.name:datadog-4,openai.user.api_key:sk-...key>", # noqa: E501 - "dd.trace_id": "{:x}".format(trace_id), - "dd.span_id": str(span_id), - "prompt": "Hello world", - "choices": mock.ANY, - } - ), - ] - ) - - -@pytest.mark.parametrize("ddtrace_config_openai", [dict(logs_enabled=True, log_prompt_completion_sample_rate=1.0)]) -def test_global_tags(openai_vcr, ddtrace_config_openai, openai, mock_metrics, mock_logs, mock_tracer): +def test_global_tags(openai_vcr, openai, mock_tracer): """ When the global config UST tags are set The service name should be used for all data @@ -288,32 +169,6 @@ def test_global_tags(openai_vcr, ddtrace_config_openai, openai, mock_metrics, mo assert span.get_tag("openai.organization.name") == "datadog-4" assert span.get_tag("openai.user.api_key") == "sk-...key>" - for _, _args, kwargs in mock_metrics.mock_calls: - expected_metrics = [ - "service:test-svc", - "env:staging", - "version:1234", - "openai.request.model:ada", - "model:ada", - "openai.request.endpoint:/v1/completions", - "openai.request.method:POST", - "openai.organization.name:datadog-4", - "openai.user.api_key:sk-...key>", - ] - actual_tags = kwargs.get("tags") - for m in expected_metrics: - assert m in actual_tags - - for call, args, _kwargs in mock_logs.mock_calls: - if call != "enqueue": - continue - log = args[0] - assert log["service"] == "test-svc" - assert ( - log["ddtags"] - == "env:staging,version:1234,openai.request.endpoint:/v1/completions,openai.request.method:POST,openai.request.model:ada,openai.organization.name:datadog-4,openai.user.api_key:sk-...key>" # noqa: E501 - ) - def test_completion_raw_response(openai, openai_vcr, snapshot_tracer): with snapshot_context( @@ -440,20 +295,6 @@ def test_chat_completion_raw_response(openai, openai_vcr, snapshot_tracer): ) -@pytest.mark.parametrize("ddtrace_config_openai", [dict(metrics_enabled=b) for b in [True, False]]) -def test_enable_metrics(openai, openai_vcr, ddtrace_config_openai, mock_metrics, mock_tracer): - """Ensure the metrics_enabled configuration works.""" - with openai_vcr.use_cassette("completion.yaml"): - client = openai.OpenAI() - client.completions.create( - model="ada", prompt="Hello world", temperature=0.8, n=2, stop=".", max_tokens=10, user="ddtrace-test" - ) - if ddtrace_config_openai["metrics_enabled"]: - assert mock_metrics.mock_calls - else: - assert not mock_metrics.mock_calls - - @pytest.mark.parametrize("api_key_in_env", [True, False]) async def test_achat_completion(api_key_in_env, request_api_key, openai, openai_vcr, snapshot_tracer): with snapshot_context( @@ -510,47 +351,6 @@ async def test_image_acreate(api_key_in_env, request_api_key, openai, openai_vcr ) -@pytest.mark.parametrize("ddtrace_config_openai", [dict(logs_enabled=True, log_prompt_completion_sample_rate=1.0)]) -def test_logs_image_create(openai_vcr, openai, ddtrace_config_openai, mock_logs, mock_tracer): - """Ensure logs are emitted for image endpoints when configured. - - Also ensure the logs have the correct tagging including the trace-logs correlation tagging. - """ - with openai_vcr.use_cassette("image_create.yaml"): - client = openai.OpenAI() - client.images.generate( - prompt="sleepy capybara with monkey on top", - n=1, - size="256x256", - response_format="url", - user="ddtrace-test", - ) - span = mock_tracer.pop_traces()[0][0] - trace_id, span_id = span.trace_id, span.span_id - - assert mock_logs.enqueue.call_count == 1 - mock_logs.assert_has_calls( - [ - mock.call.start(), - mock.call.enqueue( - { - "timestamp": mock.ANY, - "message": mock.ANY, - "hostname": mock.ANY, - "ddsource": "openai", - "service": "tests.contrib.openai", - "status": "info", - "ddtags": "env:,version:,openai.request.endpoint:/v1/images/generations,openai.request.method:POST,openai.request.model:dall-e,openai.organization.name:datadog-4,openai.user.api_key:sk-...key>", # noqa: E501 - "dd.trace_id": "{:x}".format(trace_id), - "dd.span_id": str(span_id), - "prompt": "sleepy capybara with monkey on top", - "choices": mock.ANY, - } - ), - ] - ) - - # TODO: Note that vcr tests for image edit/variation don't work as they error out when recording the vcr request, # during the payload decoding. We'll need to migrate those tests over once we can address this. @pytest.mark.snapshot( @@ -871,7 +671,7 @@ def test_span_finish_on_stream_error(openai, openai_vcr, snapshot_tracer): @pytest.mark.snapshot @pytest.mark.skipif(TIKTOKEN_AVAILABLE, reason="This test estimates token counts") -def test_completion_stream_est_tokens(openai, openai_vcr, mock_metrics, snapshot_tracer): +def test_completion_stream_est_tokens(openai, openai_vcr, snapshot_tracer): with openai_vcr.use_cassette("completion_streamed.yaml"): with mock.patch("ddtrace.contrib.internal.openai.utils.encoding_for_model", create=True) as mock_encoding: mock_encoding.return_value.encode.side_effect = lambda x: [1, 2] @@ -882,7 +682,7 @@ def test_completion_stream_est_tokens(openai, openai_vcr, mock_metrics, snapshot @pytest.mark.skipif(not TIKTOKEN_AVAILABLE, reason="This test computes token counts using tiktoken") @pytest.mark.snapshot(token="tests.contrib.openai.test_openai.test_completion_stream") -def test_completion_stream(openai, openai_vcr, mock_metrics, snapshot_tracer): +def test_completion_stream(openai, openai_vcr, snapshot_tracer): with openai_vcr.use_cassette("completion_streamed.yaml"): with mock.patch("ddtrace.contrib.internal.openai.utils.encoding_for_model", create=True) as mock_encoding: mock_encoding.return_value.encode.side_effect = lambda x: [1, 2] @@ -893,7 +693,7 @@ def test_completion_stream(openai, openai_vcr, mock_metrics, snapshot_tracer): @pytest.mark.skipif(not TIKTOKEN_AVAILABLE, reason="This test computes token counts using tiktoken") @pytest.mark.snapshot(token="tests.contrib.openai.test_openai.test_completion_stream") -async def test_completion_async_stream(openai, openai_vcr, mock_metrics, snapshot_tracer): +async def test_completion_async_stream(openai, openai_vcr, snapshot_tracer): with openai_vcr.use_cassette("completion_streamed.yaml"): with mock.patch("ddtrace.contrib.internal.openai.utils.encoding_for_model", create=True) as mock_encoding: mock_encoding.return_value.encode.side_effect = lambda x: [1, 2] @@ -907,7 +707,7 @@ async def test_completion_async_stream(openai, openai_vcr, mock_metrics, snapsho reason="Streamed response context managers are only available v1.6.0+", ) @pytest.mark.snapshot(token="tests.contrib.openai.test_openai.test_completion_stream") -def test_completion_stream_context_manager(openai, openai_vcr, mock_metrics, snapshot_tracer): +def test_completion_stream_context_manager(openai, openai_vcr, snapshot_tracer): with openai_vcr.use_cassette("completion_streamed.yaml"): with mock.patch("ddtrace.contrib.internal.openai.utils.encoding_for_model", create=True) as mock_encoding: mock_encoding.return_value.encode.side_effect = lambda x: [1, 2] @@ -920,7 +720,7 @@ def test_completion_stream_context_manager(openai, openai_vcr, mock_metrics, sna parse_version(openai_module.version.VERSION) < (1, 26), reason="Stream options only available openai >= 1.26" ) @pytest.mark.snapshot(token="tests.contrib.openai.test_openai.test_chat_completion_stream") -def test_chat_completion_stream(openai, openai_vcr, mock_metrics, snapshot_tracer): +def test_chat_completion_stream(openai, openai_vcr, snapshot_tracer): """Assert that streamed token chunk extraction logic works automatically.""" with openai_vcr.use_cassette("chat_completion_streamed_tokens.yaml"): with mock.patch("ddtrace.contrib.internal.openai.utils.encoding_for_model", create=True) as mock_encoding: @@ -939,7 +739,7 @@ def test_chat_completion_stream(openai, openai_vcr, mock_metrics, snapshot_trace @pytest.mark.skipif( parse_version(openai_module.version.VERSION) < (1, 26), reason="Stream options only available openai >= 1.26" ) -def test_chat_completion_stream_explicit_no_tokens(openai, openai_vcr, mock_metrics, snapshot_tracer): +def test_chat_completion_stream_explicit_no_tokens(openai, openai_vcr, mock_tracer): """Assert that streamed token chunk extraction logic is avoided if explicitly set to False by the user.""" with openai_vcr.use_cassette("chat_completion_streamed.yaml"): with mock.patch("ddtrace.contrib.internal.openai.utils.encoding_for_model", create=True) as mock_encoding: @@ -956,41 +756,22 @@ def test_chat_completion_stream_explicit_no_tokens(openai, openai_vcr, mock_metr user="ddtrace-test", n=None, ) - span = snapshot_tracer.current_span() chunks = [c for c in resp] assert len(chunks) == 15 completion = "".join([c.choices[0].delta.content for c in chunks if c.choices[0].delta.content is not None]) assert completion == expected_completion - expected_tags = [ - "version:", - "env:", - "service:tests.contrib.openai", - "openai.request.model:gpt-3.5-turbo", - "model:gpt-3.5-turbo", - "openai.request.endpoint:/v1/chat/completions", - "openai.request.method:POST", - "openai.organization.id:", - "openai.organization.name:datadog-4", - "openai.user.api_key:sk-...key>", - "error:0", - ] - assert mock.call.distribution("request.duration", span.duration_ns, tags=expected_tags) in mock_metrics.mock_calls - assert mock.call.gauge("ratelimit.requests", 3000, tags=expected_tags) in mock_metrics.mock_calls - assert mock.call.gauge("ratelimit.remaining.requests", 2999, tags=expected_tags) in mock_metrics.mock_calls - expected_tags += ["openai.estimated:true"] - if TIKTOKEN_AVAILABLE: - expected_tags = expected_tags[:-1] - assert mock.call.distribution("tokens.prompt", 8, tags=expected_tags) in mock_metrics.mock_calls - assert mock.call.distribution("tokens.completion", mock.ANY, tags=expected_tags) in mock_metrics.mock_calls - assert mock.call.distribution("tokens.total", mock.ANY, tags=expected_tags) in mock_metrics.mock_calls + span = mock_tracer.pop_traces()[0][0] + assert span.get_metric("openai.response.usage.prompt_tokens") == 8 + assert span.get_metric("openai.response.usage.completion_tokens") is not None + assert span.get_metric("openai.response.usage.total_tokens") is not None @pytest.mark.skipif( parse_version(openai_module.version.VERSION) < (1, 26, 0), reason="Streamed tokens available in 1.26.0+" ) @pytest.mark.snapshot(token="tests.contrib.openai.test_openai.test_chat_completion_stream") -async def test_chat_completion_async_stream(openai, openai_vcr, mock_metrics, snapshot_tracer): +async def test_chat_completion_async_stream(openai, openai_vcr, snapshot_tracer): with openai_vcr.use_cassette("chat_completion_streamed_tokens.yaml"): with mock.patch("ddtrace.contrib.internal.openai.utils.encoding_for_model", create=True) as mock_encoding: mock_encoding.return_value.encode.side_effect = lambda x: [1, 2, 3, 4, 5, 6, 7, 8] @@ -1012,7 +793,7 @@ async def test_chat_completion_async_stream(openai, openai_vcr, mock_metrics, sn reason="Streamed response context managers are only available v1.6.0+, tokens available 1.26.0+", ) @pytest.mark.snapshot(token="tests.contrib.openai.test_openai.test_chat_completion_stream") -async def test_chat_completion_async_stream_context_manager(openai, openai_vcr, mock_metrics, snapshot_tracer): +async def test_chat_completion_async_stream_context_manager(openai, openai_vcr, snapshot_tracer): with openai_vcr.use_cassette("chat_completion_streamed_tokens.yaml"): with mock.patch("ddtrace.contrib.internal.openai.utils.encoding_for_model", create=True) as mock_encoding: mock_encoding.return_value.encode.side_effect = lambda x: [1, 2, 3, 4, 5, 6, 7, 8] @@ -1045,14 +826,7 @@ def test_integration_sync(openai_api_key, ddtrace_run_python_code_in_subprocess) pypath = [os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))] if "PYTHONPATH" in env: pypath.append(env["PYTHONPATH"]) - env.update( - { - "OPENAI_API_KEY": openai_api_key, - "PYTHONPATH": ":".join(pypath), - # Disable metrics because the test agent doesn't support metrics - "DD_OPENAI_METRICS_ENABLED": "false", - } - ) + env.update({"OPENAI_API_KEY": openai_api_key, "PYTHONPATH": ":".join(pypath)}) out, err, status, pid = ddtrace_run_python_code_in_subprocess( """ import openai @@ -1092,14 +866,7 @@ def test_integration_async(openai_api_key, ddtrace_run_python_code_in_subprocess pypath = [os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))] if "PYTHONPATH" in env: pypath.append(env["PYTHONPATH"]) - env.update( - { - "OPENAI_API_KEY": openai_api_key, - "PYTHONPATH": ":".join(pypath), - # Disable metrics because the test agent doesn't support metrics - "DD_OPENAI_METRICS_ENABLED": "false", - } - ) + env.update({"OPENAI_API_KEY": openai_api_key, "PYTHONPATH": ":".join(pypath)}) out, err, status, pid = ddtrace_run_python_code_in_subprocess( """ import asyncio @@ -1247,36 +1014,13 @@ def test_completion_truncation(openai, openai_vcr, mock_tracer, ddtrace_config_o @pytest.mark.parametrize("ddtrace_config_openai", [dict(span_prompt_completion_sample_rate=0)]) -def test_embedding_unsampled_prompt_completion(openai, openai_vcr, ddtrace_config_openai, mock_logs, mock_tracer): +def test_embedding_unsampled_prompt_completion(openai, openai_vcr, ddtrace_config_openai, mock_tracer): with openai_vcr.use_cassette("embedding.yaml"): client = openai.OpenAI() client.embeddings.create(input="hello world", model="text-embedding-ada-002") - logs = mock_logs.enqueue.call_count traces = mock_tracer.pop_traces() assert len(traces) == 1 assert traces[0][0].get_tag("openai.request.input") is None - assert logs == 0 - - -@pytest.mark.parametrize( - "ddtrace_config_openai", - [dict(logs_enabled=True, log_prompt_completion_sample_rate=r) for r in [0, 0.25, 0.75, 1]], -) -def test_logs_sample_rate(openai, openai_vcr, ddtrace_config_openai, mock_logs, mock_tracer): - total_calls = 200 - for _ in range(total_calls): - with openai_vcr.use_cassette("completion.yaml"): - client = openai.OpenAI() - client.completions.create(model="ada", prompt="Hello world", temperature=0.8, n=2, stop=".", max_tokens=10) - - logs = mock_logs.enqueue.call_count - if ddtrace.config.openai["log_prompt_completion_sample_rate"] == 0: - assert logs == 0 - elif ddtrace.config.openai["log_prompt_completion_sample_rate"] == 1: - assert logs == total_calls - else: - rate = ddtrace.config.openai["log_prompt_completion_sample_rate"] * total_calls - assert (rate - 30) < logs < (rate + 30) def test_est_tokens(): @@ -1489,14 +1233,7 @@ def test_integration_service_name(openai_api_key, ddtrace_run_python_code_in_sub pypath = [os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))] if "PYTHONPATH" in env: pypath.append(env["PYTHONPATH"]) - env.update( - { - "OPENAI_API_KEY": openai_api_key, - "PYTHONPATH": ":".join(pypath), - # Disable metrics because the test agent doesn't support metrics - "DD_OPENAI_METRICS_ENABLED": "false", - } - ) + env.update({"OPENAI_API_KEY": openai_api_key, "PYTHONPATH": ":".join(pypath)}) if schema_version: env["DD_TRACE_SPAN_ATTRIBUTE_SCHEMA"] = schema_version if service_name: diff --git a/tests/contrib/psycopg/test_psycopg.py b/tests/contrib/psycopg/test_psycopg.py index 8e13ecc4128..60f640e65d3 100644 --- a/tests/contrib/psycopg/test_psycopg.py +++ b/tests/contrib/psycopg/test_psycopg.py @@ -42,7 +42,7 @@ def _get_conn(self, service=None): conn = psycopg.connect(**POSTGRES_CONFIG) pin = Pin.get_from(conn) if pin: - pin.clone(service=service, tracer=self.tracer).onto(conn) + pin._clone(service=service, tracer=self.tracer).onto(conn) return conn @@ -140,7 +140,7 @@ def test_psycopg3_connection_with_string(self): configs_arr.append("options='-c statement_timeout=1000 -c lock_timeout=250'") conn = psycopg.connect(" ".join(configs_arr)) - Pin.get_from(conn).clone(service="postgres", tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(service="postgres", tracer=self.tracer).onto(conn) self.assert_conn_is_traced(conn, "postgres") def test_opentracing_propagation(self): @@ -522,7 +522,7 @@ def test_connection_instance_method_patch(self): pin = Pin.get_from(connection) if pin: - pin.clone(service="postgres", tracer=self.tracer).onto(connection) + pin._clone(service="postgres", tracer=self.tracer).onto(connection) query = SQL("""select 'one' as x""") cur = connection.execute(query) diff --git a/tests/contrib/psycopg/test_psycopg_async.py b/tests/contrib/psycopg/test_psycopg_async.py index 7e4fbd59624..30b8ed6c2a2 100644 --- a/tests/contrib/psycopg/test_psycopg_async.py +++ b/tests/contrib/psycopg/test_psycopg_async.py @@ -36,7 +36,7 @@ async def _get_conn(self, service=None): conn = await psycopg.AsyncConnection.connect(**POSTGRES_CONFIG) pin = Pin.get_from(conn) if pin: - pin.clone(service=service, tracer=self.tracer).onto(conn) + pin._clone(service=service, tracer=self.tracer).onto(conn) return conn diff --git a/tests/contrib/psycopg2/test_psycopg.py b/tests/contrib/psycopg2/test_psycopg.py index 902d24d3c0e..fd4d8a02fbe 100644 --- a/tests/contrib/psycopg2/test_psycopg.py +++ b/tests/contrib/psycopg2/test_psycopg.py @@ -49,7 +49,7 @@ def _get_conn(self, service=None): conn = psycopg2.connect(**POSTGRES_CONFIG) pin = Pin.get_from(conn) if pin: - pin.clone(service=service, tracer=self.tracer).onto(conn) + pin._clone(service=service, tracer=self.tracer).onto(conn) return conn @@ -146,7 +146,7 @@ def test_psycopg2_connection_with_string(self): configs_arr.append("options='-c statement_timeout=1000 -c lock_timeout=250'") conn = psycopg2.connect(" ".join(configs_arr)) - Pin.get_from(conn).clone(service="postgres", tracer=self.tracer).onto(conn) + Pin.get_from(conn)._clone(service="postgres", tracer=self.tracer).onto(conn) self.assert_conn_is_traced(conn, "postgres") def test_opentracing_propagation(self): diff --git a/tests/contrib/pylibmc/test.py b/tests/contrib/pylibmc/test.py index 9de012439dc..0dd4e0102bc 100644 --- a/tests/contrib/pylibmc/test.py +++ b/tests/contrib/pylibmc/test.py @@ -316,7 +316,7 @@ def get_client(self): client = pylibmc.Client([url]) client.flush_all() - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) return client, self.tracer @@ -329,7 +329,7 @@ class TestPylibmcPatch(TestPylibmcPatchDefault): def get_client(self): client, tracer = TestPylibmcPatchDefault.get_client(self) - Pin.get_from(client).clone(service=self.TEST_SERVICE).onto(client) + Pin.get_from(client)._clone(service=self.TEST_SERVICE).onto(client) return client, tracer @@ -341,7 +341,7 @@ def test_patch_unpatch(self): patch() client = pylibmc.Client([url]) - Pin.get_from(client).clone(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) client.set("a", 1) @@ -362,7 +362,9 @@ def test_patch_unpatch(self): patch() client = pylibmc.Client([url]) - Pin(service=self.TEST_SERVICE, tracer=self.tracer).onto(client) + pin = Pin(service=self.TEST_SERVICE) + pin._tracer = self.tracer + pin.onto(client) client.set("a", 1) spans = self.pop_spans() diff --git a/tests/contrib/pymemcache/test_client.py b/tests/contrib/pymemcache/test_client.py index 19a7a93d523..09c8d33e007 100644 --- a/tests/contrib/pymemcache/test_client.py +++ b/tests/contrib/pymemcache/test_client.py @@ -263,7 +263,7 @@ def test_stats(self): def test_service_name_override(self): client = self.make_client([b"STORED\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"]) - Pin.override(client, service="testsvcname") + Pin._override(client, service="testsvcname") client.set(b"key", b"value", noreply=False) result = client.get(b"key") assert _str(result) == "value" @@ -280,7 +280,7 @@ def make_client(self, mock_socket_values, **kwargs): from pymemcache.client.hash import HashClient tracer = DummyTracer() - Pin.override(pymemcache, tracer=tracer) + Pin._override(pymemcache, tracer=tracer) self.client = HashClient([(TEST_HOST, TEST_PORT)], **kwargs) class _MockClient(Client): @@ -320,7 +320,7 @@ def test_service_name_override_hashclient(self): client = self.make_client([b"STORED\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"]) assert len(client.clients) == 1 for _c in client.clients.values(): - Pin.override(_c, service="testsvcname") + Pin._override(_c, service="testsvcname") client.set(b"key", b"value", noreply=False) result = client.get(b"key") assert _str(result) == "value" @@ -332,7 +332,7 @@ def test_service_name_override_hashclient(self): def test_service_name_override_hashclient_pooling(self): client = self.make_client([b""], use_pooling=True) - Pin.override(client, service="testsvcname") + Pin._override(client, service="testsvcname") client.set(b"key", b"value") assert len(client.clients) == 1 spans = self.get_spans() @@ -351,7 +351,7 @@ def tearDown(self): def make_client(self, mock_socket_values, **kwargs): tracer = DummyTracer() - Pin.override(pymemcache, tracer=tracer) + Pin._override(pymemcache, tracer=tracer) self.client = pymemcache.client.base.Client((TEST_HOST, TEST_PORT), **kwargs) self.client.sock = MockSocket(list(mock_socket_values)) return self.client @@ -365,7 +365,7 @@ def test_same_tracer(self): def test_override_parent_pin(self): """Test that the service set on `pymemcache` is used for Clients.""" - Pin.override(pymemcache, service="mysvc") + Pin._override(pymemcache, service="mysvc") client = self.make_client([b"STORED\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"]) client.set(b"key", b"value", noreply=False) @@ -378,7 +378,7 @@ def test_override_parent_pin(self): def test_override_client_pin(self): """Test that the service set on `pymemcache` is used for Clients.""" client = self.make_client([b"STORED\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"]) - Pin.override(client, service="mysvc2") + Pin._override(client, service="mysvc2") client.set(b"key", b"value", noreply=False) diff --git a/tests/contrib/pymemcache/test_client_defaults.py b/tests/contrib/pymemcache/test_client_defaults.py index 0b5e44aa3b0..302be1b0abd 100644 --- a/tests/contrib/pymemcache/test_client_defaults.py +++ b/tests/contrib/pymemcache/test_client_defaults.py @@ -18,7 +18,7 @@ def client(tracer): try: patch() - Pin.override(pymemcache, tracer=tracer) + Pin._override(pymemcache, tracer=tracer) with override_config("pymemcache", dict(command_enabled=False)): client = pymemcache.client.base.Client((TEST_HOST, TEST_PORT)) yield client diff --git a/tests/contrib/pymemcache/test_client_mixin.py b/tests/contrib/pymemcache/test_client_mixin.py index 2d471765e1f..b47c6d8b154 100644 --- a/tests/contrib/pymemcache/test_client_mixin.py +++ b/tests/contrib/pymemcache/test_client_mixin.py @@ -68,7 +68,7 @@ def tearDown(self): def make_client(self, mock_socket_values, **kwargs): tracer = DummyTracer() - Pin.override(pymemcache, tracer=tracer) + Pin._override(pymemcache, tracer=tracer) self.client = pymemcache.client.base.Client((TEST_HOST, TEST_PORT), **kwargs) self.client.sock = MockSocket(list(mock_socket_values)) return self.client diff --git a/tests/contrib/pymongo/test.py b/tests/contrib/pymongo/test.py index b6669d40ac0..31bd45b4674 100644 --- a/tests/contrib/pymongo/test.py +++ b/tests/contrib/pymongo/test.py @@ -420,7 +420,7 @@ def test_patch_pymongo_client_after_import(self): tracer = DummyTracer() client = MongoClient(port=MONGO_CONFIG["port"]) # Ensure the dummy tracer is used to create span in the pymongo integration - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) # Ensure that the client is traced client.server_info() spans = tracer.pop() @@ -440,7 +440,7 @@ def tearDown(self): def get_tracer_and_client(self): tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) return tracer, client def test_host_kwarg(self): @@ -471,7 +471,9 @@ def tearDown(self): def get_tracer_and_client(self): client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin(service="pymongo", tracer=self.tracer).onto(client) + pin = Pin(service="pymongo") + pin._tracer = self.tracer + pin.onto(client) return self.tracer, client def test_patch_unpatch(self): @@ -480,7 +482,7 @@ def test_patch_unpatch(self): patch() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) client["testdb"].drop_collection("whatever") spans = self.pop_spans() @@ -500,7 +502,7 @@ def test_patch_unpatch(self): patch() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=self.tracer).onto(client) + Pin.get_from(client)._clone(tracer=self.tracer).onto(client) client["testdb"].drop_collection("whatever") spans = self.pop_spans() @@ -520,7 +522,7 @@ def test_user_specified_service_default(self): tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -539,7 +541,7 @@ def test_user_specified_service_v0(self): tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -559,7 +561,7 @@ def test_user_specified_service_default_override(self): assert cfg.service == "new-mongo", f"service name is {cfg.service}" tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() @@ -579,7 +581,7 @@ def test_user_specified_service_v1(self): tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -593,7 +595,7 @@ def test_unspecified_service_v0(self): """ tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -605,8 +607,10 @@ def test_unspecified_service_v0(self): def test_user_specified_pymongo_service_v0(self): tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin(service="mypymongo", tracer=self.tracer).onto(client) - Pin.get_from(client).clone(tracer=tracer).onto(client) + pin = Pin(service="mypymongo") + pin._tracer = self.tracer + pin.onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -618,8 +622,10 @@ def test_user_specified_pymongo_service_v0(self): def test_user_specified_pymongo_service_v1(self): tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin(service="mypymongo", tracer=self.tracer).onto(client) - Pin.get_from(client).clone(tracer=tracer).onto(client) + pin = Pin(service="mypymongo") + pin._tracer = self.tracer + pin.onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -631,8 +637,10 @@ def test_user_specified_pymongo_service_v1(self): def test_service_precedence_v0(self): tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin(service="mypymongo", tracer=self.tracer).onto(client) - Pin.get_from(client).clone(tracer=tracer).onto(client) + pin = Pin(service="mypymongo") + pin._tracer = self.tracer + pin.onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -644,8 +652,10 @@ def test_service_precedence_v0(self): def test_service_precedence_v1(self): tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin(service="mypymongo", tracer=self.tracer).onto(client) - Pin.get_from(client).clone(tracer=tracer).onto(client) + pin = Pin(service="mypymongo") + pin._tracer = self.tracer + pin.onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -658,7 +668,7 @@ def test_operation_name_v0_schema(self): """ tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -671,7 +681,7 @@ def test_operation_name_v1_schema(self): """ tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) client["testdb"].drop_collection("whatever") spans = tracer.pop() assert len(spans) == 2 @@ -681,7 +691,7 @@ def test_operation_name_v1_schema(self): def test_peer_service_tagging(self): tracer = DummyTracer() client = pymongo.MongoClient(port=MONGO_CONFIG["port"]) - Pin.get_from(client).clone(tracer=tracer).onto(client) + Pin.get_from(client)._clone(tracer=tracer).onto(client) db_name = "testdb" client[db_name].drop_collection("whatever") spans = tracer.pop() @@ -757,13 +767,13 @@ def setUp(self): super(TestPymongoSocketTracing, self).setUp() patch() # Override server pin's tracer with our dummy tracer - Pin.override(Server, tracer=self.tracer) + Pin._override(Server, tracer=self.tracer) # maxPoolSize controls the number of sockets that the client can instantiate # and choose from to perform classic operations. For the sake of our tests, # let's limit this number to 1 self.client = pymongo.MongoClient(port=MONGO_CONFIG["port"], maxPoolSize=1) # Override MongoClient's pin's tracer with our dummy tracer - Pin.override(self.client, tracer=self.tracer, service="testdb") + Pin._override(self.client, tracer=self.tracer, service="testdb") def tearDown(self): unpatch() diff --git a/tests/contrib/pymysql/test_pymysql.py b/tests/contrib/pymysql/test_pymysql.py index e94e03c8395..9638289e6fc 100644 --- a/tests/contrib/pymysql/test_pymysql.py +++ b/tests/contrib/pymysql/test_pymysql.py @@ -347,7 +347,7 @@ def _get_conn_tracer(self): assert pin # Customize the service # we have to apply it on the existing one since new one won't inherit `app` - pin.clone(tracer=self.tracer).onto(self.conn) + pin._clone(tracer=self.tracer).onto(self.conn) return self.conn, self.tracer @@ -363,7 +363,7 @@ def test_patch_unpatch(self): conn = pymysql.connect(**MYSQL_CONFIG) pin = Pin.get_from(conn) assert pin - pin.clone(tracer=self.tracer).onto(conn) + pin._clone(tracer=self.tracer).onto(conn) assert not conn._closed cursor = conn.cursor() @@ -396,7 +396,7 @@ def test_patch_unpatch(self): def test_user_pin_override(self): conn, tracer = self._get_conn_tracer() pin = Pin.get_from(conn) - pin.clone(service="pin-svc", tracer=self.tracer).onto(conn) + pin._clone(service="pin-svc", tracer=self.tracer).onto(conn) cursor = conn.cursor() cursor.execute("SELECT 1") rows = cursor.fetchall() diff --git a/tests/contrib/pynamodb/test_pynamodb.py b/tests/contrib/pynamodb/test_pynamodb.py index 33b4e4c2c14..d0f459011fc 100644 --- a/tests/contrib/pynamodb/test_pynamodb.py +++ b/tests/contrib/pynamodb/test_pynamodb.py @@ -22,7 +22,7 @@ def setUp(self): self.conn.session.set_credentials("aws-access-key", "aws-secret-access-key", "session-token") super(PynamodbTest, self).setUp() - Pin.override(self.conn, tracer=self.tracer) + Pin._override(self.conn, tracer=self.tracer) def tearDown(self): super(PynamodbTest, self).tearDown() @@ -268,7 +268,7 @@ def test_env_user_specified_pynamodb_service(self): # Manual override dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") - Pin.override(self.conn, service="mypynamodb", tracer=self.tracer) + Pin._override(self.conn, service="mypynamodb", tracer=self.tracer) list_result = self.conn.list_tables() span = self.get_spans()[0] assert span.service == "mypynamodb", span.service @@ -289,7 +289,7 @@ def test_service_precedence(self): # Manual override dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") - Pin.override(self.conn, service="override-pynamodb", tracer=self.tracer) + Pin._override(self.conn, service="override-pynamodb", tracer=self.tracer) list_result = self.conn.list_tables() span = self.get_spans()[0] assert span.service == "override-pynamodb", span.service diff --git a/tests/contrib/pyodbc/test_pyodbc.py b/tests/contrib/pyodbc/test_pyodbc.py index 4c965aede7b..3f78f622ef8 100644 --- a/tests/contrib/pyodbc/test_pyodbc.py +++ b/tests/contrib/pyodbc/test_pyodbc.py @@ -211,7 +211,7 @@ def _get_conn_tracer(self): assert pin # Customize the service # we have to apply it on the existing one since new one won't inherit `app` - pin.clone(tracer=self.tracer).onto(self.conn) + pin._clone(tracer=self.tracer).onto(self.conn) return self.conn, self.tracer @@ -227,7 +227,7 @@ def test_patch_unpatch(self): conn = pyodbc.connect(PYODBC_CONNECT_DSN) pin = Pin.get_from(conn) assert pin - pin.clone(tracer=self.tracer).onto(conn) + pin._clone(tracer=self.tracer).onto(conn) cursor = conn.cursor() cursor.execute("SELECT 1") @@ -256,7 +256,7 @@ def test_patch_unpatch(self): def test_user_pin_override(self): conn, tracer = self._get_conn_tracer() pin = Pin.get_from(conn) - pin.clone(service="pin-svc", tracer=self.tracer).onto(conn) + pin._clone(service="pin-svc", tracer=self.tracer).onto(conn) cursor = conn.cursor() cursor.execute("SELECT 1") rows = cursor.fetchall() diff --git a/tests/contrib/redis/test_redis.py b/tests/contrib/redis/test_redis.py index fb83f6f53fc..7a5ee5d92a0 100644 --- a/tests/contrib/redis/test_redis.py +++ b/tests/contrib/redis/test_redis.py @@ -25,7 +25,7 @@ def setUp(self): patch() r = redis.Redis(port=self.TEST_PORT) r.flushall() - Pin.override(r, tracer=self.tracer) + Pin._override(r, tracer=self.tracer) self.r = r def tearDown(self): @@ -194,7 +194,7 @@ def test_meta_override(self): r = self.r pin = Pin.get_from(r) if pin: - pin.clone(tags={"cheese": "camembert"}).onto(r) + pin._clone(tags={"cheese": "camembert"}).onto(r) r.get("cheese") spans = self.get_spans() @@ -211,7 +211,7 @@ def test_patch_unpatch(self): patch() r = redis.Redis(port=REDIS_CONFIG["port"]) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) r.get("key") spans = tracer.pop() @@ -231,7 +231,7 @@ def test_patch_unpatch(self): patch() r = redis.Redis(port=REDIS_CONFIG["port"]) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) r.get("key") spans = tracer.pop() @@ -429,7 +429,7 @@ def test_env_user_specified_redis_service_v0(self): self.reset() # Manual override - Pin.override(self.r, service="mysvc", tracer=self.tracer) + Pin._override(self.r, service="mysvc", tracer=self.tracer) self.r.get("cheese") span = self.get_spans()[0] assert span.service == "mysvc", span.service @@ -447,7 +447,7 @@ def test_service_precedence_v0(self): self.reset() # Do a manual override - Pin.override(self.r, service="override-redis", tracer=self.tracer) + Pin._override(self.r, service="override-redis", tracer=self.tracer) self.r.get("cheese") span = self.get_spans()[0] assert span.service == "override-redis", span.service @@ -501,7 +501,7 @@ def test_meta_override(self): r = self.r pin = Pin.get_from(r) if pin: - pin.clone(tags={"cheese": "camembert"}).onto(r) + pin._clone(tags={"cheese": "camembert"}).onto(r) r.get("cheese") @@ -513,7 +513,7 @@ def test_patch_unpatch(self): patch() r = redis.Redis(port=REDIS_CONFIG["port"]) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) r.get("key") spans = tracer.pop() @@ -533,7 +533,7 @@ def test_patch_unpatch(self): patch() r = redis.Redis(port=REDIS_CONFIG["port"]) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) r.get("key") spans = tracer.pop() @@ -576,7 +576,7 @@ def test_env_user_specified_redis_service(self): self.reset() # Manual override - Pin.override(self.r, service="mysvc", tracer=self.tracer) + Pin._override(self.r, service="mysvc", tracer=self.tracer) self.r.get("cheese") @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="app-svc", DD_REDIS_SERVICE="env-redis")) @@ -587,7 +587,7 @@ def test_service_precedence(self): self.reset() # Do a manual override - Pin.override(self.r, service="override-redis", tracer=self.tracer) + Pin._override(self.r, service="override-redis", tracer=self.tracer) self.r.get("cheese") @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_REDIS_CMD_MAX_LENGTH="10")) diff --git a/tests/contrib/redis/test_redis_asyncio.py b/tests/contrib/redis/test_redis_asyncio.py index b1bd5858e04..72fc0f47c63 100644 --- a/tests/contrib/redis/test_redis_asyncio.py +++ b/tests/contrib/redis/test_redis_asyncio.py @@ -130,7 +130,7 @@ async def test_override_service_name(redis_client): @pytest.mark.snapshot(wait_for_num_traces=1) async def test_pin(redis_client): - Pin.override(redis_client, service="my-redis") + Pin._override(redis_client, service="my-redis") val = await redis_client.get("cheese") assert val is None diff --git a/tests/contrib/redis/test_redis_cluster.py b/tests/contrib/redis/test_redis_cluster.py index 2731a18fcee..54fb778987a 100644 --- a/tests/contrib/redis/test_redis_cluster.py +++ b/tests/contrib/redis/test_redis_cluster.py @@ -26,7 +26,7 @@ def setUp(self): patch() r = self._get_test_client() r.flushall() - Pin.override(r, tracer=self.tracer) + Pin._override(r, tracer=self.tracer) self.r = r def tearDown(self): @@ -103,7 +103,7 @@ def test_patch_unpatch(self): patch() r = self._get_test_client() - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) r.get("key") spans = tracer.pop() @@ -123,7 +123,7 @@ def test_patch_unpatch(self): patch() r = self._get_test_client() - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) r.get("key") spans = tracer.pop() @@ -142,7 +142,7 @@ def test_user_specified_service_v0(self): assert config.service == "mysvc" r = self._get_test_client() - Pin.get_from(r).clone(tracer=self.tracer).onto(r) + Pin.get_from(r)._clone(tracer=self.tracer).onto(r) r.get("key") spans = self.get_spans() @@ -162,7 +162,7 @@ def test_user_specified_service_v1(self): assert config.service == "mysvc" r = self._get_test_client() - Pin.get_from(r).clone(tracer=self.tracer).onto(r) + Pin.get_from(r)._clone(tracer=self.tracer).onto(r) r.get("key") spans = self.get_spans() diff --git a/tests/contrib/redis/test_redis_cluster_asyncio.py b/tests/contrib/redis/test_redis_cluster_asyncio.py index b8624c533aa..65b6a2348d3 100644 --- a/tests/contrib/redis/test_redis_cluster_asyncio.py +++ b/tests/contrib/redis/test_redis_cluster_asyncio.py @@ -28,7 +28,7 @@ async def traced_redis_cluster(tracer, test_spans): startup_nodes = [redis.asyncio.cluster.ClusterNode(TEST_HOST, int(port)) for port in TEST_PORTS.split(",")] redis_cluster = redis.asyncio.cluster.RedisCluster(startup_nodes=startup_nodes) await redis_cluster.flushall() - Pin.override(redis_cluster, tracer=tracer) + Pin._override(redis_cluster, tracer=tracer) try: yield redis_cluster, test_spans finally: @@ -125,7 +125,7 @@ async def test_patch_unpatch(redis_cluster): patch() r = redis_cluster - Pin.override(r, tracer=tracer) + Pin._override(r, tracer=tracer) await r.get("key") spans = tracer.pop() @@ -145,7 +145,7 @@ async def test_patch_unpatch(redis_cluster): patch() r = redis_cluster - Pin.override(r, tracer=tracer) + Pin._override(r, tracer=tracer) await r.get("key") spans = tracer.pop() @@ -182,7 +182,7 @@ async def test(): tracer = DummyTracer() test_spans = TracerSpanContainer(tracer) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) await r.get("key") await r.close() @@ -231,7 +231,7 @@ async def test(): tracer = DummyTracer() test_spans = TracerSpanContainer(tracer) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) await r.get("key") await r.close() @@ -280,7 +280,7 @@ async def test(): tracer = DummyTracer() test_spans = TracerSpanContainer(tracer) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) await r.get("key") await r.close() @@ -321,7 +321,7 @@ async def test(): tracer = DummyTracer() test_spans = TracerSpanContainer(tracer) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) await r.get("key") await r.close() @@ -362,7 +362,7 @@ async def test(): tracer = DummyTracer() test_spans = TracerSpanContainer(tracer) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) await r.get("key") await r.close() @@ -411,7 +411,7 @@ async def test(): tracer = DummyTracer() test_spans = TracerSpanContainer(tracer) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) await r.get("key") await r.close() @@ -456,7 +456,7 @@ async def test(): tracer = DummyTracer() test_spans = TracerSpanContainer(tracer) - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) await r.get("key") await r.close() diff --git a/tests/contrib/rediscluster/test.py b/tests/contrib/rediscluster/test.py index a2c5ac5c6b2..79b4c806440 100644 --- a/tests/contrib/rediscluster/test.py +++ b/tests/contrib/rediscluster/test.py @@ -43,7 +43,7 @@ def setUp(self): patch() r = _get_test_client() r.flushall() - Pin.override(r, tracer=self.tracer) + Pin._override(r, tracer=self.tracer) self.r = r def tearDown(self): @@ -115,7 +115,7 @@ def test_patch_unpatch(self): patch() r = _get_test_client() - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) r.get("key") spans = tracer.pop() @@ -135,7 +135,7 @@ def test_patch_unpatch(self): patch() r = _get_test_client() - Pin.get_from(r).clone(tracer=tracer).onto(r) + Pin.get_from(r)._clone(tracer=tracer).onto(r) r.get("key") spans = tracer.pop() @@ -154,7 +154,7 @@ def test_user_specified_service_v0(self): assert config.service == "mysvc" r = _get_test_client() - Pin.get_from(r).clone(tracer=self.tracer).onto(r) + Pin.get_from(r)._clone(tracer=self.tracer).onto(r) r.get("key") spans = self.get_spans() @@ -174,7 +174,7 @@ def test_user_specified_service_v1(self): assert config.service == "mysvc" r = _get_test_client() - Pin.get_from(r).clone(tracer=self.tracer).onto(r) + Pin.get_from(r)._clone(tracer=self.tracer).onto(r) r.get("key") spans = self.get_spans() @@ -189,7 +189,7 @@ def test_unspecified_service_v1(self): the default span service name """ r = _get_test_client() - Pin.get_from(r).clone(tracer=self.tracer).onto(r) + Pin.get_from(r)._clone(tracer=self.tracer).onto(r) r.get("key") spans = self.get_spans() @@ -220,7 +220,7 @@ def test_span_name_v0_schema(self): the default span service name """ r = _get_test_client() - Pin.get_from(r).clone(tracer=self.tracer).onto(r) + Pin.get_from(r)._clone(tracer=self.tracer).onto(r) r.get("key") spans = self.get_spans() @@ -235,7 +235,7 @@ def test_span_name_v1_schema(self): the default span service name """ r = _get_test_client() - Pin.get_from(r).clone(tracer=self.tracer).onto(r) + Pin.get_from(r)._clone(tracer=self.tracer).onto(r) r.get("key") spans = self.get_spans() diff --git a/tests/contrib/rq/test_rq.py b/tests/contrib/rq/test_rq.py index d72871823da..65908ac9717 100644 --- a/tests/contrib/rq/test_rq.py +++ b/tests/contrib/rq/test_rq.py @@ -117,7 +117,7 @@ def test_sync_worker_config_service(queue): @snapshot(ignores=snapshot_ignores) def test_queue_pin_service(queue): - Pin.override(queue, service="my-pin-svc") + Pin._override(queue, service="my-pin-svc") job = queue.enqueue(job_add1, 10) worker = rq.SimpleWorker([queue], connection=queue.connection) worker.work(burst=True) @@ -128,7 +128,7 @@ def test_queue_pin_service(queue): def test_sync_worker_pin_service(queue): job = queue.enqueue(job_add1, 10) worker = rq.SimpleWorker([queue], connection=queue.connection) - Pin.override(worker, service="my-pin-svc") + Pin._override(worker, service="my-pin-svc") worker.work(burst=True) assert job.result == 11 diff --git a/tests/contrib/shared_tests.py b/tests/contrib/shared_tests.py index cf647a15628..dfc3b5e6a0a 100644 --- a/tests/contrib/shared_tests.py +++ b/tests/contrib/shared_tests.py @@ -72,8 +72,8 @@ def _test_dbm_propagation_comment_pin_service_name_override( """tests if dbm comment is set in mysql""" db_name = config["db"] - Pin.override(conn, service="pin-service-name-override", tracer=tracer) - Pin.override(cursor, service="pin-service-name-override", tracer=tracer) + Pin._override(conn, service="pin-service-name-override", tracer=tracer) + Pin._override(cursor, service="pin-service-name-override", tracer=tracer) dbm_comment = ( f"/*dddb='{db_name}',dddbs='pin-service-name-override',dde='staging',ddh='127.0.0.1',ddps='orders-app'," diff --git a/tests/contrib/shared_tests_async.py b/tests/contrib/shared_tests_async.py index 0d49f09d608..f06b6d278f5 100644 --- a/tests/contrib/shared_tests_async.py +++ b/tests/contrib/shared_tests_async.py @@ -72,8 +72,8 @@ async def _test_dbm_propagation_comment_pin_service_name_override( """tests if dbm comment is set in mysql""" db_name = config["db"] - Pin.override(conn, service="pin-service-name-override", tracer=tracer) - Pin.override(cursor, service="pin-service-name-override", tracer=tracer) + Pin._override(conn, service="pin-service-name-override", tracer=tracer) + Pin._override(cursor, service="pin-service-name-override", tracer=tracer) dbm_comment = ( f"/*dddb='{db_name}',dddbs='pin-service-name-override',dde='staging',ddh='127.0.0.1',ddps='orders-app'," diff --git a/tests/contrib/sqlalchemy/test_patch.py b/tests/contrib/sqlalchemy/test_patch.py index a6f08bb5f46..35245d5a906 100644 --- a/tests/contrib/sqlalchemy/test_patch.py +++ b/tests/contrib/sqlalchemy/test_patch.py @@ -25,7 +25,7 @@ def setUp(self): patch() dsn = "postgresql://%(user)s:%(password)s@%(host)s:%(port)s/%(dbname)s" % POSTGRES_CONFIG self.engine = sqlalchemy.create_engine(dsn) - Pin.override(self.engine, tracer=self.tracer) + Pin._override(self.engine, tracer=self.tracer) # prepare a connection self.conn = self.engine.connect() @@ -57,7 +57,7 @@ def test_engine_traced(self): def test_engine_pin_service(self): # ensures that the engine service is updated with the PIN object - Pin.override(self.engine, service="replica-db") + Pin._override(self.engine, service="replica-db") rows = self.conn.execute(text("SELECT 1")).fetchall() assert len(rows) == 1 diff --git a/tests/contrib/sqlite3/test_sqlite3.py b/tests/contrib/sqlite3/test_sqlite3.py index 6101dcfa081..df1bbdbabc5 100644 --- a/tests/contrib/sqlite3/test_sqlite3.py +++ b/tests/contrib/sqlite3/test_sqlite3.py @@ -64,7 +64,7 @@ def test_sqlite(self): db = sqlite3.connect(":memory:") pin = Pin.get_from(db) assert pin - pin.clone(service=service, tracer=self.tracer).onto(db) + pin._clone(service=service, tracer=self.tracer).onto(db) # Ensure we can run a query and it's correctly traced q = "select * from sqlite_master" @@ -216,7 +216,7 @@ def test_sqlite_ot(self): db = sqlite3.connect(":memory:") pin = Pin.get_from(db) assert pin - pin.clone(tracer=self.tracer).onto(db) + pin._clone(tracer=self.tracer).onto(db) cursor = db.execute(q) rows = cursor.fetchall() assert not rows @@ -233,7 +233,7 @@ def test_sqlite_ot(self): db = sqlite3.connect(":memory:") pin = Pin.get_from(db) assert pin - pin.clone(tracer=self.tracer).onto(db) + pin._clone(tracer=self.tracer).onto(db) cursor = db.execute(q) rows = cursor.fetchall() assert not rows @@ -270,7 +270,7 @@ def test_patch_unpatch(self): db = sqlite3.connect(":memory:") pin = Pin.get_from(db) assert pin - pin.clone(tracer=self.tracer).onto(db) + pin._clone(tracer=self.tracer).onto(db) db.cursor().execute("select 'blah'").fetchall() self.assert_structure( @@ -292,7 +292,7 @@ def test_patch_unpatch(self): db = sqlite3.connect(":memory:") pin = Pin.get_from(db) assert pin - pin.clone(tracer=self.tracer).onto(db) + pin._clone(tracer=self.tracer).onto(db) db.cursor().execute("select 'blah'").fetchall() self.assert_structure( @@ -301,7 +301,7 @@ def test_patch_unpatch(self): def _given_a_traced_connection(self, tracer): db = sqlite3.connect(":memory:") - Pin.get_from(db).clone(tracer=tracer).onto(db) + Pin.get_from(db)._clone(tracer=tracer).onto(db) return db @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) diff --git a/tests/contrib/starlette/test_starlette.py b/tests/contrib/starlette/test_starlette.py index f290ade8ea7..78b7ac135fe 100644 --- a/tests/contrib/starlette/test_starlette.py +++ b/tests/contrib/starlette/test_starlette.py @@ -38,7 +38,7 @@ def engine(): def tracer(engine): original_tracer = ddtrace.tracer tracer = DummyTracer() - Pin.override(engine, tracer=tracer) + Pin._override(engine, tracer=tracer) ddtrace.tracer = tracer starlette_patch() yield tracer diff --git a/tests/contrib/subprocess/test_subprocess.py b/tests/contrib/subprocess/test_subprocess.py index 40e7ab67431..f9084d23db1 100644 --- a/tests/contrib/subprocess/test_subprocess.py +++ b/tests/contrib/subprocess/test_subprocess.py @@ -194,7 +194,7 @@ def test_truncation(cmdline_obj, expected_str, expected_list, truncated): def test_ossystem(tracer): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(os).clone(tracer=tracer).onto(os) + Pin.get_from(os)._clone(tracer=tracer).onto(os) with tracer.trace("ossystem_test"): ret = os.system("dir -l /") assert ret == 0 @@ -215,7 +215,7 @@ def test_ossystem(tracer): def test_fork(tracer): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(os).clone(tracer=tracer).onto(os) + Pin.get_from(os)._clone(tracer=tracer).onto(os) with tracer.trace("ossystem_test"): pid = os.fork() if pid == 0: @@ -241,7 +241,7 @@ def test_fork(tracer): def test_unpatch(tracer): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(os).clone(tracer=tracer).onto(os) + Pin.get_from(os)._clone(tracer=tracer).onto(os) with tracer.trace("os.system"): ret = os.system("dir -l /") assert ret == 0 @@ -254,7 +254,7 @@ def test_unpatch(tracer): unpatch() with override_global_config(dict(_asm_enabled=True)): - Pin.get_from(os).clone(tracer=tracer).onto(os) + Pin.get_from(os)._clone(tracer=tracer).onto(os) with tracer.trace("os.system_unpatch"): ret = os.system("dir -l /") assert ret == 0 @@ -283,7 +283,7 @@ def test_ossystem_noappsec(tracer): def test_ospopen(tracer): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(subprocess).clone(tracer=tracer).onto(subprocess) + Pin.get_from(subprocess)._clone(tracer=tracer).onto(subprocess) with tracer.trace("os.popen"): pipe = os.popen("dir -li /") content = pipe.read() @@ -330,7 +330,7 @@ def test_ospopen(tracer): def test_osspawn_variants(tracer, function, mode, arguments): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(os).clone(tracer=tracer).onto(os) + Pin.get_from(os)._clone(tracer=tracer).onto(os) if "_" in function.__name__: # wrapt changes function names when debugging @@ -369,7 +369,7 @@ def test_osspawn_variants(tracer, function, mode, arguments): def test_subprocess_init_shell_true(tracer): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(subprocess).clone(tracer=tracer).onto(subprocess) + Pin.get_from(subprocess)._clone(tracer=tracer).onto(subprocess) with tracer.trace("subprocess.Popen.init", span_type=SpanTypes.SYSTEM): subp = subprocess.Popen(["dir", "-li", "/"], shell=True) subp.wait() @@ -389,7 +389,7 @@ def test_subprocess_init_shell_true(tracer): def test_subprocess_init_shell_false(tracer): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(subprocess).clone(tracer=tracer).onto(subprocess) + Pin.get_from(subprocess)._clone(tracer=tracer).onto(subprocess) with tracer.trace("subprocess.Popen.init", span_type=SpanTypes.SYSTEM): subp = subprocess.Popen(["dir", "-li", "/"], shell=False) subp.wait() @@ -406,7 +406,7 @@ def test_subprocess_wait_shell_false(tracer): args = ["dir", "-li", "/"] with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(subprocess).clone(tracer=tracer).onto(subprocess) + Pin.get_from(subprocess)._clone(tracer=tracer).onto(subprocess) with tracer.trace("subprocess.Popen.init", span_type=SpanTypes.SYSTEM): subp = subprocess.Popen(args=args, shell=False) subp.wait() @@ -419,7 +419,7 @@ def test_subprocess_wait_shell_false(tracer): def test_subprocess_wait_shell_true(tracer): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(subprocess).clone(tracer=tracer).onto(subprocess) + Pin.get_from(subprocess)._clone(tracer=tracer).onto(subprocess) with tracer.trace("subprocess.Popen.init", span_type=SpanTypes.SYSTEM): subp = subprocess.Popen(args=["dir", "-li", "/"], shell=True) subp.wait() @@ -430,7 +430,7 @@ def test_subprocess_wait_shell_true(tracer): def test_subprocess_run(tracer): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(subprocess).clone(tracer=tracer).onto(subprocess) + Pin.get_from(subprocess)._clone(tracer=tracer).onto(subprocess) with tracer.trace("subprocess.Popen.wait"): result = subprocess.run(["dir", "-l", "/"], shell=True) assert result.returncode == 0 @@ -451,7 +451,7 @@ def test_subprocess_run(tracer): def test_subprocess_communicate(tracer): with override_global_config(dict(_asm_enabled=True)): patch() - Pin.get_from(subprocess).clone(tracer=tracer).onto(subprocess) + Pin.get_from(subprocess)._clone(tracer=tracer).onto(subprocess) with tracer.trace("subprocess.Popen.wait"): subp = subprocess.Popen(args=["dir", "-li", "/"], shell=True) subp.communicate() diff --git a/tests/contrib/suitespec.yml b/tests/contrib/suitespec.yml index 366e28aaaf9..c49f3e79dbf 100644 --- a/tests/contrib/suitespec.yml +++ b/tests/contrib/suitespec.yml @@ -239,6 +239,11 @@ components: urllib3: - ddtrace/contrib/_urllib3.py - ddtrace/contrib/internal/urllib3/* + valkey: + - ddtrace/contrib/internal/valkey/* + - ddtrace/contrib/internal/valkey_utils.py + - ddtrace/_trace/utils_valkey.py + - ddtrace/ext/valkey.py vertica: - ddtrace/contrib/_vertica.py - ddtrace/contrib/internal/vertica/* @@ -1176,3 +1181,19 @@ suites: services: - redis snapshot: true + valkey: + parallelism: 5 + paths: + - '@bootstrap' + - '@core' + - '@contrib' + - '@tracing' + - '@valkey' + - tests/contrib/valkey/* + - tests/snapshots/tests.contrib.valkey.* + pattern: ^valkey* + runner: riot + services: + - valkeycluster + - valkey + snapshot: true diff --git a/tests/contrib/tornado/test_config.py b/tests/contrib/tornado/test_config.py index aaa87fcb2ec..0130db034eb 100644 --- a/tests/contrib/tornado/test_config.py +++ b/tests/contrib/tornado/test_config.py @@ -1,6 +1,5 @@ from ddtrace.trace import TraceFilter -from ddtrace.trace import Tracer -from tests.utils import DummyWriter +from tests.utils import DummyTracer from .utils import TornadoTestCase @@ -19,8 +18,7 @@ class TestTornadoSettings(TornadoTestCase): """ def get_app(self): - # Override with a real tracer - self.tracer = Tracer() + self.tracer = DummyTracer() super(TestTornadoSettings, self).get_app() def get_settings(self): @@ -40,25 +38,6 @@ def get_settings(self): }, } - def test_tracer_is_properly_configured(self): - # the tracer must be properly configured - assert self.tracer._tags.get("env") == "production" - assert self.tracer._tags.get("debug") == "false" - assert self.tracer.enabled is False - assert self.tracer.agent_trace_url == "http://dd-agent.service.consul:8126" - - writer = DummyWriter() - self.tracer._configure(enabled=True, writer=writer) - with self.tracer.trace("keep"): - pass - spans = writer.pop() - assert len(spans) == 1 - - with self.tracer.trace("drop"): - pass - spans = writer.pop() - assert len(spans) == 0 - class TestTornadoSettingsEnabled(TornadoTestCase): def get_settings(self): diff --git a/tests/contrib/urllib3/test_urllib3.py b/tests/contrib/urllib3/test_urllib3.py index 24ba7815e56..370c08c7904 100644 --- a/tests/contrib/urllib3/test_urllib3.py +++ b/tests/contrib/urllib3/test_urllib3.py @@ -35,7 +35,7 @@ def setUp(self): patch() self.http = urllib3.PoolManager() - Pin.override(urllib3.connectionpool.HTTPConnectionPool, tracer=self.tracer) + Pin._override(urllib3.connectionpool.HTTPConnectionPool, tracer=self.tracer) def tearDown(self): super(BaseUrllib3TestCase, self).tearDown() diff --git a/tests/contrib/valkey/__init__.py b/tests/contrib/valkey/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/contrib/valkey/test_valkey.py b/tests/contrib/valkey/test_valkey.py new file mode 100644 index 00000000000..772418dcc16 --- /dev/null +++ b/tests/contrib/valkey/test_valkey.py @@ -0,0 +1,615 @@ +# -*- coding: utf-8 -*- +from unittest import mock + +import pytest +import valkey + +import ddtrace +from ddtrace.contrib.internal.valkey.patch import patch +from ddtrace.contrib.internal.valkey.patch import unpatch +from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin +from tests.opentracer.utils import init_tracer +from tests.utils import DummyTracer +from tests.utils import TracerTestCase +from tests.utils import snapshot + +from ..config import VALKEY_CONFIG + + +class TestValkeyPatch(TracerTestCase): + TEST_PORT = VALKEY_CONFIG["port"] + + def setUp(self): + super(TestValkeyPatch, self).setUp() + patch() + r = valkey.Valkey(port=self.TEST_PORT) + r.flushall() + Pin._override(r, tracer=self.tracer) + self.r = r + + def tearDown(self): + unpatch() + super(TestValkeyPatch, self).tearDown() + + def command_test_rowcount(self, raw_command, row_count, expect_result=True, **kwargs): + command_args_as_list = raw_command.split(" ") + + command_name = command_args_as_list[0].lower() + + if hasattr(self.r, command_name): + func = getattr(self.r, command_name) + + try: + # try to run function with kwargs, may fail due to valkey version + result = yield func(*command_args_as_list[1:], **kwargs) + for k in kwargs.keys(): + raw_command += " " + str(kwargs[k]) + except Exception: + # try without keyword arguments + result = func(*command_args_as_list[1:]) + + if expect_result: + assert result is not None + else: + empty_result = [None, [], {}, b""] + if isinstance(result, list): + result = [x for x in result if x] + assert result in empty_result + + command_span = self.get_spans()[-1] + + assert command_span.name == "valkey.command" + assert command_span.get_tag("valkey.raw_command") == raw_command + assert command_span.get_metric("db.row_count") == row_count + + def test_long_command(self): + self.r.mget(*range(1000)) + + spans = self.get_spans() + assert len(spans) == 1 + span = spans[0] + + self.assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.span_type == "valkey" + assert span.error == 0 + meta = { + "out.host": "localhost", + } + metrics = { + "network.destination.port": self.TEST_PORT, + "out.valkey_db": 0, + } + for k, v in meta.items(): + assert span.get_tag(k) == v + for k, v in metrics.items(): + assert span.get_metric(k) == v + + assert span.get_tag("valkey.raw_command").startswith("MGET 0 1 2 3") + assert span.get_tag("valkey.raw_command").endswith("...") + assert span.get_tag("component") == "valkey" + assert span.get_tag("span.kind") == "client" + assert span.get_tag("db.system") == "valkey" + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) + def test_service_name_v1(self): + us = self.r.get("cheese") + assert us is None + spans = self.get_spans() + span = spans[0] + assert span.service == DEFAULT_SPAN_SERVICE_NAME + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) + def test_operation_name_v0_schema(self): + us = self.r.get("cheese") + assert us is None + spans = self.get_spans() + span = spans[0] + assert span.name == "valkey.command" + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) + def test_operation_name_v1_schema(self): + us = self.r.get("cheese") + assert us is None + spans = self.get_spans() + span = spans[0] + assert span.name == "valkey.command" + + def test_basics(self): + us = self.r.get("cheese") + assert us is None + spans = self.get_spans() + assert len(spans) == 1 + span = spans[0] + self.assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.span_type == "valkey" + assert span.error == 0 + assert span.get_metric("out.valkey_db") == 0 + assert span.get_tag("out.host") == "localhost" + assert span.get_tag("valkey.raw_command") == "GET cheese" + assert span.get_tag("component") == "valkey" + assert span.get_tag("span.kind") == "client" + assert span.get_tag("db.system") == "valkey" + assert span.get_metric("valkey.args_length") == 2 + assert span.resource == "GET" + + def test_connection_error(self): + with mock.patch.object( + valkey.connection.ConnectionPool, + "get_connection", + side_effect=valkey.exceptions.ConnectionError("whatever"), + ): + with pytest.raises(valkey.exceptions.ConnectionError): + self.r.get("foo") + + def test_pipeline_traced(self): + with self.r.pipeline(transaction=False) as p: + p.set("blah", 32) + p.rpush("foo", "éé") + p.hgetall("xxx") + p.execute() + + spans = self.get_spans() + assert len(spans) == 1 + span = spans[0] + self.assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.resource == "SET\nRPUSH\nHGETALL" + assert span.span_type == "valkey" + assert span.error == 0 + assert span.get_metric("out.valkey_db") == 0 + assert span.get_tag("out.host") == "localhost" + assert span.get_tag("valkey.raw_command") == "SET blah 32\nRPUSH foo éé\nHGETALL xxx" + assert span.get_tag("component") == "valkey" + assert span.get_tag("span.kind") == "client" + assert span.get_metric("valkey.pipeline_length") == 3 + assert span.get_metric("valkey.pipeline_length") == 3 + + def test_pipeline_immediate(self): + with self.r.pipeline() as p: + p.set("a", 1) + p.immediate_execute_command("SET", "a", 1) + p.execute() + + spans = self.get_spans() + assert len(spans) == 2 + span = spans[0] + self.assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.resource == "SET" + assert span.span_type == "valkey" + assert span.error == 0 + assert span.get_metric("out.valkey_db") == 0 + assert span.get_tag("out.host") == "localhost" + assert span.get_tag("component") == "valkey" + assert span.get_tag("span.kind") == "client" + + def test_meta_override(self): + r = self.r + pin = Pin.get_from(r) + if pin: + pin._clone(tags={"cheese": "camembert"}).onto(r) + + r.get("cheese") + spans = self.get_spans() + assert len(spans) == 1 + span = spans[0] + assert span.service == "valkey" + assert "cheese" in span.get_tags() and span.get_tag("cheese") == "camembert" + + def test_patch_unpatch(self): + tracer = DummyTracer() + + # Test patch idempotence + patch() + patch() + + r = valkey.Valkey(port=VALKEY_CONFIG["port"]) + Pin.get_from(r)._clone(tracer=tracer).onto(r) + r.get("key") + + spans = tracer.pop() + assert spans, spans + assert len(spans) == 1 + + # Test unpatch + unpatch() + + r = valkey.Valkey(port=VALKEY_CONFIG["port"]) + r.get("key") + + spans = tracer.pop() + assert not spans, spans + + # Test patch again + patch() + + r = valkey.Valkey(port=VALKEY_CONFIG["port"]) + Pin.get_from(r)._clone(tracer=tracer).onto(r) + r.get("key") + + spans = tracer.pop() + assert spans, spans + assert len(spans) == 1 + + def test_opentracing(self): + """Ensure OpenTracing works with valkey.""" + ot_tracer = init_tracer("valkey_svc", self.tracer) + + with ot_tracer.start_active_span("valkey_get"): + us = self.r.get("cheese") + assert us is None + + spans = self.get_spans() + assert len(spans) == 2 + ot_span, dd_span = spans + + # confirm the parenting + assert ot_span.parent_id is None + assert dd_span.parent_id == ot_span.span_id + + assert ot_span.name == "valkey_get" + assert ot_span.service == "valkey_svc" + + self.assert_is_measured(dd_span) + assert dd_span.service == "valkey" + assert dd_span.name == "valkey.command" + assert dd_span.span_type == "valkey" + assert dd_span.error == 0 + assert dd_span.get_metric("out.valkey_db") == 0 + assert dd_span.get_tag("out.host") == "localhost" + assert dd_span.get_tag("valkey.raw_command") == "GET cheese" + assert dd_span.get_tag("component") == "valkey" + assert dd_span.get_tag("span.kind") == "client" + assert dd_span.get_tag("db.system") == "valkey" + assert dd_span.get_metric("valkey.args_length") == 2 + assert dd_span.resource == "GET" + + def test_valkey_rowcount_all_keys_valid(self): + self.r.set("key1", "value1") + + get1 = self.r.get("key1") + + assert get1 == b"value1" + + spans = self.get_spans() + get_valid_key_span = spans[1] + + assert get_valid_key_span.name == "valkey.command" + assert get_valid_key_span.get_tag("valkey.raw_command") == "GET key1" + assert get_valid_key_span.get_metric("db.row_count") == 1 + + get_commands = ["GET key", "GETEX key", "GETRANGE key 0 2"] + list_get_commands = ["LINDEX lkey 0", "LRANGE lkey 0 3", "RPOP lkey", "LPOP lkey"] + hashing_get_commands = [ + "HGET hkey field1", + "HGETALL hkey", + "HKEYS hkey", + "HMGET hkey field1 field2", + "HRANDFIELD hkey", + "HVALS hkey", + ] + multi_key_get_commands = ["MGET key key2", "MGET key key2 key3", "MGET key key2 key3 key4"] + + for command in get_commands: + self.r.set("key", "value") + self.command_test_rowcount(command, 1) + for command in list_get_commands: + self.r.lpush("lkey", "1", "2", "3", "4", "5") + self.command_test_rowcount(command, 1) + if command == "RPOP lkey": # lets get multiple values from the set and ensure rowcount is still 1 + self.command_test_rowcount(command, 1, count=2) + for command in hashing_get_commands: + self.r.hset("hkey", "field1", "value1") + self.r.hset("hkey", "field2", "value2") + self.command_test_rowcount(command, 1) + for command in multi_key_get_commands: + self.r.mset({"key": "value", "key2": "value2", "key3": "value3", "key4": "value4"}) + self.command_test_rowcount(command, len(command.split(" ")) - 1) + + def test_valkey_rowcount_some_keys_valid(self): + self.r.mset({"key": "value", "key2": "value2"}) + + get_both_valid = self.r.mget("key", "key2") + get_one_missing = self.r.mget("key", "missing_key") + + assert get_both_valid == [b"value", b"value2"] + assert get_one_missing == [b"value", None] + + spans = self.get_spans() + get_both_valid_span = spans[1] + get_one_missing_span = spans[2] + + assert get_both_valid_span.name == "valkey.command" + assert get_both_valid_span.get_tag("valkey.raw_command") == "MGET key key2" + assert get_both_valid_span.get_metric("db.row_count") == 2 + + assert get_one_missing_span.name == "valkey.command" + assert get_one_missing_span.get_tag("valkey.raw_command") == "MGET key missing_key" + assert get_one_missing_span.get_metric("db.row_count") == 1 + + multi_key_get_commands = [ + "MGET key key2", + "MGET key missing_key", + "MGET key key2 missing_key", + "MGET key missing_key missing_key2 key2", + ] + + for command in multi_key_get_commands: + command_keys = command.split(" ")[1:] + self.command_test_rowcount(command, len([key for key in command_keys if "missing_key" not in key])) + + def test_valkey_rowcount_no_keys_valid(self): + get_missing = self.r.get("missing_key") + + assert get_missing is None + + spans = self.get_spans() + get_missing_key_span = spans[0] + + assert get_missing_key_span.name == "valkey.command" + assert get_missing_key_span.get_tag("valkey.raw_command") == "GET missing_key" + assert get_missing_key_span.get_metric("db.row_count") == 0 + + get_commands = ["GET key", "GETDEL key", "GETEX key", "GETRANGE key 0 2"] + list_get_commands = ["LINDEX lkey 0", "LRANGE lkey 0 3", "RPOP lkey", "LPOP lkey"] + hashing_get_commands = [ + "HGET hkey field1", + "HGETALL hkey", + "HKEYS hkey", + "HMGET hkey field1 field2", + "HRANDFIELD hkey", + "HVALS hkey", + ] + multi_key_get_commands = ["MGET key key2", "MGET key key2 key3", "MGET key key2 key3 key4"] + + for command in get_commands: + self.command_test_rowcount(command, 0, expect_result=False) + for command in list_get_commands: + self.command_test_rowcount(command, 0, expect_result=False) + if command == "RPOP lkey": # lets get multiple values from the set and ensure rowcount is still 1 + self.command_test_rowcount(command, 0, expect_result=False, count=2) + for command in hashing_get_commands: + self.command_test_rowcount(command, 0, expect_result=False) + for command in multi_key_get_commands: + self.command_test_rowcount(command, 0, expect_result=False) + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) + def test_user_specified_service_default(self): + from ddtrace import config + + assert config.service == "mysvc" + + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "valkey" + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) + def test_user_specified_service_v0(self): + from ddtrace import config + + assert config.service == "mysvc" + + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "valkey" + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) + def test_user_specified_service_v1(self): + from ddtrace import config + + assert config.service == "mysvc" + + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "mysvc" + + @TracerTestCase.run_in_subprocess( + env_overrides=dict(DD_VALKEY_SERVICE="myvalkey", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0") + ) + def test_env_user_specified_valkey_service_v0(self): + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "myvalkey", span.service + + self.reset() + + # Global config + with self.override_config("valkey", dict(service="cfg-valkey")): + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "cfg-valkey", span.service + + self.reset() + + # Manual override + Pin._override(self.r, service="mysvc", tracer=self.tracer) + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "mysvc", span.service + + @TracerTestCase.run_in_subprocess( + env_overrides=dict( + DD_SERVICE="app-svc", DD_VALKEY_SERVICE="env-specified-valkey-svc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0" + ) + ) + def test_service_precedence_v0(self): + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "env-specified-valkey-svc", span.service + + self.reset() + + # Do a manual override + Pin._override(self.r, service="override-valkey", tracer=self.tracer) + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "override-valkey", span.service + + +class TestValkeyPatchSnapshot(TracerTestCase): + TEST_PORT = VALKEY_CONFIG["port"] + + def setUp(self): + super(TestValkeyPatchSnapshot, self).setUp() + patch() + r = valkey.Valkey(port=self.TEST_PORT) + self.r = r + + def tearDown(self): + unpatch() + super(TestValkeyPatchSnapshot, self).tearDown() + self.r.flushall() + + @snapshot() + def test_long_command(self): + self.r.mget(*range(1000)) + + @snapshot() + def test_basics(self): + us = self.r.get("cheese") + assert us is None + + @snapshot() + def test_unicode(self): + us = self.r.get("😐") + assert us is None + + @snapshot() + def test_pipeline_traced(self): + with self.r.pipeline(transaction=False) as p: + p.set("blah", 32) + p.rpush("foo", "éé") + p.hgetall("xxx") + p.execute() + + @snapshot() + def test_pipeline_immediate(self): + with self.r.pipeline() as p: + p.set("a", 1) + p.immediate_execute_command("SET", "a", 1) + p.execute() + + @snapshot() + def test_meta_override(self): + r = self.r + pin = Pin.get_from(r) + if pin: + pin._clone(tags={"cheese": "camembert"}).onto(r) + + r.get("cheese") + + def test_patch_unpatch(self): + tracer = DummyTracer() + + # Test patch idempotence + patch() + patch() + + r = valkey.Valkey(port=VALKEY_CONFIG["port"]) + Pin.get_from(r)._clone(tracer=tracer).onto(r) + r.get("key") + + spans = tracer.pop() + assert spans, spans + assert len(spans) == 1 + + # Test unpatch + unpatch() + + r = valkey.Valkey(port=VALKEY_CONFIG["port"]) + r.get("key") + + spans = tracer.pop() + assert not spans, spans + + # Test patch again + patch() + + r = valkey.Valkey(port=VALKEY_CONFIG["port"]) + Pin.get_from(r)._clone(tracer=tracer).onto(r) + r.get("key") + + spans = tracer.pop() + assert spans, spans + assert len(spans) == 1 + + @snapshot() + def test_opentracing(self): + """Ensure OpenTracing works with valkey.""" + ot_tracer = init_tracer("valkey_svc", ddtrace.tracer) + + with ot_tracer.start_active_span("valkey_get"): + us = self.r.get("cheese") + assert us is None + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) + @snapshot() + def test_user_specified_service(self): + from ddtrace import config + + assert config.service == "mysvc" + + self.r.get("cheese") + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_VALKEY_SERVICE="myvalkey")) + @snapshot() + def test_env_user_specified_valkey_service(self): + self.r.get("cheese") + + self.reset() + + # Global config + with self.override_config("valkey", dict(service="cfg-valkey")): + self.r.get("cheese") + + self.reset() + + # Manual override + Pin._override(self.r, service="mysvc", tracer=self.tracer) + self.r.get("cheese") + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="app-svc", DD_VALKEY_SERVICE="env-valkey")) + @snapshot() + def test_service_precedence(self): + self.r.get("cheese") + + self.reset() + + # Do a manual override + Pin._override(self.r, service="override-valkey", tracer=self.tracer) + self.r.get("cheese") + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_VALKEY_CMD_MAX_LENGTH="10")) + @snapshot() + def test_custom_cmd_length_env(self): + self.r.get("here-is-a-long-key-name") + + @snapshot() + def test_custom_cmd_length(self): + with self.override_config("valkey", dict(cmd_max_length=7)): + self.r.get("here-is-a-long-key-name") + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_VALKEY_RESOURCE_ONLY_COMMAND="false")) + @snapshot() + def test_full_command_in_resource_env(self): + self.r.get("put_key_in_resource") + p = self.r.pipeline(transaction=False) + p.set("pipeline-cmd1", 1) + p.set("pipeline-cmd2", 2) + p.execute() + + @snapshot() + def test_full_command_in_resource_config(self): + with self.override_config("valkey", dict(resource_only_command=False)): + self.r.get("put_key_in_resource") + p = self.r.pipeline(transaction=False) + p.set("pipeline-cmd1", 1) + p.set("pipeline-cmd2", 2) + p.execute() diff --git a/tests/contrib/valkey/test_valkey_asyncio.py b/tests/contrib/valkey/test_valkey_asyncio.py new file mode 100644 index 00000000000..cce88912d65 --- /dev/null +++ b/tests/contrib/valkey/test_valkey_asyncio.py @@ -0,0 +1,221 @@ +import asyncio +import typing +from unittest import mock + +import pytest +import valkey +import valkey.asyncio +from wrapt import ObjectProxy + +from ddtrace import tracer +from ddtrace.contrib.internal.valkey.patch import patch +from ddtrace.contrib.internal.valkey.patch import unpatch +from ddtrace.trace import Pin +from tests.utils import override_config + +from ..config import VALKEY_CONFIG + + +def get_valkey_instance(max_connections: int, client_name: typing.Optional[str] = None): + return valkey.asyncio.from_url( + "valkey://127.0.0.1:%s" % VALKEY_CONFIG["port"], max_connections=max_connections, client_name=client_name + ) + + +@pytest.fixture +def valkey_client(): + r = get_valkey_instance(max_connections=10) # default values + yield r + + +@pytest.fixture +def single_pool_valkey_client(): + r = get_valkey_instance(max_connections=1) + yield r + + +@pytest.fixture(autouse=True) +async def traced_valkey(valkey_client): + await valkey_client.flushall() + + patch() + try: + yield + finally: + unpatch() + await valkey_client.flushall() + + +def test_patching(): + """ + When patching valkey library + We wrap the correct methods + When unpatching valkey library + We unwrap the correct methods + """ + assert isinstance(valkey.asyncio.client.Valkey.execute_command, ObjectProxy) + assert isinstance(valkey.asyncio.client.Valkey.pipeline, ObjectProxy) + assert isinstance(valkey.asyncio.client.Pipeline.pipeline, ObjectProxy) + unpatch() + assert not isinstance(valkey.asyncio.client.Valkey.execute_command, ObjectProxy) + assert not isinstance(valkey.asyncio.client.Valkey.pipeline, ObjectProxy) + assert not isinstance(valkey.asyncio.client.Pipeline.pipeline, ObjectProxy) + + +@pytest.mark.snapshot(wait_for_num_traces=1) +async def test_basic_request(valkey_client): + val = await valkey_client.get("cheese") + assert val is None + + +@pytest.mark.snapshot(wait_for_num_traces=1) +async def test_unicode_request(valkey_client): + val = await valkey_client.get("😐") + assert val is None + + +@pytest.mark.snapshot(wait_for_num_traces=1, ignores=["meta.error.stack"]) +async def test_connection_error(valkey_client): + with mock.patch.object( + valkey.asyncio.connection.ConnectionPool, + "get_connection", + side_effect=valkey.exceptions.ConnectionError("whatever"), + ): + with pytest.raises(valkey.exceptions.ConnectionError): + await valkey_client.get("foo") + + +@pytest.mark.snapshot(wait_for_num_traces=2) +async def test_decoding_non_utf8_args(valkey_client): + await valkey_client.set(b"\x80foo", b"\x80abc") + val = await valkey_client.get(b"\x80foo") + assert val == b"\x80abc" + + +@pytest.mark.snapshot(wait_for_num_traces=1) +async def test_decoding_non_utf8_pipeline_args(valkey_client): + p = valkey_client.pipeline() + p.set(b"\x80blah", "boo") + p.set("foo", b"\x80abc") + p.get(b"\x80blah") + p.get("foo") + + response_list = await p.execute() + assert response_list[0] is True # response from valkey.set is OK if successfully pushed + assert response_list[1] is True + assert response_list[2].decode() == "boo" + assert response_list[3] == b"\x80abc" + + +@pytest.mark.snapshot(wait_for_num_traces=1) +async def test_long_command(valkey_client): + length = 1000 + val_list = await valkey_client.mget(*range(length)) + assert len(val_list) == length + for val in val_list: + assert val is None + + +@pytest.mark.snapshot(wait_for_num_traces=3) +async def test_override_service_name(valkey_client): + with override_config("valkey", dict(service_name="myvalkey")): + val = await valkey_client.get("cheese") + assert val is None + await valkey_client.set("cheese", "my-cheese") + val = await valkey_client.get("cheese") + if isinstance(val, bytes): + val = val.decode() + assert val == "my-cheese" + + +@pytest.mark.snapshot(wait_for_num_traces=1) +async def test_pin(valkey_client): + Pin._override(valkey_client, service="my-valkey") + val = await valkey_client.get("cheese") + assert val is None + + +@pytest.mark.snapshot(wait_for_num_traces=1) +async def test_pipeline_traced(valkey_client): + p = valkey_client.pipeline(transaction=False) + p.set("blah", "boo") + p.set("foo", "bar") + p.get("blah") + p.get("foo") + + response_list = await p.execute() + assert response_list[0] is True # response from valkey.set is OK if successfully pushed + assert response_list[1] is True + assert ( + response_list[2].decode() == "boo" + ) # response from hset is 'Integer reply: The number of fields that were added.' + assert response_list[3].decode() == "bar" + + +@pytest.mark.snapshot(wait_for_num_traces=1) +async def test_pipeline_traced_context_manager_transaction(valkey_client): + """ + Regression test for: https://github.com/DataDog/dd-trace-py/issues/3106 + + Example:: + + async def main(): + valkey = await valkey.from_url("valkey://localhost") + async with valkey.pipeline(transaction=True) as pipe: + ok1, ok2 = await (pipe.set("key1", "value1").set("key2", "value2").execute()) + assert ok1 + assert ok2 + """ + + async with valkey_client.pipeline(transaction=True) as p: + set_1, set_2, get_1, get_2 = await p.set("blah", "boo").set("foo", "bar").get("blah").get("foo").execute() + + # response from valkey.set is OK if successfully pushed + assert set_1 is True + assert set_2 is True + assert get_1.decode() == "boo" + assert get_2.decode() == "bar" + + +@pytest.mark.snapshot(wait_for_num_traces=1) +async def test_two_traced_pipelines(valkey_client): + with tracer.trace("web-request", service="test"): + p1 = await valkey_client.pipeline(transaction=False) + p2 = await valkey_client.pipeline(transaction=False) + await p1.set("blah", "boo") + await p2.set("foo", "bar") + await p1.get("blah") + await p2.get("foo") + + response_list1 = await p1.execute() + response_list2 = await p2.execute() + + assert response_list1[0] is True # response from valkey.set is OK if successfully pushed + assert response_list2[0] is True + assert ( + response_list1[1].decode() == "boo" + ) # response from hset is 'Integer reply: The number of fields that were added.' + assert response_list2[1].decode() == "bar" + + +async def test_parenting(valkey_client, snapshot_context): + with snapshot_context(wait_for_num_traces=1): + with tracer.trace("web-request", service="test"): + await valkey_client.set("blah", "boo") + await valkey_client.get("blah") + + +async def test_client_name(snapshot_context): + with snapshot_context(wait_for_num_traces=1): + with tracer.trace("web-request", service="test"): + valkey_client = get_valkey_instance(10, client_name="testing-client-name") + await valkey_client.get("blah") + + +@pytest.mark.asyncio +async def test_asyncio_task_cancelled(valkey_client): + with mock.patch.object( + valkey.asyncio.connection.ConnectionPool, "get_connection", side_effect=asyncio.CancelledError + ): + with pytest.raises(asyncio.CancelledError): + await valkey_client.get("foo") diff --git a/tests/contrib/valkey/test_valkey_cluster.py b/tests/contrib/valkey/test_valkey_cluster.py new file mode 100644 index 00000000000..bd765dbff96 --- /dev/null +++ b/tests/contrib/valkey/test_valkey_cluster.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +import valkey + +from ddtrace.contrib.internal.valkey.patch import patch +from ddtrace.contrib.internal.valkey.patch import unpatch +from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin +from tests.contrib.config import VALKEY_CLUSTER_CONFIG +from tests.utils import DummyTracer +from tests.utils import TracerTestCase +from tests.utils import assert_is_measured + + +class TestValkeyClusterPatch(TracerTestCase): + TEST_HOST = VALKEY_CLUSTER_CONFIG["host"] + TEST_PORTS = VALKEY_CLUSTER_CONFIG["ports"] + + def _get_test_client(self): + startup_nodes = [valkey.cluster.ClusterNode(self.TEST_HOST, int(port)) for port in self.TEST_PORTS.split(",")] + return valkey.cluster.ValkeyCluster(startup_nodes=startup_nodes) + + def setUp(self): + super(TestValkeyClusterPatch, self).setUp() + patch() + r = self._get_test_client() + r.flushall() + Pin._override(r, tracer=self.tracer) + self.r = r + + def tearDown(self): + unpatch() + super(TestValkeyClusterPatch, self).tearDown() + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) + def test_span_service_name_v1(self): + us = self.r.get("cheese") + assert us is None + spans = self.get_spans() + span = spans[0] + assert span.service == DEFAULT_SPAN_SERVICE_NAME + + def test_basics(self): + us = self.r.get("cheese") + assert us is None + spans = self.get_spans() + assert len(spans) == 1 + span = spans[0] + assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.span_type == "valkey" + assert span.error == 0 + assert span.get_tag("valkey.raw_command") == "GET cheese" + assert span.get_tag("component") == "valkey" + assert span.get_tag("db.system") == "valkey" + assert span.get_metric("valkey.args_length") == 2 + assert span.resource == "GET" + + def test_unicode(self): + us = self.r.get("😐") + assert us is None + spans = self.get_spans() + assert len(spans) == 1 + span = spans[0] + assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.span_type == "valkey" + assert span.error == 0 + assert span.get_tag("valkey.raw_command") == "GET 😐" + assert span.get_tag("component") == "valkey" + assert span.get_tag("db.system") == "valkey" + assert span.get_metric("valkey.args_length") == 2 + assert span.resource == "GET" + + def test_pipeline(self): + with self.r.pipeline(transaction=False) as p: + p.set("blah", 32) + p.rpush("foo", "éé") + p.hgetall("xxx") + p.execute() + + spans = self.get_spans() + assert len(spans) == 1 + span = spans[0] + assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.resource == "SET\nRPUSH\nHGETALL" + assert span.span_type == "valkey" + assert span.error == 0 + assert span.get_tag("valkey.raw_command") == "SET blah 32\nRPUSH foo éé\nHGETALL xxx" + assert span.get_tag("component") == "valkey" + assert span.get_metric("valkey.pipeline_length") == 3 + + def test_patch_unpatch(self): + tracer = DummyTracer() + + # Test patch idempotence + patch() + patch() + + r = self._get_test_client() + Pin.get_from(r)._clone(tracer=tracer).onto(r) + r.get("key") + + spans = tracer.pop() + assert spans, spans + assert len(spans) == 1 + + # Test unpatch + unpatch() + + r = self._get_test_client() + r.get("key") + + spans = tracer.pop() + assert not spans, spans + + # Test patch again + patch() + + r = self._get_test_client() + Pin.get_from(r)._clone(tracer=tracer).onto(r) + r.get("key") + + spans = tracer.pop() + assert spans, spans + assert len(spans) == 1 + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) + def test_user_specified_service_v0(self): + """ + When a user specifies a service for the app + The valkeycluster integration should not use it. + """ + # Ensure that the service name was configured + from ddtrace import config + + assert config.service == "mysvc" + + r = self._get_test_client() + Pin.get_from(r)._clone(tracer=self.tracer).onto(r) + r.get("key") + + spans = self.get_spans() + assert len(spans) == 1 + span = spans[0] + assert span.service != "mysvc" + + @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) + def test_user_specified_service_v1(self): + """ + When a user specifies a service for the app + The valkeycluster integration should use it. + """ + # Ensure that the service name was configured + from ddtrace import config + + assert config.service == "mysvc" + + r = self._get_test_client() + Pin.get_from(r)._clone(tracer=self.tracer).onto(r) + r.get("key") + + spans = self.get_spans() + assert len(spans) == 1 + span = spans[0] + assert span.service == "mysvc" + + @TracerTestCase.run_in_subprocess( + env_overrides=dict(DD_VALKEY_SERVICE="myvalkeycluster", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0") + ) + def test_env_user_specified_valkeycluster_service_v0(self): + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "myvalkeycluster", span.service + + @TracerTestCase.run_in_subprocess( + env_overrides=dict(DD_VALKEY_SERVICE="myvalkeycluster", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1") + ) + def test_env_user_specified_valkeycluster_service_v1(self): + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "myvalkeycluster", span.service + + @TracerTestCase.run_in_subprocess( + env_overrides=dict( + DD_SERVICE="app-svc", DD_VALKEY_SERVICE="myvalkeycluster", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0" + ) + ) + def test_service_precedence_v0(self): + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "myvalkeycluster" + + self.reset() + + @TracerTestCase.run_in_subprocess( + env_overrides=dict( + DD_SERVICE="app-svc", DD_VALKEY_SERVICE="myvalkeycluster", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1" + ) + ) + def test_service_precedence_v1(self): + self.r.get("cheese") + span = self.get_spans()[0] + assert span.service == "myvalkeycluster" + + self.reset() diff --git a/tests/contrib/valkey/test_valkey_cluster_asyncio.py b/tests/contrib/valkey/test_valkey_cluster_asyncio.py new file mode 100644 index 00000000000..a5e4db415b0 --- /dev/null +++ b/tests/contrib/valkey/test_valkey_cluster_asyncio.py @@ -0,0 +1,457 @@ +# -*- coding: utf-8 -*- +import pytest +import valkey + +from ddtrace.contrib.internal.valkey.patch import patch +from ddtrace.contrib.internal.valkey.patch import unpatch +from ddtrace.trace import Pin +from tests.contrib.config import VALKEY_CLUSTER_CONFIG +from tests.utils import DummyTracer +from tests.utils import assert_is_measured + + +TEST_HOST = VALKEY_CLUSTER_CONFIG["host"] +TEST_PORTS = VALKEY_CLUSTER_CONFIG["ports"] + + +@pytest.mark.asyncio +@pytest.fixture +async def valkey_cluster(): + startup_nodes = [valkey.asyncio.cluster.ClusterNode(TEST_HOST, int(port)) for port in TEST_PORTS.split(",")] + yield valkey.asyncio.cluster.ValkeyCluster(startup_nodes=startup_nodes) + + +@pytest.mark.asyncio +@pytest.fixture +async def traced_valkey_cluster(tracer, test_spans): + patch() + startup_nodes = [valkey.asyncio.cluster.ClusterNode(TEST_HOST, int(port)) for port in TEST_PORTS.split(",")] + valkey_cluster = valkey.asyncio.cluster.ValkeyCluster(startup_nodes=startup_nodes) + await valkey_cluster.flushall() + Pin._override(valkey_cluster, tracer=tracer) + try: + yield valkey_cluster, test_spans + finally: + unpatch() + await valkey_cluster.flushall() + + +@pytest.mark.asyncio +async def test_basics(traced_valkey_cluster): + cluster, test_spans = traced_valkey_cluster + us = await cluster.get("cheese") + assert us is None + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + + span = spans[0] + assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.span_type == "valkey" + assert span.error == 0 + assert span.get_tag("valkey.raw_command") == "GET cheese" + assert span.get_tag("component") == "valkey" + assert span.get_tag("db.system") == "valkey" + assert span.get_metric("valkey.args_length") == 2 + assert span.resource == "GET" + + +@pytest.mark.asyncio +async def test_unicode(traced_valkey_cluster): + cluster, test_spans = traced_valkey_cluster + us = await cluster.get("😐") + assert us is None + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + + span = spans[0] + assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.span_type == "valkey" + assert span.error == 0 + assert span.get_tag("valkey.raw_command") == "GET 😐" + assert span.get_tag("component") == "valkey" + assert span.get_tag("db.system") == "valkey" + assert span.get_metric("valkey.args_length") == 2 + assert span.resource == "GET" + + +@pytest.mark.asyncio +async def test_pipeline(traced_valkey_cluster): + cluster, test_spans = traced_valkey_cluster + async with cluster.pipeline(transaction=False) as p: + p.set("blah", 32) + p.rpush("foo", "éé") + p.hgetall("xxx") + await p.execute() + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + + span = spans[0] + assert_is_measured(span) + assert span.service == "valkey" + assert span.name == "valkey.command" + assert span.resource == "SET\nRPUSH\nHGETALL" + assert span.span_type == "valkey" + assert span.error == 0 + assert span.get_tag("valkey.raw_command") == "SET blah 32\nRPUSH foo éé\nHGETALL xxx" + assert span.get_tag("component") == "valkey" + assert span.get_metric("valkey.pipeline_length") == 3 + + +@pytest.mark.asyncio +async def test_patch_unpatch(valkey_cluster): + tracer = DummyTracer() + + # Test patch idempotence + patch() + patch() + + r = valkey_cluster + Pin._override(r, tracer=tracer) + await r.get("key") + + spans = tracer.pop() + assert spans, spans + assert len(spans) == 1 + + # Test unpatch + unpatch() + + r = valkey_cluster + await r.get("key") + + spans = tracer.pop() + assert not spans, spans + + # Test patch again + patch() + + r = valkey_cluster + Pin._override(r, tracer=tracer) + await r.get("key") + + spans = tracer.pop() + assert spans, spans + assert len(spans) == 1 + unpatch() + + +@pytest.mark.subprocess( + env=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1"), + err=None, # avoid checking stderr because of an expected deprecation warning +) +def test_default_service_name_v1(): + import asyncio + + import valkey + + from ddtrace.contrib.internal.valkey.patch import patch + from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME + from ddtrace.trace import Pin + from tests.contrib.config import VALKEY_CLUSTER_CONFIG + from tests.utils import DummyTracer + from tests.utils import TracerSpanContainer + + patch() + + async def test(): + startup_nodes = [ + valkey.asyncio.cluster.ClusterNode(VALKEY_CLUSTER_CONFIG["host"], int(port)) + for port in VALKEY_CLUSTER_CONFIG["ports"].split(",") + ] + r = valkey.asyncio.cluster.ValkeyCluster(startup_nodes=startup_nodes) + tracer = DummyTracer() + test_spans = TracerSpanContainer(tracer) + + Pin.get_from(r)._clone(tracer=tracer).onto(r) + await r.get("key") + await r.close() + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + span = spans[0] + assert span.service == DEFAULT_SPAN_SERVICE_NAME + + asyncio.run(test()) + + +@pytest.mark.subprocess( + env=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0"), + err=None, # avoid checking stderr because of an expected deprecation warning +) +def test_user_specified_service_v0(): + """ + When a user specifies a service for the app + The valkeycluster integration should not use it. + """ + import asyncio + + import valkey + + from ddtrace import config + from ddtrace.contrib.internal.valkey.patch import patch + from ddtrace.trace import Pin + from tests.contrib.config import VALKEY_CLUSTER_CONFIG + from tests.utils import DummyTracer + from tests.utils import TracerSpanContainer + + patch() + + async def test(): + # # Ensure that the service name was configured + assert config.service == "mysvc" + + startup_nodes = [ + valkey.asyncio.cluster.ClusterNode(VALKEY_CLUSTER_CONFIG["host"], int(port)) + for port in VALKEY_CLUSTER_CONFIG["ports"].split(",") + ] + r = valkey.asyncio.cluster.ValkeyCluster(startup_nodes=startup_nodes) + tracer = DummyTracer() + test_spans = TracerSpanContainer(tracer) + + Pin.get_from(r)._clone(tracer=tracer).onto(r) + await r.get("key") + await r.close() + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + span = spans[0] + assert span.service != "mysvc" + + asyncio.run(test()) + + +@pytest.mark.subprocess( + env=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1"), + err=None, # avoid checking stderr because of an expected deprecation warning +) +def test_user_specified_service_v1(): + """ + When a user specifies a service for the app + The valkeycluster integration should use it. + """ + import asyncio + + import valkey + + from ddtrace import config + from ddtrace.contrib.internal.valkey.patch import patch + from ddtrace.trace import Pin + from tests.contrib.config import VALKEY_CLUSTER_CONFIG + from tests.utils import DummyTracer + from tests.utils import TracerSpanContainer + + patch() + + async def test(): + # # Ensure that the service name was configured + assert config.service == "mysvc" + + startup_nodes = [ + valkey.asyncio.cluster.ClusterNode(VALKEY_CLUSTER_CONFIG["host"], int(port)) + for port in VALKEY_CLUSTER_CONFIG["ports"].split(",") + ] + r = valkey.asyncio.cluster.ValkeyCluster(startup_nodes=startup_nodes) + tracer = DummyTracer() + test_spans = TracerSpanContainer(tracer) + + Pin.get_from(r)._clone(tracer=tracer).onto(r) + await r.get("key") + await r.close() + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + span = spans[0] + assert span.service == "mysvc" + + asyncio.run(test()) + + +@pytest.mark.subprocess( + env=dict(DD_VALKEY_SERVICE="myvalkeycluster", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0"), + err=None, # avoid checking stderr because of an expected deprecation warning +) +def test_env_user_specified_valkeycluster_service_v0(): + import asyncio + + import valkey + + from ddtrace.contrib.internal.valkey.patch import patch + from ddtrace.trace import Pin + from tests.contrib.config import VALKEY_CLUSTER_CONFIG + from tests.utils import DummyTracer + from tests.utils import TracerSpanContainer + + patch() + + async def test(): + startup_nodes = [ + valkey.asyncio.cluster.ClusterNode(VALKEY_CLUSTER_CONFIG["host"], int(port)) + for port in VALKEY_CLUSTER_CONFIG["ports"].split(",") + ] + r = valkey.asyncio.cluster.ValkeyCluster(startup_nodes=startup_nodes) + tracer = DummyTracer() + test_spans = TracerSpanContainer(tracer) + + Pin.get_from(r)._clone(tracer=tracer).onto(r) + await r.get("key") + await r.close() + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + span = spans[0] + assert span.service == "myvalkeycluster" + + asyncio.run(test()) + + +@pytest.mark.subprocess( + env=dict(DD_VALKEY_SERVICE="myvalkeycluster", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1"), + err=None, # avoid checking stderr because of an expected deprecation warning +) +def test_env_user_specified_valkeycluster_service_v1(): + import asyncio + + import valkey + + from ddtrace.contrib.internal.valkey.patch import patch + from ddtrace.trace import Pin + from tests.contrib.config import VALKEY_CLUSTER_CONFIG + from tests.utils import DummyTracer + from tests.utils import TracerSpanContainer + + patch() + + async def test(): + startup_nodes = [ + valkey.asyncio.cluster.ClusterNode(VALKEY_CLUSTER_CONFIG["host"], int(port)) + for port in VALKEY_CLUSTER_CONFIG["ports"].split(",") + ] + r = valkey.asyncio.cluster.ValkeyCluster(startup_nodes=startup_nodes) + tracer = DummyTracer() + test_spans = TracerSpanContainer(tracer) + + Pin.get_from(r)._clone(tracer=tracer).onto(r) + await r.get("key") + await r.close() + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + span = spans[0] + assert span.service == "myvalkeycluster" + + asyncio.run(test()) + + +@pytest.mark.subprocess( + env=dict( + DD_SERVICE="mysvc", + DD_VALKEY_SERVICE="myvalkeycluster", + DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0", + ), + err=None, # avoid checking stderr because of an expected deprecation warning +) +def test_service_precedence_v0(): + import asyncio + + import valkey + + from ddtrace import config + from ddtrace.contrib.internal.valkey.patch import patch + from ddtrace.trace import Pin + from tests.contrib.config import VALKEY_CLUSTER_CONFIG + from tests.utils import DummyTracer + from tests.utils import TracerSpanContainer + + patch() + + async def test(): + # # Ensure that the service name was configured + assert config.service == "mysvc" + + startup_nodes = [ + valkey.asyncio.cluster.ClusterNode(VALKEY_CLUSTER_CONFIG["host"], int(port)) + for port in VALKEY_CLUSTER_CONFIG["ports"].split(",") + ] + r = valkey.asyncio.cluster.ValkeyCluster(startup_nodes=startup_nodes) + tracer = DummyTracer() + test_spans = TracerSpanContainer(tracer) + + Pin.get_from(r)._clone(tracer=tracer).onto(r) + await r.get("key") + await r.close() + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + span = spans[0] + assert span.service == "myvalkeycluster" + + asyncio.run(test()) + + +@pytest.mark.subprocess( + env=dict(DD_SERVICE="mysvc", DD_VALKEY_SERVICE="myvalkeycluster", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1"), + err=None, # avoid checking stderr because of an expected deprecation warning +) +def test_service_precedence_v1(): + import asyncio + + import valkey + + from ddtrace import config + from ddtrace.contrib.internal.valkey.patch import patch + from ddtrace.trace import Pin + from tests.contrib.config import VALKEY_CLUSTER_CONFIG + from tests.utils import DummyTracer + from tests.utils import TracerSpanContainer + + patch() + + async def test(): + # # Ensure that the service name was configured + assert config.service == "mysvc" + + startup_nodes = [ + valkey.asyncio.cluster.ClusterNode(VALKEY_CLUSTER_CONFIG["host"], int(port)) + for port in VALKEY_CLUSTER_CONFIG["ports"].split(",") + ] + r = valkey.asyncio.cluster.ValkeyCluster(startup_nodes=startup_nodes) + tracer = DummyTracer() + test_spans = TracerSpanContainer(tracer) + + Pin.get_from(r)._clone(tracer=tracer).onto(r) + await r.get("key") + await r.close() + + traces = test_spans.pop_traces() + assert len(traces) == 1 + spans = traces[0] + assert len(spans) == 1 + span = spans[0] + assert span.service == "myvalkeycluster" + + asyncio.run(test()) diff --git a/tests/contrib/valkey/test_valkey_patch.py b/tests/contrib/valkey/test_valkey_patch.py new file mode 100644 index 00000000000..320d2b82b6a --- /dev/null +++ b/tests/contrib/valkey/test_valkey_patch.py @@ -0,0 +1,31 @@ +# This test script was automatically generated by the contrib-patch-tests.py +# script. If you want to make changes to it, you should make sure that you have +# removed the ``_generated`` suffix from the file name, to prevent the content +# from being overwritten by future re-generations. + +from ddtrace.contrib.internal.valkey.patch import get_version +from ddtrace.contrib.internal.valkey.patch import patch + + +try: + from ddtrace.contrib.internal.valkey.patch import unpatch +except ImportError: + unpatch = None +from tests.contrib.patch import PatchTestCase + + +class TestValkeyPatch(PatchTestCase.Base): + __integration_name__ = "valkey" + __module_name__ = "valkey" + __patch_func__ = patch + __unpatch_func__ = unpatch + __get_version__ = get_version + + def assert_module_patched(self, valkey): + pass + + def assert_not_module_patched(self, valkey): + pass + + def assert_not_module_double_patched(self, valkey): + pass diff --git a/tests/contrib/vertexai/conftest.py b/tests/contrib/vertexai/conftest.py index 0b2b976b610..173551a219a 100644 --- a/tests/contrib/vertexai/conftest.py +++ b/tests/contrib/vertexai/conftest.py @@ -43,7 +43,7 @@ def mock_tracer(ddtrace_global_config, vertexai): try: pin = Pin.get_from(vertexai) mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) - pin.override(vertexai, tracer=mock_tracer) + pin._override(vertexai, tracer=mock_tracer) pin.tracer._configure() if ddtrace_global_config.get("_llmobs_enabled", False): # Have to disable and re-enable LLMObs to use the mock tracer. diff --git a/tests/contrib/vertica/test_vertica.py b/tests/contrib/vertica/test_vertica.py index d3fb9709bea..2c0d554c01a 100644 --- a/tests/contrib/vertica/test_vertica.py +++ b/tests/contrib/vertica/test_vertica.py @@ -130,7 +130,7 @@ def test_configuration_service_name(self): conn = vertica_python.connect(**VERTICA_CONFIG) cur = conn.cursor() - Pin.override(cur, tracer=test_tracer) + Pin._override(cur, tracer=test_tracer) with conn: cur.execute("DROP TABLE IF EXISTS {}".format(TEST_TABLE)) spans = test_tracer.pop() @@ -163,7 +163,7 @@ def test_configuration_routine(self): test_tracer = DummyTracer() conn = vertica_python.connect(**VERTICA_CONFIG) - Pin.override(conn, service="mycustomservice", tracer=test_tracer) + Pin._override(conn, service="mycustomservice", tracer=test_tracer) conn.cursor() # should be traced now conn.close() spans = test_tracer.pop() @@ -175,7 +175,7 @@ def test_execute_metadata(self): """Metadata related to an `execute` call should be captured.""" conn, cur = self.test_conn - Pin.override(cur, tracer=self.test_tracer) + Pin._override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) @@ -206,7 +206,7 @@ def test_cursor_override(self): """Test overriding the tracer with our own.""" conn, cur = self.test_conn - Pin.override(cur, tracer=self.test_tracer) + Pin._override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) @@ -403,7 +403,7 @@ def test_user_specified_service_default(self): assert config.service == "mysvc" conn, cur = self.test_conn - Pin.override(cur, tracer=self.test_tracer) + Pin._override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute("SELECT * FROM {};".format(TEST_TABLE)) @@ -427,7 +427,7 @@ def test_user_specified_service_v0(self): assert config.service == "mysvc" conn, cur = self.test_conn - Pin.override(cur, tracer=self.test_tracer) + Pin._override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute("SELECT * FROM {};".format(TEST_TABLE)) @@ -451,7 +451,7 @@ def test_user_specified_service_v1(self): assert config.service == "mysvc" conn, cur = self.test_conn - Pin.override(cur, tracer=self.test_tracer) + Pin._override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute("SELECT * FROM {};".format(TEST_TABLE)) @@ -469,7 +469,7 @@ def test_unspecified_service_v0(self): should result in the default DD_SERVICE the span service """ conn, cur = self.test_conn - Pin.override(cur, tracer=self.test_tracer) + Pin._override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute("SELECT * FROM {};".format(TEST_TABLE)) @@ -487,7 +487,7 @@ def test_unspecified_service_v1(self): should result in the default DD_SERVICE the span service """ conn, cur = self.test_conn - Pin.override(cur, tracer=self.test_tracer) + Pin._override(cur, tracer=self.test_tracer) with conn: cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) cur.execute("SELECT * FROM {};".format(TEST_TABLE)) diff --git a/tests/contrib/yaaredis/test_yaaredis.py b/tests/contrib/yaaredis/test_yaaredis.py index 350b323de9c..df064817aef 100644 --- a/tests/contrib/yaaredis/test_yaaredis.py +++ b/tests/contrib/yaaredis/test_yaaredis.py @@ -115,7 +115,7 @@ async def test_pipeline_immediate(snapshot_context, traced_yaaredis): async def test_meta_override(tracer, test_spans, traced_yaaredis): pin = Pin.get_from(traced_yaaredis) assert pin is not None - pin.clone(tags={"cheese": "camembert"}, tracer=tracer).onto(traced_yaaredis) + pin._clone(tags={"cheese": "camembert"}, tracer=tracer).onto(traced_yaaredis) await traced_yaaredis.get("cheese") test_spans.assert_trace_count(1) @@ -130,7 +130,7 @@ async def test_meta_override(tracer, test_spans, traced_yaaredis): @pytest.mark.asyncio async def test_service_name(tracer, test_spans, traced_yaaredis): service = str(uuid.uuid4()) - Pin.override(traced_yaaredis, service=service, tracer=tracer) + Pin._override(traced_yaaredis, service=service, tracer=tracer) await traced_yaaredis.set("cheese", "1") test_spans.assert_trace_count(1) @@ -142,7 +142,7 @@ async def test_service_name(tracer, test_spans, traced_yaaredis): async def test_service_name_config(tracer, test_spans, traced_yaaredis): service = str(uuid.uuid4()) with override_config("yaaredis", dict(service=service)): - Pin.override(traced_yaaredis, tracer=tracer) + Pin._override(traced_yaaredis, tracer=tracer) await traced_yaaredis.set("cheese", "1") test_spans.assert_trace_count(1) test_spans.assert_span_count(1) diff --git a/tests/integration/test_debug.py b/tests/integration/test_debug.py index f5453f353fe..b2f973b1a48 100644 --- a/tests/integration/test_debug.py +++ b/tests/integration/test_debug.py @@ -3,8 +3,6 @@ import os import re import subprocess -from typing import List -from typing import Optional import mock import pytest @@ -13,11 +11,10 @@ import ddtrace._trace.sampler from ddtrace.internal import debug from ddtrace.internal.writer import AgentWriter -from ddtrace.internal.writer import TraceWriter -from ddtrace.trace import Span from tests.integration.utils import AGENT_VERSION from tests.subprocesstest import SubprocessTestCase from tests.subprocesstest import run_in_subprocess +from tests.utils import DummyTracer pytestmark = pytest.mark.skipif(AGENT_VERSION == "testagent", reason="The test agent doesn't support startup logs.") @@ -36,7 +33,6 @@ def __eq__(self, other): @pytest.mark.subprocess() def test_standard_tags(): from datetime import datetime - import sys import ddtrace from ddtrace.internal import debug @@ -75,14 +71,6 @@ def test_standard_tags(): in_venv = f.get("in_virtual_env") assert in_venv is True - lang_version = f.get("lang_version") - if sys.version_info == (3, 7, 0): - assert "3.7" in lang_version - elif sys.version_info == (3, 6, 0): - assert "3.6" in lang_version - elif sys.version_info == (2, 7, 0): - assert "2.7" in lang_version - agent_url = f.get("agent_url") assert agent_url == "http://localhost:8126" @@ -198,13 +186,12 @@ def test_trace_agent_url(self): ) ) def test_tracer_loglevel_info_connection(self): - tracer = ddtrace.trace.Tracer() logging.basicConfig(level=logging.INFO) with mock.patch.object(logging.Logger, "log") as mock_logger: # shove an unserializable object into the config log output # regression: this used to cause an exception to be raised ddtrace.config.version = AgentWriter(agent_url="foobar") - tracer._configure() + ddtrace.trace.tracer.configure() assert mock.call(logging.INFO, re_matcher("- DATADOG TRACER CONFIGURATION - ")) in mock_logger.mock_calls @run_in_subprocess( @@ -214,10 +201,9 @@ def test_tracer_loglevel_info_connection(self): ) ) def test_tracer_loglevel_info_no_connection(self): - tracer = ddtrace.trace.Tracer() logging.basicConfig(level=logging.INFO) with mock.patch.object(logging.Logger, "log") as mock_logger: - tracer._configure() + ddtrace.trace.tracer.configure() assert mock.call(logging.INFO, re_matcher("- DATADOG TRACER CONFIGURATION - ")) in mock_logger.mock_calls assert mock.call(logging.WARNING, re_matcher("- DATADOG TRACER DIAGNOSTIC - ")) in mock_logger.mock_calls @@ -228,9 +214,8 @@ def test_tracer_loglevel_info_no_connection(self): ) ) def test_tracer_log_disabled_error(self): - tracer = ddtrace.trace.Tracer() with mock.patch.object(logging.Logger, "log") as mock_logger: - tracer._configure() + ddtrace.trace.tracer._configure() assert mock_logger.mock_calls == [] @run_in_subprocess( @@ -240,9 +225,8 @@ def test_tracer_log_disabled_error(self): ) ) def test_tracer_log_disabled(self): - tracer = ddtrace.trace.Tracer() with mock.patch.object(logging.Logger, "log") as mock_logger: - tracer._configure() + ddtrace.trace.tracer._configure() assert mock_logger.mock_calls == [] @run_in_subprocess( @@ -252,9 +236,8 @@ def test_tracer_log_disabled(self): ) def test_tracer_info_level_log(self): logging.basicConfig(level=logging.INFO) - tracer = ddtrace.trace.Tracer() with mock.patch.object(logging.Logger, "log") as mock_logger: - tracer._configure() + ddtrace.trace.tracer._configure() assert mock_logger.mock_calls == [] @@ -296,16 +279,24 @@ def test_to_json(): json.dumps(info) +@pytest.mark.subprocess(env={"AWS_LAMBDA_FUNCTION_NAME": "something"}) def test_agentless(monkeypatch): - monkeypatch.setenv("AWS_LAMBDA_FUNCTION_NAME", "something") - tracer = ddtrace.trace.Tracer() - info = debug.collect(tracer) + from ddtrace.internal import debug + from ddtrace.trace import tracer + info = debug.collect(tracer) assert info.get("agent_url") == "AGENTLESS" +@pytest.mark.subprocess() def test_custom_writer(): - tracer = ddtrace.trace.Tracer() + from typing import List + from typing import Optional + + from ddtrace.internal import debug + from ddtrace.internal.writer import TraceWriter + from ddtrace.trace import Span + from ddtrace.trace import tracer class CustomWriter(TraceWriter): def recreate(self) -> TraceWriter: @@ -326,16 +317,24 @@ def flush_queue(self) -> None: assert info.get("agent_url") == "CUSTOM" +@pytest.mark.subprocess() def test_different_samplers(): - tracer = ddtrace.trace.Tracer() + import ddtrace + from ddtrace.internal import debug + from ddtrace.trace import tracer + tracer._configure(sampler=ddtrace._trace.sampler.RateSampler()) info = debug.collect(tracer) assert info.get("sampler_type") == "RateSampler" +@pytest.mark.subprocess() def test_startup_logs_sampling_rules(): - tracer = ddtrace.trace.Tracer() + import ddtrace + from ddtrace.internal import debug + from ddtrace.trace import tracer + sampler = ddtrace._trace.sampler.DatadogSampler(rules=[ddtrace._trace.sampler.SamplingRule(sample_rate=1.0)]) tracer._configure(sampler=sampler) f = debug.collect(tracer) @@ -424,7 +423,7 @@ def test_debug_span_log(): def test_partial_flush_log(): - tracer = ddtrace.trace.Tracer() + tracer = DummyTracer() tracer._configure( partial_flush_enabled=True, diff --git a/tests/integration/test_encoding.py b/tests/integration/test_encoding.py index e3f5037e7b3..ff47679af47 100644 --- a/tests/integration/test_encoding.py +++ b/tests/integration/test_encoding.py @@ -4,7 +4,7 @@ import mock import pytest -from ddtrace.trace import Tracer +from ddtrace.trace import tracer AGENT_VERSION = os.environ.get("AGENT_VERSION") @@ -12,7 +12,6 @@ class TestTraceAcceptedByAgent: def test_simple_trace_accepted_by_agent(self): - tracer = Tracer() with mock.patch("ddtrace.internal.writer.writer.log") as log: with tracer.trace("root"): for _ in range(999): @@ -32,7 +31,6 @@ def test_simple_trace_accepted_by_agent(self): ) def test_trace_with_meta_accepted_by_agent(self, tags): """Meta tags should be text types.""" - tracer = Tracer() with mock.patch("ddtrace.internal.writer.writer.log") as log: with tracer.trace("root", service="test_encoding", resource="test_resource") as root: root.set_tags(tags) @@ -53,7 +51,6 @@ def test_trace_with_meta_accepted_by_agent(self, tags): ) def test_trace_with_metrics_accepted_by_agent(self, metrics): """Metric tags should be numeric types - i.e. int, float, long (py3), and str numbers.""" - tracer = Tracer() with mock.patch("ddtrace.internal.writer.writer.log") as log: with tracer.trace("root") as root: root.set_metrics(metrics) @@ -72,7 +69,6 @@ def test_trace_with_metrics_accepted_by_agent(self, metrics): ) def test_trace_with_links_accepted_by_agent(self, span_links_kwargs): """Links should not break things.""" - tracer = Tracer() with mock.patch("ddtrace.internal.writer.writer.log") as log: with tracer.trace("root", service="test_encoding", resource="test_resource") as root: root.set_link(**span_links_kwargs) diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 70cc84cdbfa..fd29cc18231 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -9,10 +9,10 @@ from ddtrace.internal.atexit import register_on_exit_signal from ddtrace.internal.runtime import container -from ddtrace.trace import Tracer from tests.integration.utils import import_ddtrace_in_subprocess from tests.integration.utils import parametrize_with_all_encodings from tests.integration.utils import skip_if_testagent +from tests.utils import DummyTracer from tests.utils import call_program @@ -37,7 +37,7 @@ def test_configure_keeps_api_hostname_and_port(): @mock.patch("signal.getsignal") def test_shutdown_on_exit_signal(mock_get_signal, mock_signal): mock_get_signal.return_value = None - tracer = Tracer() + tracer = DummyTracer() register_on_exit_signal(tracer._atexit) assert mock_signal.call_count == 2 assert mock_signal.call_args_list[0][0][0] == signal.SIGTERM diff --git a/tests/integration/test_integration_civisibility.py b/tests/integration/test_integration_civisibility.py index cc633d12018..9e01d47b756 100644 --- a/tests/integration/test_integration_civisibility.py +++ b/tests/integration/test_integration_civisibility.py @@ -10,7 +10,7 @@ from ddtrace.internal.ci_visibility.constants import EVP_PROXY_AGENT_ENDPOINT from ddtrace.internal.ci_visibility.constants import EVP_SUBDOMAIN_HEADER_EVENT_VALUE from ddtrace.internal.ci_visibility.constants import EVP_SUBDOMAIN_HEADER_NAME -from ddtrace.internal.ci_visibility.recorder import CIVisibilityTracer as Tracer +from ddtrace.internal.ci_visibility.recorder import CIVisibilityTracer from tests.ci_visibility.util import _get_default_civisibility_ddconfig from tests.utils import override_env @@ -36,7 +36,7 @@ def test_civisibility_intake_with_evp_available(): with override_env( dict(DD_API_KEY="foobar.baz", DD_SITE="foo.bar", DD_CIVISIBILITY_AGENTLESS_ENABLED="0") ), mock.patch("ddtrace.internal.ci_visibility.recorder.ddconfig", _get_default_civisibility_ddconfig()): - t = Tracer() + t = CIVisibilityTracer() CIVisibility.enable(tracer=t) assert CIVisibility._instance.tracer._writer._endpoint == EVP_PROXY_AGENT_ENDPOINT assert CIVisibility._instance.tracer._writer.intake_url == agent.get_trace_url() @@ -64,7 +64,7 @@ def test_civisibility_intake_with_apikey(): with override_env( dict(DD_API_KEY="foobar.baz", DD_SITE="foo.bar", DD_CIVISIBILITY_AGENTLESS_ENABLED="1") ), mock.patch("ddtrace.internal.ci_visibility.recorder.ddconfig", _get_default_civisibility_ddconfig()): - t = Tracer() + t = CIVisibilityTracer() CIVisibility.enable(tracer=t) assert CIVisibility._instance.tracer._writer._endpoint == AGENTLESS_ENDPOINT assert CIVisibility._instance.tracer._writer.intake_url == "https://citestcycle-intake.foo.bar" diff --git a/tests/integration/test_integration_snapshots.py b/tests/integration/test_integration_snapshots.py index 0ba978fa260..eab323bf319 100644 --- a/tests/integration/test_integration_snapshots.py +++ b/tests/integration/test_integration_snapshots.py @@ -5,7 +5,6 @@ import mock import pytest -from ddtrace.trace import Tracer from ddtrace.trace import tracer from tests.integration.utils import AGENT_VERSION from tests.integration.utils import mark_snapshot @@ -214,7 +213,6 @@ def test_trace_with_wrong_meta_types_not_sent(encoding, meta, monkeypatch): def test_trace_with_wrong_metrics_types_not_sent(encoding, metrics, monkeypatch): """Wrong metric types should raise TypeErrors during encoding and fail to send to the agent.""" with override_global_config(dict(_trace_api=encoding)): - tracer = Tracer() with mock.patch("ddtrace._trace.span.log") as log: with tracer.trace("root") as root: root._metrics = metrics diff --git a/tests/integration/test_priority_sampling.py b/tests/integration/test_priority_sampling.py index 57b64a2fe5c..32fc4e0dcee 100644 --- a/tests/integration/test_priority_sampling.py +++ b/tests/integration/test_priority_sampling.py @@ -8,7 +8,7 @@ from ddtrace.internal.encoding import JSONEncoder from ddtrace.internal.encoding import MsgpackEncoderV04 as Encoder from ddtrace.internal.writer import AgentWriter -from ddtrace.trace import Tracer +from ddtrace.trace import tracer as ddtracer from tests.integration.utils import AGENT_VERSION from tests.integration.utils import parametrize_with_all_encodings from tests.integration.utils import skip_if_testagent @@ -115,18 +115,16 @@ def test_priority_sampling_response(): @pytest.mark.snapshot(agent_sample_rate_by_service={"service:test,env:": 0.9999}) def test_agent_sample_rate_keep(): """Ensure that the agent sample rate is respected when a trace is auto sampled.""" - tracer = Tracer() - # First trace won't actually have the sample rate applied since the response has not yet been received. - with tracer.trace(""): + with ddtracer.trace(""): pass # Force a flush to get the response back. - tracer.flush() + ddtracer.flush() # Subsequent traces should have the rate applied. - with tracer.trace("test", service="test") as span: + with ddtracer.trace("test", service="test") as span: pass - tracer.flush() + ddtracer.flush() assert span.get_metric("_dd.agent_psr") == pytest.approx(0.9999) assert span.get_metric("_sampling_priority_v1") == AUTO_KEEP assert span.get_tag("_dd.p.dm") == "-1" @@ -136,21 +134,17 @@ def test_agent_sample_rate_keep(): @pytest.mark.snapshot(agent_sample_rate_by_service={"service:test,env:": 0.0001}) def test_agent_sample_rate_reject(): """Ensure that the agent sample rate is respected when a trace is auto rejected.""" - from ddtrace.trace import Tracer - - tracer = Tracer() - # First trace won't actually have the sample rate applied since the response has not yet been received. - with tracer.trace(""): + with ddtracer.trace(""): pass # Force a flush to get the response back. - tracer.flush() + ddtracer.flush() # Subsequent traces should have the rate applied. - with tracer.trace("test", service="test") as span: + with ddtracer.trace("test", service="test") as span: pass - tracer.flush() + ddtracer.flush() assert span.get_metric("_dd.agent_psr") == pytest.approx(0.0001) assert span.get_metric("_sampling_priority_v1") == AUTO_REJECT assert span.get_tag("_dd.p.dm") == "-1" diff --git a/tests/integration/test_settings.py b/tests/integration/test_settings.py index 249b0211bb4..ba9bcd66f37 100644 --- a/tests/integration/test_settings.py +++ b/tests/integration/test_settings.py @@ -20,7 +20,7 @@ def test_setting_origin_environment(test_agent_session, run_python_code_in_subpr env = os.environ.copy() env.update( { - "DD_TRACE_SAMPLE_RATE": "0.1", + "DD_TRACE_SAMPLING_RULES": '[{"sample_rate":0.1}]', "DD_LOGS_INJECTION": "true", "DD_TRACE_HEADER_TAGS": "X-Header-Tag-1:header_tag_1,X-Header-Tag-2:header_tag_2", "DD_TAGS": "team:apm,component:web", @@ -39,11 +39,11 @@ def test_setting_origin_environment(test_agent_session, run_python_code_in_subpr assert status == 0, err events = test_agent_session.get_events(subprocess=True) - events_trace_sample_rate = _get_telemetry_config_items(events, "DD_TRACE_SAMPLE_RATE") + events_trace_sample_rate = _get_telemetry_config_items(events, "DD_TRACE_SAMPLING_RULES") assert { - "name": "DD_TRACE_SAMPLE_RATE", - "value": 0.1, + "name": "DD_TRACE_SAMPLING_RULES", + "value": '[{"sample_rate":0.1}]', "origin": "env_var", } in events_trace_sample_rate @@ -69,7 +69,6 @@ def test_setting_origin_code(test_agent_session, run_python_code_in_subprocess): env = os.environ.copy() env.update( { - "DD_TRACE_SAMPLE_RATE": "0.1", "DD_LOGS_INJECTION": "true", "DD_TRACE_HEADER_TAGS": "X-Header-Tag-1:header_tag_1,X-Header-Tag-2:header_tag_2", "DD_TAGS": "team:apm,component:web", @@ -81,7 +80,6 @@ def test_setting_origin_code(test_agent_session, run_python_code_in_subprocess): """ from ddtrace import config, tracer -config._trace_sample_rate = 0.2 config._logs_injection = False config._trace_http_header_tags = {"header": "value"} config.tags = {"header": "value"} @@ -96,12 +94,6 @@ def test_setting_origin_code(test_agent_session, run_python_code_in_subprocess): assert status == 0, err events = test_agent_session.get_events(subprocess=True) - events_trace_sample_rate = _get_telemetry_config_items(events, "DD_TRACE_SAMPLE_RATE") - assert { - "name": "DD_TRACE_SAMPLE_RATE", - "value": 0.2, - "origin": "code", - } in events_trace_sample_rate events_logs_injection_enabled = _get_telemetry_config_items(events, "DD_LOGS_INJECTION") assert { @@ -174,8 +166,8 @@ def test_remoteconfig_sampling_rate_default(test_agent_session, run_python_code_ assert status == 0, err events = test_agent_session.get_events(subprocess=True) - events_trace_sample_rate = _get_telemetry_config_items(events, "DD_TRACE_SAMPLE_RATE") - assert {"name": "DD_TRACE_SAMPLE_RATE", "value": 1.0, "origin": "default"} in events_trace_sample_rate + events_trace_sample_rate = _get_telemetry_config_items(events, "trace_sample_rate") + assert {"name": "trace_sample_rate", "value": 1.0, "origin": "default"} in events_trace_sample_rate @pytest.mark.skipif(AGENT_VERSION != "testagent", reason="Tests only compatible with a testagent") @@ -191,7 +183,22 @@ def test_remoteconfig_sampling_rate_telemetry(test_agent_session, run_python_cod from ddtrace import config, tracer from tests.internal.test_settings import _base_rc_config -config._handle_remoteconfig(_base_rc_config({"tracing_sampling_rate": 0.5})) +config._handle_remoteconfig( + _base_rc_config( + { + "tracing_sampling_rules": [ + { + "sample_rate": "0.5", + "service": "*", + "name": "*", + "resource": "*", + "tags": {}, + "provenance": "customer", + } + ] + } + ) +) with tracer.trace("test") as span: pass assert span.get_metric("_dd.rule_psr") == 0.5 @@ -201,8 +208,13 @@ def test_remoteconfig_sampling_rate_telemetry(test_agent_session, run_python_cod assert status == 0, err events = test_agent_session.get_events(subprocess=True) - events_trace_sample_rate = _get_telemetry_config_items(events, "DD_TRACE_SAMPLE_RATE") - assert {"name": "DD_TRACE_SAMPLE_RATE", "value": 0.5, "origin": "remote_config"} in events_trace_sample_rate + events_trace_sample_rate = _get_telemetry_config_items(events, "DD_TRACE_SAMPLING_RULES") + assert { + "name": "DD_TRACE_SAMPLING_RULES", + "origin": "remote_config", + "value": '[{"sample_rate": "0.5", "service": "*", "name": "*", "resource": "*", ' + '"tags": {}, "provenance": "customer"}]', + } in events_trace_sample_rate @pytest.mark.skipif(AGENT_VERSION != "testagent", reason="Tests only compatible with a testagent") @@ -226,9 +238,11 @@ def test_remoteconfig_header_tags_telemetry(test_agent_session, run_python_code_ {"header": "used-with-default", "tag_name":""}] })) with tracer.trace("test") as span: - trace_utils.set_http_meta(span, - config.falcon, # randomly chosen http integration config - request_headers={"used": "foobarbanana", "used-with-default": "defaultname"}) + trace_utils.set_http_meta( + span, + config.falcon, # randomly chosen http integration config + request_headers={"used": "foobarbanana", "used-with-default": "defaultname"}, + ) assert span.get_tag("header_tag_69") == "foobarbanana" assert span.get_tag("header_tag_70") is None assert span.get_tag("http.request.headers.used-with-default") == "defaultname" diff --git a/tests/integration/test_tracemethods.py b/tests/integration/test_tracemethods.py index 15129c56161..7353c12182a 100644 --- a/tests/integration/test_tracemethods.py +++ b/tests/integration/test_tracemethods.py @@ -27,14 +27,10 @@ "mod.mod2.mod3:Class.test_method,Class.test_method2", [("mod.mod2.mod3", "Class.test_method"), ("mod.mod2.mod3", "Class.test_method2")], ), - ("module[method1, method2]", []), ("module", []), ("module.", []), ("module.method", []), - ("module.method[m1,m2,]", []), ("module.method;module.method", []), - ("module.method[m1];module.method[m1,m2,]", []), - ("module.method[[m1]", []), ], ) def test_trace_methods_parse(dd_trace_methods: str, expected_output: List[Tuple[str, str]]): @@ -43,37 +39,6 @@ def test_trace_methods_parse(dd_trace_methods: str, expected_output: List[Tuple[ assert _parse_trace_methods(dd_trace_methods) == expected_output -def test_legacy_trace_methods_parse(): - from ddtrace.internal.tracemethods import _parse_legacy_trace_methods - - assert _parse_legacy_trace_methods("") == [] - assert _parse_legacy_trace_methods("module[method1]") == ["module.method1"] - assert _parse_legacy_trace_methods("module[method1,method2]") == ["module.method1", "module.method2"] - assert _parse_legacy_trace_methods("module[method1,method2];mod2[m1,m2]") == [ - "module.method1", - "module.method2", - "mod2.m1", - "mod2.m2", - ] - assert _parse_legacy_trace_methods("mod.submod[m1,m2,m3]") == ["mod.submod.m1", "mod.submod.m2", "mod.submod.m3"] - assert _parse_legacy_trace_methods("mod.submod.subsubmod[m1,m2]") == [ - "mod.submod.subsubmod.m1", - "mod.submod.subsubmod.m2", - ] - assert _parse_legacy_trace_methods("mod.mod2.mod3.Class[test_method,test_method2]") == [ - "mod.mod2.mod3.Class.test_method", - "mod.mod2.mod3.Class.test_method2", - ] - assert _parse_legacy_trace_methods("module[method1, method2]") == [] - assert _parse_legacy_trace_methods("module") == [] - assert _parse_legacy_trace_methods("module.") == [] - assert _parse_legacy_trace_methods("module.method") == [] - assert _parse_legacy_trace_methods("module.method[m1,m2,]") == [] - assert _parse_legacy_trace_methods("module.method;module.method") == [] - assert _parse_legacy_trace_methods("module.method[m1];module.method[m1,m2,]") == [] - assert _parse_legacy_trace_methods("module.method[[m1]") == [] - - def _test_method(): pass @@ -105,9 +70,9 @@ def test_method(self): ddtrace_run=True, env=dict( DD_TRACE_METHODS=( - "tests.integration.test_tracemethods[_test_method,_test_method2];" - "tests.integration.test_tracemethods._Class[test_method,test_method2];" - "tests.integration.test_tracemethods._Class.NestedClass[test_method]" + "tests.integration.test_tracemethods:_test_method,_test_method2;" + "tests.integration.test_tracemethods:_Class.test_method,_Class.test_method2;" + "tests.integration.test_tracemethods:_Class.NestedClass.test_method" ) ), ) @@ -139,8 +104,8 @@ async def _async_test_method2(): def test_ddtrace_run_trace_methods_async(ddtrace_run_python_code_in_subprocess): env = os.environ.copy() env["DD_TRACE_METHODS"] = ( - "tests.integration.test_tracemethods[_async_test_method,_async_test_method2];" - "tests.integration.test_tracemethods._Class[async_test_method]" + "tests.integration.test_tracemethods:_async_test_method,_async_test_method2;" + "tests.integration.test_tracemethods:_Class.async_test_method" ) tests_dir = os.path.dirname(os.path.dirname(__file__)) env["PYTHONPATH"] = os.pathsep.join([tests_dir, env.get("PYTHONPATH", "")]) diff --git a/tests/integration/utils.py b/tests/integration/utils.py index 5b87161e2d0..53640d82b57 100644 --- a/tests/integration/utils.py +++ b/tests/integration/utils.py @@ -5,8 +5,6 @@ import mock import pytest -from ddtrace.trace import Tracer - AGENT_VERSION = os.environ.get("AGENT_VERSION") @@ -28,7 +26,8 @@ def encode_traces(self, traces): def send_invalid_payload_and_get_logs(encoder_cls=BadEncoder): - t = Tracer() + from ddtrace.trace import tracer as t + for client in t._writer._clients: client.encoder = encoder_cls() with mock.patch("ddtrace.internal.writer.writer.log") as log: diff --git a/tests/internal/test_settings.py b/tests/internal/test_settings.py index 2ff1843690e..a26d692eea4 100644 --- a/tests/internal/test_settings.py +++ b/tests/internal/test_settings.py @@ -62,22 +62,36 @@ def _deleted_rc_config(): }, }, { - "env": {"DD_TRACE_SAMPLE_RATE": "0.9"}, - "expected": {"_trace_sample_rate": 0.9}, - "expected_source": {"_trace_sample_rate": "env_var"}, + "env": {"DD_TRACE_SAMPLING_RULES": '[{"sample_rate":0.91}]'}, + "expected": {"_trace_sampling_rules": '[{"sample_rate":0.91}]'}, + "expected_source": {"_trace_sampling_rules": "env_var"}, }, { - "env": {"DD_TRACE_SAMPLE_RATE": "0.9"}, - "code": {"_trace_sample_rate": 0.8}, - "expected": {"_trace_sample_rate": 0.8}, - "expected_source": {"_trace_sample_rate": "code"}, + "env": {"DD_TRACE_SAMPLING_RULES": '[{"sample_rate":0.92}]'}, + "code": {"_trace_sampling_rules": '[{"sample_rate":0.82}]'}, + "expected": {"_trace_sampling_rules": '[{"sample_rate":0.82}]'}, + "expected_source": {"_trace_sampling_rules": "code"}, }, { - "env": {"DD_TRACE_SAMPLE_RATE": "0.9"}, - "code": {"_trace_sample_rate": 0.8}, - "rc": {"tracing_sampling_rate": 0.7}, - "expected": {"_trace_sample_rate": 0.7}, - "expected_source": {"_trace_sample_rate": "remote_config"}, + "env": {"DD_TRACE_SAMPLING_RULES": '[{"sample_rate":0.93}]'}, + "code": {"_trace_sampling_rules": '[{"sample_rate":0.83}]'}, + "rc": { + "tracing_sampling_rules": [ + { + "sample_rate": "0.73", + "service": "*", + "name": "*", + "resource": "*", + "tags": [], + "provenance": "customer", + } + ] + }, + "expected": { + "_trace_sampling_rules": '[{"sample_rate": "0.73", "service": "*", "name": "*", ' + '"resource": "*", "tags": [], "provenance": "customer"}]', + }, + "expected_source": {"_trace_sampling_rules": "remote_config"}, }, { "env": {"DD_LOGS_INJECTION": "true"}, @@ -227,60 +241,6 @@ def test_config_subscription(config): _handler.assert_called_once_with(config, [s]) -def test_remoteconfig_sampling_rate_user(run_python_code_in_subprocess): - env = os.environ.copy() - env.update({"DD_TRACE_SAMPLE_RATE": "0.1"}) - out, err, status, _ = run_python_code_in_subprocess( - """ -from ddtrace import config, tracer -from ddtrace._trace.sampler import DatadogSampler -from tests.internal.test_settings import _base_rc_config, _deleted_rc_config - -with tracer.trace("test") as span: - pass -assert span.get_metric("_dd.rule_psr") == 0.1 - -config._handle_remoteconfig(_base_rc_config({"tracing_sampling_rate": 0.2})) -with tracer.trace("test") as span: - pass -assert span.get_metric("_dd.rule_psr") == 0.2 - -config._handle_remoteconfig(_base_rc_config({})) -with tracer.trace("test") as span: - pass -assert span.get_metric("_dd.rule_psr") == 0.1 - -custom_sampler = DatadogSampler(default_sample_rate=0.3) -tracer._configure(sampler=custom_sampler) -with tracer.trace("test") as span: - pass -assert span.get_metric("_dd.rule_psr") == 0.3 - -config._handle_remoteconfig(_base_rc_config({"tracing_sampling_rate": 0.4})) -with tracer.trace("test") as span: - pass -assert span.get_metric("_dd.rule_psr") == 0.4 - -config._handle_remoteconfig(_base_rc_config({})) -with tracer.trace("test") as span: - pass -assert span.get_metric("_dd.rule_psr") == 0.3 - -config._handle_remoteconfig(_base_rc_config({"tracing_sampling_rate": 0.4})) -with tracer.trace("test") as span: - pass -assert span.get_metric("_dd.rule_psr") == 0.4 - -config._handle_remoteconfig(_deleted_rc_config()) -with tracer.trace("test") as span: - pass -assert span.get_metric("_dd.rule_psr") == 0.3 - """, - env=env, - ) - assert status == 0, err.decode("utf-8") - - def test_remoteconfig_sampling_rules(run_python_code_in_subprocess): env = os.environ.copy() env.update({"DD_TRACE_SAMPLING_RULES": '[{"sample_rate":0.1, "name":"test"}]'}) @@ -368,13 +328,12 @@ def test_remoteconfig_sampling_rules(run_python_code_in_subprocess): assert status == 0, err.decode("utf-8") -def test_remoteconfig_sample_rate_and_rules(run_python_code_in_subprocess): +def test_remoteconfig_global_sample_rate_and_rules(run_python_code_in_subprocess): """There is complex logic regarding the interaction between setting new sample rates and rules with remote config. """ env = os.environ.copy() - env.update({"DD_TRACE_SAMPLING_RULES": '[{"sample_rate":0.9, "name":"rules"}]'}) - env.update({"DD_TRACE_SAMPLE_RATE": "0.8"}) + env.update({"DD_TRACE_SAMPLING_RULES": '[{"sample_rate":0.9, "name":"rules"}, {"sample_rate":0.8}]'}) out, err, status, _ = run_python_code_in_subprocess( """ @@ -410,8 +369,9 @@ def test_remoteconfig_sample_rate_and_rules(run_python_code_in_subprocess): with tracer.trace("sample_rate") as span: pass -assert span.get_metric("_dd.rule_psr") == 0.8 -assert span.get_tag("_dd.p.dm") == "-3" +# Global sampling rule was overwritten +assert span.get_metric("_dd.rule_psr") is None +assert span.get_tag("_dd.p.dm") == "-0" config._handle_remoteconfig(_base_rc_config({"tracing_sampling_rate": 0.2})) @@ -482,8 +442,8 @@ def test_remoteconfig_sample_rate_and_rules(run_python_code_in_subprocess): with tracer.trace("sample_rate") as span: pass -assert span.get_metric("_dd.rule_psr") == 0.8 -assert span.get_tag("_dd.p.dm") == "-3" +assert span.get_metric("_dd.rule_psr") is None +assert span.get_tag("_dd.p.dm") == "-0" """, env=env, diff --git a/tests/opentelemetry/test_config.py b/tests/opentelemetry/test_config.py index 39a43128e9e..d5e9bf570fd 100644 --- a/tests/opentelemetry/test_config.py +++ b/tests/opentelemetry/test_config.py @@ -1,6 +1,24 @@ import pytest +def _global_sampling_rule(): + from ddtrace._trace.sampling_rule import SamplingRule + from ddtrace.trace import tracer + + assert hasattr(tracer._sampler, "rules") + + for rule in tracer._sampler.rules: + if ( + rule.service == SamplingRule.NO_RULE + and rule.name == SamplingRule.NO_RULE + and rule.resource == SamplingRule.NO_RULE + and rule.tags == SamplingRule.NO_RULE + and rule.provenance == "default" + ): + return rule + assert False, "Rule not found" + + @pytest.mark.subprocess( env={ "OTEL_SERVICE_NAME": "Test", @@ -10,7 +28,7 @@ "OTEL_PROPAGATORS": "jaegar, tracecontext, b3", "DD_TRACE_PROPAGATION_STYLE": "b3", "OTEL_TRACES_SAMPLER": "always_off", - "DD_TRACE_SAMPLE_RATE": "1.0", + "DD_TRACE_SAMPLING_RULES": '[{"sample_rate":0.1}]', "OTEL_TRACES_EXPORTER": "True", "DD_TRACE_ENABLED": "True", "OTEL_METRICS_EXPORTER": "none", @@ -26,11 +44,12 @@ ) def test_dd_otel_mixed_env_configuration(): from ddtrace import config + from tests.opentelemetry.test_config import _global_sampling_rule assert config.service == "DD_service_test", config.service assert config._debug_mode is False, config._debug_mode assert config._propagation_style_extract == ["b3"], config._propagation_style_extract - assert config._trace_sample_rate == 1.0, config._trace_sample_rate + assert _global_sampling_rule().sample_rate == 0.1 assert config._tracing_enabled is True, config._tracing_enabled assert config._runtime_metrics_enabled is True, config._runtime_metrics_enabled assert config._otel_enabled is True, config._otel_enabled @@ -45,7 +64,7 @@ def test_dd_otel_mixed_env_configuration(): "OTEL_LOG_LEVEL": "debug", "OTEL_PROPAGATORS": "jaegar, tracecontext, b3", "OTEL_TRACES_SAMPLER": "always_off", - "DD_TRACE_SAMPLE_RATE": "1.0", + "DD_TRACE_SAMPLING_RULES": '[{"sample_rate":0.9}]', "OTEL_TRACES_EXPORTER": "OTLP", "OTEL_METRICS_EXPORTER": "none", "OTEL_LOGS_EXPORTER": "warning", @@ -59,13 +78,14 @@ def test_dd_otel_mixed_env_configuration(): ) def test_dd_otel_missing_dd_env_configuration(): from ddtrace import config + from tests.opentelemetry.test_config import _global_sampling_rule assert config.service == "Test", config.service assert config.version == "1.0" assert config._otel_enabled is True, config._otel_enabled assert config._debug_mode is True, config._debug_mode assert config._propagation_style_extract == ["tracecontext", "b3"], config._propagation_style_extract - assert config._trace_sample_rate == 1.0, config._trace_sample_rate + assert _global_sampling_rule().sample_rate == 0.9 assert config._tracing_enabled is True, config._tracing_enabled assert config._runtime_metrics_enabled is False, config._runtime_metrics_enabled assert config.tags == { @@ -133,8 +153,9 @@ def test_otel_propagation_style_configuration_unsupportedwarning(): ) def test_otel_traces_sampler_configuration_alwayson(): from ddtrace import config + from tests.opentelemetry.test_config import _global_sampling_rule - assert config._trace_sample_rate == 1.0, config._trace_sample_rate + assert _global_sampling_rule().sample_rate == 1.0, config._trace_sample_rate @pytest.mark.subprocess( @@ -143,8 +164,9 @@ def test_otel_traces_sampler_configuration_alwayson(): ) def test_otel_traces_sampler_configuration_ignore_parent(): from ddtrace import config + from tests.opentelemetry.test_config import _global_sampling_rule - assert config._trace_sample_rate == 1.0, config._trace_sample_rate + assert _global_sampling_rule().sample_rate == 1.0, config._trace_sample_rate @pytest.mark.subprocess( @@ -153,8 +175,9 @@ def test_otel_traces_sampler_configuration_ignore_parent(): ) def test_otel_traces_sampler_configuration_alwaysoff(): from ddtrace import config + from tests.opentelemetry.test_config import _global_sampling_rule - assert config._trace_sample_rate == 0.0, config._trace_sample_rate + assert _global_sampling_rule().sample_rate == 0.0, config._trace_sample_rate @pytest.mark.subprocess( @@ -167,8 +190,9 @@ def test_otel_traces_sampler_configuration_alwaysoff(): ) def test_otel_traces_sampler_configuration_traceidratio(): from ddtrace import config + from tests.opentelemetry.test_config import _global_sampling_rule - assert config._trace_sample_rate == 0.5, config._trace_sample_rate + assert _global_sampling_rule().sample_rate == 0.5, config._trace_sample_rate @pytest.mark.subprocess(env={"OTEL_TRACES_EXPORTER": "none"}) diff --git a/tests/opentracer/core/test_dd_compatibility.py b/tests/opentracer/core/test_dd_compatibility.py index 4ba14b0618f..c68b5ca6d6c 100644 --- a/tests/opentracer/core/test_dd_compatibility.py +++ b/tests/opentracer/core/test_dd_compatibility.py @@ -15,14 +15,6 @@ def test_ottracer_uses_global_ddtracer(self): tracer = ddtrace.opentracer.Tracer() assert tracer._dd_tracer is ddtrace.tracer - def test_custom_ddtracer(self): - """A user should be able to specify their own Datadog tracer instance if - they wish. - """ - custom_dd_tracer = ddtrace.trace.Tracer() - tracer = ddtrace.opentracer.Tracer(dd_tracer=custom_dd_tracer) - assert tracer._dd_tracer is custom_dd_tracer - def test_ot_dd_global_tracers(self, global_tracer): """Ensure our test function opentracer_init() prep""" ot_tracer = global_tracer diff --git a/tests/opentracer/core/test_tracer.py b/tests/opentracer/core/test_tracer.py index a0a18ff0dd8..f5534c8f1b0 100644 --- a/tests/opentracer/core/test_tracer.py +++ b/tests/opentracer/core/test_tracer.py @@ -15,8 +15,6 @@ from ddtrace.opentracer.span_context import SpanContext from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID from ddtrace.settings import ConfigException -from ddtrace.trace import Tracer as DDTracer -from tests.utils import override_global_config class TestTracerConfig(object): @@ -69,12 +67,6 @@ def test_invalid_config_key(self): assert ["enabeld", "setttings"] in str(ce_info) # codespell:ignore assert tracer is not None - def test_ddtrace_fallback_config(self): - """Ensure datadog configuration is used by default.""" - with override_global_config(dict(_tracing_enabled=False)): - tracer = Tracer(dd_tracer=DDTracer()) - assert tracer._dd_tracer.enabled is False - def test_global_tags(self): """Global tags should be passed from the opentracer to the tracer.""" config = { diff --git a/tests/opentracer/utils.py b/tests/opentracer/utils.py index 6a34052a385..85b84865ad8 100644 --- a/tests/opentracer/utils.py +++ b/tests/opentracer/utils.py @@ -7,5 +7,5 @@ def init_tracer(service_name, dd_tracer, scope_manager=None): It accepts a Datadog tracer that should be the same one used for testing. """ - ot_tracer = Tracer(service_name, dd_tracer=dd_tracer, scope_manager=scope_manager) + ot_tracer = Tracer(service_name, scope_manager=scope_manager, _dd_tracer=dd_tracer) return ot_tracer diff --git a/tests/profiling/collector/conftest.py b/tests/profiling/collector/conftest.py index a774b20f7da..a53ac79bcad 100644 --- a/tests/profiling/collector/conftest.py +++ b/tests/profiling/collector/conftest.py @@ -2,12 +2,13 @@ import ddtrace from ddtrace.profiling import Profiler +from tests.utils import override_global_config @pytest.fixture -def tracer(monkeypatch): - monkeypatch.setenv("DD_TRACE_STARTUP_LOGS", "0") - return ddtrace.trace.Tracer() +def tracer(): + with override_global_config(dict(_startup_logs_enabled=False)): + yield ddtrace.trace.tracer @pytest.fixture diff --git a/tests/profiling_v2/collector/conftest.py b/tests/profiling_v2/collector/conftest.py index 311c286c11e..7dc1d816091 100644 --- a/tests/profiling_v2/collector/conftest.py +++ b/tests/profiling_v2/collector/conftest.py @@ -5,4 +5,4 @@ @pytest.fixture def tracer(): - return ddtrace.trace.Tracer() + return ddtrace.trace.tracer diff --git a/tests/snapshots/tests.contrib.graphene.test_graphene.test_schema_failing_extensions.json b/tests/snapshots/tests.contrib.graphene.test_graphene.test_schema_failing_extensions.json new file mode 100644 index 00000000000..11391a3d342 --- /dev/null +++ b/tests/snapshots/tests.contrib.graphene.test_graphene.test_schema_failing_extensions.json @@ -0,0 +1,110 @@ +[[ + { + "name": "graphql.request", + "service": "graphql", + "resource": "{ user(id: \"999\") }", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "graphql", + "error": 1, + "meta": { + "_dd.base_service": "tests.contrib.graphene", + "_dd.p.dm": "-0", + "_dd.p.tid": "67a1663a00000000", + "component": "graphql", + "error.message": "User not found\n\nGraphQL request:1:3\n1 | { user(id: \"999\") }\n | ^", + "error.stack": "Traceback (most recent call last):\n File \"/Users/quinna.halim/dd-trace-py/.riot/venv_py3130_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_graphene~300_pytest-asyncio0211_graphql-relay_pytest-randomly/lib/python3.13/site-packages/graphql/execution/execute.py\", line 617, in resolve_field\n result = resolve_fn(source, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\", line 242, in _resolver_middleware\n return next_middleware(root, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/tests/contrib/graphene/test_graphene.py\", line 37, in resolve_user\n raise graphql.error.GraphQLError(\n ...<7 lines>...\n )\ngraphql.error.graphql_error.GraphQLError: User not found\n\nGraphQL request:1:3\n1 | { user(id: \"999\") }\n | ^\n", + "error.type": "graphql.error.graphql_error.GraphQLError", + "events": "[{\"name\": \"dd.graphql.query.error\", \"time_unix_nano\": 1738630714389656000, \"attributes\": {\"message\": \"User not found\", \"type\": \"graphql.error.graphql_error.GraphQLError\", \"locations\": \"1:3\", \"stacktrace\": \"Traceback (most recent call last):\\n File \\\"/Users/quinna.halim/dd-trace-py/.riot/venv_py3130_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_graphene~300_pytest-asyncio0211_graphql-relay_pytest-randomly/lib/python3.13/site-packages/graphql/execution/execute.py\\\", line 617, in resolve_field\\n result = resolve_fn(source, info, **args)\\n File \\\"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\\\", line 242, in _resolver_middleware\\n return next_middleware(root, info, **args)\\n File \\\"/Users/quinna.halim/dd-trace-py/tests/contrib/graphene/test_graphene.py\\\", line 37, in resolve_user\\n raise graphql.error.GraphQLError(\\n ...<7 lines>...\\n )\\ngraphql.error.graphql_error.GraphQLError: User not found\\n\\nGraphQL request:1:3\\n1 | { user(id: \\\"999\\\") }\\n | ^\\n\", \"path\": \"user\", \"code\": \"USER_NOT_FOUND\", \"extensions\": \"{'code': 'USER_NOT_FOUND', 'status': 404}\"}}]", + "language": "python", + "runtime-id": "eee5cdadb35249afb9f5d6e1c304be24" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 43930 + }, + "duration": 3813000, + "start": 1738630714385845000 + }, + { + "name": "graphql.parse", + "service": "graphql", + "resource": "graphql.parse", + "trace_id": 0, + "span_id": 2, + "parent_id": 1, + "type": "graphql", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.graphene", + "component": "graphql", + "graphql.source": "{ user(id: \"999\") }" + }, + "duration": 180000, + "start": 1738630714386234000 + }, + { + "name": "graphql.validate", + "service": "graphql", + "resource": "graphql.validate", + "trace_id": 0, + "span_id": 3, + "parent_id": 1, + "type": "graphql", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.graphene", + "component": "graphql", + "graphql.source": "{ user(id: \"999\") }" + }, + "duration": 888000, + "start": 1738630714386488000 + }, + { + "name": "graphql.execute", + "service": "graphql", + "resource": "{ user(id: \"999\") }", + "trace_id": 0, + "span_id": 4, + "parent_id": 1, + "type": "graphql", + "error": 1, + "meta": { + "_dd.base_service": "tests.contrib.graphene", + "component": "graphql", + "error.message": "User not found\n\nGraphQL request:1:3\n1 | { user(id: \"999\") }\n | ^", + "error.stack": "Traceback (most recent call last):\n File \"/Users/quinna.halim/dd-trace-py/.riot/venv_py3130_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_graphene~300_pytest-asyncio0211_graphql-relay_pytest-randomly/lib/python3.13/site-packages/graphql/execution/execute.py\", line 617, in resolve_field\n result = resolve_fn(source, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\", line 242, in _resolver_middleware\n return next_middleware(root, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/tests/contrib/graphene/test_graphene.py\", line 37, in resolve_user\n raise graphql.error.GraphQLError(\n ...<7 lines>...\n )\ngraphql.error.graphql_error.GraphQLError: User not found\n\nGraphQL request:1:3\n1 | { user(id: \"999\") }\n | ^\n", + "error.type": "graphql.error.graphql_error.GraphQLError", + "events": "[{\"name\": \"dd.graphql.query.error\", \"time_unix_nano\": 1738630714389350000, \"attributes\": {\"message\": \"User not found\", \"type\": \"graphql.error.graphql_error.GraphQLError\", \"locations\": \"1:3\", \"stacktrace\": \"Traceback (most recent call last):\\n File \\\"/Users/quinna.halim/dd-trace-py/.riot/venv_py3130_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_graphene~300_pytest-asyncio0211_graphql-relay_pytest-randomly/lib/python3.13/site-packages/graphql/execution/execute.py\\\", line 617, in resolve_field\\n result = resolve_fn(source, info, **args)\\n File \\\"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\\\", line 242, in _resolver_middleware\\n return next_middleware(root, info, **args)\\n File \\\"/Users/quinna.halim/dd-trace-py/tests/contrib/graphene/test_graphene.py\\\", line 37, in resolve_user\\n raise graphql.error.GraphQLError(\\n ...<7 lines>...\\n )\\ngraphql.error.graphql_error.GraphQLError: User not found\\n\\nGraphQL request:1:3\\n1 | { user(id: \\\"999\\\") }\\n | ^\\n\", \"path\": \"user\", \"code\": \"USER_NOT_FOUND\", \"extensions\": \"{'code': 'USER_NOT_FOUND', 'status': 404}\"}}]", + "graphql.operation.type": "query", + "graphql.source": "{ user(id: \"999\") }" + }, + "metrics": { + "_dd.measured": 1 + }, + "duration": 1921000, + "start": 1738630714387431000 + }, + { + "name": "graphql.resolve", + "service": "graphql", + "resource": "user", + "trace_id": 0, + "span_id": 5, + "parent_id": 4, + "type": "graphql", + "error": 1, + "meta": { + "_dd.base_service": "tests.contrib.graphene", + "component": "graphql", + "error.message": "User not found", + "error.stack": "Traceback (most recent call last):\n File \"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\", line 242, in _resolver_middleware\n return next_middleware(root, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/tests/contrib/graphene/test_graphene.py\", line 37, in resolve_user\n raise graphql.error.GraphQLError(\n ...<7 lines>...\n )\ngraphql.error.graphql_error.GraphQLError: User not found\n", + "error.type": "graphql.error.graphql_error.GraphQLError" + }, + "duration": 1152000, + "start": 1738630714387593000 + }]] diff --git a/tests/snapshots/tests.contrib.graphql.test_graphql.test_graphql_error.json b/tests/snapshots/tests.contrib.graphql.test_graphql.test_graphql_error.json index 4b95ed96c9e..c19d700c1a3 100644 --- a/tests/snapshots/tests.contrib.graphql.test_graphql.test_graphql_error.json +++ b/tests/snapshots/tests.contrib.graphql.test_graphql.test_graphql_error.json @@ -2,7 +2,7 @@ { "name": "graphql.request", "service": "graphql", - "resource": "{ invalid_schema }", + "resource": "query my_query{ invalid_schema }", "trace_id": 0, "span_id": 1, "parent_id": 0, @@ -11,22 +11,23 @@ "meta": { "_dd.base_service": "tests.contrib.graphql", "_dd.p.dm": "-0", - "_dd.p.tid": "654a694400000000", + "_dd.p.tid": "679b96df00000000", "component": "graphql", - "error.message": "Cannot query field 'invalid_schema' on type 'RootQueryType'.\n\nGraphQL request:1:3\n1 | { invalid_schema }\n | ^", + "error.message": "Cannot query field 'invalid_schema' on type 'RootQueryType'.\n\nGraphQL request:1:17\n1 | query my_query{ invalid_schema }\n | ^", "error.type": "graphql.error.graphql_error.GraphQLError", + "events": "[{\"name\": \"dd.graphql.query.error\", \"time_unix_nano\": 1738249951802662000, \"attributes\": {\"message\": \"Cannot query field 'invalid_schema' on type 'RootQueryType'.\", \"type\": \"graphql.error.graphql_error.GraphQLError\", \"locations\": \"1:17\"}}]", "language": "python", - "runtime-id": "13a9a43400594de89a6aa537a3cb7b8e" + "runtime-id": "9a4c224e6fdd49cea07a7a3954fd5db7" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 50292 + "process_id": 26822 }, - "duration": 950458, - "start": 1692710417176600596 + "duration": 1221000, + "start": 1738249951801442000 }, { "name": "graphql.parse", @@ -39,12 +40,11 @@ "error": 0, "meta": { "_dd.base_service": "tests.contrib.graphql", - "_dd.p.tid": "654a694400000000", "component": "graphql", - "graphql.source": "{ invalid_schema }" + "graphql.source": "query my_query{ invalid_schema }" }, - "duration": 85750, - "start": 1692710417176824346 + "duration": 143000, + "start": 1738249951801693000 }, { "name": "graphql.validate", @@ -57,12 +57,12 @@ "error": 1, "meta": { "_dd.base_service": "tests.contrib.graphql", - "_dd.p.tid": "654a694400000000", "component": "graphql", - "error.message": "Cannot query field 'invalid_schema' on type 'RootQueryType'.\n\nGraphQL request:1:3\n1 | { invalid_schema }\n | ^", + "error.message": "Cannot query field 'invalid_schema' on type 'RootQueryType'.\n\nGraphQL request:1:17\n1 | query my_query{ invalid_schema }\n | ^", "error.type": "graphql.error.graphql_error.GraphQLError", - "graphql.source": "{ invalid_schema }" + "events": "[{\"name\": \"dd.graphql.query.error\", \"time_unix_nano\": 1738249951802627000, \"attributes\": {\"message\": \"Cannot query field 'invalid_schema' on type 'RootQueryType'.\", \"type\": \"graphql.error.graphql_error.GraphQLError\", \"locations\": \"1:17\"}}]", + "graphql.source": "query my_query{ invalid_schema }" }, - "duration": 564292, - "start": 1692710417176948721 + "duration": 745000, + "start": 1738249951801884000 }]] diff --git a/tests/snapshots/tests.contrib.graphql.test_graphql.test_graphql_fail.json b/tests/snapshots/tests.contrib.graphql.test_graphql.test_graphql_fail.json new file mode 100644 index 00000000000..b644d1844d9 --- /dev/null +++ b/tests/snapshots/tests.contrib.graphql.test_graphql.test_graphql_fail.json @@ -0,0 +1,138 @@ +[[ + { + "name": "graphql.parse", + "service": "graphql", + "resource": "graphql.parse", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "graphql", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.graphql", + "_dd.p.dm": "-0", + "_dd.p.tid": "679b976d00000000", + "component": "graphql", + "graphql.source": "type Query { fail: String }", + "language": "python", + "runtime-id": "596dfe80a9184851a69f62836436abe5" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 27630 + }, + "duration": 257000, + "start": 1738250093732371000 + }], +[ + { + "name": "graphql.request", + "service": "graphql", + "resource": "query { fail }", + "trace_id": 1, + "span_id": 1, + "parent_id": 0, + "type": "graphql", + "error": 1, + "meta": { + "_dd.base_service": "tests.contrib.graphql", + "_dd.p.dm": "-0", + "_dd.p.tid": "679b976d00000000", + "component": "graphql", + "error.message": "'NoneType' object has no attribute 'name'\n\nGraphQL request:3:7\n2 | query {\n3 | fail\n | ^\n4 | }", + "error.stack": "Traceback (most recent call last):\n File \"/Users/quinna.halim/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio0211_graphql-core~320_pytest-randomly/lib/python3.10/site-packages/graphql/execution/execute.py\", line 521, in execute_field\n result = resolve_fn(source, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\", line 243, in _resolver_middleware\n return next_middleware(root, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/tests/contrib/graphql/test_graphql.py\", line 104, in \n result = graphql_sync(test_schema, query, root_value=None, field_resolver=lambda _type, _field: resolvers[_type.name][_field.name])\ngraphql.error.graphql_error.GraphQLError: 'NoneType' object has no attribute 'name'\n\nGraphQL request:3:7\n2 | query {\n3 | fail\n | ^\n4 | }\n", + "error.type": "graphql.error.graphql_error.GraphQLError", + "events": "[{\"name\": \"dd.graphql.query.error\", \"time_unix_nano\": 1738250093735707000, \"attributes\": {\"message\": \"'NoneType' object has no attribute 'name'\", \"type\": \"graphql.error.graphql_error.GraphQLError\", \"locations\": \"3:7\", \"stacktrace\": \"Traceback (most recent call last):\\n File \\\"/Users/quinna.halim/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio0211_graphql-core~320_pytest-randomly/lib/python3.10/site-packages/graphql/execution/execute.py\\\", line 521, in execute_field\\n result = resolve_fn(source, info, **args)\\n File \\\"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\\\", line 243, in _resolver_middleware\\n return next_middleware(root, info, **args)\\n File \\\"/Users/quinna.halim/dd-trace-py/tests/contrib/graphql/test_graphql.py\\\", line 104, in \\n result = graphql_sync(test_schema, query, root_value=None, field_resolver=lambda _type, _field: resolvers[_type.name][_field.name])\\ngraphql.error.graphql_error.GraphQLError: 'NoneType' object has no attribute 'name'\\n\\nGraphQL request:3:7\\n2 | query {\\n3 | fail\\n | ^\\n4 | }\\n\", \"path\": \"fail\"}}]", + "language": "python", + "runtime-id": "596dfe80a9184851a69f62836436abe5" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 27630 + }, + "duration": 1681000, + "start": 1738250093734027000 + }, + { + "name": "graphql.parse", + "service": "graphql", + "resource": "graphql.parse", + "trace_id": 1, + "span_id": 2, + "parent_id": 1, + "type": "graphql", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.graphql", + "component": "graphql", + "graphql.source": "query { fail }" + }, + "duration": 128000, + "start": 1738250093734164000 + }, + { + "name": "graphql.validate", + "service": "graphql", + "resource": "graphql.validate", + "trace_id": 1, + "span_id": 3, + "parent_id": 1, + "type": "graphql", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.graphql", + "component": "graphql", + "graphql.source": "query { fail }" + }, + "duration": 493000, + "start": 1738250093734336000 + }, + { + "name": "graphql.execute", + "service": "graphql", + "resource": "query { fail }", + "trace_id": 1, + "span_id": 4, + "parent_id": 1, + "type": "graphql", + "error": 1, + "meta": { + "_dd.base_service": "tests.contrib.graphql", + "component": "graphql", + "error.message": "'NoneType' object has no attribute 'name'\n\nGraphQL request:3:7\n2 | query {\n3 | fail\n | ^\n4 | }", + "error.stack": "Traceback (most recent call last):\n File \"/Users/quinna.halim/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio0211_graphql-core~320_pytest-randomly/lib/python3.10/site-packages/graphql/execution/execute.py\", line 521, in execute_field\n result = resolve_fn(source, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\", line 243, in _resolver_middleware\n return next_middleware(root, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/tests/contrib/graphql/test_graphql.py\", line 104, in \n result = graphql_sync(test_schema, query, root_value=None, field_resolver=lambda _type, _field: resolvers[_type.name][_field.name])\ngraphql.error.graphql_error.GraphQLError: 'NoneType' object has no attribute 'name'\n\nGraphQL request:3:7\n2 | query {\n3 | fail\n | ^\n4 | }\n", + "error.type": "graphql.error.graphql_error.GraphQLError", + "events": "[{\"name\": \"dd.graphql.query.error\", \"time_unix_nano\": 1738250093735623000, \"attributes\": {\"message\": \"'NoneType' object has no attribute 'name'\", \"type\": \"graphql.error.graphql_error.GraphQLError\", \"locations\": \"3:7\", \"stacktrace\": \"Traceback (most recent call last):\\n File \\\"/Users/quinna.halim/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio0211_graphql-core~320_pytest-randomly/lib/python3.10/site-packages/graphql/execution/execute.py\\\", line 521, in execute_field\\n result = resolve_fn(source, info, **args)\\n File \\\"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\\\", line 243, in _resolver_middleware\\n return next_middleware(root, info, **args)\\n File \\\"/Users/quinna.halim/dd-trace-py/tests/contrib/graphql/test_graphql.py\\\", line 104, in \\n result = graphql_sync(test_schema, query, root_value=None, field_resolver=lambda _type, _field: resolvers[_type.name][_field.name])\\ngraphql.error.graphql_error.GraphQLError: 'NoneType' object has no attribute 'name'\\n\\nGraphQL request:3:7\\n2 | query {\\n3 | fail\\n | ^\\n4 | }\\n\", \"path\": \"fail\"}}]", + "graphql.operation.type": "query", + "graphql.source": "query { fail }" + }, + "metrics": { + "_dd.measured": 1 + }, + "duration": 761000, + "start": 1738250093734863000 + }, + { + "name": "graphql.resolve", + "service": "graphql", + "resource": "fail", + "trace_id": 1, + "span_id": 5, + "parent_id": 4, + "type": "graphql", + "error": 1, + "meta": { + "_dd.base_service": "tests.contrib.graphql", + "component": "graphql", + "error.message": "'NoneType' object has no attribute 'name'", + "error.stack": "Traceback (most recent call last):\n File \"/Users/quinna.halim/dd-trace-py/ddtrace/contrib/internal/graphql/patch.py\", line 243, in _resolver_middleware\n return next_middleware(root, info, **args)\n File \"/Users/quinna.halim/dd-trace-py/tests/contrib/graphql/test_graphql.py\", line 104, in \n result = graphql_sync(test_schema, query, root_value=None, field_resolver=lambda _type, _field: resolvers[_type.name][_field.name])\nAttributeError: 'NoneType' object has no attribute 'name'\n", + "error.type": "builtins.AttributeError" + }, + "duration": 424000, + "start": 1738250093734961000 + }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_batch.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_batch.json index 4788830f558..1d3c08d174c 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_batch.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_batch.json @@ -61,7 +61,6 @@ "_dd.measured": 1, "langchain.tokens.completion_tokens": 16, "langchain.tokens.prompt_tokens": 14, - "langchain.tokens.total_cost": 5.3e-05, "langchain.tokens.total_tokens": 30 }, "duration": 6742000, @@ -95,7 +94,6 @@ "_dd.measured": 1, "langchain.tokens.completion_tokens": 15, "langchain.tokens.prompt_tokens": 14, - "langchain.tokens.total_cost": 5.1000000000000006e-05, "langchain.tokens.total_tokens": 29 }, "duration": 3314000, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_complicated.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_complicated.json index 92121180dfd..ed0c9e0f55d 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_complicated.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_complicated.json @@ -25,7 +25,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 19, "langchain.tokens.prompt_tokens": 53, - "langchain.tokens.total_cost": 0.0001175, "langchain.tokens.total_tokens": 72, "process_id": 82010 }, @@ -60,7 +59,6 @@ "_dd.measured": 1, "langchain.tokens.completion_tokens": 19, "langchain.tokens.prompt_tokens": 53, - "langchain.tokens.total_cost": 0.0001175, "langchain.tokens.total_tokens": 72 }, "duration": 3680000, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_nested.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_nested.json index 3910e614529..f63c58f92e3 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_nested.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_nested.json @@ -25,7 +25,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 53, "langchain.tokens.prompt_tokens": 50, - "langchain.tokens.total_cost": 0.000181, "langchain.tokens.total_tokens": 103, "process_id": 82010 }, @@ -51,7 +50,6 @@ "_dd.measured": 1, "langchain.tokens.completion_tokens": 16, "langchain.tokens.prompt_tokens": 18, - "langchain.tokens.total_cost": 5.9e-05, "langchain.tokens.total_tokens": 34 }, "duration": 63808000, @@ -85,7 +83,6 @@ "_dd.measured": 1, "langchain.tokens.completion_tokens": 16, "langchain.tokens.prompt_tokens": 18, - "langchain.tokens.total_cost": 5.9e-05, "langchain.tokens.total_tokens": 34 }, "duration": 61552000, @@ -119,7 +116,6 @@ "_dd.measured": 1, "langchain.tokens.completion_tokens": 37, "langchain.tokens.prompt_tokens": 32, - "langchain.tokens.total_cost": 0.000122, "langchain.tokens.total_tokens": 69 }, "duration": 3288000, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_simple.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_simple.json index 9548083a9ee..66254abefd3 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_simple.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_simple.json @@ -24,7 +24,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 101, "langchain.tokens.prompt_tokens": 20, - "langchain.tokens.total_cost": 0.000232, "langchain.tokens.total_tokens": 121, "process_id": 82010 }, @@ -61,7 +60,6 @@ "_dd.measured": 1, "langchain.tokens.completion_tokens": 101, "langchain.tokens.prompt_tokens": 20, - "langchain.tokens.total_cost": 0.000232, "langchain.tokens.total_tokens": 121 }, "duration": 6142000, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_simple_async.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_simple_async.json index a0c1a1e6c53..35d458d43f5 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_simple_async.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_chain_simple_async.json @@ -24,7 +24,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 78, "langchain.tokens.prompt_tokens": 20, - "langchain.tokens.total_cost": 0.000186, "langchain.tokens.total_tokens": 98, "process_id": 82010 }, @@ -61,7 +60,6 @@ "_dd.measured": 1, "langchain.tokens.completion_tokens": 78, "langchain.tokens.prompt_tokens": 20, - "langchain.tokens.total_cost": 0.000186, "langchain.tokens.total_tokens": 98 }, "duration": 3399000, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_with_tools_openai.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_with_tools_openai.json index 2b865bb7371..99c9f2d5c3e 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_with_tools_openai.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_lcel_with_tools_openai.json @@ -37,7 +37,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 32, "langchain.tokens.prompt_tokens": 85, - "langchain.tokens.total_cost": 9.05e-05, "langchain.tokens.total_tokens": 117, "process_id": 82010 }, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_async_generate.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_async_generate.json index 57fe006a65a..55b112ca6b6 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_async_generate.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_async_generate.json @@ -43,7 +43,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 150, "langchain.tokens.prompt_tokens": 60, - "langchain.tokens.total_cost": 0.00038999999999999994, "langchain.tokens.total_tokens": 210, "process_id": 82010 }, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_sync_call_langchain_openai.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_sync_call_langchain_openai.json index dd9e0c392da..0d4c995aba4 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_sync_call_langchain_openai.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_sync_call_langchain_openai.json @@ -35,7 +35,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 83, "langchain.tokens.prompt_tokens": 20, - "langchain.tokens.total_cost": 0.00019600000000000002, "langchain.tokens.total_tokens": 103, "process_id": 82010 }, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_sync_generate.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_sync_generate.json index f5de86a6836..6e188b6629c 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_sync_generate.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_sync_generate.json @@ -43,7 +43,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 110, "langchain.tokens.prompt_tokens": 60, - "langchain.tokens.total_cost": 0.00031, "langchain.tokens.total_tokens": 170, "process_id": 82010 }, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_vision_generate.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_vision_generate.json index 38bf8bc9a95..d02c31a219a 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_vision_generate.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_vision_generate.json @@ -35,7 +35,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 56, "langchain.tokens.prompt_tokens": 1151, - "langchain.tokens.total_cost": 0, "langchain.tokens.total_tokens": 1207, "process_id": 34354 }, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_async.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_async.json index f1a7884c2bf..5ef01923f7b 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_async.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_async.json @@ -36,7 +36,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 12, "langchain.tokens.prompt_tokens": 10, - "langchain.tokens.total_cost": 3.9e-05, "langchain.tokens.total_tokens": 22, "process_id": 82010 }, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_sync.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_sync.json index a527a18d4a2..6c25e449a96 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_sync.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_sync.json @@ -36,7 +36,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 256, "langchain.tokens.prompt_tokens": 17, - "langchain.tokens.total_cost": 0.0005375, "langchain.tokens.total_tokens": 273, "process_id": 82010 }, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_sync_multiple_prompts.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_sync_multiple_prompts.json index fb587526043..9d5f107c31b 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_sync_multiple_prompts.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_sync_multiple_prompts.json @@ -40,7 +40,6 @@ "_sampling_priority_v1": 1, "langchain.tokens.completion_tokens": 271, "langchain.tokens.prompt_tokens": 23, - "langchain.tokens.total_cost": 0.0005765000000000001, "langchain.tokens.total_tokens": 294, "process_id": 82010 }, diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[None-None].json b/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[None-None].json deleted file mode 100644 index 65eec00d960..00000000000 --- a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[None-None].json +++ /dev/null @@ -1,77 +0,0 @@ -[[ - { - "name": "openai.request", - "service": "ddtrace_subprocess_dir", - "resource": "createCompletion", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65674d7100000000", - "component": "openai", - "language": "python", - "openai.api_base": "https://api.openai.com/v1", - "openai.api_type": "open_ai", - "openai.organization.name": "datadog-4", - "openai.request.client": "OpenAI", - "openai.request.endpoint": "/v1/completions", - "openai.request.max_tokens": "10", - "openai.request.method": "POST", - "openai.request.model": "ada", - "openai.request.n": "2", - "openai.request.prompt.0": "Hello world", - "openai.request.stop": ".", - "openai.request.temperature": "0.8", - "openai.response.choices.0.finish_reason": "length", - "openai.response.choices.0.text": ", relax!\u201d I said to my laptop", - "openai.response.choices.1.finish_reason": "stop", - "openai.response.choices.1.text": " (1", - "openai.response.created": "1681852797", - "openai.response.id": "cmpl-76n1xLvRKv3mfjx7hJ41UHrHy9ar6", - "openai.response.model": "ada", - "openai.user.api_key": "sk-...key>", - "runtime-id": "8c92d3e850d9413593bf481d805039d1" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "openai.organization.ratelimit.requests.limit": 3000, - "openai.organization.ratelimit.requests.remaining": 2999, - "openai.organization.ratelimit.tokens.limit": 250000, - "openai.organization.ratelimit.tokens.remaining": 249979, - "process_id": 20673 - }, - "duration": 21745000, - "start": 1701268849462298000 - }, - { - "name": "requests.request", - "service": "openai", - "resource": "POST /v1/completions", - "trace_id": 0, - "span_id": 2, - "parent_id": 1, - "type": "http", - "error": 0, - "meta": { - "_dd.base_service": "ddtrace_subprocess_dir", - "component": "requests", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/v1 PythonBindings/0.27.2", - "out.host": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1 - }, - "duration": 2999000, - "start": 1701268849479960000 - }]] diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[None-v0].json b/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[None-v0].json deleted file mode 100644 index d6417fb5667..00000000000 --- a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[None-v0].json +++ /dev/null @@ -1,77 +0,0 @@ -[[ - { - "name": "openai.request", - "service": "ddtrace_subprocess_dir", - "resource": "createCompletion", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65674d7200000000", - "component": "openai", - "language": "python", - "openai.api_base": "https://api.openai.com/v1", - "openai.api_type": "open_ai", - "openai.organization.name": "datadog-4", - "openai.request.client": "OpenAI", - "openai.request.endpoint": "/v1/completions", - "openai.request.max_tokens": "10", - "openai.request.method": "POST", - "openai.request.model": "ada", - "openai.request.n": "2", - "openai.request.prompt.0": "Hello world", - "openai.request.stop": ".", - "openai.request.temperature": "0.8", - "openai.response.choices.0.finish_reason": "length", - "openai.response.choices.0.text": ", relax!\u201d I said to my laptop", - "openai.response.choices.1.finish_reason": "stop", - "openai.response.choices.1.text": " (1", - "openai.response.created": "1681852797", - "openai.response.id": "cmpl-76n1xLvRKv3mfjx7hJ41UHrHy9ar6", - "openai.response.model": "ada", - "openai.user.api_key": "sk-...key>", - "runtime-id": "675032183b244929ba8c3a0a1c0021e5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "openai.organization.ratelimit.requests.limit": 3000, - "openai.organization.ratelimit.requests.remaining": 2999, - "openai.organization.ratelimit.tokens.limit": 250000, - "openai.organization.ratelimit.tokens.remaining": 249979, - "process_id": 20696 - }, - "duration": 20412000, - "start": 1701268850764763000 - }, - { - "name": "requests.request", - "service": "openai", - "resource": "POST /v1/completions", - "trace_id": 0, - "span_id": 2, - "parent_id": 1, - "type": "http", - "error": 0, - "meta": { - "_dd.base_service": "ddtrace_subprocess_dir", - "component": "requests", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/v1 PythonBindings/0.27.2", - "out.host": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1 - }, - "duration": 3134000, - "start": 1701268850780901000 - }]] diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[None-v1].json b/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[None-v1].json deleted file mode 100644 index 979ea768ef5..00000000000 --- a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[None-v1].json +++ /dev/null @@ -1,77 +0,0 @@ -[[ - { - "name": "openai.request", - "service": "ddtrace_subprocess_dir", - "resource": "createCompletion", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65674d7400000000", - "component": "openai", - "language": "python", - "openai.api_base": "https://api.openai.com/v1", - "openai.api_type": "open_ai", - "openai.organization.name": "datadog-4", - "openai.request.client": "OpenAI", - "openai.request.endpoint": "/v1/completions", - "openai.request.max_tokens": "10", - "openai.request.method": "POST", - "openai.request.model": "ada", - "openai.request.n": "2", - "openai.request.prompt.0": "Hello world", - "openai.request.stop": ".", - "openai.request.temperature": "0.8", - "openai.response.choices.0.finish_reason": "length", - "openai.response.choices.0.text": ", relax!\u201d I said to my laptop", - "openai.response.choices.1.finish_reason": "stop", - "openai.response.choices.1.text": " (1", - "openai.response.created": "1681852797", - "openai.response.id": "cmpl-76n1xLvRKv3mfjx7hJ41UHrHy9ar6", - "openai.response.model": "ada", - "openai.user.api_key": "sk-...key>", - "runtime-id": "1f3499a720954236be60cf0fece4246c" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "openai.organization.ratelimit.requests.limit": 3000, - "openai.organization.ratelimit.requests.remaining": 2999, - "openai.organization.ratelimit.tokens.limit": 250000, - "openai.organization.ratelimit.tokens.remaining": 249979, - "process_id": 20714 - }, - "duration": 19970000, - "start": 1701268852029562000 - }, - { - "name": "http.client.request", - "service": "ddtrace_subprocess_dir", - "resource": "POST /v1/completions", - "trace_id": 0, - "span_id": 2, - "parent_id": 1, - "type": "http", - "error": 0, - "meta": { - "_dd.peer.service.source": "out.host", - "component": "requests", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/v1 PythonBindings/0.27.2", - "out.host": "api.openai.com", - "peer.service": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 2897000, - "start": 1701268852045569000 - }]] diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[mysvc-None].json b/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[mysvc-None].json deleted file mode 100644 index a80c1218caf..00000000000 --- a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[mysvc-None].json +++ /dev/null @@ -1,77 +0,0 @@ -[[ - { - "name": "openai.request", - "service": "mysvc", - "resource": "createCompletion", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65674d7500000000", - "component": "openai", - "language": "python", - "openai.api_base": "https://api.openai.com/v1", - "openai.api_type": "open_ai", - "openai.organization.name": "datadog-4", - "openai.request.client": "OpenAI", - "openai.request.endpoint": "/v1/completions", - "openai.request.max_tokens": "10", - "openai.request.method": "POST", - "openai.request.model": "ada", - "openai.request.n": "2", - "openai.request.prompt.0": "Hello world", - "openai.request.stop": ".", - "openai.request.temperature": "0.8", - "openai.response.choices.0.finish_reason": "length", - "openai.response.choices.0.text": ", relax!\u201d I said to my laptop", - "openai.response.choices.1.finish_reason": "stop", - "openai.response.choices.1.text": " (1", - "openai.response.created": "1681852797", - "openai.response.id": "cmpl-76n1xLvRKv3mfjx7hJ41UHrHy9ar6", - "openai.response.model": "ada", - "openai.user.api_key": "sk-...key>", - "runtime-id": "1244eea37568412fb5bdedf9c37ed48a" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "openai.organization.ratelimit.requests.limit": 3000, - "openai.organization.ratelimit.requests.remaining": 2999, - "openai.organization.ratelimit.tokens.limit": 250000, - "openai.organization.ratelimit.tokens.remaining": 249979, - "process_id": 20736 - }, - "duration": 19953000, - "start": 1701268853284736000 - }, - { - "name": "requests.request", - "service": "openai", - "resource": "POST /v1/completions", - "trace_id": 0, - "span_id": 2, - "parent_id": 1, - "type": "http", - "error": 0, - "meta": { - "_dd.base_service": "mysvc", - "component": "requests", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/v1 PythonBindings/0.27.2", - "out.host": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1 - }, - "duration": 2837000, - "start": 1701268853300833000 - }]] diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[mysvc-v0].json b/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[mysvc-v0].json deleted file mode 100644 index f3f9c57f768..00000000000 --- a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[mysvc-v0].json +++ /dev/null @@ -1,77 +0,0 @@ -[[ - { - "name": "openai.request", - "service": "mysvc", - "resource": "createCompletion", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65674d7600000000", - "component": "openai", - "language": "python", - "openai.api_base": "https://api.openai.com/v1", - "openai.api_type": "open_ai", - "openai.organization.name": "datadog-4", - "openai.request.client": "OpenAI", - "openai.request.endpoint": "/v1/completions", - "openai.request.max_tokens": "10", - "openai.request.method": "POST", - "openai.request.model": "ada", - "openai.request.n": "2", - "openai.request.prompt.0": "Hello world", - "openai.request.stop": ".", - "openai.request.temperature": "0.8", - "openai.response.choices.0.finish_reason": "length", - "openai.response.choices.0.text": ", relax!\u201d I said to my laptop", - "openai.response.choices.1.finish_reason": "stop", - "openai.response.choices.1.text": " (1", - "openai.response.created": "1681852797", - "openai.response.id": "cmpl-76n1xLvRKv3mfjx7hJ41UHrHy9ar6", - "openai.response.model": "ada", - "openai.user.api_key": "sk-...key>", - "runtime-id": "12b4a711854c44f681695957b545dcf5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "openai.organization.ratelimit.requests.limit": 3000, - "openai.organization.ratelimit.requests.remaining": 2999, - "openai.organization.ratelimit.tokens.limit": 250000, - "openai.organization.ratelimit.tokens.remaining": 249979, - "process_id": 20750 - }, - "duration": 25352000, - "start": 1701268854568669000 - }, - { - "name": "requests.request", - "service": "openai", - "resource": "POST /v1/completions", - "trace_id": 0, - "span_id": 2, - "parent_id": 1, - "type": "http", - "error": 0, - "meta": { - "_dd.base_service": "mysvc", - "component": "requests", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/v1 PythonBindings/0.27.2", - "out.host": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1 - }, - "duration": 3922000, - "start": 1701268854588758000 - }]] diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[mysvc-v1].json b/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[mysvc-v1].json deleted file mode 100644 index 0696ae54454..00000000000 --- a/tests/snapshots/tests.contrib.openai.test_openai_v0.test_integration_service_name[mysvc-v1].json +++ /dev/null @@ -1,77 +0,0 @@ -[[ - { - "name": "openai.request", - "service": "mysvc", - "resource": "createCompletion", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65674d7700000000", - "component": "openai", - "language": "python", - "openai.api_base": "https://api.openai.com/v1", - "openai.api_type": "open_ai", - "openai.organization.name": "datadog-4", - "openai.request.client": "OpenAI", - "openai.request.endpoint": "/v1/completions", - "openai.request.max_tokens": "10", - "openai.request.method": "POST", - "openai.request.model": "ada", - "openai.request.n": "2", - "openai.request.prompt.0": "Hello world", - "openai.request.stop": ".", - "openai.request.temperature": "0.8", - "openai.response.choices.0.finish_reason": "length", - "openai.response.choices.0.text": ", relax!\u201d I said to my laptop", - "openai.response.choices.1.finish_reason": "stop", - "openai.response.choices.1.text": " (1", - "openai.response.created": "1681852797", - "openai.response.id": "cmpl-76n1xLvRKv3mfjx7hJ41UHrHy9ar6", - "openai.response.model": "ada", - "openai.user.api_key": "sk-...key>", - "runtime-id": "03e7664126ea4fe99e0aefec4efd003c" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "openai.organization.ratelimit.requests.limit": 3000, - "openai.organization.ratelimit.requests.remaining": 2999, - "openai.organization.ratelimit.tokens.limit": 250000, - "openai.organization.ratelimit.tokens.remaining": 249979, - "process_id": 20772 - }, - "duration": 19966000, - "start": 1701268855885252000 - }, - { - "name": "http.client.request", - "service": "mysvc", - "resource": "POST /v1/completions", - "trace_id": 0, - "span_id": 2, - "parent_id": 1, - "type": "http", - "error": 0, - "meta": { - "_dd.peer.service.source": "out.host", - "component": "requests", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/v1 PythonBindings/0.27.2", - "out.host": "api.openai.com", - "peer.service": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 2849000, - "start": 1701268855901267000 - }]] diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_async.json b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_async.json index 9cfd3a107cd..ab2f74aa60b 100644 --- a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_async.json +++ b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_async.json @@ -44,6 +44,9 @@ "openai.organization.ratelimit.requests.remaining": 2999, "openai.organization.ratelimit.tokens.limit": 250000, "openai.organization.ratelimit.tokens.remaining": 249979, + "openai.response.usage.completion_tokens": 12, + "openai.response.usage.prompt_tokens": 2, + "openai.response.usage.total_tokens": 14, "process_id": 24448 }, "duration": 17466000, diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-None].json b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-None].json index 3361ea38b5c..d7faa3f22e2 100644 --- a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-None].json +++ b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-None].json @@ -39,6 +39,9 @@ "openai.organization.ratelimit.requests.remaining": 2999, "openai.organization.ratelimit.tokens.limit": 250000, "openai.organization.ratelimit.tokens.remaining": 249979, + "openai.response.usage.completion_tokens": 12, + "openai.response.usage.prompt_tokens": 2, + "openai.response.usage.total_tokens": 14, "process_id": 20806 }, "duration": 16421000, diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-v0].json b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-v0].json index 9815b378221..3af343273f6 100644 --- a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-v0].json +++ b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-v0].json @@ -39,6 +39,9 @@ "openai.organization.ratelimit.requests.remaining": 2999, "openai.organization.ratelimit.tokens.limit": 250000, "openai.organization.ratelimit.tokens.remaining": 249979, + "openai.response.usage.completion_tokens": 12, + "openai.response.usage.prompt_tokens": 2, + "openai.response.usage.total_tokens": 14, "process_id": 20827 }, "duration": 17257000, diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-v1].json b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-v1].json index 3c9e6612d78..7f51ec196a6 100644 --- a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-v1].json +++ b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[None-v1].json @@ -39,6 +39,9 @@ "openai.organization.ratelimit.requests.remaining": 2999, "openai.organization.ratelimit.tokens.limit": 250000, "openai.organization.ratelimit.tokens.remaining": 249979, + "openai.response.usage.completion_tokens": 12, + "openai.response.usage.prompt_tokens": 2, + "openai.response.usage.total_tokens": 14, "process_id": 20839 }, "duration": 17259000, diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-None].json b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-None].json index fb11e4200a0..35268ec5092 100644 --- a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-None].json +++ b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-None].json @@ -39,6 +39,9 @@ "openai.organization.ratelimit.requests.remaining": 2999, "openai.organization.ratelimit.tokens.limit": 250000, "openai.organization.ratelimit.tokens.remaining": 249979, + "openai.response.usage.completion_tokens": 12, + "openai.response.usage.prompt_tokens": 2, + "openai.response.usage.total_tokens": 14, "process_id": 20848 }, "duration": 17004000, diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-v0].json b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-v0].json index 63341870faa..999dbb7529c 100644 --- a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-v0].json +++ b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-v0].json @@ -39,6 +39,9 @@ "openai.organization.ratelimit.requests.remaining": 2999, "openai.organization.ratelimit.tokens.limit": 250000, "openai.organization.ratelimit.tokens.remaining": 249979, + "openai.response.usage.completion_tokens": 12, + "openai.response.usage.prompt_tokens": 2, + "openai.response.usage.total_tokens": 14, "process_id": 20864 }, "duration": 17872000, diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-v1].json b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-v1].json index 4ff254b053c..76d352a3f59 100644 --- a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-v1].json +++ b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_service_name[mysvc-v1].json @@ -39,6 +39,9 @@ "openai.organization.ratelimit.requests.remaining": 2999, "openai.organization.ratelimit.tokens.limit": 250000, "openai.organization.ratelimit.tokens.remaining": 249979, + "openai.response.usage.completion_tokens": 12, + "openai.response.usage.prompt_tokens": 2, + "openai.response.usage.total_tokens": 14, "process_id": 20888 }, "duration": 16629000, diff --git a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_sync.json b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_sync.json index 9cfd3a107cd..ab2f74aa60b 100644 --- a/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_sync.json +++ b/tests/snapshots/tests.contrib.openai.test_openai_v1.test_integration_sync.json @@ -44,6 +44,9 @@ "openai.organization.ratelimit.requests.remaining": 2999, "openai.organization.ratelimit.tokens.limit": 250000, "openai.organization.ratelimit.tokens.remaining": 249979, + "openai.response.usage.completion_tokens": 12, + "openai.response.usage.prompt_tokens": 2, + "openai.response.usage.total_tokens": 14, "process_id": 24448 }, "duration": 17466000, diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_analytics_with_rate.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_analytics_with_rate.json new file mode 100644 index 00000000000..cdc76343f08 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_analytics_with_rate.json @@ -0,0 +1,38 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_dd1.sr.eausr": 0.5, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 516916, + "start": 1692651820581556875 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_analytics_without_rate.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_analytics_without_rate.json new file mode 100644 index 00000000000..9a2bb9f2e4f --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_analytics_without_rate.json @@ -0,0 +1,38 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_dd1.sr.eausr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 340708, + "start": 1692651820591814875 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_basics.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_basics.json new file mode 100644 index 00000000000..e6da74211bb --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_basics.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 335292, + "start": 1692651820600962708 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_custom_cmd_length.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_custom_cmd_length.json new file mode 100644 index 00000000000..5614e912961 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_custom_cmd_length.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET here..." + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 326167, + "start": 1692651820609597416 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_custom_cmd_length_env.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_custom_cmd_length_env.json new file mode 100644 index 00000000000..75f058f3700 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_custom_cmd_length_env.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "ea409d0295db44adbf88dda3e4806547", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET here-is..." + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 20043, + "valkey.args_length": 2 + }, + "duration": 404084, + "start": 1692651821117540958 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_env_user_specified_valkey_service.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_env_user_specified_valkey_service.json new file mode 100644 index 00000000000..f4b7d26f3a6 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_env_user_specified_valkey_service.json @@ -0,0 +1,74 @@ +[[ + { + "name": "valkey.command", + "service": "myvalkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "e263ff9ad1cd43099216a11ca5e19377", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 20046, + "valkey.args_length": 2 + }, + "duration": 501125, + "start": 1692651821692035875 + }], +[ + { + "name": "valkey.command", + "service": "cfg-valkey", + "resource": "GET", + "trace_id": 1, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "e263ff9ad1cd43099216a11ca5e19377", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 20046, + "valkey.args_length": 2 + }, + "duration": 329333, + "start": 1692651821722196292 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_full_command_in_resource_config.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_full_command_in_resource_config.json new file mode 100644 index 00000000000..c447412ee09 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_full_command_in_resource_config.json @@ -0,0 +1,71 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET put_key_in_resource", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3a1f7ba9b1ab42f4858e5effd03877ef", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET put_key_in_resource" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 65639, + "valkey.args_length": 2 + }, + "duration": 2978000, + "start": 1698858795260743000 + }], +[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET pipeline-cmd1 1\nSET pipeline-cmd2 2", + "trace_id": 1, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3a1f7ba9b1ab42f4858e5effd03877ef", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "SET pipeline-cmd1 1\nSET pipeline-cmd2 2" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 65639, + "valkey.pipeline_length": 2 + }, + "duration": 1408000, + "start": 1698858795278553000 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_full_command_in_resource_env.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_full_command_in_resource_env.json new file mode 100644 index 00000000000..f7f89e8565a --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_full_command_in_resource_env.json @@ -0,0 +1,71 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET put_key_in_resource", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "", + "_dd.p.dm": "-0", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "451464ac55804a488cf355b1d96c7002", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET put_key_in_resource" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 65646, + "valkey.args_length": 2 + }, + "duration": 3112000, + "start": 1698858796156355000 + }], +[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET pipeline-cmd1 1\nSET pipeline-cmd2 2", + "trace_id": 1, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "", + "_dd.p.dm": "-0", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "451464ac55804a488cf355b1d96c7002", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "SET pipeline-cmd1 1\nSET pipeline-cmd2 2" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 65646, + "valkey.pipeline_length": 2 + }, + "duration": 1246000, + "start": 1698858796167913000 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_long_command.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_long_command.json new file mode 100644 index 00000000000..15378c706ba --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_long_command.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "MGET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "MGET 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 36..." + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 1001 + }, + "duration": 3428042, + "start": 1692651821775339875 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_meta_override.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_meta_override.json new file mode 100644 index 00000000000..5edc6b45665 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_meta_override.json @@ -0,0 +1,38 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "cheese": "camembert", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 474500, + "start": 1692651821790889125 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_opentracing.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_opentracing.json new file mode 100644 index 00000000000..749bd3d3307 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_opentracing.json @@ -0,0 +1,56 @@ +[[ + { + "name": "valkey_get", + "service": "valkey_svc", + "resource": "valkey_get", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "language": "python", + "runtime-id": "3cf1df7fb079462ab81608355e026651" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 19999 + }, + "duration": 534179, + "start": 1692651821803009280 + }, + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 2, + "parent_id": 1, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "out.host": "localhost", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "valkey.args_length": 2 + }, + "duration": 358500, + "start": 1692651821803151542 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_pipeline_immediate.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_pipeline_immediate.json new file mode 100644 index 00000000000..5559b6a5959 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_pipeline_immediate.json @@ -0,0 +1,72 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "SET a 1" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 3 + }, + "duration": 343500, + "start": 1692651821823333917 + }], +[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET", + "trace_id": 1, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "SET a 1" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.pipeline_length": 1 + }, + "duration": 158750, + "start": 1692651821823756750 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_pipeline_traced.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_pipeline_traced.json new file mode 100644 index 00000000000..c5e90a181b3 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_pipeline_traced.json @@ -0,0 +1,36 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET\nRPUSH\nHGETALL", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "SET blah 32\nRPUSH foo \u00e9\u00e9\nHGETALL xxx" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.pipeline_length": 3 + }, + "duration": 589917, + "start": 1692651821833429417 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_service_precedence.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_service_precedence.json new file mode 100644 index 00000000000..27979635427 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_service_precedence.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "env-valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "app-svc", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "0f782133fa34462daf85cad95bb55fd2", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 20052, + "valkey.args_length": 2 + }, + "duration": 423750, + "start": 1692651822324419751 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_unicode.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_unicode.json new file mode 100644 index 00000000000..09f6f46fd3d --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_unicode.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET \ud83d\ude10" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 512083, + "start": 1692651822408832834 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey.test_user_specified_service.json b/tests/snapshots/tests.contrib.valkey.test_valkey.test_user_specified_service.json new file mode 100644 index 00000000000..7a91612554e --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey.test_user_specified_service.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "mysvc", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "localhost", + "runtime-id": "9d4dd102c4394715976611e15b961233", + "server.address": "localhost", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 20056, + "valkey.args_length": 2 + }, + "duration": 439500, + "start": 1692651822941153668 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_basic_request.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_basic_request.json new file mode 100644 index 00000000000..70b0e166d97 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_basic_request.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 595750, + "start": 1692651823036625793 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_client_name.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_client_name.json new file mode 100644 index 00000000000..4f7b2688d76 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_client_name.json @@ -0,0 +1,57 @@ +[[ + { + "name": "web-request", + "service": "test", + "resource": "web-request", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "language": "python", + "runtime-id": "3cf1df7fb079462ab81608355e026651" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 19999 + }, + "duration": 828125, + "start": 1692651823188535376 + }, + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 2, + "parent_id": 1, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "out.host": "127.0.0.1", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.client_name": "testing-client-name", + "valkey.raw_command": "GET blah" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "valkey.args_length": 2 + }, + "duration": 541041, + "start": 1692651823188798168 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_connection_error.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_connection_error.json new file mode 100644 index 00000000000..64609dd8614 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_connection_error.json @@ -0,0 +1,40 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 1, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "error.message": "whatever", + "error.stack": "Traceback (most recent call last):\n File \"/root/project/ddtrace/contrib/trace_utils_valkey.py\", line 117, in _trace_valkey_cmd\n yield span\n File \"/root/project/ddtrace/contrib/valkey/asyncio_patch.py\", line 22, in traced_async_execute_command\n return await _run_valkey_command_async(span=span, func=func, args=args, kwargs=kwargs)\n File \"/root/project/ddtrace/contrib/valkey/asyncio_patch.py\", line 41, in _run_valkey_command_async\n result = await func(*args, **kwargs)\n File \"/root/project/.riot/venv_py31011_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_valkey~41/lib/python3.10/site-packages/valkey/asyncio/client.py\", line 509, in execute_command\n conn = self.connection or await pool.get_connection(command_name, **options)\n File \"/root/.pyenv/versions/3.10.11/lib/python3.10/unittest/mock.py\", line 2234, in _execute_mock_call\n raise effect\nvalkey.exceptions.ConnectionError: whatever\n", + "error.type": "valkey.exceptions.ConnectionError", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "dc59875580884b52bebd2f9c402238f8", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "GET foo" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 2340, + "valkey.args_length": 2 + }, + "duration": 935417, + "start": 1695409673533997174 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_decoding_non_utf8_args.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_decoding_non_utf8_args.json new file mode 100644 index 00000000000..649d89db933 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_decoding_non_utf8_args.json @@ -0,0 +1,73 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "SET \\x80foo \\x80abc" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 3 + }, + "duration": 512917, + "start": 1692651823066497751 + }], +[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 1, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "GET \\x80foo" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 330333, + "start": 1692651823067101001 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_decoding_non_utf8_pipeline_args.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_decoding_non_utf8_pipeline_args.json new file mode 100644 index 00000000000..c22d2347b5e --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_decoding_non_utf8_pipeline_args.json @@ -0,0 +1,36 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET\nSET\nGET\nGET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "SET \\x80blah boo\nSET foo \\x80abc\nGET \\x80blah\nGET foo" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.pipeline_length": 4 + }, + "duration": 404709, + "start": 1692651823079707584 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_long_command.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_long_command.json new file mode 100644 index 00000000000..9f4e40ffd1b --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_long_command.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "MGET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "MGET 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 36..." + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 1001 + }, + "duration": 5689625, + "start": 1692651823091333793 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_override_service_name.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_override_service_name.json new file mode 100644 index 00000000000..f3d0bce583a --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_override_service_name.json @@ -0,0 +1,110 @@ +[[ + { + "name": "valkey.command", + "service": "myvalkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 297666, + "start": 1692651823109161293 + }], +[ + { + "name": "valkey.command", + "service": "myvalkey", + "resource": "SET", + "trace_id": 1, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "SET cheese my-cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 3 + }, + "duration": 230084, + "start": 1692651823109550709 + }], +[ + { + "name": "valkey.command", + "service": "myvalkey", + "resource": "GET", + "trace_id": 2, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 160875, + "start": 1692651823109840043 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_parenting.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_parenting.json new file mode 100644 index 00000000000..c9a38d7fa31 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_parenting.json @@ -0,0 +1,85 @@ +[[ + { + "name": "web-request", + "service": "test", + "resource": "web-request", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "language": "python", + "runtime-id": "3cf1df7fb079462ab81608355e026651" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 19999 + }, + "duration": 953000, + "start": 1692651823176740209 + }, + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET", + "trace_id": 0, + "span_id": 2, + "parent_id": 1, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "out.host": "127.0.0.1", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "SET blah boo" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "valkey.args_length": 3 + }, + "duration": 270791, + "start": 1692651823176857918 + }, + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 3, + "parent_id": 1, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "out.host": "127.0.0.1", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "GET blah" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "db.row_count": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "valkey.args_length": 2 + }, + "duration": 499000, + "start": 1692651823177170168 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_pin.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_pin.json new file mode 100644 index 00000000000..91c995cc259 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_pin.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "my-valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "GET cheese" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 327417, + "start": 1692651823121474251 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_pipeline_traced.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_pipeline_traced.json new file mode 100644 index 00000000000..e267216e24f --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_pipeline_traced.json @@ -0,0 +1,36 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET\nSET\nGET\nGET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "SET blah boo\nSET foo bar\nGET blah\nGET foo" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.pipeline_length": 4 + }, + "duration": 384125, + "start": 1692651823134602834 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_pipeline_traced_context_manager_transaction.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_pipeline_traced_context_manager_transaction.json new file mode 100644 index 00000000000..72633ef5e16 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_pipeline_traced_context_manager_transaction.json @@ -0,0 +1,36 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET\nSET\nGET\nGET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "SET blah boo\nSET foo bar\nGET blah\nGET foo" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.pipeline_length": 4 + }, + "duration": 507125, + "start": 1692651823152247501 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_two_traced_pipelines.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_two_traced_pipelines.json new file mode 100644 index 00000000000..60ff68c9b1b --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_two_traced_pipelines.json @@ -0,0 +1,84 @@ +[[ + { + "name": "web-request", + "service": "test", + "resource": "web-request", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "language": "python", + "runtime-id": "3cf1df7fb079462ab81608355e026651" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 19999 + }, + "duration": 940000, + "start": 1692651823164019209 + }, + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET\nGET", + "trace_id": 0, + "span_id": 2, + "parent_id": 1, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "out.host": "127.0.0.1", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "SET blah boo\nGET blah" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "valkey.pipeline_length": 2 + }, + "duration": 352833, + "start": 1692651823164207293 + }, + { + "name": "valkey.command", + "service": "valkey", + "resource": "SET\nGET", + "trace_id": 0, + "span_id": 3, + "parent_id": 1, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "out.host": "127.0.0.1", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "SET foo bar\nGET foo" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "network.destination.port": 6379, + "out.valkey_db": 0, + "valkey.pipeline_length": 2 + }, + "duration": 310042, + "start": 1692651823164624126 + }]] diff --git a/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_unicode_request.json b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_unicode_request.json new file mode 100644 index 00000000000..c6db207fd51 --- /dev/null +++ b/tests/snapshots/tests.contrib.valkey.test_valkey_asyncio.test_unicode_request.json @@ -0,0 +1,37 @@ +[[ + { + "name": "valkey.command", + "service": "valkey", + "resource": "GET", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "valkey", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.valkey", + "_dd.p.dm": "-0", + "_dd.p.tid": "654a694400000000", + "component": "valkey", + "db.system": "valkey", + "language": "python", + "out.host": "127.0.0.1", + "runtime-id": "3cf1df7fb079462ab81608355e026651", + "server.address": "127.0.0.1", + "span.kind": "client", + "valkey.raw_command": "GET \ud83d\ude10" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "db.row_count": 0, + "network.destination.port": 6379, + "out.valkey_db": 0, + "process_id": 19999, + "valkey.args_length": 2 + }, + "duration": 300041, + "start": 1692651823049427543 + }]] diff --git a/tests/suitespec.yml b/tests/suitespec.yml index d9da18df66d..69c6b19e8d8 100644 --- a/tests/suitespec.yml +++ b/tests/suitespec.yml @@ -76,7 +76,7 @@ components: - ddtrace/__init__.py - ddtrace/py.typed - ddtrace/version.py - - ddtrace/settings/config.py + - ddtrace/settings/_config.py - src/native/* datastreams: - ddtrace/internal/datastreams/* @@ -117,7 +117,7 @@ components: - ddtrace/trace/* - ddtrace/constants.py - ddtrace/settings/__init__.py - - ddtrace/settings/config.py + - ddtrace/settings/_config.py - ddtrace/settings/http.py - ddtrace/settings/exceptions.py - ddtrace/settings/integration.py diff --git a/tests/telemetry/test_writer.py b/tests/telemetry/test_writer.py index 39d672a1c01..2de23c9fba3 100644 --- a/tests/telemetry/test_writer.py +++ b/tests/telemetry/test_writer.py @@ -118,7 +118,6 @@ def test_app_started_event(telemetry_writer, test_agent_session, mock_time): {"name": "DD_SPAN_SAMPLING_RULES_FILE", "origin": "unknown", "value": None}, {"name": "DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED", "origin": "unknown", "value": True}, {"name": "DD_TRACE_AGENT_TIMEOUT_SECONDS", "origin": "unknown", "value": 2.0}, - {"name": "DD_TRACE_ANALYTICS_ENABLED", "origin": "unknown", "value": False}, {"name": "DD_TRACE_API_VERSION", "origin": "unknown", "value": None}, {"name": "DD_TRACE_CLIENT_IP_ENABLED", "origin": "unknown", "value": None}, {"name": "DD_TRACE_COMPUTE_STATS", "origin": "unknown", "value": False}, @@ -225,7 +224,6 @@ def test_app_started_event_configuration_override(test_agent_session, run_python env["DD_RUNTIME_METRICS_ENABLED"] = "True" env["DD_SERVICE_MAPPING"] = "default_dd_service:remapped_dd_service" env["DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED"] = "True" - env["DD_TRACE_ANALYTICS_ENABLED"] = "True" env["DD_TRACE_CLIENT_IP_ENABLED"] = "True" env["DD_TRACE_COMPUTE_STATS"] = "True" env["DD_TRACE_DEBUG"] = "True" @@ -237,7 +235,6 @@ def test_app_started_event_configuration_override(test_agent_session, run_python env["DD_TRACE_PROPAGATION_STYLE_INJECT"] = "tracecontext" env["DD_REMOTE_CONFIGURATION_ENABLED"] = "True" env["DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS"] = "1" - env["DD_TRACE_SAMPLE_RATE"] = "0.5" env["DD_TRACE_RATE_LIMIT"] = "50" env["DD_TRACE_SAMPLING_RULES"] = '[{"sample_rate":1.0,"service":"xyz","name":"abc"}]' env["DD_PROFILING_ENABLED"] = "True" @@ -356,7 +353,6 @@ def test_app_started_event_configuration_override(test_agent_session, run_python {"name": "DD_EXCEPTION_REPLAY_CAPTURE_MAX_FRAMES", "origin": "default", "value": 8}, {"name": "DD_EXCEPTION_REPLAY_ENABLED", "origin": "env_var", "value": True}, {"name": "DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED", "origin": "default", "value": False}, - {"name": "DD_HTTP_CLIENT_TAG_QUERY_STRING", "origin": "default", "value": None}, {"name": "DD_IAST_DEDUPLICATION_ENABLED", "origin": "default", "value": True}, {"name": "DD_IAST_ENABLED", "origin": "default", "value": False}, {"name": "DD_IAST_MAX_CONCURRENT_REQUESTS", "origin": "default", "value": 2}, @@ -433,7 +429,6 @@ def test_app_started_event_configuration_override(test_agent_session, run_python {"name": "DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED", "origin": "env_var", "value": True}, {"name": "DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED", "origin": "default", "value": False}, {"name": "DD_TRACE_AGENT_TIMEOUT_SECONDS", "origin": "default", "value": 2.0}, - {"name": "DD_TRACE_ANALYTICS_ENABLED", "origin": "env_var", "value": True}, {"name": "DD_TRACE_API_VERSION", "origin": "env_var", "value": "v0.5"}, {"name": "DD_TRACE_CLIENT_IP_ENABLED", "origin": "env_var", "value": True}, {"name": "DD_TRACE_CLIENT_IP_HEADER", "origin": "default", "value": None}, @@ -456,13 +451,11 @@ def test_app_started_event_configuration_override(test_agent_session, run_python {"name": "DD_TRACE_PROPAGATION_STYLE_INJECT", "origin": "env_var", "value": "tracecontext"}, {"name": "DD_TRACE_RATE_LIMIT", "origin": "env_var", "value": 50}, {"name": "DD_TRACE_REPORT_HOSTNAME", "origin": "default", "value": False}, - {"name": "DD_TRACE_SAMPLE_RATE", "origin": "env_var", "value": 0.5}, { "name": "DD_TRACE_SAMPLING_RULES", "origin": "env_var", "value": '[{"sample_rate":1.0,"service":"xyz","name":"abc"}]', }, - {"name": "DD_TRACE_SPAN_AGGREGATOR_RLOCK", "origin": "default", "value": True}, {"name": "DD_TRACE_SPAN_TRACEBACK_MAX_SIZE", "origin": "default", "value": 30}, {"name": "DD_TRACE_STARTUP_LOGS", "origin": "env_var", "value": True}, {"name": "DD_TRACE_WRITER_BUFFER_SIZE_BYTES", "origin": "env_var", "value": 1000}, @@ -483,6 +476,7 @@ def test_app_started_event_configuration_override(test_agent_session, run_python {"name": "python_build_gnu_type", "origin": "unknown", "value": sysconfig.get_config_var("BUILD_GNU_TYPE")}, {"name": "python_host_gnu_type", "origin": "unknown", "value": sysconfig.get_config_var("HOST_GNU_TYPE")}, {"name": "python_soabi", "origin": "unknown", "value": sysconfig.get_config_var("SOABI")}, + {"name": "trace_sample_rate", "origin": "default", "value": 1.0}, ] assert configurations == expected, configurations diff --git a/tests/tracer/test_correlation_log_context.py b/tests/tracer/test_correlation_log_context.py index b7200b8b38f..e2e5dda6b37 100644 --- a/tests/tracer/test_correlation_log_context.py +++ b/tests/tracer/test_correlation_log_context.py @@ -1,8 +1,8 @@ import pytest from ddtrace import config -from ddtrace.trace import Tracer from ddtrace.trace import tracer +from tests.utils import DummyTracer def global_config(config): @@ -10,7 +10,7 @@ def global_config(config): config.env = "test-env" config.version = "test-version" global tracer - tracer = Tracer() + tracer = DummyTracer() yield config.service = config.env = config.version = None @@ -33,9 +33,9 @@ def format_trace_id(span): @pytest.mark.subprocess() def test_get_log_correlation_service(): """Ensure expected DDLogRecord service is generated via get_correlation_log_record.""" - from ddtrace.trace import Tracer from ddtrace.trace import tracer from tests.tracer.test_correlation_log_context import format_trace_id + from tests.utils import DummyTracer from tests.utils import override_global_config with override_global_config(dict(service="test-service", env="test-env", version="test-version")): @@ -49,7 +49,7 @@ def test_get_log_correlation_service(): "version": "test-version", } - test_tracer = Tracer() + test_tracer = DummyTracer() with test_tracer.trace("test-span-2", service="span-service") as span2: dd_log_record = test_tracer.get_log_correlation_context() assert dd_log_record == { @@ -65,12 +65,12 @@ def test_get_log_correlation_service(): def test_get_log_correlation_context_basic(): """Ensure expected DDLogRecord is generated via get_correlation_log_record.""" from ddtrace.trace import Context - from ddtrace.trace import Tracer from tests.tracer.test_correlation_log_context import format_trace_id + from tests.utils import DummyTracer from tests.utils import override_global_config with override_global_config(dict(service="test-service", env="test-env", version="test-version")): - tracer = Tracer() + tracer = DummyTracer() with tracer.trace("test-span-1") as span1: dd_log_record = tracer.get_log_correlation_context() assert dd_log_record == { @@ -80,7 +80,7 @@ def test_get_log_correlation_context_basic(): "env": "test-env", "version": "test-version", }, dd_log_record - test_tracer = Tracer() + test_tracer = DummyTracer() with test_tracer.trace("test-span-2") as span2: dd_log_record = test_tracer.get_log_correlation_context() assert dd_log_record == { @@ -130,9 +130,9 @@ def test_get_log_correlation_context_opentracer(): @pytest.mark.subprocess() def test_get_log_correlation_context_no_active_span(): """Ensure empty DDLogRecord generated if no active span.""" - from ddtrace.trace import Tracer + from tests.utils import DummyTracer - tracer = Tracer() + tracer = DummyTracer() dd_log_record = tracer.get_log_correlation_context() assert dd_log_record == { "span_id": "0", @@ -146,9 +146,8 @@ def test_get_log_correlation_context_no_active_span(): @pytest.mark.subprocess() def test_get_log_correlation_context_disabled_tracer(): """Ensure get_correlation_log_record returns None if tracer is disabled.""" - from ddtrace.trace import Tracer + from ddtrace.trace import tracer - tracer = Tracer() tracer.enabled = False with tracer.trace("test-span"): dd_log_record = tracer.get_log_correlation_context() diff --git a/tests/tracer/test_encoders.py b/tests/tracer/test_encoders.py index 7006bc6b95d..4fe48a2a838 100644 --- a/tests/tracer/test_encoders.py +++ b/tests/tracer/test_encoders.py @@ -869,19 +869,3 @@ def test_json_encoder_traces_bytes(): assert "\\x80span.a" == span_a["name"] assert "\x80span.b" == span_b["name"] assert "\x80span.b" == span_c["name"] - - -@pytest.mark.subprocess(env={"DD_TRACE_API_VERSION": "v0.3"}) -def test_v03_trace_api_deprecation(): - import warnings - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - from ddtrace.trace import tracer - - assert tracer._writer._api_version == "v0.4" - assert len(warns) == 1, warns - assert ( - warns[0].message.args[0] == "DD_TRACE_API_VERSION=v0.3 is deprecated and will be " - "removed in version '3.0.0': Traces will be submitted to the v0.4/traces agent endpoint instead." - ), warns[0].message diff --git a/tests/tracer/test_gitmetadata.py b/tests/tracer/test_gitmetadata.py index cb03d59f7e2..d6c35a2de0c 100644 --- a/tests/tracer/test_gitmetadata.py +++ b/tests/tracer/test_gitmetadata.py @@ -8,10 +8,9 @@ import pytest -import ddtrace from ddtrace.internal import gitmetadata from tests.subprocesstest import run_in_subprocess -from tests.utils import DummyWriter +from tests.utils import DummyTracer from tests.utils import TracerTestCase @@ -44,8 +43,8 @@ class GitMetadataTestCase(TracerTestCase): ) ) def test_gitmetadata_from_package(self): - tracer = ddtrace.trace.Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() + with tracer.trace("span") as s: pass @@ -59,8 +58,8 @@ def test_gitmetadata_from_package(self): ) ) def test_gitmetadata_from_DD_TAGS(self): - tracer = ddtrace.trace.Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() + with tracer.trace("span") as s: pass @@ -80,8 +79,8 @@ def test_gitmetadata_from_DD_TAGS(self): ) ) def test_gitmetadata_from_ENV(self): - tracer = ddtrace.trace.Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() + with tracer.trace("span") as s: pass @@ -104,8 +103,8 @@ def test_gitmetadata_from_ENV(self): ) ) def test_gitmetadata_disabled(self): - tracer = ddtrace.trace.Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() + with tracer.trace("span") as s: pass @@ -123,8 +122,8 @@ def test_gitmetadata_disabled(self): ) ) def test_gitmetadata_package_without_metadata(self): - tracer = ddtrace.trace.Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() + with tracer.trace("span") as s: pass @@ -143,8 +142,8 @@ def test_gitmetadata_package_without_metadata(self): ) ) def test_gitmetadata_from_env_filtering_https(self): - tracer = ddtrace.trace.Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() + with tracer.trace("span") as s: pass @@ -163,8 +162,8 @@ def test_gitmetadata_from_env_filtering_https(self): ) ) def test_gitmetadata_from_ddtags_filtering_https(self): - tracer = ddtrace.trace.Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() + with tracer.trace("span") as s: pass @@ -184,8 +183,8 @@ def test_gitmetadata_from_ddtags_filtering_https(self): ) ) def test_gitmetadata_from_env_filtering_ssh(self): - tracer = ddtrace.trace.Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() + with tracer.trace("span") as s: pass @@ -204,8 +203,8 @@ def test_gitmetadata_from_env_filtering_ssh(self): ) ) def test_gitmetadata_from_ddtags_filtering_ssh(self): - tracer = ddtrace.trace.Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() + with tracer.trace("span") as s: pass diff --git a/tests/tracer/test_memory_leak.py b/tests/tracer/test_memory_leak.py index 7fdcd7589f6..b13cc0ec4ee 100644 --- a/tests/tracer/test_memory_leak.py +++ b/tests/tracer/test_memory_leak.py @@ -1,28 +1,21 @@ """ Variety of test cases ensuring that ddtrace does not leak memory. """ - -import gc -from threading import Thread -from typing import TYPE_CHECKING from weakref import WeakValueDictionary import pytest +from ddtrace.trace import Span from ddtrace.trace import Tracer - - -if TYPE_CHECKING: # pragma: no cover - from ddtrace.trace import Span # noqa:F401 +from tests.utils import DummyTracer @pytest.fixture -def tracer() -> Tracer: - return Tracer() +def tracer() -> DummyTracer: + return DummyTracer() -def trace(weakdict: WeakValueDictionary, tracer: Tracer, *args, **kwargs): - # type: (...) -> Span +def trace(weakdict: WeakValueDictionary, tracer: Tracer, *args, **kwargs) -> Span: """Return a span created from ``tracer`` and add it to the given weak dictionary. @@ -34,7 +27,14 @@ def trace(weakdict: WeakValueDictionary, tracer: Tracer, *args, **kwargs): return s -def test_leak(tracer): +@pytest.mark.subprocess +def test_leak(): + import gc + from weakref import WeakValueDictionary + + from ddtrace.trace import tracer + from tests.tracer.test_memory_leak import trace + wd = WeakValueDictionary() with trace(wd, tracer, "span1") as span: with trace(wd, tracer, "span2") as span2: @@ -44,15 +44,23 @@ def test_leak(tracer): # The spans are still open and referenced so they should not be gc'd gc.collect() assert len(wd) == 2 + tracer.flush() del span, span2 gc.collect() assert len(wd) == 0 -def test_single_thread_single_trace(tracer): +@pytest.mark.subprocess +def test_single_thread_single_trace(): """ Ensure a simple trace doesn't leak span objects. """ + import gc + from weakref import WeakValueDictionary + + from ddtrace.trace import tracer + from tests.tracer.test_memory_leak import trace + wd = WeakValueDictionary() with trace(wd, tracer, "span1"): with trace(wd, tracer, "span2"): @@ -64,10 +72,17 @@ def test_single_thread_single_trace(tracer): assert len(wd) == 0 -def test_single_thread_multi_trace(tracer): +@pytest.mark.subprocess +def test_single_thread_multi_trace(): """ Ensure a trace in a thread is properly garbage collected. """ + import gc + from weakref import WeakValueDictionary + + from ddtrace.trace import tracer + from tests.tracer.test_memory_leak import trace + wd = WeakValueDictionary() for _ in range(1000): with trace(wd, tracer, "span1"): @@ -75,17 +90,25 @@ def test_single_thread_multi_trace(tracer): pass with trace(wd, tracer, "span3"): pass - + tracer.flush() # Once these references are deleted then the spans should no longer be # referenced by anything and should be gc'd. gc.collect() assert len(wd) == 0 -def test_multithread_trace(tracer): +@pytest.mark.subprocess +def test_multithread_trace(): """ Ensure a trace that crosses thread boundaries is properly garbage collected. """ + import gc + from threading import Thread + from weakref import WeakValueDictionary + + from ddtrace.trace import tracer + from tests.tracer.test_memory_leak import trace + wd = WeakValueDictionary() state = [] @@ -102,6 +125,7 @@ def _target(ctx): # Ensure thread finished successfully assert state == [1] + tracer.flush() del span gc.collect() assert len(wd) == 0 diff --git a/tests/tracer/test_processors.py b/tests/tracer/test_processors.py index ff19453555b..8c9955f9ffd 100644 --- a/tests/tracer/test_processors.py +++ b/tests/tracer/test_processors.py @@ -26,7 +26,6 @@ from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.trace import Context from ddtrace.trace import Span -from ddtrace.trace import Tracer from tests.utils import DummyTracer from tests.utils import DummyWriter from tests.utils import override_global_config @@ -244,7 +243,7 @@ def test_aggregator_partial_flush_2_spans(): def test_trace_top_level_span_processor_partial_flushing(): """Parent span and child span have the same service name""" - tracer = Tracer() + tracer = DummyTracer() tracer._configure( partial_flush_enabled=True, partial_flush_min_spans=2, @@ -271,8 +270,7 @@ def test_trace_top_level_span_processor_partial_flushing(): def test_trace_top_level_span_processor_same_service_name(): """Parent span and child span have the same service name""" - tracer = Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() with tracer.trace("parent", service="top_level_test") as parent: with tracer.trace("child") as child: @@ -285,8 +283,7 @@ def test_trace_top_level_span_processor_same_service_name(): def test_trace_top_level_span_processor_different_service_name(): """Parent span and child span have the different service names""" - tracer = Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() with tracer.trace("parent", service="top_level_test_service") as parent: with tracer.trace("child", service="top_level_test_service2") as child: @@ -299,8 +296,7 @@ def test_trace_top_level_span_processor_different_service_name(): def test_trace_top_level_span_processor_orphan_span(): """Trace chuck does not contain parent span""" - tracer = Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() with tracer.trace("parent") as parent: pass @@ -388,7 +384,7 @@ def test_span_creation_metrics(): def test_changing_tracer_sampler_changes_tracesamplingprocessor_sampler(): """Changing the tracer sampler should change the sampling processor's sampler""" - tracer = Tracer() + tracer = DummyTracer() # get processor for aggregator in tracer._deferred_processors: if type(aggregator) is SpanAggregator: @@ -632,9 +628,8 @@ def test_endpoint_call_counter_processor_disabled(): def test_endpoint_call_counter_processor_real_tracer(): - tracer = Tracer() + tracer = DummyTracer() tracer._endpoint_call_counter_span_processor.enable() - tracer._configure(writer=DummyWriter()) with tracer.trace("parent", service="top_level_test_service", resource="a", span_type=SpanTypes.WEB): with tracer.trace("child", service="top_level_test_service2"): @@ -656,8 +651,7 @@ def test_endpoint_call_counter_processor_real_tracer(): def test_trace_tag_processor_adds_chunk_root_tags(): - tracer = Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() with tracer.trace("parent") as parent: with tracer.trace("child") as child: @@ -679,7 +673,7 @@ def on_span_finish(self, span): tp = TestProcessor() tp.register() - tracer = Tracer() + tracer = DummyTracer() with tracer.trace("test") as span: assert span.get_tag("on_start") == "ok" diff --git a/tests/tracer/test_propagation.py b/tests/tracer/test_propagation.py index e34167ce04f..f79116826d3 100644 --- a/tests/tracer/test_propagation.py +++ b/tests/tracer/test_propagation.py @@ -47,6 +47,7 @@ from tests.contrib.fastapi.conftest import test_spans as fastapi_test_spans # noqa:F401 from tests.contrib.fastapi.conftest import tracer # noqa:F401 +from ..utils import flaky from ..utils import override_env from ..utils import override_global_config @@ -836,6 +837,7 @@ def test_extract_128bit_trace_ids_tracecontext(): assert child_span.trace_id == trace_id +@flaky(1735812000, reason="FIXME: Failing due to the global tracer being used in all tests") def test_last_dd_span_id(): non_dd_remote_context = HTTPPropagator.extract( { diff --git a/tests/tracer/test_sampler.py b/tests/tracer/test_sampler.py index 813dc1be439..f54c7de55da 100644 --- a/tests/tracer/test_sampler.py +++ b/tests/tracer/test_sampler.py @@ -1,6 +1,5 @@ from __future__ import division -import re import unittest import mock @@ -250,7 +249,7 @@ def test_sampling_rule_init_defaults(): def test_sampling_rule_init(): - a_regex = re.compile(r"\.request$") + a_regex = "*request" a_string = "my-service" rule = SamplingRule( @@ -261,7 +260,7 @@ def test_sampling_rule_init(): assert rule.sample_rate == 0.0, "SamplingRule should store the rate it's initialized with" assert rule.service.pattern == a_string, "SamplingRule should store the service it's initialized with" - assert rule.name == a_regex, "SamplingRule should store the name regex it's initialized with" + assert rule.name.pattern == a_regex, "SamplingRule should store the name regex it's initialized with" @pytest.mark.parametrize( @@ -272,38 +271,13 @@ def test_sampling_rule_init(): (SamplingRule(sample_rate=0.0), SamplingRule(sample_rate=0.0), True), (SamplingRule(sample_rate=0.5), SamplingRule(sample_rate=1.0), False), (SamplingRule(sample_rate=1.0, service="my-svc"), SamplingRule(sample_rate=1.0, service="my-svc"), True), - ( - SamplingRule(sample_rate=1.0, service=re.compile("my-svc")), - SamplingRule(sample_rate=1.0, service=re.compile("my-svc")), - True, - ), (SamplingRule(sample_rate=1.0, service="my-svc"), SamplingRule(sample_rate=1.0, service="other-svc"), False), (SamplingRule(sample_rate=1.0, service="my-svc"), SamplingRule(sample_rate=0.5, service="my-svc"), False), - ( - SamplingRule(sample_rate=1.0, service=re.compile("my-svc")), - SamplingRule(sample_rate=0.5, service=re.compile("my-svc")), - False, - ), - ( - SamplingRule(sample_rate=1.0, service=re.compile("my-svc")), - SamplingRule(sample_rate=1.0, service=re.compile("other")), - False, - ), ( SamplingRule(sample_rate=1.0, name="span.name"), SamplingRule(sample_rate=1.0, name="span.name"), True, ), - ( - SamplingRule(sample_rate=1.0, name=re.compile("span.name")), - SamplingRule(sample_rate=1.0, name=re.compile("span.name")), - True, - ), - ( - SamplingRule(sample_rate=1.0, name=re.compile("span.name")), - SamplingRule(sample_rate=1.0, name=re.compile("span.other")), - False, - ), ( SamplingRule(sample_rate=1.0, name="span.name"), SamplingRule(sample_rate=0.5, name="span.name"), @@ -316,16 +290,6 @@ def test_sampling_rule_init(): SamplingRule(sample_rate=1.0, service="my-svc", name="span.name"), True, ), - ( - SamplingRule(sample_rate=1.0, service="my-svc", name=re.compile("span.name")), - SamplingRule(sample_rate=1.0, service="my-svc", name=re.compile("span.name")), - True, - ), - ( - SamplingRule(sample_rate=1.0, service=re.compile("my-svc"), name=re.compile("span.name")), - SamplingRule(sample_rate=1.0, service=re.compile("my-svc"), name=re.compile("span.name")), - True, - ), ( SamplingRule(sample_rate=1.0, service="my-svc", name="span.name"), SamplingRule(sample_rate=0.5, service="my-svc", name="span.name"), @@ -491,15 +455,6 @@ def test_sampling_rule_init_via_env(): ("test.span", None, False), ("test.span", "test.span", True), ("test.span", "test_span", False), - ("test.span", re.compile(r"^test\.span$"), True), - ("test_span", re.compile(r"^test.span$"), True), - ("test.span", re.compile(r"^test_span$"), False), - ("test.span", re.compile(r"test"), True), - ("test.span", re.compile(r"test\.span|another\.span"), True), - ("another.span", re.compile(r"test\.span|another\.span"), True), - ("test.span", lambda name: "span" in name, True), - ("test.span", lambda name: "span" not in name, False), - ("test.span", lambda name: 1 / 0, False), ] ], ) @@ -518,20 +473,8 @@ def test_sampling_rule_matches_name(span, rule, span_expected_to_match_rule): ("my-service", None, False), (None, "tests.tracer", True), ("tests.tracer", "my-service", False), - ("tests.tracer", re.compile(r"my-service"), False), - ("tests.tracer", lambda service: "service" in service, False), ("my-service", "my-service", True), ("my-service", "my_service", False), - ("my-service", re.compile(r"^my-"), True), - ("my_service", re.compile(r"^my[_-]"), True), - ("my-service", re.compile(r"^my_"), False), - ("my-service", re.compile(r"my-service"), True), - ("my-service", re.compile(r"my"), True), - ("my-service", re.compile(r"my-service|another-service"), True), - ("another-service", re.compile(r"my-service|another-service"), True), - ("my-service", lambda service: "service" in service, True), - ("my-service", lambda service: "service" not in service, False), - ("my-service", lambda service: 1 / 0, False), ] ], ) @@ -553,7 +496,7 @@ def test_sampling_rule_matches_service(span, rule, span_expected_to_match_rule): SamplingRule( sample_rate=1, name="test.span", - service=re.compile(r"^my-"), + service="my-*", ), True, ), @@ -567,7 +510,7 @@ def test_sampling_rule_matches_service(span, rule, span_expected_to_match_rule): SamplingRule( sample_rate=0, name="test.span", - service=re.compile(r"^my-"), + service="my-*", ), True, ), @@ -580,7 +523,7 @@ def test_sampling_rule_matches_service(span, rule, span_expected_to_match_rule): SamplingRule( sample_rate=1, name="test_span", - service=re.compile(r"^my-"), + service="my-*", ), False, ), @@ -593,7 +536,7 @@ def test_sampling_rule_matches_service(span, rule, span_expected_to_match_rule): SamplingRule( sample_rate=1, name="test.span", - service=re.compile(r"^service-"), + service="service-", ), False, ), @@ -605,26 +548,6 @@ def test_sampling_rule_matches(span, rule, span_expected_to_match_rule): ) -def test_sampling_rule_matches_exception(): - def pattern(prop): - raise Exception("an error occurred") - - rule = SamplingRule(sample_rate=1.0, name=pattern) - span = create_span(name="test.span") - - with mock.patch("ddtrace._trace.sampling_rule.log") as mock_log: - assert ( - rule.matches(span) is False - ), "SamplingRule should not match when its name pattern function throws an exception" - mock_log.warning.assert_called_once_with( - "%r pattern %r failed with %r", - rule, - pattern, - "test.span", - exc_info=True, - ) - - @pytest.mark.subprocess( parametrize={"DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED": ["true", "false"]}, ) @@ -645,21 +568,6 @@ def test_sampling_rule_sample(): ) -@pytest.mark.subprocess(env={"DD_TRACE_SAMPLE_RATE": "0.2"}) -def test_sampling_rate_config_deprecated(): - import warnings - - with warnings.catch_warnings(record=True) as ws: - warnings.simplefilter("always") - - from ddtrace import config - - assert config._trace_sample_rate == 0.2 - - assert len(ws) >= 1 - assert any(w for w in ws if "DD_TRACE_SAMPLE_RATE is deprecated" in str(w.message)), [w.message for w in ws] - - def test_sampling_rule_sample_rate_1(): rule = SamplingRule(sample_rate=1) @@ -727,15 +635,6 @@ def test_datadog_sampler_init(): SamplingRule(sample_rate=0.5) ], "DatadogSampler initialized with no arguments and envvars set should hold a sample_rate from the envvar" - with override_global_config(dict(_trace_sample_rate=0)): - sampler = DatadogSampler() - assert ( - sampler.limiter.rate_limit == DatadogSampler.DEFAULT_RATE_LIMIT - ), "DatadogSampler initialized with DD_TRACE_SAMPLE_RATE=0 envvar should hold the default rate limit" - assert sampler.rules == [ - SamplingRule(sample_rate=0) - ], "DatadogSampler initialized with DD_TRACE_SAMPLE_RATE=0 envvar should hold sample_rate=0" - with override_global_config(dict(_trace_sample_rate="asdf")): with pytest.raises(ValueError): DatadogSampler() diff --git a/tests/tracer/test_single_span_sampling_rules.py b/tests/tracer/test_single_span_sampling_rules.py index ef33ecfd619..7fcd0ef7a54 100644 --- a/tests/tracer/test_single_span_sampling_rules.py +++ b/tests/tracer/test_single_span_sampling_rules.py @@ -10,9 +10,7 @@ from ddtrace.internal.sampling import SpanSamplingRule from ddtrace.internal.sampling import _get_file_json from ddtrace.internal.sampling import get_span_sampling_rules -from ddtrace.trace import Tracer from tests.utils import DummyTracer -from tests.utils import DummyWriter from ..utils import override_global_config @@ -129,8 +127,7 @@ def test_env_rules_cause_matching_span_to_be_sampled(): sampling_rules = get_span_sampling_rules() assert sampling_rules[0]._service_matcher.pattern == "test_service" assert sampling_rules[0]._name_matcher.pattern == "test_name" - tracer = Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() span = traced_function(sampling_rules[0], tracer=tracer) assert_sampling_decision_tags(span) @@ -141,8 +138,7 @@ def test_env_rules_dont_cause_non_matching_span_to_be_sampled(): sampling_rules = get_span_sampling_rules() assert sampling_rules[0]._service_matcher.pattern == "test_ser" assert sampling_rules[0]._name_matcher.pattern == "test_na" - tracer = Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() span = traced_function(sampling_rules[0], tracer=tracer) assert_sampling_decision_tags(span, sample_rate=None, mechanism=None, limit=None) @@ -153,8 +149,7 @@ def test_single_span_rules_not_applied_when_span_sampled_by_trace_sampling(): sampling_rules = get_span_sampling_rules() assert sampling_rules[0]._service_matcher.pattern == "test_service" assert sampling_rules[0]._name_matcher.pattern == "test_name" - tracer = Tracer() - tracer._configure(writer=DummyWriter()) + tracer = DummyTracer() span = traced_function(sampling_rules[0], tracer=tracer, trace_sampling=True) assert sampling_rules[0].match(span) is True assert_sampling_decision_tags(span, sample_rate=None, mechanism=None, limit=None, trace_sampling=True) diff --git a/tests/tracer/test_trace_utils.py b/tests/tracer/test_trace_utils.py index ca564cac394..6820b0c4d76 100644 --- a/tests/tracer/test_trace_utils.py +++ b/tests/tracer/test_trace_utils.py @@ -28,7 +28,6 @@ from ddtrace.trace import Context from ddtrace.trace import Pin from ddtrace.trace import Span -from ddtrace.trace import Tracer from tests.appsec.utils import asm_context from tests.utils import override_global_config @@ -277,9 +276,8 @@ def test_int_service(int_config, pin, config_val, default, global_service, expec assert trace_utils.int_service(pin, int_config.myint, default) == expected -def test_int_service_integration(int_config): +def test_int_service_integration(int_config, tracer): pin = Pin() - tracer = Tracer() assert trace_utils.int_service(pin, int_config.myint) == "tests.tracer" with override_global_config(dict(service="global-svc")): @@ -905,8 +903,7 @@ def test_distributed_tracing_enabled(int_config, props, default, expected): assert trace_utils.distributed_tracing_enabled(int_config.myint, **kwargs) == expected, (props, default, expected) -def test_activate_distributed_headers_enabled(int_config): - tracer = Tracer() +def test_activate_distributed_headers_enabled(int_config, tracer): int_config.myint["distributed_tracing_enabled"] = True headers = { HTTP_HEADER_PARENT_ID: "12345", @@ -925,8 +922,7 @@ def test_activate_distributed_headers_enabled(int_config): assert context.span_id == 12345 -def test_activate_distributed_headers_disabled(int_config): - tracer = Tracer() +def test_activate_distributed_headers_disabled(int_config, tracer): int_config.myint["distributed_tracing_enabled"] = False headers = { HTTP_HEADER_PARENT_ID: "12345", @@ -941,16 +937,14 @@ def test_activate_distributed_headers_disabled(int_config): assert tracer.context_provider.active() is None -def test_activate_distributed_headers_no_headers(int_config): - tracer = Tracer() +def test_activate_distributed_headers_no_headers(int_config, tracer): int_config.myint["distributed_tracing_enabled"] = True trace_utils.activate_distributed_headers(tracer, int_config=int_config.myint, request_headers=None) assert tracer.context_provider.active() is None -def test_activate_distributed_headers_override_true(int_config): - tracer = Tracer() +def test_activate_distributed_headers_override_true(int_config, tracer): int_config.myint["distributed_tracing_enabled"] = False headers = { HTTP_HEADER_PARENT_ID: "12345", @@ -964,8 +958,7 @@ def test_activate_distributed_headers_override_true(int_config): assert context.span_id == 12345 -def test_activate_distributed_headers_override_false(int_config): - tracer = Tracer() +def test_activate_distributed_headers_override_false(int_config, tracer): int_config.myint["distributed_tracing_enabled"] = True headers = { HTTP_HEADER_PARENT_ID: "12345", @@ -977,8 +970,7 @@ def test_activate_distributed_headers_override_false(int_config): assert tracer.context_provider.active() is None -def test_activate_distributed_headers_existing_context(int_config): - tracer = Tracer() +def test_activate_distributed_headers_existing_context(int_config, tracer): int_config.myint["distributed_tracing_enabled"] = True headers = { @@ -993,8 +985,7 @@ def test_activate_distributed_headers_existing_context(int_config): assert tracer.context_provider.active() == ctx -def test_activate_distributed_headers_existing_context_different_trace_id(int_config): - tracer = Tracer() +def test_activate_distributed_headers_existing_context_different_trace_id(int_config, tracer): int_config.myint["distributed_tracing_enabled"] = True headers = { diff --git a/tests/tracer/test_tracer.py b/tests/tracer/test_tracer.py index 1aa1c42bf1d..85d8be52a36 100644 --- a/tests/tracer/test_tracer.py +++ b/tests/tracer/test_tracer.py @@ -9,7 +9,6 @@ from os import getpid import threading from unittest.case import SkipTest -import weakref import mock import pytest @@ -30,8 +29,7 @@ from ddtrace.constants import VERSION_KEY from ddtrace.contrib.internal.trace_utils import set_user from ddtrace.ext import user -from ddtrace.internal._encoding import MsgpackEncoderV04 -from ddtrace.internal._encoding import MsgpackEncoderV05 +import ddtrace.internal from ddtrace.internal.compat import PYTHON_VERSION_INFO from ddtrace.internal.rate_limiter import RateLimiter from ddtrace.internal.serverless import has_aws_lambda_agent_extension @@ -40,8 +38,9 @@ from ddtrace.internal.writer import LogWriter from ddtrace.settings import Config from ddtrace.trace import Context -from ddtrace.trace import Tracer +from ddtrace.trace import tracer as global_tracer from tests.subprocesstest import run_in_subprocess +from tests.utils import DummyTracer from tests.utils import TracerTestCase from tests.utils import override_global_config @@ -485,32 +484,6 @@ def test_adding_mapped_services(self): pass assert self.tracer._services == set(["one", "three"]) - def test_configure_dogstatsd_url_host_port(self): - tracer = Tracer() - tracer._configure(dogstatsd_url="foo:1234") - assert tracer._writer.dogstatsd.host == "foo" - assert tracer._writer.dogstatsd.port == 1234 - - tracer = Tracer() - writer = AgentWriter("http://localhost:8126") - tracer._configure(writer=writer, dogstatsd_url="foo:1234") - assert tracer._writer.dogstatsd.host == "foo" - assert tracer._writer.dogstatsd.port == 1234 - - def test_configure_dogstatsd_url_socket(self): - tracer = Tracer() - tracer._configure(dogstatsd_url="unix:///foo.sock") - assert tracer._writer.dogstatsd.host is None - assert tracer._writer.dogstatsd.port is None - assert tracer._writer.dogstatsd.socket_path == "/foo.sock" - - tracer = Tracer() - writer = AgentWriter("http://localhost:8126") - tracer._configure(writer=writer, dogstatsd_url="unix:///foo.sock") - assert tracer._writer.dogstatsd.host is None - assert tracer._writer.dogstatsd.port is None - assert tracer._writer.dogstatsd.socket_path == "/foo.sock" - def test_tracer_set_user(self): with self.trace("fake_span") as span: set_user( @@ -637,34 +610,17 @@ def test_tracer_set_user_propagation_string_error(self): @pytest.mark.subprocess(env=dict(DD_AGENT_PORT="", DD_AGENT_HOST="", DD_TRACE_AGENT_URL="")) def test_tracer_url(): - import pytest - import ddtrace - t = ddtrace.trace.Tracer() - assert t._writer.agent_url == "http://localhost:8126" - - t = ddtrace.trace.Tracer(url="http://foobar:12") - assert t._writer.agent_url == "http://foobar:12" - - t = ddtrace.trace.Tracer(url="unix:///foobar") - assert t._writer.agent_url == "unix:///foobar" - - t = ddtrace.trace.Tracer(url="http://localhost") - assert t._writer.agent_url == "http://localhost" - - t = ddtrace.trace.Tracer(url="https://localhost") - assert t._writer.agent_url == "https://localhost" - - with pytest.raises(ValueError) as e: - ddtrace.trace.Tracer(url="foo://foobar:12") - assert ( - str(e.value) == "Unsupported protocol 'foo' in intake URL 'foo://foobar:12'. Must be one of: http, https, unix" - ) + assert ddtrace.trace.tracer._writer.agent_url == "http://localhost:8126" +@pytest.mark.subprocess() def test_tracer_shutdown_no_timeout(): - t = ddtrace.trace.Tracer() + import mock + + from ddtrace.internal.writer import AgentWriter + from ddtrace.trace import tracer as t with mock.patch.object(AgentWriter, "stop") as mock_stop: with mock.patch.object(AgentWriter, "join") as mock_join: @@ -674,8 +630,12 @@ def test_tracer_shutdown_no_timeout(): mock_join.assert_not_called() +@pytest.mark.subprocess() def test_tracer_configure_writer_stop_unstarted(): - t = ddtrace.trace.Tracer() + import mock + + from ddtrace.trace import tracer as t + t._writer = mock.Mock(wraps=t._writer) orig_writer = t._writer @@ -684,8 +644,12 @@ def test_tracer_configure_writer_stop_unstarted(): assert orig_writer.stop.called +@pytest.mark.subprocess() def test_tracer_configure_writer_stop_started(): - t = ddtrace.trace.Tracer() + import mock + + from ddtrace.trace import tracer as t + t._writer = mock.Mock(wraps=t._writer) orig_writer = t._writer @@ -697,8 +661,12 @@ def test_tracer_configure_writer_stop_started(): orig_writer.stop.assert_called_once_with() +@pytest.mark.subprocess() def test_tracer_shutdown_timeout(): - t = ddtrace.trace.Tracer() + import mock + + from ddtrace.internal.writer import AgentWriter + from ddtrace.trace import tracer as t with mock.patch.object(AgentWriter, "stop") as mock_stop: with t.trace("something"): @@ -709,7 +677,11 @@ def test_tracer_shutdown_timeout(): def test_tracer_shutdown(): - t = ddtrace.trace.Tracer() + import mock + + from ddtrace.internal.writer import AgentWriter + from ddtrace.trace import tracer as t + t.shutdown() with mock.patch.object(AgentWriter, "write") as mock_write: @@ -720,7 +692,12 @@ def test_tracer_shutdown(): def test_tracer_shutdown_warning(): - t = ddtrace.trace.Tracer() + import logging + + import mock + + from ddtrace.trace import tracer as t + t.shutdown() with mock.patch.object(logging.Logger, "warning") as mock_logger: @@ -734,30 +711,6 @@ def test_tracer_shutdown_warning(): ) -def test_tracer_dogstatsd_url(): - t = ddtrace.trace.Tracer() - assert t._writer.dogstatsd.host == "localhost" - assert t._writer.dogstatsd.port == 8125 - - t = ddtrace.trace.Tracer(dogstatsd_url="foobar:12") - assert t._writer.dogstatsd.host == "foobar" - assert t._writer.dogstatsd.port == 12 - - t = ddtrace.trace.Tracer(dogstatsd_url="udp://foobar:12") - assert t._writer.dogstatsd.host == "foobar" - assert t._writer.dogstatsd.port == 12 - - t = ddtrace.trace.Tracer(dogstatsd_url="/var/run/statsd.sock") - assert t._writer.dogstatsd.socket_path == "/var/run/statsd.sock" - - t = ddtrace.trace.Tracer(dogstatsd_url="unix:///var/run/statsd.sock") - assert t._writer.dogstatsd.socket_path == "/var/run/statsd.sock" - - with pytest.raises(ValueError) as e: - t = ddtrace.trace.Tracer(dogstatsd_url="foo://foobar:12") - assert str(e) == "Unknown url format for `foo://foobar:12`" - - @pytest.mark.skip(reason="Fails to Pickle RateLimiter in the Tracer") @pytest.mark.subprocess def test_tracer_fork(): @@ -811,7 +764,7 @@ def task(t, errors): def test_tracer_with_version(): - t = ddtrace.trace.Tracer() + t = DummyTracer() # With global `config.version` defined with override_global_config(dict(version="1.2.3")): @@ -838,7 +791,7 @@ def test_tracer_with_version(): def test_tracer_with_env(): - t = ddtrace.trace.Tracer() + t = DummyTracer() # With global `config.env` defined with override_global_config(dict(env="prod")): @@ -960,33 +913,13 @@ def test_version_service_mapping(self): def test_detect_agentless_env_with_lambda(self): assert in_aws_lambda() assert not has_aws_lambda_agent_extension() - tracer = Tracer() - assert isinstance(tracer._writer, LogWriter) - tracer._configure(enabled=True) - assert isinstance(tracer._writer, LogWriter) - - @run_in_subprocess(env_overrides=dict(AWS_LAMBDA_FUNCTION_NAME="my-func")) - def test_detect_agent_config_with_lambda_extension(self): - def mock_os_path_exists(path): - return path == "/opt/extensions/datadog-agent" - - assert in_aws_lambda() - - with mock.patch("os.path.exists", side_effect=mock_os_path_exists): - assert has_aws_lambda_agent_extension() - - tracer = Tracer() - assert isinstance(tracer._writer, AgentWriter) - assert tracer._writer._sync_mode - - tracer._configure(enabled=False) - assert isinstance(tracer._writer, AgentWriter) - assert tracer._writer._sync_mode + assert isinstance(ddtrace.tracer._writer, LogWriter) + ddtrace.tracer._configure(enabled=True) + assert isinstance(ddtrace.tracer._writer, LogWriter) @run_in_subprocess(env_overrides=dict(AWS_LAMBDA_FUNCTION_NAME="my-func", DD_AGENT_HOST="localhost")) def test_detect_agent_config(self): - tracer = Tracer() - assert isinstance(tracer._writer, AgentWriter) + assert isinstance(global_tracer._writer, AgentWriter) @run_in_subprocess(env_overrides=dict(DD_TAGS="key1:value1,key2:value2")) def test_dd_tags(self): @@ -1001,7 +934,7 @@ def test_dd_tags_invalid(self): @run_in_subprocess(env_overrides=dict(DD_TAGS="service:mysvc,env:myenv,version:myvers")) def test_tags_from_DD_TAGS(self): - t = ddtrace.trace.Tracer() + t = DummyTracer() with t.trace("test") as s: assert s.service == "mysvc" assert s.get_tag("env") == "myenv" @@ -1016,33 +949,29 @@ def test_tags_from_DD_TAGS(self): ) ) def test_tags_from_DD_TAGS_precedence(self): - t = ddtrace.trace.Tracer() - with t.trace("test") as s: + with global_tracer.trace("test") as s: assert s.service == "svc" assert s.get_tag("env") == "env" assert s.get_tag("version") == "0.123" @run_in_subprocess(env_overrides=dict(DD_TAGS="service:mysvc,env:myenv,version:myvers")) def test_tags_from_DD_TAGS_override(self): - t = ddtrace.trace.Tracer() ddtrace.config.env = "env" ddtrace.config.service = "service" ddtrace.config.version = "0.123" - with t.trace("test") as s: + with global_tracer.trace("test") as s: assert s.service == "service" assert s.get_tag("env") == "env" assert s.get_tag("version") == "0.123" def test_tracer_set_runtime_tags(): - t = ddtrace.trace.Tracer() - with t.start_span("foobar") as span: + with global_tracer.start_span("foobar") as span: pass assert len(span.get_tag("runtime-id")) - t2 = ddtrace.trace.Tracer() - with t2.start_span("foobaz") as span2: + with global_tracer.start_span("foobaz") as span2: pass assert span.get_tag("runtime-id") == span2.get_tag("runtime-id") @@ -1084,7 +1013,7 @@ def test_tracer_runtime_tags_cross_execution(tracer): def test_start_span_hooks(): - t = ddtrace.trace.Tracer() + t = DummyTracer() result = {} @@ -1099,7 +1028,7 @@ def store_span(span): def test_deregister_start_span_hooks(): - t = ddtrace.trace.Tracer() + t = DummyTracer() result = {} @@ -1119,9 +1048,8 @@ def store_span(span): def test_enable(): import os - import ddtrace + from ddtrace.trace import tracer as t2 - t2 = ddtrace.trace.Tracer() if os.environ["DD_TRACE_ENABLED"] == "true": assert t2.enabled else: @@ -1170,7 +1098,7 @@ def thread_target(): def test_runtime_id_parent_only(): - tracer = ddtrace.trace.Tracer() + tracer = DummyTracer() # Parent spans should have runtime-id with tracer.trace("test") as s: @@ -1221,18 +1149,6 @@ def test_runtime_id_fork(): assert exit_code == 12 -def test_multiple_tracer_ctx(): - t1 = ddtrace.trace.Tracer() - t2 = ddtrace.trace.Tracer() - - with t1.trace("") as s1: - with t2.trace("") as s2: - pass - - assert s2.parent_id == s1.span_id - assert s2.trace_id == s1.trace_id - - def test_filters(tracer, test_spans): class FilterAll(object): def process_trace(self, trace): @@ -1413,12 +1329,10 @@ def _test_partial_flush(self): def test_unicode_config_vals(): - t = ddtrace.trace.Tracer() - with override_global_config(dict(version="😇", env="😇")): - with t.trace("1"): + with global_tracer.trace("1"): pass - t.shutdown() + global_tracer.flush() def test_ctx(tracer, test_spans): @@ -1664,45 +1578,25 @@ def override_service_mapping(service_mapping): ddtrace.config.service_mapping = {} # Test single mapping - with override_service_mapping("foo:bar"), ddtrace.trace.Tracer().trace("renaming", service="foo") as span: + with override_service_mapping("foo:bar"), global_tracer.trace("renaming", service="foo") as span: assert span.service == "bar" # Test multiple mappings - with override_service_mapping("foo:bar,sna:fu"), ddtrace.trace.Tracer().trace("renaming", service="sna") as span: + with override_service_mapping("foo:bar,sna:fu"), global_tracer.trace("renaming", service="sna") as span: assert span.service == "fu" # Test colliding mappings - with override_service_mapping("foo:bar,foo:foobar"), ddtrace.trace.Tracer().trace( - "renaming", service="foo" - ) as span: + with override_service_mapping("foo:bar,foo:foobar"), global_tracer.trace("renaming", service="foo") as span: assert span.service == "foobar" # Test invalid service mapping with override_service_mapping("foo;bar,sna:fu"): - with ddtrace.trace.Tracer().trace("passthru", service="foo") as _: + with global_tracer.trace("passthru", service="foo") as _: assert _.service == "foo" - with ddtrace.trace.Tracer().trace("renaming", "sna") as _: + with global_tracer.trace("renaming", "sna") as _: assert _.service == "fu" -@pytest.mark.subprocess(env=dict(DD_AGENT_PORT="", DD_AGENT_HOST="", DD_TRACE_AGENT_URL="")) -def test_configure_url_partial(): - import ddtrace - - tracer = ddtrace.trace.Tracer() - tracer._configure(hostname="abc") - assert tracer._writer.agent_url == "http://abc:8126" - tracer._configure(port=123) - assert tracer._writer.agent_url == "http://abc:123" - - tracer = ddtrace.trace.Tracer(url="http://abc") - assert tracer._writer.agent_url == "http://abc" - tracer._configure(port=123) - assert tracer._writer.agent_url == "http://abc:123" - tracer._configure(port=431) - assert tracer._writer.agent_url == "http://abc:431" - - @pytest.mark.subprocess(env={"DD_TRACE_AGENT_URL": "bad://localhost:1234"}) def test_bad_agent_url(): import pytest @@ -1910,16 +1804,16 @@ def test_fork_pid(): assert exit_code == 12 +@pytest.mark.subprocess def test_tracer_api_version(): - t = Tracer() - assert isinstance(t._writer._encoder, MsgpackEncoderV05) + from ddtrace.internal.encoding import MsgpackEncoderV05 + from ddtrace.trace import tracer as t - t._configure(api_version="v0.4") - assert isinstance(t._writer._encoder, MsgpackEncoderV04) + assert isinstance(t._writer._encoder, MsgpackEncoderV05) -@pytest.mark.parametrize("enabled", [True, False]) -def test_tracer_memory_leak_span_processors(enabled): +@pytest.mark.subprocess(parametrize={"DD_TRACE_ENABLED": ["true", "false"]}) +def test_tracer_memory_leak_span_processors(): """ Test whether the tracer or span processors will hold onto span references after the trace is complete. @@ -1927,16 +1821,20 @@ def test_tracer_memory_leak_span_processors(enabled): This is a regression test for the tracer not calling on_span_finish of SpanAggregator when the tracer was disabled and traces leaking. """ + import gc + import weakref + + from ddtrace.trace import TraceFilter + from ddtrace.trace import tracer as t + spans = weakref.WeakSet() # Filter to ensure we don't send the traces to the writer - class DropAllFilter: + class DropAllFilter(TraceFilter): def process_trace(self, trace): return None - t = Tracer() - t.enabled = enabled - t._configure(trace_processors=[DropAllFilter()]) + t.configure(trace_processors=[DropAllFilter()]) for _ in range(5): with t.trace("test") as span: @@ -1944,6 +1842,7 @@ def process_trace(self, trace): # Be sure to dereference the last Span held by the local variable `span` span = None + t.flush() # Force gc gc.collect() @@ -1984,11 +1883,9 @@ def test_finish_span_with_ancestors(tracer): assert span3.finished -def test_ctx_api(): +def test_ctx_api(tracer): from ddtrace.internal import core - tracer = Tracer() - assert core.get_item("key") is None with tracer.trace("root") as span: @@ -2019,7 +1916,7 @@ def test_asm_standalone_configuration(sca_enabled, appsec_enabled, iast_enabled) with override_env({"DD_APPSEC_SCA_ENABLED": sca_enabled}): ddtrace.config._reset() - tracer = ddtrace.trace.Tracer() + tracer = DummyTracer() tracer._configure(appsec_enabled=appsec_enabled, iast_enabled=iast_enabled, appsec_standalone_enabled=True) if sca_enabled == "true": assert bool(ddtrace.config._sca_enabled) is True @@ -2038,10 +1935,9 @@ def test_asm_standalone_configuration(sca_enabled, appsec_enabled, iast_enabled) def test_gc_not_used_on_root_spans(): - tracer = ddtrace.trace.Tracer() gc.freeze() - with tracer.trace("test-event"): + with ddtrace.tracer.trace("test-event"): pass # There should be no more span objects lingering around. @@ -2057,25 +1953,39 @@ def test_gc_not_used_on_root_spans(): # print("--------------------") +@pytest.mark.subprocess(env=dict(AWS_LAMBDA_FUNCTION_NAME="my-func")) +def test_detect_agent_config_with_lambda_extension(): + import mock + + def mock_os_path_exists(path): + return path == "/opt/extensions/datadog-agent" + + with mock.patch("os.path.exists", side_effect=mock_os_path_exists): + import ddtrace + from ddtrace.internal.writer import AgentWriter + from ddtrace.trace import tracer + + assert ddtrace.internal.serverless.in_aws_lambda() + + assert ddtrace.internal.serverless.has_aws_lambda_agent_extension() + + assert isinstance(tracer._writer, AgentWriter) + assert tracer._writer._sync_mode + + tracer._configure(enabled=False) + assert isinstance(tracer._writer, AgentWriter) + assert tracer._writer._sync_mode + + @pytest.mark.subprocess() def test_multiple_tracer_instances(): - import warnings + import mock - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - import ddtrace + import ddtrace - assert ddtrace.tracer is not None - for w in warns: - # Ensure the warning is not about multiple tracer instances is not logged when importing ddtrace - assert "Support for multiple Tracer instances is deprecated" not in str(w.message) - - warns.clear() - t = ddtrace.trace.Tracer() - # TODO: Update this assertion when the deprecation is removed and the tracer becomes a singleton - assert t is not ddtrace.tracer - assert len(warns) == 1 - assert ( - str(warns[0].message) == "Support for multiple Tracer instances is deprecated and will be " - "removed in version '3.0.0'. Use ddtrace.tracer instead." - ) + assert ddtrace.trace.tracer is not None + with mock.patch("ddtrace._trace.tracer.log") as log: + ddtrace.trace.Tracer() + log.error.assert_called_once_with( + "Multiple Tracer instances can not be initialized. " "Use ``ddtrace.trace.tracer`` instead." + ) diff --git a/tests/utils.py b/tests/utils.py index bc7acd68b84..5d94598ec4b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -127,6 +127,7 @@ def override_global_config(values): "_x_datadog_tags_max_length", "_128_bit_trace_id_enabled", "_x_datadog_tags_enabled", + "_startup_logs_enabled", "_propagate_service", "env", "version", @@ -649,8 +650,8 @@ def configure(self, *args, **kwargs): self._configure(*args, **kwargs) def _configure(self, *args, **kwargs): - assert "writer" not in kwargs or isinstance( - kwargs["writer"], DummyWriterMixin + assert isinstance( + kwargs.get("writer"), (DummyWriterMixin, type(None)) ), "cannot configure writer of DummyTracer" if not kwargs.get("writer"):