Skip to content

Commit 7fbf54a

Browse files
authored
chore(langchain): drop support for tracing langchain.chains (#12172)
This PR drops support for tracing `langchain.chains` in favor of only tracing `RunnableSequence` (LangChain Expression Language - LCEL) chains. High LOC - this PR also re-introduces APM tracing tests for langchain, but removes all cassettes and snapshots related to previous `langchain` patching. We still need to refactor `conftest` a bit to be more modern, and re-introduce LLMObs tests. I'm fairly certain those ones are flaky, while the normal APM tests _should_ be OK. **Known issue**: This PR effectively removes all patching on the `langchain` package, and only patches `langchain_core` and community and partner libraries. However, to trigger patching, you must still: ```python import langchain ``` in order to trigger patching for LangChain. This will be fixed in a patch in `ddtrace` 3.x. MLOB-2140 ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)
1 parent 9eeae42 commit 7fbf54a

File tree

159 files changed

+1047
-11538
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

159 files changed

+1047
-11538
lines changed

ddtrace/contrib/internal/langchain/patch.py

Lines changed: 0 additions & 112 deletions
Original file line numberDiff line numberDiff line change
@@ -643,114 +643,6 @@ def traced_embedding(langchain, pin, func, instance, args, kwargs):
643643
return embeddings
644644

645645

646-
@with_traced_module
647-
def traced_chain_call(langchain, pin, func, instance, args, kwargs):
648-
integration = langchain._datadog_integration
649-
span = integration.trace(
650-
pin,
651-
"{}.{}".format(instance.__module__, instance.__class__.__name__),
652-
submit_to_llmobs=True,
653-
interface_type="chain",
654-
)
655-
inputs = None
656-
final_outputs = {}
657-
try:
658-
inputs = get_argument_value(args, kwargs, 0, "input")
659-
if not isinstance(inputs, dict):
660-
inputs = {instance.input_keys[0]: inputs}
661-
if integration.is_pc_sampled_span(span):
662-
for k, v in inputs.items():
663-
span.set_tag_str("langchain.request.inputs.%s" % k, integration.trunc(str(v)))
664-
template = deep_getattr(instance, "prompt.template", default="")
665-
if template:
666-
span.set_tag_str("langchain.request.prompt", integration.trunc(str(template)))
667-
final_outputs = func(*args, **kwargs)
668-
if integration.is_pc_sampled_span(span):
669-
for k, v in final_outputs.items():
670-
span.set_tag_str("langchain.response.outputs.%s" % k, integration.trunc(str(v)))
671-
if _is_iast_enabled():
672-
taint_outputs(instance, inputs, final_outputs)
673-
except Exception:
674-
span.set_exc_info(*sys.exc_info())
675-
integration.metric(span, "incr", "request.error", 1)
676-
raise
677-
finally:
678-
integration.llmobs_set_tags(span, args=[], kwargs=inputs, response=final_outputs, operation="chain")
679-
span.finish()
680-
integration.metric(span, "dist", "request.duration", span.duration_ns)
681-
if integration.is_pc_sampled_log(span):
682-
log_inputs = {}
683-
log_outputs = {}
684-
for k, v in inputs.items():
685-
log_inputs[k] = str(v)
686-
for k, v in final_outputs.items():
687-
log_outputs[k] = str(v)
688-
integration.log(
689-
span,
690-
"info" if span.error == 0 else "error",
691-
"sampled %s.%s" % (instance.__module__, instance.__class__.__name__),
692-
attrs={
693-
"inputs": log_inputs,
694-
"prompt": str(deep_getattr(instance, "prompt.template", default="")),
695-
"outputs": log_outputs,
696-
},
697-
)
698-
return final_outputs
699-
700-
701-
@with_traced_module
702-
async def traced_chain_acall(langchain, pin, func, instance, args, kwargs):
703-
integration = langchain._datadog_integration
704-
span = integration.trace(
705-
pin,
706-
"{}.{}".format(instance.__module__, instance.__class__.__name__),
707-
submit_to_llmobs=True,
708-
interface_type="chain",
709-
)
710-
inputs = None
711-
final_outputs = {}
712-
try:
713-
inputs = get_argument_value(args, kwargs, 0, "input")
714-
if not isinstance(inputs, dict):
715-
inputs = {instance.input_keys[0]: inputs}
716-
if integration.is_pc_sampled_span(span):
717-
for k, v in inputs.items():
718-
span.set_tag_str("langchain.request.inputs.%s" % k, integration.trunc(str(v)))
719-
template = deep_getattr(instance, "prompt.template", default="")
720-
if template:
721-
span.set_tag_str("langchain.request.prompt", integration.trunc(str(template)))
722-
final_outputs = await func(*args, **kwargs)
723-
if integration.is_pc_sampled_span(span):
724-
for k, v in final_outputs.items():
725-
span.set_tag_str("langchain.response.outputs.%s" % k, integration.trunc(str(v)))
726-
except Exception:
727-
span.set_exc_info(*sys.exc_info())
728-
integration.metric(span, "incr", "request.error", 1)
729-
raise
730-
finally:
731-
integration.llmobs_set_tags(span, args=[], kwargs=inputs, response=final_outputs, operation="chain")
732-
span.finish()
733-
integration.metric(span, "dist", "request.duration", span.duration_ns)
734-
if integration.is_pc_sampled_log(span):
735-
log_inputs = {}
736-
log_outputs = {}
737-
for k, v in inputs.items():
738-
log_inputs[k] = str(v)
739-
for k, v in final_outputs.items():
740-
log_outputs[k] = str(v)
741-
integration.log(
742-
span,
743-
"info" if span.error == 0 else "error",
744-
"sampled %s.%s" % (instance.__module__, instance.__class__.__name__),
745-
attrs={
746-
"inputs": log_inputs,
747-
"prompt": str(deep_getattr(instance, "prompt.template", default="")),
748-
"outputs": log_outputs,
749-
},
750-
)
751-
return final_outputs
752-
753-
754646
@with_traced_module
755647
def traced_lcel_runnable_sequence(langchain, pin, func, instance, args, kwargs):
756648
"""
@@ -1287,8 +1179,6 @@ def patch():
12871179
"language_models.chat_models.BaseChatModel.agenerate",
12881180
traced_chat_model_agenerate(langchain),
12891181
)
1290-
wrap("langchain", "chains.base.Chain.invoke", traced_chain_call(langchain))
1291-
wrap("langchain", "chains.base.Chain.ainvoke", traced_chain_acall(langchain))
12921182
wrap("langchain_core", "runnables.base.RunnableSequence.invoke", traced_lcel_runnable_sequence(langchain))
12931183
wrap("langchain_core", "runnables.base.RunnableSequence.ainvoke", traced_lcel_runnable_sequence_async(langchain))
12941184
wrap("langchain_core", "runnables.base.RunnableSequence.batch", traced_lcel_runnable_sequence(langchain))
@@ -1342,8 +1232,6 @@ def unpatch():
13421232
unwrap(langchain_core.language_models.llms.BaseLLM, "agenerate")
13431233
unwrap(langchain_core.language_models.chat_models.BaseChatModel, "generate")
13441234
unwrap(langchain_core.language_models.chat_models.BaseChatModel, "agenerate")
1345-
unwrap(langchain.chains.base.Chain, "invoke")
1346-
unwrap(langchain.chains.base.Chain, "ainvoke")
13471235
unwrap(langchain_core.runnables.base.RunnableSequence, "invoke")
13481236
unwrap(langchain_core.runnables.base.RunnableSequence, "ainvoke")
13491237
unwrap(langchain_core.runnables.base.RunnableSequence, "batch")
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
---
2+
upgrade:
3+
- |
4+
langchain: removed tracing support for ``langchain.chains.base.Chains``. To maintain support, migrate to using Langchain's LCEL instead.

tests/contrib/langchain/cassettes/langchain/ai21_completion_sync.yaml

Lines changed: 0 additions & 71 deletions
This file was deleted.

tests/contrib/langchain/cassettes/langchain/ai21_completion_sync_39.yaml

Lines changed: 0 additions & 151 deletions
This file was deleted.

tests/contrib/langchain/cassettes/langchain/cohere_completion_sync.yaml

Lines changed: 0 additions & 61 deletions
This file was deleted.

0 commit comments

Comments
 (0)