Skip to content

Commit 87ed060

Browse files
feat(langchain): Record run_name in on_chat_model_start
1 parent 36ca817 commit 87ed060

2 files changed

Lines changed: 57 additions & 0 deletions

File tree

sentry_sdk/integrations/langchain.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -432,6 +432,13 @@ def on_chat_model_start(
432432
SPANDATA.GEN_AI_AGENT_NAME, agent_metadata["lc_agent_name"]
433433
)
434434

435+
run_name = kwargs.get("name")
436+
if run_name is not None:
437+
span.set_data(
438+
SPANDATA.GEN_AI_PIPELINE_NAME,
439+
run_name,
440+
)
441+
435442
for key, attribute in DATA_FIELDS.items():
436443
if key in all_params and all_params[key] is not None:
437444
set_data_normalized(span, attribute, all_params[key], unpack=False)

tests/integrations/langchain/test_langchain.py

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,56 @@ def test_langchain_text_completion(
170170
assert llm_span["data"]["gen_ai.usage.output_tokens"] == 15
171171

172172

173+
def test_langchain_chat(
174+
sentry_init,
175+
capture_events,
176+
get_model_response,
177+
nonstreaming_responses_model_response,
178+
):
179+
sentry_init(
180+
integrations=[
181+
LangchainIntegration(
182+
include_prompts=True,
183+
)
184+
],
185+
traces_sample_rate=1.0,
186+
send_default_pii=True,
187+
)
188+
events = capture_events()
189+
190+
model_response = get_model_response(
191+
nonstreaming_responses_model_response,
192+
serialize_pydantic=True,
193+
request_headers={
194+
"X-Stainless-Raw-Response": "True",
195+
},
196+
)
197+
198+
llm = ChatOpenAI(
199+
model_name="gpt-3.5-turbo",
200+
temperature=0,
201+
openai_api_key="badkey",
202+
use_responses_api=True,
203+
)
204+
205+
with patch.object(
206+
llm.client._client._client,
207+
"send",
208+
return_value=model_response,
209+
) as _:
210+
with start_transaction():
211+
llm.invoke(
212+
"How many letters in the word eudca",
213+
config={"run_name": "my-snazzy-pipeline"},
214+
)
215+
216+
tx = events[0]
217+
218+
chat_spans = list(x for x in tx["spans"] if x["op"] == "gen_ai.chat")
219+
assert len(chat_spans) == 1
220+
assert chat_spans[0]["data"]["gen_ai.pipeline.name"] == "my-snazzy-pipeline"
221+
222+
173223
@pytest.mark.skipif(
174224
LANGCHAIN_VERSION < (1,),
175225
reason="LangChain 1.0+ required (ONE AGENT refactor)",

0 commit comments

Comments
 (0)