diff --git a/dev-packages/node-integration-tests/suites/tracing/langgraph/scenario-resume.mjs b/dev-packages/node-integration-tests/suites/tracing/langgraph/scenario-resume.mjs new file mode 100644 index 000000000000..a908f642e5ae --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/langgraph/scenario-resume.mjs @@ -0,0 +1,43 @@ +import { END, MemorySaver, MessagesAnnotation, START, StateGraph } from '@langchain/langgraph'; +import * as Sentry from '@sentry/node'; + +async function run() { + await Sentry.startSpan({ op: 'function', name: 'langgraph-resume-test' }, async () => { + const mockLlm = () => { + return { + messages: [ + { + role: 'assistant', + content: 'Mock LLM response', + response_metadata: { + model_name: 'mock-model', + finish_reason: 'stop', + tokenUsage: { + promptTokens: 20, + completionTokens: 10, + totalTokens: 30, + }, + }, + }, + ], + }; + }; + + // Test: invoke with null input (resume after human-in-the-loop interrupt) + // See: https://docs.langchain.com/oss/javascript/langgraph/use-functional-api#resuming-after-an-error + const checkpointer = new MemorySaver(); + const graph = new StateGraph(MessagesAnnotation) + .addNode('agent', mockLlm) + .addEdge(START, 'agent') + .addEdge('agent', END) + .compile({ name: 'resume_agent', checkpointer }); + + const config = { configurable: { thread_id: 'resume-thread-1' } }; + await graph.invoke({ messages: [{ role: 'user', content: 'Hello' }] }, config); + await graph.invoke(null, config); + }); + + await Sentry.flush(2000); +} + +run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/langgraph/test.ts b/dev-packages/node-integration-tests/suites/tracing/langgraph/test.ts index 5905d592ee7a..89cd583da89b 100644 --- a/dev-packages/node-integration-tests/suites/tracing/langgraph/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/langgraph/test.ts @@ -318,4 +318,50 @@ describe('LangGraph integration', () => { }); }, ); + + // Test for null input resume scenario (https://github.com/getsentry/sentry-javascript/issues/19353) + const EXPECTED_TRANSACTION_RESUME = { + transaction: 'langgraph-resume-test', + contexts: { + trace: expect.objectContaining({ + status: 'ok', + }), + }, + spans: expect.arrayContaining([ + // create_agent span + expect.objectContaining({ + data: { + [GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'create_agent', + [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.create_agent', + [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.langgraph', + [GEN_AI_AGENT_NAME_ATTRIBUTE]: 'resume_agent', + }, + description: 'create_agent resume_agent', + op: 'gen_ai.create_agent', + origin: 'auto.ai.langgraph', + status: 'ok', + }), + // invoke_agent span with null input (resume) + expect.objectContaining({ + data: expect.objectContaining({ + [GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'invoke_agent', + [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', + [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.langgraph', + [GEN_AI_AGENT_NAME_ATTRIBUTE]: 'resume_agent', + [GEN_AI_PIPELINE_NAME_ATTRIBUTE]: 'resume_agent', + [GEN_AI_CONVERSATION_ID_ATTRIBUTE]: 'resume-thread-1', + }), + description: 'invoke_agent resume_agent', + op: 'gen_ai.invoke_agent', + origin: 'auto.ai.langgraph', + status: 'ok', + }), + ]), + }; + + createEsmAndCjsTests(__dirname, 'scenario-resume.mjs', 'instrument.mjs', (createRunner, test) => { + test('should not throw when invoke is called with null input (resume scenario)', async () => { + await createRunner().ignore('event').expect({ transaction: EXPECTED_TRANSACTION_RESUME }).start().completed(); + }); + }); }); diff --git a/packages/core/src/tracing/langgraph/index.ts b/packages/core/src/tracing/langgraph/index.ts index 6a9c39a7ddda..c1e838bd1914 100644 --- a/packages/core/src/tracing/langgraph/index.ts +++ b/packages/core/src/tracing/langgraph/index.ts @@ -136,7 +136,7 @@ function instrumentCompiledGraphInvoke( const recordInputs = options.recordInputs; const recordOutputs = options.recordOutputs; const inputMessages = - args.length > 0 ? ((args[0] as { messages?: LangChainMessage[] }).messages ?? []) : []; + args.length > 0 ? ((args[0] as { messages?: LangChainMessage[] } | null)?.messages ?? []) : []; if (inputMessages && recordInputs) { const normalizedMessages = normalizeLangChainMessages(inputMessages);