From c8b9a7fdebc902b6d68ed1356e79ee976ab1cee7 Mon Sep 17 00:00:00 2001 From: Benjamin Schroth <68321970+schrothbn@users.noreply.github.com> Date: Wed, 14 May 2025 12:18:15 +0200 Subject: [PATCH] fix(Basic LLM Chain Node): Use JSON parsing for Claude 3.7 with thinking enabled (#15381) --- .../chains/ChainLLM/methods/chainExecutor.ts | 16 ++++++++++++++++ .../chains/ChainLLM/test/chainExecutor.test.ts | 14 ++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/methods/chainExecutor.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/methods/chainExecutor.ts index 35f2fad7b5..b16c1a60d4 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/methods/chainExecutor.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/methods/chainExecutor.ts @@ -38,6 +38,18 @@ export function isModelWithResponseFormat( ); } +export function isModelInThinkingMode( + llm: BaseLanguageModel, +): llm is BaseLanguageModel & { lc_kwargs: { invocationKwargs: { thinking: { type: string } } } } { + return ( + 'lc_kwargs' in llm && + 'invocationKwargs' in llm.lc_kwargs && + typeof llm.lc_kwargs.invocationKwargs === 'object' && + 'thinking' in llm.lc_kwargs.invocationKwargs && + llm.lc_kwargs.invocationKwargs.thinking.type === 'enabled' + ); +} + /** * Type guard to check if the LLM has a format property(Ollama) */ @@ -61,6 +73,10 @@ export function getOutputParserForLLM( return new NaiveJsonOutputParser(); } + if (isModelInThinkingMode(llm)) { + return new NaiveJsonOutputParser(); + } + return new StringOutputParser(); } diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/test/chainExecutor.test.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/test/chainExecutor.test.ts index 17d1a2a8d3..6ed0858251 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/test/chainExecutor.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/test/chainExecutor.test.ts @@ -62,6 +62,20 @@ describe('chainExecutor', () => { const parser = chainExecutor.getOutputParserForLLM(regularModel); expect(parser).toBeInstanceOf(StringOutputParser); }); + + it('should return NaiveJsonOutputParser for Anthropic models in thinking mode', () => { + const model = { + lc_kwargs: { + invocationKwargs: { + thinking: { + type: 'enabled', + }, + }, + }, + }; + const parser = chainExecutor.getOutputParserForLLM(model as unknown as BaseChatModel); + expect(parser).toBeInstanceOf(NaiveJsonOutputParser); + }); }); describe('NaiveJsonOutputParser', () => {