fix(Basic LLM Chain Node): Use JSON parsing for Claude 3.7 with thinking enabled (#15381)

This commit is contained in:
Benjamin Schroth
2025-05-14 12:18:15 +02:00
committed by GitHub
parent 4657e348f4
commit c8b9a7fdeb
2 changed files with 30 additions and 0 deletions

View File

@@ -38,6 +38,18 @@ export function isModelWithResponseFormat(
);
}
export function isModelInThinkingMode(
llm: BaseLanguageModel,
): llm is BaseLanguageModel & { lc_kwargs: { invocationKwargs: { thinking: { type: string } } } } {
return (
'lc_kwargs' in llm &&
'invocationKwargs' in llm.lc_kwargs &&
typeof llm.lc_kwargs.invocationKwargs === 'object' &&
'thinking' in llm.lc_kwargs.invocationKwargs &&
llm.lc_kwargs.invocationKwargs.thinking.type === 'enabled'
);
}
/**
* Type guard to check if the LLM has a format property(Ollama)
*/
@@ -61,6 +73,10 @@ export function getOutputParserForLLM(
return new NaiveJsonOutputParser();
}
if (isModelInThinkingMode(llm)) {
return new NaiveJsonOutputParser();
}
return new StringOutputParser();
}

View File

@@ -62,6 +62,20 @@ describe('chainExecutor', () => {
const parser = chainExecutor.getOutputParserForLLM(regularModel);
expect(parser).toBeInstanceOf(StringOutputParser);
});
it('should return NaiveJsonOutputParser for Anthropic models in thinking mode', () => {
const model = {
lc_kwargs: {
invocationKwargs: {
thinking: {
type: 'enabled',
},
},
},
};
const parser = chainExecutor.getOutputParserForLLM(model as unknown as BaseChatModel);
expect(parser).toBeInstanceOf(NaiveJsonOutputParser);
});
});
describe('NaiveJsonOutputParser', () => {