mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-19 11:01:15 +00:00
feat(core): Improve Langsmith traces for AI executions (#9081)
Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
This commit is contained in:
@@ -27,6 +27,7 @@ import {
|
||||
getPromptInputByType,
|
||||
isChatInstance,
|
||||
} from '../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../utils/tracing';
|
||||
|
||||
interface MessagesTemplate {
|
||||
type: string;
|
||||
@@ -154,9 +155,9 @@ async function createSimpleLLMChain(
|
||||
const chain = new LLMChain({
|
||||
llm,
|
||||
prompt,
|
||||
});
|
||||
}).withConfig(getTracingConfig(context));
|
||||
|
||||
const response = (await chain.call({
|
||||
const response = (await chain.invoke({
|
||||
query,
|
||||
signal: context.getExecutionCancelSignal(),
|
||||
})) as string[];
|
||||
@@ -203,8 +204,9 @@ async function getChain(
|
||||
);
|
||||
|
||||
const chain = prompt.pipe(llm).pipe(combinedOutputParser);
|
||||
|
||||
const response = (await chain.invoke({ query })) as string | string[];
|
||||
const response = (await chain.withConfig(getTracingConfig(context)).invoke({ query })) as
|
||||
| string
|
||||
| string[];
|
||||
|
||||
return Array.isArray(response) ? response : [response];
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import type { BaseLanguageModel } from '@langchain/core/language_models/base';
|
||||
import type { BaseRetriever } from '@langchain/core/retrievers';
|
||||
import { getTemplateNoticeField } from '../../../utils/sharedFields';
|
||||
import { getPromptInputByType } from '../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../utils/tracing';
|
||||
|
||||
export class ChainRetrievalQa implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
@@ -176,7 +177,7 @@ export class ChainRetrievalQa implements INodeType {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘query‘ parameter is empty.');
|
||||
}
|
||||
|
||||
const response = await chain.call({ query });
|
||||
const response = await chain.withConfig(getTracingConfig(this)).invoke({ query });
|
||||
returnData.push({ json: { response } });
|
||||
}
|
||||
return await this.prepareOutputData(returnData);
|
||||
|
||||
@@ -18,6 +18,7 @@ import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader';
|
||||
import { getTemplateNoticeField } from '../../../../utils/sharedFields';
|
||||
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt';
|
||||
import { getChainPromptsArgs } from '../helpers';
|
||||
import { getTracingConfig } from '../../../../utils/tracing';
|
||||
|
||||
function getInputs(parameters: IDataObject) {
|
||||
const chunkingMode = parameters?.chunkingMode;
|
||||
@@ -364,7 +365,7 @@ export class ChainSummarizationV2 implements INodeType {
|
||||
? await documentInput.processItem(item, itemIndex)
|
||||
: documentInput;
|
||||
|
||||
const response = await chain.call({
|
||||
const response = await chain.withConfig(getTracingConfig(this)).invoke({
|
||||
input_documents: processedDocuments,
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user