feat(core): Improve Langsmith traces for AI executions (#9081)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
This commit is contained in:
oleg
2024-04-08 22:51:49 +02:00
committed by GitHub
parent 3bcfef95f6
commit 936682eeaa
18 changed files with 99 additions and 26 deletions

View File

@@ -1,9 +1,5 @@
import {
type IExecuteFunctions,
type INodeExecutionData,
NodeConnectionType,
NodeOperationError,
} from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
@@ -16,13 +12,13 @@ import {
getOptionalOutputParsers,
getConnectedTools,
} from '../../../../../utils/helpers';
import { getTracingConfig } from '../../../../../utils/tracing';
export async function conversationalAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Conversational Agent');
const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0);
if (!isChatInstance(model)) {
@@ -104,7 +100,9 @@ export async function conversationalAgentExecute(
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor.call({ input, outputParsers });
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };

View File

@@ -17,6 +17,7 @@ import {
getOptionalOutputParsers,
getPromptInputByType,
} from '../../../../../utils/helpers';
import { getTracingConfig } from '../../../../../utils/tracing';
export async function openAiFunctionsAgentExecute(
this: IExecuteFunctions,
@@ -104,7 +105,9 @@ export async function openAiFunctionsAgentExecute(
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor.call({ input, outputParsers });
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };

View File

@@ -15,6 +15,7 @@ import {
getOptionalOutputParsers,
getPromptInputByType,
} from '../../../../../utils/helpers';
import { getTracingConfig } from '../../../../../utils/tracing';
export async function planAndExecuteAgentExecute(
this: IExecuteFunctions,
@@ -79,7 +80,9 @@ export async function planAndExecuteAgentExecute(
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor.call({ input, outputParsers });
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };

View File

@@ -17,6 +17,7 @@ import {
getPromptInputByType,
isChatInstance,
} from '../../../../../utils/helpers';
import { getTracingConfig } from '../../../../../utils/tracing';
export async function reActAgentAgentExecute(
this: IExecuteFunctions,
@@ -100,7 +101,10 @@ export async function reActAgentAgentExecute(
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor.call({ input, outputParsers });
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };
}

View File

@@ -14,6 +14,7 @@ import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { DataSource } from '@n8n/typeorm';
import { getPromptInputByType, serializeChatHistory } from '../../../../../utils/helpers';
import { getTracingConfig } from '../../../../../utils/tracing';
import { getSqliteDataSource } from './other/handlers/sqlite';
import { getPostgresDataSource } from './other/handlers/postgres';
import { SQL_PREFIX, SQL_SUFFIX } from './other/prompts';
@@ -126,7 +127,7 @@ export async function sqlAgentAgentExecute(
let response: IDataObject;
try {
response = await agentExecutor.call({
response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
input,
signal: this.getExecutionCancelSignal(),
chatHistory,

View File

@@ -10,6 +10,7 @@ import type {
} from 'n8n-workflow';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { getConnectedTools } from '../../../utils/helpers';
import { getTracingConfig } from '../../../utils/tracing';
import { formatToOpenAIAssistantTool } from './utils';
export class OpenAiAssistant implements INodeType {
@@ -373,7 +374,7 @@ export class OpenAiAssistant implements INodeType {
tools,
});
const response = await agentExecutor.call({
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
content: input,
signal: this.getExecutionCancelSignal(),
timeout: options.timeout ?? 10000,

View File

@@ -27,6 +27,7 @@ import {
getPromptInputByType,
isChatInstance,
} from '../../../utils/helpers';
import { getTracingConfig } from '../../../utils/tracing';
interface MessagesTemplate {
type: string;
@@ -154,9 +155,9 @@ async function createSimpleLLMChain(
const chain = new LLMChain({
llm,
prompt,
});
}).withConfig(getTracingConfig(context));
const response = (await chain.call({
const response = (await chain.invoke({
query,
signal: context.getExecutionCancelSignal(),
})) as string[];
@@ -203,8 +204,9 @@ async function getChain(
);
const chain = prompt.pipe(llm).pipe(combinedOutputParser);
const response = (await chain.invoke({ query })) as string | string[];
const response = (await chain.withConfig(getTracingConfig(context)).invoke({ query })) as
| string
| string[];
return Array.isArray(response) ? response : [response];
}

View File

@@ -12,6 +12,7 @@ import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseRetriever } from '@langchain/core/retrievers';
import { getTemplateNoticeField } from '../../../utils/sharedFields';
import { getPromptInputByType } from '../../../utils/helpers';
import { getTracingConfig } from '../../../utils/tracing';
export class ChainRetrievalQa implements INodeType {
description: INodeTypeDescription = {
@@ -176,7 +177,7 @@ export class ChainRetrievalQa implements INodeType {
throw new NodeOperationError(this.getNode(), 'The query parameter is empty.');
}
const response = await chain.call({ query });
const response = await chain.withConfig(getTracingConfig(this)).invoke({ query });
returnData.push({ json: { response } });
}
return await this.prepareOutputData(returnData);

View File

@@ -18,6 +18,7 @@ import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader';
import { getTemplateNoticeField } from '../../../../utils/sharedFields';
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt';
import { getChainPromptsArgs } from '../helpers';
import { getTracingConfig } from '../../../../utils/tracing';
function getInputs(parameters: IDataObject) {
const chunkingMode = parameters?.chunkingMode;
@@ -364,7 +365,7 @@ export class ChainSummarizationV2 implements INodeType {
? await documentInput.processItem(item, itemIndex)
: documentInput;
const response = await chain.call({
const response = await chain.withConfig(getTracingConfig(this)).invoke({
input_documents: processedDocuments,
});

View File

@@ -16,6 +16,7 @@ import { Document } from '@langchain/core/documents';
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
import type { CallbackManagerForRetrieverRun } from '@langchain/core/callbacks/manager';
import { logWrapper } from '../../../utils/logWrapper';
function objectToString(obj: Record<string, string> | IDataObject, level = 0) {
@@ -287,7 +288,10 @@ export class RetrieverWorkflow implements INodeType {
this.executeFunctions = executeFunctions;
}
async getRelevantDocuments(query: string): Promise<Document[]> {
async _getRelevantDocuments(
query: string,
config?: CallbackManagerForRetrieverRun,
): Promise<Document[]> {
const source = this.executeFunctions.getNodeParameter('source', itemIndex) as string;
const baseMetadata: IDataObject = {
@@ -360,6 +364,7 @@ export class RetrieverWorkflow implements INodeType {
receivedItems = (await this.executeFunctions.executeWorkflow(
workflowInfo,
items,
config?.getChild(),
)) as INodeExecutionData[][];
} catch (error) {
// Make sure a valid error gets returned that can by json-serialized else it will

View File

@@ -16,6 +16,7 @@ import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
import { DynamicTool } from '@langchain/core/tools';
import get from 'lodash/get';
import isObject from 'lodash/isObject';
import type { CallbackManagerForToolRun } from '@langchain/core/callbacks/manager';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
export class ToolWorkflow implements INodeType {
@@ -320,7 +321,10 @@ export class ToolWorkflow implements INodeType {
const name = this.getNodeParameter('name', itemIndex) as string;
const description = this.getNodeParameter('description', itemIndex) as string;
const runFunction = async (query: string): Promise<string> => {
const runFunction = async (
query: string,
runManager?: CallbackManagerForToolRun,
): Promise<string> => {
const source = this.getNodeParameter('source', itemIndex) as string;
const responsePropertyName = this.getNodeParameter(
'responsePropertyName',
@@ -385,7 +389,11 @@ export class ToolWorkflow implements INodeType {
let receivedData: INodeExecutionData;
try {
receivedData = (await this.executeWorkflow(workflowInfo, items)) as INodeExecutionData;
receivedData = (await this.executeWorkflow(
workflowInfo,
items,
runManager?.getChild(),
)) as INodeExecutionData;
} catch (error) {
// Make sure a valid error gets returned that can by json-serialized else it will
// not show up in the frontend
@@ -413,13 +421,13 @@ export class ToolWorkflow implements INodeType {
name,
description,
func: async (query: string): Promise<string> => {
func: async (query: string, runManager?: CallbackManagerForToolRun): Promise<string> => {
const { index } = this.addInputData(NodeConnectionType.AiTool, [[{ json: { query } }]]);
let response: string = '';
let executionError: ExecutionError | undefined;
try {
response = await runFunction(query);
response = await runFunction(query, runManager);
} catch (error) {
// TODO: Do some more testing. Issues here should actually fail the workflow
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment

View File

@@ -11,6 +11,7 @@ import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { assistantRLC } from '../descriptions';
import { getConnectedTools } from '../../../../../utils/helpers';
import { getTracingConfig } from '../../../../../utils/tracing';
const properties: INodeProperties[] = [
assistantRLC,
@@ -181,7 +182,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
tools: tools ?? [],
});
const response = await agentExecutor.invoke({
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
content: input,
signal: this.getExecutionCancelSignal(),
timeout: options.timeout ?? 10000,