mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-19 19:11:13 +00:00
feat(core): Improve Langsmith traces for AI executions (#9081)
Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
This commit is contained in:
@@ -1,9 +1,5 @@
|
||||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
NodeConnectionType,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||
|
||||
import { initializeAgentExecutorWithOptions } from 'langchain/agents';
|
||||
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
|
||||
@@ -16,13 +12,13 @@ import {
|
||||
getOptionalOutputParsers,
|
||||
getConnectedTools,
|
||||
} from '../../../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../../../utils/tracing';
|
||||
|
||||
export async function conversationalAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
nodeVersion: number,
|
||||
): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Conversational Agent');
|
||||
|
||||
const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0);
|
||||
|
||||
if (!isChatInstance(model)) {
|
||||
@@ -104,7 +100,9 @@ export async function conversationalAgentExecute(
|
||||
input = (await prompt.invoke({ input })).value;
|
||||
}
|
||||
|
||||
let response = await agentExecutor.call({ input, outputParsers });
|
||||
let response = await agentExecutor
|
||||
.withConfig(getTracingConfig(this))
|
||||
.invoke({ input, outputParsers });
|
||||
|
||||
if (outputParser) {
|
||||
response = { output: await outputParser.parse(response.output as string) };
|
||||
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
getOptionalOutputParsers,
|
||||
getPromptInputByType,
|
||||
} from '../../../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../../../utils/tracing';
|
||||
|
||||
export async function openAiFunctionsAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
@@ -104,7 +105,9 @@ export async function openAiFunctionsAgentExecute(
|
||||
input = (await prompt.invoke({ input })).value;
|
||||
}
|
||||
|
||||
let response = await agentExecutor.call({ input, outputParsers });
|
||||
let response = await agentExecutor
|
||||
.withConfig(getTracingConfig(this))
|
||||
.invoke({ input, outputParsers });
|
||||
|
||||
if (outputParser) {
|
||||
response = { output: await outputParser.parse(response.output as string) };
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
getOptionalOutputParsers,
|
||||
getPromptInputByType,
|
||||
} from '../../../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../../../utils/tracing';
|
||||
|
||||
export async function planAndExecuteAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
@@ -79,7 +80,9 @@ export async function planAndExecuteAgentExecute(
|
||||
input = (await prompt.invoke({ input })).value;
|
||||
}
|
||||
|
||||
let response = await agentExecutor.call({ input, outputParsers });
|
||||
let response = await agentExecutor
|
||||
.withConfig(getTracingConfig(this))
|
||||
.invoke({ input, outputParsers });
|
||||
|
||||
if (outputParser) {
|
||||
response = { output: await outputParser.parse(response.output as string) };
|
||||
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
getPromptInputByType,
|
||||
isChatInstance,
|
||||
} from '../../../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../../../utils/tracing';
|
||||
|
||||
export async function reActAgentAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
@@ -100,7 +101,10 @@ export async function reActAgentAgentExecute(
|
||||
input = (await prompt.invoke({ input })).value;
|
||||
}
|
||||
|
||||
let response = await agentExecutor.call({ input, outputParsers });
|
||||
let response = await agentExecutor
|
||||
.withConfig(getTracingConfig(this))
|
||||
.invoke({ input, outputParsers });
|
||||
|
||||
if (outputParser) {
|
||||
response = { output: await outputParser.parse(response.output as string) };
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
|
||||
import type { DataSource } from '@n8n/typeorm';
|
||||
|
||||
import { getPromptInputByType, serializeChatHistory } from '../../../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../../../utils/tracing';
|
||||
import { getSqliteDataSource } from './other/handlers/sqlite';
|
||||
import { getPostgresDataSource } from './other/handlers/postgres';
|
||||
import { SQL_PREFIX, SQL_SUFFIX } from './other/prompts';
|
||||
@@ -126,7 +127,7 @@ export async function sqlAgentAgentExecute(
|
||||
|
||||
let response: IDataObject;
|
||||
try {
|
||||
response = await agentExecutor.call({
|
||||
response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
|
||||
input,
|
||||
signal: this.getExecutionCancelSignal(),
|
||||
chatHistory,
|
||||
|
||||
@@ -10,6 +10,7 @@ import type {
|
||||
} from 'n8n-workflow';
|
||||
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
|
||||
import { getConnectedTools } from '../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../utils/tracing';
|
||||
import { formatToOpenAIAssistantTool } from './utils';
|
||||
|
||||
export class OpenAiAssistant implements INodeType {
|
||||
@@ -373,7 +374,7 @@ export class OpenAiAssistant implements INodeType {
|
||||
tools,
|
||||
});
|
||||
|
||||
const response = await agentExecutor.call({
|
||||
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
|
||||
content: input,
|
||||
signal: this.getExecutionCancelSignal(),
|
||||
timeout: options.timeout ?? 10000,
|
||||
|
||||
Reference in New Issue
Block a user