fix: Revert AI nodes batching (#15129)

This commit is contained in:
oleg
2025-05-06 11:18:11 +02:00
committed by GitHub
parent ca0e7ffe3b
commit 939ff97ec4
12 changed files with 205 additions and 632 deletions

View File

@@ -47,30 +47,6 @@ export const toolsAgentProperties: INodeProperties[] = [
description:
'Whether or not binary images should be automatically passed through to the agent as image type messages',
},
{
displayName: 'Batch Processing',
name: 'batching',
type: 'collection',
description: 'Batch processing options for rate limiting',
default: {},
options: [
{
displayName: 'Batch Size',
name: 'batchSize',
default: 1,
type: 'number',
description:
'How many items to process in parallel. This is useful for rate limiting, but will impact the ordering in the agents log output.',
},
{
displayName: 'Delay Between Batches',
name: 'delayBetweenBatches',
default: 0,
type: 'number',
description: 'Delay in milliseconds between batches. This is useful for rate limiting.',
},
],
},
],
},
];

View File

@@ -11,13 +11,7 @@ import type { AgentAction, AgentFinish } from 'langchain/agents';
import { AgentExecutor, createToolCallingAgent } from 'langchain/agents';
import type { ToolsAgentAction } from 'langchain/dist/agents/tool_calling/output_parser';
import { omit } from 'lodash';
import {
BINARY_ENCODING,
jsonParse,
NodeConnectionTypes,
NodeOperationError,
sleep,
} from 'n8n-workflow';
import { BINARY_ENCODING, jsonParse, NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import type { ZodObject } from 'zod';
import { z } from 'zod';
@@ -412,21 +406,12 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
const returnData: INodeExecutionData[] = [];
const items = this.getInputData();
const outputParser = await getOptionalOutputParser(this);
const memory = await getOptionalMemory(this);
const tools = await getTools(this, outputParser);
const { batchSize, delayBetweenBatches } = this.getNodeParameter('options.batching', 0, {
batchSize: 1,
delayBetweenBatches: 0,
}) as {
batchSize: number;
delayBetweenBatches: number;
};
for (let i = 0; i < items.length; i += batchSize) {
const batch = items.slice(i, i + batchSize);
const batchPromises = batch.map(async (_item, batchItemIndex) => {
const itemIndex = i + batchItemIndex;
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try {
const model = await getChatModel(this);
const memory = await getOptionalMemory(this);
const input = getPromptInputByType({
ctx: this,
@@ -476,7 +461,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
});
// Invoke the executor with the given input and system message.
return await executor.invoke(
const response = await executor.invoke(
{
input,
system_message: options.systemMessage ?? SYSTEM_MESSAGE,
@@ -485,22 +470,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
},
{ signal: this.getExecutionCancelSignal() },
);
});
const batchResults = await Promise.allSettled(batchPromises);
batchResults.forEach((result, index) => {
if (result.status === 'rejected') {
if (this.continueOnFail()) {
returnData.push({
json: { error: result.reason as string },
pairedItem: { item: index },
});
return;
} else {
throw new NodeOperationError(this.getNode(), result.reason);
}
}
const response = result.value;
// If memory and outputParser are connected, parse the output.
if (memory && outputParser) {
const parsedOutput = jsonParse<{ output: Record<string, unknown> }>(
@@ -522,10 +492,15 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
};
returnData.push(itemResult);
});
if (i + batchSize < items.length && delayBetweenBatches > 0) {
await sleep(delayBetweenBatches);
} catch (error) {
if (this.continueOnFail()) {
returnData.push({
json: { error: error.message },
pairedItem: { item: itemIndex },
});
continue;
}
throw error;
}
}