feat: Add message history trimming to AI workflow builder (no-changelog) (#17829)

This commit is contained in:
Eugene
2025-08-05 15:16:12 +02:00
committed by GitHub
parent dd049249be
commit bac61a7e0d
10 changed files with 421 additions and 3 deletions

View File

@@ -0,0 +1,3 @@
export const MAX_AI_BUILDER_PROMPT_LENGTH = 1000; // characters
export const MAX_USER_MESSAGES = 10; // Maximum number of user messages to keep in the state

View File

@@ -0,0 +1,154 @@
import { HumanMessage, AIMessage as AssistantMessage, ToolMessage } from '@langchain/core/messages';
import type { BaseMessage } from '@langchain/core/messages';
import { createTrimMessagesReducer } from '../workflow-state';
describe('createTrimMessagesReducer', () => {
it('should return messages unchanged when human messages are within limit', () => {
const reducer = createTrimMessagesReducer(3);
const messages: BaseMessage[] = [
new HumanMessage('User 1'),
new AssistantMessage('Assistant 1'),
new ToolMessage({ content: 'Tool 1', tool_call_id: '1' }),
new ToolMessage({ content: 'Tool 2', tool_call_id: '2' }),
new AssistantMessage('Assistant 2'),
];
const result = reducer(messages);
expect(result).toEqual(messages);
expect(result.length).toBe(5);
});
it('should trim messages when human messages exceed limit', () => {
const reducer = createTrimMessagesReducer(3);
const messages: BaseMessage[] = [
new HumanMessage('User 1'),
new AssistantMessage('Assistant 1'),
new ToolMessage({ content: 'Tool 1', tool_call_id: '1' }),
new HumanMessage('User 2'),
new AssistantMessage('Assistant 2'),
new HumanMessage('User 3'),
new AssistantMessage('Assistant 3'),
new HumanMessage('User 4'),
new AssistantMessage('Assistant 4'),
];
const result = reducer(messages);
// Should keep only the last 3 HumanMessages
const humanMessages = result.filter((msg) => msg instanceof HumanMessage);
expect(humanMessages.length).toBe(3);
// Should start with HumanMessage
expect(result[0]).toBeInstanceOf(HumanMessage);
expect((result[0] as HumanMessage).content).toBe('User 2');
// Should preserve messages between HumanMessages
expect(result.length).toBe(6); // User 2, Assistant 2, User 3, Assistant 3, User 4, Assistant 4
});
it('should handle typical conversation pattern', () => {
const reducer = createTrimMessagesReducer(2);
const messages: BaseMessage[] = [
new HumanMessage('User 1'),
new AssistantMessage('Assistant 1'),
new ToolMessage({ content: 'Tool 1', tool_call_id: '1' }),
new ToolMessage({ content: 'Tool 2', tool_call_id: '2' }),
new AssistantMessage('Assistant 2'),
new HumanMessage('User 2'),
new AssistantMessage('Assistant 3'),
new ToolMessage({ content: 'Tool 3', tool_call_id: '3' }),
new ToolMessage({ content: 'Tool 4', tool_call_id: '4' }),
new AssistantMessage('Assistant 4'),
new HumanMessage('User 3'),
new AssistantMessage('Assistant 5'),
new ToolMessage({ content: 'Tool 5', tool_call_id: '5' }),
new ToolMessage({ content: 'Tool 6', tool_call_id: '6' }),
new AssistantMessage('Assistant 6'),
];
const result = reducer(messages);
// Should keep only the last 2 HumanMessages
const humanMessages = result.filter((msg) => msg instanceof HumanMessage);
expect(humanMessages.length).toBe(2);
// Should start with HumanMessage
expect(result[0]).toBeInstanceOf(HumanMessage);
expect((result[0] as HumanMessage).content).toBe('User 2');
// Should include all messages from User 2 onwards
expect(result.length).toBe(10);
expect(result.map((m) => m.content)).toEqual([
'User 2',
'Assistant 3',
'Tool 3',
'Tool 4',
'Assistant 4',
'User 3',
'Assistant 5',
'Tool 5',
'Tool 6',
'Assistant 6',
]);
});
it('should handle edge case with exactly maxUserMessages', () => {
const reducer = createTrimMessagesReducer(2);
const messages: BaseMessage[] = [
new HumanMessage('User 1'),
new AssistantMessage('Assistant 1'),
new HumanMessage('User 2'),
new AssistantMessage('Assistant 2'),
];
const result = reducer(messages);
expect(result).toEqual(messages);
expect(result.length).toBe(4);
});
it('should handle empty array', () => {
const reducer = createTrimMessagesReducer(5);
const messages: BaseMessage[] = [];
const result = reducer(messages);
expect(result).toEqual([]);
});
it('should handle array with no HumanMessages', () => {
const reducer = createTrimMessagesReducer(5);
const messages: BaseMessage[] = [
new AssistantMessage('Assistant 1'),
new ToolMessage({ content: 'Tool 1', tool_call_id: '1' }),
new AssistantMessage('Assistant 2'),
];
const result = reducer(messages);
expect(result).toEqual(messages);
});
it('should handle maxUserMessages = 1', () => {
const reducer = createTrimMessagesReducer(1);
const messages: BaseMessage[] = [
new HumanMessage('User 1'),
new AssistantMessage('Assistant 1'),
new HumanMessage('User 2'),
new AssistantMessage('Assistant 2'),
new HumanMessage('User 3'),
new AssistantMessage('Assistant 3'),
];
const result = reducer(messages);
// Should keep only the last HumanMessage
const humanMessages = result.filter((msg) => msg instanceof HumanMessage);
expect(humanMessages.length).toBe(1);
// Should start with User 3
expect(result[0]).toBeInstanceOf(HumanMessage);
expect((result[0] as HumanMessage).content).toBe('User 3');
// Should only include User 3 and Assistant 3
expect(result.length).toBe(2);
});
});

View File

@@ -12,8 +12,10 @@ import type {
NodeExecutionSchema,
} from 'n8n-workflow';
import { MAX_AI_BUILDER_PROMPT_LENGTH } from '@/constants';
import { conversationCompactChain } from './chains/conversation-compact';
import { LLMServiceError } from './errors';
import { LLMServiceError, ValidationError } from './errors';
import { createAddNodeTool } from './tools/add-node.tool';
import { createConnectNodesTool } from './tools/connect-nodes.tool';
import { createNodeDetailsTool } from './tools/node-details.tool';
@@ -191,6 +193,17 @@ export class WorkflowBuilderAgent {
}
async *chat(payload: ChatPayload, userId?: string, abortSignal?: AbortSignal) {
// Check for the message maximum length
if (payload.message.length > MAX_AI_BUILDER_PROMPT_LENGTH) {
this.logger?.warn('Message exceeds maximum length', {
messageLength: payload.message.length,
maxLength: MAX_AI_BUILDER_PROMPT_LENGTH,
});
throw new ValidationError(
`Message exceeds maximum length of ${MAX_AI_BUILDER_PROMPT_LENGTH} characters`,
);
}
const agent = this.createWorkflow().compile({ checkpointer: this.checkpointer });
const workflowId = payload.workflowContext?.currentWorkflow?.id;
// Generate thread ID from workflowId and userId

View File

@@ -1,5 +1,9 @@
import type { BaseMessage } from '@langchain/core/messages';
import { HumanMessage } from '@langchain/core/messages';
import { Annotation, messagesStateReducer } from '@langchain/langgraph';
import type { BinaryOperator } from '@langchain/langgraph/dist/channels/binop';
import { MAX_USER_MESSAGES } from '@/constants';
import type { SimpleWorkflow, WorkflowOperation } from './types/workflow';
import type { ChatPayload } from './workflow-builder-agent';
@@ -32,9 +36,44 @@ function operationsReducer(
return [...(current ?? []), ...update];
}
// Creates a reducer that trims the message history to keep only the last `maxUserMessages` HumanMessage instances
export function createTrimMessagesReducer(maxUserMessages: number) {
return (current: BaseMessage[]): BaseMessage[] => {
// Count HumanMessage instances and remember their indices
const humanMessageIndices: number[] = [];
current.forEach((msg, index) => {
if (msg instanceof HumanMessage) {
humanMessageIndices.push(index);
}
});
// If we have fewer than or equal to maxUserMessages, return as is
if (humanMessageIndices.length <= maxUserMessages) {
return current;
}
// Find the index of the first HumanMessage that we want to keep
const startHumanMessageIndex =
humanMessageIndices[humanMessageIndices.length - maxUserMessages];
// Slice from that HumanMessage onwards
return current.slice(startHumanMessageIndex);
};
}
// Utility function to combine multiple message reducers into one.
function combineMessageReducers(...reducers: Array<BinaryOperator<BaseMessage[], BaseMessage[]>>) {
return (current: BaseMessage[], update: BaseMessage[]): BaseMessage[] => {
return reducers.reduce((acc, reducer) => reducer(acc, update), current);
};
}
export const WorkflowState = Annotation.Root({
messages: Annotation<BaseMessage[]>({
reducer: messagesStateReducer,
reducer: combineMessageReducers(
messagesStateReducer,
createTrimMessagesReducer(MAX_USER_MESSAGES),
),
default: () => [],
}),
// // The original prompt from the user.