diff --git a/packages/@n8n/ai-workflow-builder.ee/src/constants.ts b/packages/@n8n/ai-workflow-builder.ee/src/constants.ts new file mode 100644 index 0000000000..8a27d0b662 --- /dev/null +++ b/packages/@n8n/ai-workflow-builder.ee/src/constants.ts @@ -0,0 +1,3 @@ +export const MAX_AI_BUILDER_PROMPT_LENGTH = 1000; // characters + +export const MAX_USER_MESSAGES = 10; // Maximum number of user messages to keep in the state diff --git a/packages/@n8n/ai-workflow-builder.ee/src/test/workflow-state.test.ts b/packages/@n8n/ai-workflow-builder.ee/src/test/workflow-state.test.ts new file mode 100644 index 0000000000..bed2841803 --- /dev/null +++ b/packages/@n8n/ai-workflow-builder.ee/src/test/workflow-state.test.ts @@ -0,0 +1,154 @@ +import { HumanMessage, AIMessage as AssistantMessage, ToolMessage } from '@langchain/core/messages'; +import type { BaseMessage } from '@langchain/core/messages'; + +import { createTrimMessagesReducer } from '../workflow-state'; + +describe('createTrimMessagesReducer', () => { + it('should return messages unchanged when human messages are within limit', () => { + const reducer = createTrimMessagesReducer(3); + const messages: BaseMessage[] = [ + new HumanMessage('User 1'), + new AssistantMessage('Assistant 1'), + new ToolMessage({ content: 'Tool 1', tool_call_id: '1' }), + new ToolMessage({ content: 'Tool 2', tool_call_id: '2' }), + new AssistantMessage('Assistant 2'), + ]; + + const result = reducer(messages); + expect(result).toEqual(messages); + expect(result.length).toBe(5); + }); + + it('should trim messages when human messages exceed limit', () => { + const reducer = createTrimMessagesReducer(3); + const messages: BaseMessage[] = [ + new HumanMessage('User 1'), + new AssistantMessage('Assistant 1'), + new ToolMessage({ content: 'Tool 1', tool_call_id: '1' }), + new HumanMessage('User 2'), + new AssistantMessage('Assistant 2'), + new HumanMessage('User 3'), + new AssistantMessage('Assistant 3'), + new HumanMessage('User 4'), + new AssistantMessage('Assistant 4'), + ]; + + const result = reducer(messages); + + // Should keep only the last 3 HumanMessages + const humanMessages = result.filter((msg) => msg instanceof HumanMessage); + expect(humanMessages.length).toBe(3); + + // Should start with HumanMessage + expect(result[0]).toBeInstanceOf(HumanMessage); + expect((result[0] as HumanMessage).content).toBe('User 2'); + + // Should preserve messages between HumanMessages + expect(result.length).toBe(6); // User 2, Assistant 2, User 3, Assistant 3, User 4, Assistant 4 + }); + + it('should handle typical conversation pattern', () => { + const reducer = createTrimMessagesReducer(2); + const messages: BaseMessage[] = [ + new HumanMessage('User 1'), + new AssistantMessage('Assistant 1'), + new ToolMessage({ content: 'Tool 1', tool_call_id: '1' }), + new ToolMessage({ content: 'Tool 2', tool_call_id: '2' }), + new AssistantMessage('Assistant 2'), + new HumanMessage('User 2'), + new AssistantMessage('Assistant 3'), + new ToolMessage({ content: 'Tool 3', tool_call_id: '3' }), + new ToolMessage({ content: 'Tool 4', tool_call_id: '4' }), + new AssistantMessage('Assistant 4'), + new HumanMessage('User 3'), + new AssistantMessage('Assistant 5'), + new ToolMessage({ content: 'Tool 5', tool_call_id: '5' }), + new ToolMessage({ content: 'Tool 6', tool_call_id: '6' }), + new AssistantMessage('Assistant 6'), + ]; + + const result = reducer(messages); + + // Should keep only the last 2 HumanMessages + const humanMessages = result.filter((msg) => msg instanceof HumanMessage); + expect(humanMessages.length).toBe(2); + + // Should start with HumanMessage + expect(result[0]).toBeInstanceOf(HumanMessage); + expect((result[0] as HumanMessage).content).toBe('User 2'); + + // Should include all messages from User 2 onwards + expect(result.length).toBe(10); + expect(result.map((m) => m.content)).toEqual([ + 'User 2', + 'Assistant 3', + 'Tool 3', + 'Tool 4', + 'Assistant 4', + 'User 3', + 'Assistant 5', + 'Tool 5', + 'Tool 6', + 'Assistant 6', + ]); + }); + + it('should handle edge case with exactly maxUserMessages', () => { + const reducer = createTrimMessagesReducer(2); + const messages: BaseMessage[] = [ + new HumanMessage('User 1'), + new AssistantMessage('Assistant 1'), + new HumanMessage('User 2'), + new AssistantMessage('Assistant 2'), + ]; + + const result = reducer(messages); + expect(result).toEqual(messages); + expect(result.length).toBe(4); + }); + + it('should handle empty array', () => { + const reducer = createTrimMessagesReducer(5); + const messages: BaseMessage[] = []; + + const result = reducer(messages); + expect(result).toEqual([]); + }); + + it('should handle array with no HumanMessages', () => { + const reducer = createTrimMessagesReducer(5); + const messages: BaseMessage[] = [ + new AssistantMessage('Assistant 1'), + new ToolMessage({ content: 'Tool 1', tool_call_id: '1' }), + new AssistantMessage('Assistant 2'), + ]; + + const result = reducer(messages); + expect(result).toEqual(messages); + }); + + it('should handle maxUserMessages = 1', () => { + const reducer = createTrimMessagesReducer(1); + const messages: BaseMessage[] = [ + new HumanMessage('User 1'), + new AssistantMessage('Assistant 1'), + new HumanMessage('User 2'), + new AssistantMessage('Assistant 2'), + new HumanMessage('User 3'), + new AssistantMessage('Assistant 3'), + ]; + + const result = reducer(messages); + + // Should keep only the last HumanMessage + const humanMessages = result.filter((msg) => msg instanceof HumanMessage); + expect(humanMessages.length).toBe(1); + + // Should start with User 3 + expect(result[0]).toBeInstanceOf(HumanMessage); + expect((result[0] as HumanMessage).content).toBe('User 3'); + + // Should only include User 3 and Assistant 3 + expect(result.length).toBe(2); + }); +}); diff --git a/packages/@n8n/ai-workflow-builder.ee/src/workflow-builder-agent.ts b/packages/@n8n/ai-workflow-builder.ee/src/workflow-builder-agent.ts index 20cd539fde..2712400c6c 100644 --- a/packages/@n8n/ai-workflow-builder.ee/src/workflow-builder-agent.ts +++ b/packages/@n8n/ai-workflow-builder.ee/src/workflow-builder-agent.ts @@ -12,8 +12,10 @@ import type { NodeExecutionSchema, } from 'n8n-workflow'; +import { MAX_AI_BUILDER_PROMPT_LENGTH } from '@/constants'; + import { conversationCompactChain } from './chains/conversation-compact'; -import { LLMServiceError } from './errors'; +import { LLMServiceError, ValidationError } from './errors'; import { createAddNodeTool } from './tools/add-node.tool'; import { createConnectNodesTool } from './tools/connect-nodes.tool'; import { createNodeDetailsTool } from './tools/node-details.tool'; @@ -191,6 +193,17 @@ export class WorkflowBuilderAgent { } async *chat(payload: ChatPayload, userId?: string, abortSignal?: AbortSignal) { + // Check for the message maximum length + if (payload.message.length > MAX_AI_BUILDER_PROMPT_LENGTH) { + this.logger?.warn('Message exceeds maximum length', { + messageLength: payload.message.length, + maxLength: MAX_AI_BUILDER_PROMPT_LENGTH, + }); + + throw new ValidationError( + `Message exceeds maximum length of ${MAX_AI_BUILDER_PROMPT_LENGTH} characters`, + ); + } const agent = this.createWorkflow().compile({ checkpointer: this.checkpointer }); const workflowId = payload.workflowContext?.currentWorkflow?.id; // Generate thread ID from workflowId and userId diff --git a/packages/@n8n/ai-workflow-builder.ee/src/workflow-state.ts b/packages/@n8n/ai-workflow-builder.ee/src/workflow-state.ts index d6a12a5980..a03b5caf70 100644 --- a/packages/@n8n/ai-workflow-builder.ee/src/workflow-state.ts +++ b/packages/@n8n/ai-workflow-builder.ee/src/workflow-state.ts @@ -1,5 +1,9 @@ import type { BaseMessage } from '@langchain/core/messages'; +import { HumanMessage } from '@langchain/core/messages'; import { Annotation, messagesStateReducer } from '@langchain/langgraph'; +import type { BinaryOperator } from '@langchain/langgraph/dist/channels/binop'; + +import { MAX_USER_MESSAGES } from '@/constants'; import type { SimpleWorkflow, WorkflowOperation } from './types/workflow'; import type { ChatPayload } from './workflow-builder-agent'; @@ -32,9 +36,44 @@ function operationsReducer( return [...(current ?? []), ...update]; } +// Creates a reducer that trims the message history to keep only the last `maxUserMessages` HumanMessage instances +export function createTrimMessagesReducer(maxUserMessages: number) { + return (current: BaseMessage[]): BaseMessage[] => { + // Count HumanMessage instances and remember their indices + const humanMessageIndices: number[] = []; + current.forEach((msg, index) => { + if (msg instanceof HumanMessage) { + humanMessageIndices.push(index); + } + }); + + // If we have fewer than or equal to maxUserMessages, return as is + if (humanMessageIndices.length <= maxUserMessages) { + return current; + } + + // Find the index of the first HumanMessage that we want to keep + const startHumanMessageIndex = + humanMessageIndices[humanMessageIndices.length - maxUserMessages]; + + // Slice from that HumanMessage onwards + return current.slice(startHumanMessageIndex); + }; +} + +// Utility function to combine multiple message reducers into one. +function combineMessageReducers(...reducers: Array>) { + return (current: BaseMessage[], update: BaseMessage[]): BaseMessage[] => { + return reducers.reduce((acc, reducer) => reducer(acc, update), current); + }; +} + export const WorkflowState = Annotation.Root({ messages: Annotation({ - reducer: messagesStateReducer, + reducer: combineMessageReducers( + messagesStateReducer, + createTrimMessagesReducer(MAX_USER_MESSAGES), + ), default: () => [], }), // // The original prompt from the user. diff --git a/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/AskAssistantChat.test.ts b/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/AskAssistantChat.test.ts index 77e76646b7..b6d034b3df 100644 --- a/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/AskAssistantChat.test.ts +++ b/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/AskAssistantChat.test.ts @@ -236,4 +236,23 @@ describe('AskAssistantChat', () => { expect(wrapper.container).toMatchSnapshot(); expect(wrapper.queryByTestId('error-retry-button')).not.toBeInTheDocument(); }); + + it('limits maximum input length when maxLength prop is specified', async () => { + const wrapper = render(AskAssistantChat, { + global: { + directives: { + n8nHtml, + }, + stubs, + }, + props: { + user: { firstName: 'Kobi', lastName: 'Dog' }, + maxLength: 100, + }, + }); + + expect(wrapper.container).toMatchSnapshot(); + const textarea = wrapper.queryByTestId('chat-input'); + expect(textarea).toHaveAttribute('maxLength', '100'); + }); }); diff --git a/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/AskAssistantChat.vue b/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/AskAssistantChat.vue index d7c34063b5..f2666f33a4 100644 --- a/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/AskAssistantChat.vue +++ b/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/AskAssistantChat.vue @@ -28,6 +28,7 @@ interface Props { placeholder?: string; scrollOnNewMessage?: boolean; showStop?: boolean; + maxLength?: number; } const emit = defineEmits<{ @@ -249,6 +250,7 @@ watch( :placeholder="placeholder ?? t('assistantChat.inputPlaceholder')" rows="1" wrap="hard" + :maxlength="maxLength" data-test-id="chat-input" @keydown.enter.exact.prevent="onSendMessage" @input.prevent="growInput" diff --git a/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/__snapshots__/AskAssistantChat.test.ts.snap b/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/__snapshots__/AskAssistantChat.test.ts.snap index db9896419c..f0d1ffb00b 100644 --- a/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/__snapshots__/AskAssistantChat.test.ts.snap +++ b/packages/frontend/@n8n/design-system/src/components/AskAssistantChat/__snapshots__/AskAssistantChat.test.ts.snap @@ -193,6 +193,192 @@ exports[`AskAssistantChat > does not render retry button if no error is present `; +exports[`AskAssistantChat > limits maximum input length when maxLength prop is specified 1`] = ` +
+
+
+
+
+ + + + + + + + + + + + AI Assistant + +
+ + +
+
+ +
+
+
+
+ +
+ Hi Kobi 👋 +
+
+

+ I can answer most questions about building workflows in n8n. +

+

+ For specific tasks, you’ll see the + + button in the UI. +

+

+ How can I help? +

+
+ +
+
+
+ + +