fix(editor): Fix trimPayloadToSize mutating original objects in AI assistant (#17498)

This commit is contained in:
oleg
2025-07-21 15:48:30 +02:00
committed by GitHub
parent ddd8052e1a
commit 101004390b
2 changed files with 136 additions and 12 deletions

View File

@@ -1,5 +1,10 @@
import { describe, it, expect } from 'vitest';
import type { INode, IRunExecutionData, NodeConnectionType } from 'n8n-workflow';
import {
deepCopy,
type INode,
type IRunExecutionData,
type NodeConnectionType,
} from 'n8n-workflow';
import { useAIAssistantHelpers } from './useAIAssistantHelpers';
import { createTestingPinia } from '@pinia/testing';
import { setActivePinia } from 'pinia';
@@ -569,7 +574,7 @@ describe('Trim Payload Size', () => {
});
it('Should trim active node parameters in error helper payload', () => {
const payload = ERROR_HELPER_TEST_PAYLOAD;
const payload = deepCopy(ERROR_HELPER_TEST_PAYLOAD);
aiAssistantHelpers.trimPayloadSize(payload);
expect((payload.payload as ChatRequest.InitErrorHelper).node.parameters).toEqual({});
});
@@ -577,15 +582,18 @@ describe('Trim Payload Size', () => {
it('Should trim all node parameters in support chat', () => {
// Testing the scenario where only one trimming pass is needed
// (payload is under the limit after removing all node parameters and execution data)
const payload: ChatRequest.RequestPayload = SUPPORT_CHAT_TEST_PAYLOAD;
const supportPayload: ChatRequest.InitSupportChat =
payload.payload as ChatRequest.InitSupportChat;
const payload: ChatRequest.RequestPayload = deepCopy(SUPPORT_CHAT_TEST_PAYLOAD);
// Trimming to 4kb should be successful
expect(() =>
aiAssistantHelpers.trimPayloadSize(payload, PAYLOAD_SIZE_FOR_1_PASS),
).not.toThrow();
// All active node parameters should be removed
// Get the modified payload
const supportPayload: ChatRequest.InitSupportChat =
payload.payload as ChatRequest.InitSupportChat;
// All active node parameters should be removed in the payload
expect(supportPayload?.context?.activeNodeInfo?.node?.parameters).toEqual({});
// Also, all node parameters in the workflow should be removed
supportPayload.context?.currentWorkflow?.nodes?.forEach((node) => {
@@ -606,14 +614,17 @@ describe('Trim Payload Size', () => {
it('Should trim the whole context in support chat', () => {
// Testing the scenario where both trimming passes are needed
// (payload is over the limit after removing all node parameters and execution data)
const payload: ChatRequest.RequestPayload = SUPPORT_CHAT_TEST_PAYLOAD;
const supportPayload: ChatRequest.InitSupportChat =
payload.payload as ChatRequest.InitSupportChat;
const payload: ChatRequest.RequestPayload = deepCopy(SUPPORT_CHAT_TEST_PAYLOAD);
// Trimming should be successful
expect(() =>
aiAssistantHelpers.trimPayloadSize(payload, PAYLOAD_SIZE_FOR_2_PASSES),
).not.toThrow();
// Get the modified payload
const supportPayload: ChatRequest.InitSupportChat =
payload.payload as ChatRequest.InitSupportChat;
// The whole context object should be removed
expect(supportPayload.context).not.toBeDefined();
});
@@ -622,4 +633,111 @@ describe('Trim Payload Size', () => {
const payload = ERROR_HELPER_TEST_PAYLOAD;
expect(() => aiAssistantHelpers.trimPayloadSize(payload, 0.2)).toThrow();
});
it('Should NOT modify the original objects when trimming payload', () => {
// Create a test payload to verify that the original objects are not mutated
const testNode = {
id: 'test-node',
name: 'Test Node',
type: 'test.node',
typeVersion: 1,
position: [0, 0] as [number, number],
parameters: { key: 'value', nested: { data: 'test' } },
};
const workflowNode = {
id: 'workflow-node',
name: 'Workflow Node',
type: 'test.node',
typeVersion: 1,
position: [100, 100] as [number, number],
parameters: { param1: 'test1', param2: 'test2' },
};
const errorNode = {
id: 'error-node',
name: 'Error Node',
type: 'test.node',
typeVersion: 1,
position: [200, 200] as [number, number],
parameters: { errorParam: 'errorValue' },
};
const payload: ChatRequest.RequestPayload = {
sessionId: 'test-session',
payload: {
type: 'init-support-chat',
role: 'user',
user: {
firstName: 'Test User',
},
question: 'test question',
context: {
activeNodeInfo: {
node: testNode,
},
currentWorkflow: {
name: 'Test Workflow',
nodes: [workflowNode],
connections: {},
active: false,
},
executionData: {
runData: {
'Test Node': [
{
startTime: 1000,
executionTime: 100,
executionIndex: 0,
source: [],
executionStatus: 'success',
data: { main: [[{ json: {} }]] },
},
],
},
},
},
},
};
// Create a shared reference to verify immutability
const sharedReference = {
activeNode: testNode,
workflowNode,
errorNode,
};
// Store original parameter values
const originalTestNodeParams = { ...testNode.parameters };
const originalWorkflowNodeParams = { ...workflowNode.parameters };
const originalErrorNodeParams = { ...errorNode.parameters };
// Verify parameters exist before trimming
expect(Object.keys(testNode.parameters).length).toBeGreaterThan(0);
expect(Object.keys(workflowNode.parameters).length).toBeGreaterThan(0);
expect(Object.keys(errorNode.parameters).length).toBeGreaterThan(0);
// Call trimPayloadSize
aiAssistantHelpers.trimPayloadSize(payload, PAYLOAD_SIZE_FOR_1_PASS);
// Check that the original objects have NOT been modified
expect(testNode.parameters).toEqual(originalTestNodeParams);
expect(workflowNode.parameters).toEqual(originalWorkflowNodeParams);
expect(errorNode.parameters).toEqual(originalErrorNodeParams);
// The shared references should also remain unchanged
expect(sharedReference.activeNode.parameters).toEqual(originalTestNodeParams);
expect(sharedReference.workflowNode.parameters).toEqual(originalWorkflowNodeParams);
expect(sharedReference.errorNode.parameters).toEqual(originalErrorNodeParams);
// But the payload itself should have been modified with empty parameters
const supportPayload = payload.payload as ChatRequest.InitSupportChat;
expect(supportPayload.context?.activeNodeInfo?.node?.parameters).toEqual({});
expect(
supportPayload.context?.currentWorkflow?.nodes?.every(
(node) => Object.keys(node.parameters).length === 0,
),
).toBe(true);
expect(supportPayload.context?.executionData?.runData).toEqual({});
});
});

View File

@@ -265,7 +265,10 @@ export const useAIAssistantHelpers = () => {
payload: ChatRequest.RequestPayload,
size = AI_ASSISTANT_MAX_CONTENT_LENGTH,
): void => {
const requestPayload = payload.payload;
// Create a deep copy to avoid mutating the original payload
const payloadCopy = deepCopy(payload);
const requestPayload = payloadCopy.payload;
// For support chat, remove parameters from the active node object and all nodes in the workflow
if (requestPayload.type === 'init-support-chat') {
if (requestPayload.context?.activeNodeInfo?.node) {
@@ -288,7 +291,7 @@ export const useAIAssistantHelpers = () => {
}
}
// If the payload is still too big, remove the whole context object
if (getRequestPayloadSize(payload) > size) {
if (getRequestPayloadSize(payloadCopy) > size) {
requestPayload.context = undefined;
}
// For error helper, remove parameters from the active node object
@@ -297,9 +300,12 @@ export const useAIAssistantHelpers = () => {
requestPayload.node.parameters = {};
}
// If the payload is still too big, throw an error that will be shown to the user
if (getRequestPayloadSize(payload) > size) {
if (getRequestPayloadSize(payloadCopy) > size) {
throw new Error(locale.baseText('aiAssistant.payloadTooBig.message'));
}
// Apply the trimmed payload back to the original object
payload.payload = payloadCopy.payload;
};
/**