From a98ed2ca495d5c86ebb61baad049592ba1bce3a6 Mon Sep 17 00:00:00 2001 From: Michael Kret <88898367+michael-radency@users.noreply.github.com> Date: Thu, 24 Jul 2025 11:48:40 +0300 Subject: [PATCH] feat: Respond to chat and wait for response (#12546) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ Co-authored-by: Shireen Missi <94372015+ShireenMissi@users.noreply.github.com> --- .../nodes/trigger/ChatTrigger/Chat.node.ts | 273 ++++++++++++ .../trigger/ChatTrigger/ChatTrigger.node.ts | 122 ++++-- .../ChatTrigger/__test__/Chat.node.test.ts | 143 +++++++ .../__test__/ChatTrigger.node.test.ts | 9 +- .../nodes/trigger/ChatTrigger/templates.ts | 2 +- .../nodes/trigger/ChatTrigger/util.ts | 67 +++ packages/@n8n/nodes-langchain/package.json | 1 + .../@n8n/nodes-langchain/utils/helpers.ts | 14 + .../utils/tests/helpers.test.ts | 47 +++ .../__tests__/chat-execution-manager.test.ts | 292 +++++++++++++ .../src/chat/__tests__/chat-server.test.ts | 86 ++++ .../src/chat/__tests__/chat-service.test.ts | 399 ++++++++++++++++++ packages/cli/src/chat/__tests__/utils.test.ts | 303 +++++++++++++ .../cli/src/chat/chat-execution-manager.ts | 156 +++++++ packages/cli/src/chat/chat-server.ts | 54 +++ packages/cli/src/chat/chat-service.ts | 339 +++++++++++++++ packages/cli/src/chat/chat-service.types.ts | 41 ++ packages/cli/src/chat/utils.ts | 48 +++ packages/cli/src/server.ts | 4 + .../__tests__/webhook-helpers.test.ts | 72 +++- packages/cli/src/webhooks/webhook-helpers.ts | 44 +- .../__tests__/node-execution-context.test.ts | 27 +- .../node-execution-context.ts | 28 +- .../@n8n/chat/src/__stories__/App.stories.ts | 2 +- .../@n8n/chat/src/__tests__/Input.spec.ts | 157 +++++++ .../chat/src/__tests__/plugins/chat.test.ts | 66 +++ .../@n8n/chat/src/__tests__/utils/fetch.ts | 8 +- .../frontend/@n8n/chat/src/api/generic.ts | 10 +- .../@n8n/chat/src/components/Input.vue | 126 +++++- .../frontend/@n8n/chat/src/plugins/chat.ts | 15 +- packages/frontend/@n8n/chat/src/types/chat.ts | 5 +- .../frontend/@n8n/chat/src/types/webhook.ts | 3 + .../frontend/@n8n/chat/src/utils/index.ts | 1 + .../frontend/@n8n/chat/src/utils/utils.ts | 11 + .../editor-ui/src/components/RunData.test.ts | 1 - .../src/composables/useRunWorkflow.test.ts | 1 + packages/frontend/editor-ui/src/constants.ts | 1 + .../logs/__test__/useChatMessaging.test.ts | 125 ++++++ .../logs/components/LogsPanel.test.ts | 3 + .../logs/composables/useChatMessaging.ts | 30 +- .../features/logs/composables/useChatState.ts | 66 ++- .../src/features/logs/logs.utils.test.ts | 119 +++++- .../editor-ui/src/features/logs/logs.utils.ts | 28 +- .../RespondToWebhook/RespondToWebhook.node.ts | 48 ++- .../test/RespondToWebhook.test.ts | 73 ++++ packages/workflow/src/constants.ts | 3 + packages/workflow/src/interfaces.ts | 39 +- 47 files changed, 3441 insertions(+), 71 deletions(-) create mode 100644 packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/Chat.node.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/__test__/Chat.node.test.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/util.ts create mode 100644 packages/cli/src/chat/__tests__/chat-execution-manager.test.ts create mode 100644 packages/cli/src/chat/__tests__/chat-server.test.ts create mode 100644 packages/cli/src/chat/__tests__/chat-service.test.ts create mode 100644 packages/cli/src/chat/__tests__/utils.test.ts create mode 100644 packages/cli/src/chat/chat-execution-manager.ts create mode 100644 packages/cli/src/chat/chat-server.ts create mode 100644 packages/cli/src/chat/chat-service.ts create mode 100644 packages/cli/src/chat/chat-service.types.ts create mode 100644 packages/cli/src/chat/utils.ts create mode 100644 packages/frontend/@n8n/chat/src/__tests__/Input.spec.ts create mode 100644 packages/frontend/@n8n/chat/src/__tests__/plugins/chat.test.ts create mode 100644 packages/frontend/@n8n/chat/src/utils/utils.ts create mode 100644 packages/frontend/editor-ui/src/features/logs/__test__/useChatMessaging.test.ts diff --git a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/Chat.node.ts b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/Chat.node.ts new file mode 100644 index 0000000000..042211ab89 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/Chat.node.ts @@ -0,0 +1,273 @@ +/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { BaseChatMemory } from 'langchain/memory'; +import { + CHAT_TRIGGER_NODE_TYPE, + CHAT_WAIT_USER_REPLY, + NodeConnectionTypes, + NodeOperationError, +} from 'n8n-workflow'; +import type { + IExecuteFunctions, + INodeExecutionData, + INodeTypeDescription, + INodeType, + INodeProperties, +} from 'n8n-workflow'; + +import { configureInputs, configureWaitTillDate } from './util'; + +const limitWaitTimeProperties: INodeProperties[] = [ + { + displayName: 'Limit Type', + name: 'limitType', + type: 'options', + default: 'afterTimeInterval', + description: + 'Sets the condition for the execution to resume. Can be a specified date or after some time.', + options: [ + { + name: 'After Time Interval', + description: 'Waits for a certain amount of time', + value: 'afterTimeInterval', + }, + { + name: 'At Specified Time', + description: 'Waits until the set date and time to continue', + value: 'atSpecifiedTime', + }, + ], + }, + { + displayName: 'Amount', + name: 'resumeAmount', + type: 'number', + displayOptions: { + show: { + limitType: ['afterTimeInterval'], + }, + }, + typeOptions: { + minValue: 0, + numberPrecision: 2, + }, + default: 1, + description: 'The time to wait', + }, + { + displayName: 'Unit', + name: 'resumeUnit', + type: 'options', + displayOptions: { + show: { + limitType: ['afterTimeInterval'], + }, + }, + options: [ + { + name: 'Minutes', + value: 'minutes', + }, + { + name: 'Hours', + value: 'hours', + }, + { + name: 'Days', + value: 'days', + }, + ], + default: 'hours', + description: 'Unit of the interval value', + }, + { + displayName: 'Max Date and Time', + name: 'maxDateAndTime', + type: 'dateTime', + displayOptions: { + show: { + limitType: ['atSpecifiedTime'], + }, + }, + default: '', + description: 'Continue execution after the specified date and time', + }, +]; + +const limitWaitTimeOption: INodeProperties = { + displayName: 'Limit Wait Time', + name: 'limitWaitTime', + type: 'fixedCollection', + description: + 'Whether to limit the time this node should wait for a user response before execution resumes', + default: { values: { limitType: 'afterTimeInterval', resumeAmount: 45, resumeUnit: 'minutes' } }, + options: [ + { + displayName: 'Values', + name: 'values', + values: limitWaitTimeProperties, + }, + ], + displayOptions: { + show: { + [`/${CHAT_WAIT_USER_REPLY}`]: [true], + }, + }, +}; + +export class Chat implements INodeType { + description: INodeTypeDescription = { + displayName: 'Respond to Chat', + name: 'chat', + icon: 'fa:comments', + iconColor: 'black', + group: ['input'], + version: 1, + description: 'Send a message to a chat', + defaults: { + name: 'Respond to Chat', + }, + codex: { + categories: ['Core Nodes', 'HITL'], + subcategories: { + HITL: ['Human in the Loop'], + }, + alias: ['human', 'wait', 'hitl'], + resources: { + primaryDocumentation: [ + { + url: 'https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-langchain.chat/', + }, + ], + }, + }, + inputs: `={{ (${configureInputs})($parameter) }}`, + outputs: [NodeConnectionTypes.Main], + properties: [ + { + displayName: + "Verify you're using a chat trigger with the 'Response Mode' option set to 'Using Response Nodes'", + name: 'generalNotice', + type: 'notice', + default: '', + }, + { + displayName: 'Message', + name: 'message', + type: 'string', + default: '', + required: true, + typeOptions: { + rows: 6, + }, + }, + { + displayName: 'Wait for User Reply', + name: CHAT_WAIT_USER_REPLY, + type: 'boolean', + default: true, + }, + { + displayName: 'Options', + name: 'options', + type: 'collection', + placeholder: 'Add Option', + default: {}, + options: [ + { + displayName: 'Add Memory Input Connection', + name: 'memoryConnection', + type: 'boolean', + default: false, + }, + limitWaitTimeOption, + ], + }, + ], + }; + + async onMessage( + context: IExecuteFunctions, + data: INodeExecutionData, + ): Promise { + const options = context.getNodeParameter('options', 0, {}) as { + memoryConnection?: boolean; + }; + + const waitForReply = context.getNodeParameter(CHAT_WAIT_USER_REPLY, 0, true) as boolean; + + if (!waitForReply) { + const inputData = context.getInputData(); + return [inputData]; + } + + if (options.memoryConnection) { + const memory = (await context.getInputConnectionData(NodeConnectionTypes.AiMemory, 0)) as + | BaseChatMemory + | undefined; + + const message = data.json?.chatInput; + + if (memory && message) { + await memory.chatHistory.addUserMessage(message as string); + } + } + + return [[data]]; + } + + async execute(this: IExecuteFunctions): Promise { + const connectedNodes = this.getParentNodes(this.getNode().name, { + includeNodeParameters: true, + }); + + const chatTrigger = connectedNodes.find( + (node) => node.type === CHAT_TRIGGER_NODE_TYPE && !node.disabled, + ); + + if (!chatTrigger) { + throw new NodeOperationError( + this.getNode(), + 'Workflow must be started from a chat trigger node', + ); + } + + const parameters = chatTrigger.parameters as { + mode?: 'hostedChat' | 'webhook'; + options: { responseMode: 'lastNode' | 'responseNodes' | 'streaming' | 'responseNode' }; + }; + + if (parameters.mode === 'webhook') { + throw new NodeOperationError( + this.getNode(), + '"Embeded chat" is not supported, change the "Mode" in the chat trigger node to the "Hosted Chat"', + ); + } + + if (parameters.options.responseMode !== 'responseNodes') { + throw new NodeOperationError( + this.getNode(), + '"Response Mode" in the chat trigger node must be set to "Respond Nodes"', + ); + } + + const message = (this.getNodeParameter('message', 0) as string) ?? ''; + const options = this.getNodeParameter('options', 0, {}) as { + memoryConnection?: boolean; + }; + + if (options.memoryConnection) { + const memory = (await this.getInputConnectionData(NodeConnectionTypes.AiMemory, 0)) as + | BaseChatMemory + | undefined; + + if (memory) { + await memory.chatHistory.addAIChatMessage(message); + } + } + + const waitTill = configureWaitTillDate(this); + + await this.putExecutionToWait(waitTill); + return [[{ json: {}, sendMessage: message }]]; + } +} diff --git a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts index a1c36f8db7..938f03a232 100644 --- a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts @@ -35,27 +35,30 @@ const allowedFileMimeTypeOption: INodeProperties = { 'Allowed file types for upload. Comma-separated list of MIME types.', }; -const responseModeOptions = [ - { - name: 'When Last Node Finishes', - value: 'lastNode', - description: 'Returns data of the last-executed node', - }, - { - name: "Using 'Respond to Webhook' Node", - value: 'responseNode', - description: 'Response defined in that node', - }, -]; +const respondToWebhookResponseMode = { + name: "Using 'Respond to Webhook' Node", + value: 'responseNode', + description: 'Response defined in that node', +}; -const responseModeWithStreamingOptions = [ - ...responseModeOptions, - { - name: 'Streaming Response', - value: 'streaming', - description: 'Streaming response from specified nodes (e.g. Agents)', - }, -]; +const lastNodeResponseMode = { + name: 'When Last Node Finishes', + value: 'lastNode', + description: 'Returns data of the last-executed node', +}; + +const streamingResponseMode = { + name: 'Streaming Response', + value: 'streaming', + description: 'Streaming response from specified nodes (e.g. Agents)', +}; + +const respondNodesResponseMode = { + name: 'Using Response Nodes', + value: 'responseNodes', + description: + "Send responses to the chat by using 'Respond to Chat' or 'Respond to Webhook' nodes", +}; const commonOptionsFields: INodeProperties[] = [ // CORS parameters are only valid for when chat is used in hosted or webhook mode @@ -209,9 +212,8 @@ export class ChatTrigger extends Node { icon: 'fa:comments', iconColor: 'black', group: ['trigger'], - version: [1, 1.1, 1.2], - // Keep the default version as 1.1 to avoid releasing streaming in broken state - defaultVersion: 1.1, + version: [1, 1.1, 1.2, 1.3], + defaultVersion: 1.3, description: 'Runs the workflow when an n8n generated webchat is submitted', defaults: { name: 'When chat message received', @@ -390,7 +392,7 @@ export class ChatTrigger extends Node { displayOptions: { show: { public: [false], - '@version': [{ _cnd: { gte: 1.1 } }], + '@version': [1, 1.1], }, }, placeholder: 'Add Field', @@ -417,13 +419,13 @@ export class ChatTrigger extends Node { displayName: 'Response Mode', name: 'responseMode', type: 'options', - options: responseModeOptions, + options: [lastNodeResponseMode, respondToWebhookResponseMode], default: 'lastNode', description: 'When and how to respond to the webhook', }, ], }, - // Options for version 1.2+ (with streaming) + // Options for version 1.2 (with streaming) { displayName: 'Options', name: 'options', @@ -432,7 +434,7 @@ export class ChatTrigger extends Node { show: { mode: ['hostedChat', 'webhook'], public: [true], - '@version': [{ _cnd: { gte: 1.2 } }], + '@version': [1.2], }, }, placeholder: 'Add Field', @@ -443,12 +445,72 @@ export class ChatTrigger extends Node { displayName: 'Response Mode', name: 'responseMode', type: 'options', - options: responseModeWithStreamingOptions, + options: [lastNodeResponseMode, respondToWebhookResponseMode, streamingResponseMode], default: 'lastNode', description: 'When and how to respond to the webhook', }, ], }, + { + displayName: 'Options', + name: 'options', + type: 'collection', + displayOptions: { + show: { + public: [false], + '@version': [{ _cnd: { gte: 1.3 } }], + }, + }, + placeholder: 'Add Field', + default: {}, + options: [ + allowFileUploadsOption, + allowedFileMimeTypeOption, + { + displayName: 'Response Mode', + name: 'responseMode', + type: 'options', + options: [lastNodeResponseMode, respondNodesResponseMode], + default: 'lastNode', + description: 'When and how to respond to the chat', + }, + ], + }, + { + displayName: 'Options', + name: 'options', + type: 'collection', + displayOptions: { + show: { + mode: ['hostedChat', 'webhook'], + public: [true], + '@version': [{ _cnd: { gte: 1.3 } }], + }, + }, + placeholder: 'Add Field', + default: {}, + options: [ + ...commonOptionsFields, + { + displayName: 'Response Mode', + name: 'responseMode', + type: 'options', + options: [lastNodeResponseMode, respondToWebhookResponseMode], + default: 'lastNode', + description: 'When and how to respond to the chat', + displayOptions: { show: { '/mode': ['webhook'] } }, + }, + { + displayName: 'Response Mode', + name: 'responseMode', + type: 'options', + options: [lastNodeResponseMode, respondNodesResponseMode], + default: 'lastNode', + description: 'When and how to respond to the webhook', + displayOptions: { show: { '/mode': ['hostedChat'] } }, + }, + ], + }, ], }; @@ -536,10 +598,10 @@ export class ChatTrigger extends Node { allowFileUploads?: boolean; allowedFilesMimeTypes?: string; customCss?: string; + responseMode?: string; }; - const responseMode = ctx.getNodeParameter('options.responseMode', 'lastNode') as string; - const enableStreaming = responseMode === 'streaming'; + const enableStreaming = options.responseMode === 'streaming'; const req = ctx.getRequestObject(); const webhookName = ctx.getWebhookName(); diff --git a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/__test__/Chat.node.test.ts b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/__test__/Chat.node.test.ts new file mode 100644 index 0000000000..dc47838eb1 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/__test__/Chat.node.test.ts @@ -0,0 +1,143 @@ +import type { MockProxy } from 'jest-mock-extended'; +import { mock } from 'jest-mock-extended'; +import type { INode, IExecuteFunctions } from 'n8n-workflow'; +import { CHAT_TRIGGER_NODE_TYPE } from 'n8n-workflow'; + +import { Chat } from '../Chat.node'; + +describe('Test Chat Node', () => { + let chat: Chat; + let mockExecuteFunctions: MockProxy; + + const chatNode = mock({ + name: 'Chat', + type: CHAT_TRIGGER_NODE_TYPE, + parameters: {}, + }); + + beforeEach(() => { + chat = new Chat(); + mockExecuteFunctions = mock(); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should execute and send message', async () => { + const items = [{ json: { data: 'test' } }]; + mockExecuteFunctions.getInputData.mockReturnValue(items); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce('message'); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce(false); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({ + limitType: 'afterTimeInterval', + resumeAmount: 1, + resumeUnit: 'minutes', + }); + mockExecuteFunctions.getNode.mockReturnValue(chatNode); + mockExecuteFunctions.getParentNodes.mockReturnValue([ + { + type: CHAT_TRIGGER_NODE_TYPE, + disabled: false, + parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } }, + } as any, + ]); + + const result = await chat.execute.call(mockExecuteFunctions); + + expect(result).toEqual([[{ json: {}, sendMessage: 'message' }]]); + }); + + it('should execute and handle memory connection', async () => { + const items = [{ json: { data: 'test' } }]; + mockExecuteFunctions.getInputData.mockReturnValue(items); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce('message'); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({ memoryConnection: true }); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({ + limitType: 'afterTimeInterval', + resumeAmount: 1, + resumeUnit: 'minutes', + }); + mockExecuteFunctions.getNode.mockReturnValue(chatNode); + mockExecuteFunctions.getParentNodes.mockReturnValue([ + { + type: CHAT_TRIGGER_NODE_TYPE, + disabled: false, + parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } }, + } as any, + ]); + + const memory = { chatHistory: { addAIChatMessage: jest.fn() } }; + mockExecuteFunctions.getInputConnectionData.mockResolvedValueOnce(memory); + + await chat.execute.call(mockExecuteFunctions); + + expect(memory.chatHistory.addAIChatMessage).toHaveBeenCalledWith('message'); + }); + + it('should execute without memory connection', async () => { + const items = [{ json: { data: 'test' } }]; + mockExecuteFunctions.getInputData.mockReturnValue(items); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce('message'); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce(false); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({ + limitType: 'afterTimeInterval', + resumeAmount: 1, + resumeUnit: 'minutes', + }); + mockExecuteFunctions.getNode.mockReturnValue(chatNode); + mockExecuteFunctions.getParentNodes.mockReturnValue([ + { + type: CHAT_TRIGGER_NODE_TYPE, + disabled: false, + parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } }, + } as any, + ]); + + const result = await chat.execute.call(mockExecuteFunctions); + + expect(result).toEqual([[{ json: {}, sendMessage: 'message' }]]); + }); + + it('should execute with specified time limit', async () => { + const items = [{ json: { data: 'test' } }]; + mockExecuteFunctions.getInputData.mockReturnValue(items); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce('message'); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce(false); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({ + limitType: 'atSpecifiedTime', + maxDateAndTime: new Date().toISOString(), + }); + mockExecuteFunctions.getNode.mockReturnValue(chatNode); + mockExecuteFunctions.getParentNodes.mockReturnValue([ + { + type: CHAT_TRIGGER_NODE_TYPE, + disabled: false, + parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } }, + } as any, + ]); + + const result = await chat.execute.call(mockExecuteFunctions); + + expect(result).toEqual([[{ json: {}, sendMessage: 'message' }]]); + }); + + it('should process onMessage without waiting for reply', async () => { + const data = { json: { chatInput: 'user message' } }; + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({ memoryConnection: true }); + mockExecuteFunctions.getNodeParameter.mockReturnValueOnce(false); + mockExecuteFunctions.getInputData.mockReturnValue([data]); + mockExecuteFunctions.getNode.mockReturnValue(chatNode); + mockExecuteFunctions.getParentNodes.mockReturnValue([ + { + type: CHAT_TRIGGER_NODE_TYPE, + disabled: false, + parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } }, + } as any, + ]); + + const result = await chat.onMessage(mockExecuteFunctions, data); + + expect(result).toEqual([[data]]); + }); +}); diff --git a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/__test__/ChatTrigger.node.test.ts b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/__test__/ChatTrigger.node.test.ts index 1dd55ff0b5..a9480c7fa9 100644 --- a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/__test__/ChatTrigger.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/__test__/ChatTrigger.node.test.ts @@ -150,8 +150,7 @@ describe('ChatTrigger Node', () => { ): boolean | string | object | undefined => { if (paramName === 'public') return true; if (paramName === 'mode') return 'hostedChat'; - if (paramName === 'options') return {}; - if (paramName === 'options.responseMode') return 'streaming'; + if (paramName === 'options') return { responseMode: 'streaming' }; return defaultValue; }, ); @@ -184,8 +183,7 @@ describe('ChatTrigger Node', () => { ): boolean | string | object | undefined => { if (paramName === 'public') return true; if (paramName === 'mode') return 'hostedChat'; - if (paramName === 'options') return {}; - if (paramName === 'options.responseMode') return 'lastNode'; + if (paramName === 'options') return { responseMode: 'lastNode' }; return defaultValue; }, ); @@ -220,8 +218,7 @@ describe('ChatTrigger Node', () => { ): boolean | string | object | undefined => { if (paramName === 'public') return true; if (paramName === 'mode') return 'hostedChat'; - if (paramName === 'options') return {}; - if (paramName === 'options.responseMode') return 'streaming'; + if (paramName === 'options') return { responseMode: 'streaming' }; return defaultValue; }, ); diff --git a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/templates.ts b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/templates.ts index 7b5d4964f4..a10f377aea 100644 --- a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/templates.ts +++ b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/templates.ts @@ -77,7 +77,7 @@ export function createPage({