feat: Respond to chat and wait for response (#12546)

Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in>
Co-authored-by: Shireen Missi <94372015+ShireenMissi@users.noreply.github.com>
This commit is contained in:
Michael Kret
2025-07-24 11:48:40 +03:00
committed by GitHub
parent e61b25c53f
commit a98ed2ca49
47 changed files with 3441 additions and 71 deletions

View File

@@ -0,0 +1,273 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import type { BaseChatMemory } from 'langchain/memory';
import {
CHAT_TRIGGER_NODE_TYPE,
CHAT_WAIT_USER_REPLY,
NodeConnectionTypes,
NodeOperationError,
} from 'n8n-workflow';
import type {
IExecuteFunctions,
INodeExecutionData,
INodeTypeDescription,
INodeType,
INodeProperties,
} from 'n8n-workflow';
import { configureInputs, configureWaitTillDate } from './util';
const limitWaitTimeProperties: INodeProperties[] = [
{
displayName: 'Limit Type',
name: 'limitType',
type: 'options',
default: 'afterTimeInterval',
description:
'Sets the condition for the execution to resume. Can be a specified date or after some time.',
options: [
{
name: 'After Time Interval',
description: 'Waits for a certain amount of time',
value: 'afterTimeInterval',
},
{
name: 'At Specified Time',
description: 'Waits until the set date and time to continue',
value: 'atSpecifiedTime',
},
],
},
{
displayName: 'Amount',
name: 'resumeAmount',
type: 'number',
displayOptions: {
show: {
limitType: ['afterTimeInterval'],
},
},
typeOptions: {
minValue: 0,
numberPrecision: 2,
},
default: 1,
description: 'The time to wait',
},
{
displayName: 'Unit',
name: 'resumeUnit',
type: 'options',
displayOptions: {
show: {
limitType: ['afterTimeInterval'],
},
},
options: [
{
name: 'Minutes',
value: 'minutes',
},
{
name: 'Hours',
value: 'hours',
},
{
name: 'Days',
value: 'days',
},
],
default: 'hours',
description: 'Unit of the interval value',
},
{
displayName: 'Max Date and Time',
name: 'maxDateAndTime',
type: 'dateTime',
displayOptions: {
show: {
limitType: ['atSpecifiedTime'],
},
},
default: '',
description: 'Continue execution after the specified date and time',
},
];
const limitWaitTimeOption: INodeProperties = {
displayName: 'Limit Wait Time',
name: 'limitWaitTime',
type: 'fixedCollection',
description:
'Whether to limit the time this node should wait for a user response before execution resumes',
default: { values: { limitType: 'afterTimeInterval', resumeAmount: 45, resumeUnit: 'minutes' } },
options: [
{
displayName: 'Values',
name: 'values',
values: limitWaitTimeProperties,
},
],
displayOptions: {
show: {
[`/${CHAT_WAIT_USER_REPLY}`]: [true],
},
},
};
export class Chat implements INodeType {
description: INodeTypeDescription = {
displayName: 'Respond to Chat',
name: 'chat',
icon: 'fa:comments',
iconColor: 'black',
group: ['input'],
version: 1,
description: 'Send a message to a chat',
defaults: {
name: 'Respond to Chat',
},
codex: {
categories: ['Core Nodes', 'HITL'],
subcategories: {
HITL: ['Human in the Loop'],
},
alias: ['human', 'wait', 'hitl'],
resources: {
primaryDocumentation: [
{
url: 'https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-langchain.chat/',
},
],
},
},
inputs: `={{ (${configureInputs})($parameter) }}`,
outputs: [NodeConnectionTypes.Main],
properties: [
{
displayName:
"Verify you're using a chat trigger with the 'Response Mode' option set to 'Using Response Nodes'",
name: 'generalNotice',
type: 'notice',
default: '',
},
{
displayName: 'Message',
name: 'message',
type: 'string',
default: '',
required: true,
typeOptions: {
rows: 6,
},
},
{
displayName: 'Wait for User Reply',
name: CHAT_WAIT_USER_REPLY,
type: 'boolean',
default: true,
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
placeholder: 'Add Option',
default: {},
options: [
{
displayName: 'Add Memory Input Connection',
name: 'memoryConnection',
type: 'boolean',
default: false,
},
limitWaitTimeOption,
],
},
],
};
async onMessage(
context: IExecuteFunctions,
data: INodeExecutionData,
): Promise<INodeExecutionData[][]> {
const options = context.getNodeParameter('options', 0, {}) as {
memoryConnection?: boolean;
};
const waitForReply = context.getNodeParameter(CHAT_WAIT_USER_REPLY, 0, true) as boolean;
if (!waitForReply) {
const inputData = context.getInputData();
return [inputData];
}
if (options.memoryConnection) {
const memory = (await context.getInputConnectionData(NodeConnectionTypes.AiMemory, 0)) as
| BaseChatMemory
| undefined;
const message = data.json?.chatInput;
if (memory && message) {
await memory.chatHistory.addUserMessage(message as string);
}
}
return [[data]];
}
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const connectedNodes = this.getParentNodes(this.getNode().name, {
includeNodeParameters: true,
});
const chatTrigger = connectedNodes.find(
(node) => node.type === CHAT_TRIGGER_NODE_TYPE && !node.disabled,
);
if (!chatTrigger) {
throw new NodeOperationError(
this.getNode(),
'Workflow must be started from a chat trigger node',
);
}
const parameters = chatTrigger.parameters as {
mode?: 'hostedChat' | 'webhook';
options: { responseMode: 'lastNode' | 'responseNodes' | 'streaming' | 'responseNode' };
};
if (parameters.mode === 'webhook') {
throw new NodeOperationError(
this.getNode(),
'"Embeded chat" is not supported, change the "Mode" in the chat trigger node to the "Hosted Chat"',
);
}
if (parameters.options.responseMode !== 'responseNodes') {
throw new NodeOperationError(
this.getNode(),
'"Response Mode" in the chat trigger node must be set to "Respond Nodes"',
);
}
const message = (this.getNodeParameter('message', 0) as string) ?? '';
const options = this.getNodeParameter('options', 0, {}) as {
memoryConnection?: boolean;
};
if (options.memoryConnection) {
const memory = (await this.getInputConnectionData(NodeConnectionTypes.AiMemory, 0)) as
| BaseChatMemory
| undefined;
if (memory) {
await memory.chatHistory.addAIChatMessage(message);
}
}
const waitTill = configureWaitTillDate(this);
await this.putExecutionToWait(waitTill);
return [[{ json: {}, sendMessage: message }]];
}
}

View File

@@ -35,27 +35,30 @@ const allowedFileMimeTypeOption: INodeProperties = {
'Allowed file types for upload. Comma-separated list of <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types" target="_blank">MIME types</a>.',
};
const responseModeOptions = [
{
name: 'When Last Node Finishes',
value: 'lastNode',
description: 'Returns data of the last-executed node',
},
{
name: "Using 'Respond to Webhook' Node",
value: 'responseNode',
description: 'Response defined in that node',
},
];
const respondToWebhookResponseMode = {
name: "Using 'Respond to Webhook' Node",
value: 'responseNode',
description: 'Response defined in that node',
};
const responseModeWithStreamingOptions = [
...responseModeOptions,
{
name: 'Streaming Response',
value: 'streaming',
description: 'Streaming response from specified nodes (e.g. Agents)',
},
];
const lastNodeResponseMode = {
name: 'When Last Node Finishes',
value: 'lastNode',
description: 'Returns data of the last-executed node',
};
const streamingResponseMode = {
name: 'Streaming Response',
value: 'streaming',
description: 'Streaming response from specified nodes (e.g. Agents)',
};
const respondNodesResponseMode = {
name: 'Using Response Nodes',
value: 'responseNodes',
description:
"Send responses to the chat by using 'Respond to Chat' or 'Respond to Webhook' nodes",
};
const commonOptionsFields: INodeProperties[] = [
// CORS parameters are only valid for when chat is used in hosted or webhook mode
@@ -209,9 +212,8 @@ export class ChatTrigger extends Node {
icon: 'fa:comments',
iconColor: 'black',
group: ['trigger'],
version: [1, 1.1, 1.2],
// Keep the default version as 1.1 to avoid releasing streaming in broken state
defaultVersion: 1.1,
version: [1, 1.1, 1.2, 1.3],
defaultVersion: 1.3,
description: 'Runs the workflow when an n8n generated webchat is submitted',
defaults: {
name: 'When chat message received',
@@ -390,7 +392,7 @@ export class ChatTrigger extends Node {
displayOptions: {
show: {
public: [false],
'@version': [{ _cnd: { gte: 1.1 } }],
'@version': [1, 1.1],
},
},
placeholder: 'Add Field',
@@ -417,13 +419,13 @@ export class ChatTrigger extends Node {
displayName: 'Response Mode',
name: 'responseMode',
type: 'options',
options: responseModeOptions,
options: [lastNodeResponseMode, respondToWebhookResponseMode],
default: 'lastNode',
description: 'When and how to respond to the webhook',
},
],
},
// Options for version 1.2+ (with streaming)
// Options for version 1.2 (with streaming)
{
displayName: 'Options',
name: 'options',
@@ -432,7 +434,7 @@ export class ChatTrigger extends Node {
show: {
mode: ['hostedChat', 'webhook'],
public: [true],
'@version': [{ _cnd: { gte: 1.2 } }],
'@version': [1.2],
},
},
placeholder: 'Add Field',
@@ -443,12 +445,72 @@ export class ChatTrigger extends Node {
displayName: 'Response Mode',
name: 'responseMode',
type: 'options',
options: responseModeWithStreamingOptions,
options: [lastNodeResponseMode, respondToWebhookResponseMode, streamingResponseMode],
default: 'lastNode',
description: 'When and how to respond to the webhook',
},
],
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
displayOptions: {
show: {
public: [false],
'@version': [{ _cnd: { gte: 1.3 } }],
},
},
placeholder: 'Add Field',
default: {},
options: [
allowFileUploadsOption,
allowedFileMimeTypeOption,
{
displayName: 'Response Mode',
name: 'responseMode',
type: 'options',
options: [lastNodeResponseMode, respondNodesResponseMode],
default: 'lastNode',
description: 'When and how to respond to the chat',
},
],
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
displayOptions: {
show: {
mode: ['hostedChat', 'webhook'],
public: [true],
'@version': [{ _cnd: { gte: 1.3 } }],
},
},
placeholder: 'Add Field',
default: {},
options: [
...commonOptionsFields,
{
displayName: 'Response Mode',
name: 'responseMode',
type: 'options',
options: [lastNodeResponseMode, respondToWebhookResponseMode],
default: 'lastNode',
description: 'When and how to respond to the chat',
displayOptions: { show: { '/mode': ['webhook'] } },
},
{
displayName: 'Response Mode',
name: 'responseMode',
type: 'options',
options: [lastNodeResponseMode, respondNodesResponseMode],
default: 'lastNode',
description: 'When and how to respond to the webhook',
displayOptions: { show: { '/mode': ['hostedChat'] } },
},
],
},
],
};
@@ -536,10 +598,10 @@ export class ChatTrigger extends Node {
allowFileUploads?: boolean;
allowedFilesMimeTypes?: string;
customCss?: string;
responseMode?: string;
};
const responseMode = ctx.getNodeParameter('options.responseMode', 'lastNode') as string;
const enableStreaming = responseMode === 'streaming';
const enableStreaming = options.responseMode === 'streaming';
const req = ctx.getRequestObject();
const webhookName = ctx.getWebhookName();

View File

@@ -0,0 +1,143 @@
import type { MockProxy } from 'jest-mock-extended';
import { mock } from 'jest-mock-extended';
import type { INode, IExecuteFunctions } from 'n8n-workflow';
import { CHAT_TRIGGER_NODE_TYPE } from 'n8n-workflow';
import { Chat } from '../Chat.node';
describe('Test Chat Node', () => {
let chat: Chat;
let mockExecuteFunctions: MockProxy<IExecuteFunctions>;
const chatNode = mock<INode>({
name: 'Chat',
type: CHAT_TRIGGER_NODE_TYPE,
parameters: {},
});
beforeEach(() => {
chat = new Chat();
mockExecuteFunctions = mock<IExecuteFunctions>();
});
afterEach(() => {
jest.clearAllMocks();
});
it('should execute and send message', async () => {
const items = [{ json: { data: 'test' } }];
mockExecuteFunctions.getInputData.mockReturnValue(items);
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce('message');
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce(false);
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({
limitType: 'afterTimeInterval',
resumeAmount: 1,
resumeUnit: 'minutes',
});
mockExecuteFunctions.getNode.mockReturnValue(chatNode);
mockExecuteFunctions.getParentNodes.mockReturnValue([
{
type: CHAT_TRIGGER_NODE_TYPE,
disabled: false,
parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } },
} as any,
]);
const result = await chat.execute.call(mockExecuteFunctions);
expect(result).toEqual([[{ json: {}, sendMessage: 'message' }]]);
});
it('should execute and handle memory connection', async () => {
const items = [{ json: { data: 'test' } }];
mockExecuteFunctions.getInputData.mockReturnValue(items);
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce('message');
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({ memoryConnection: true });
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({
limitType: 'afterTimeInterval',
resumeAmount: 1,
resumeUnit: 'minutes',
});
mockExecuteFunctions.getNode.mockReturnValue(chatNode);
mockExecuteFunctions.getParentNodes.mockReturnValue([
{
type: CHAT_TRIGGER_NODE_TYPE,
disabled: false,
parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } },
} as any,
]);
const memory = { chatHistory: { addAIChatMessage: jest.fn() } };
mockExecuteFunctions.getInputConnectionData.mockResolvedValueOnce(memory);
await chat.execute.call(mockExecuteFunctions);
expect(memory.chatHistory.addAIChatMessage).toHaveBeenCalledWith('message');
});
it('should execute without memory connection', async () => {
const items = [{ json: { data: 'test' } }];
mockExecuteFunctions.getInputData.mockReturnValue(items);
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce('message');
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce(false);
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({
limitType: 'afterTimeInterval',
resumeAmount: 1,
resumeUnit: 'minutes',
});
mockExecuteFunctions.getNode.mockReturnValue(chatNode);
mockExecuteFunctions.getParentNodes.mockReturnValue([
{
type: CHAT_TRIGGER_NODE_TYPE,
disabled: false,
parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } },
} as any,
]);
const result = await chat.execute.call(mockExecuteFunctions);
expect(result).toEqual([[{ json: {}, sendMessage: 'message' }]]);
});
it('should execute with specified time limit', async () => {
const items = [{ json: { data: 'test' } }];
mockExecuteFunctions.getInputData.mockReturnValue(items);
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce('message');
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce(false);
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({
limitType: 'atSpecifiedTime',
maxDateAndTime: new Date().toISOString(),
});
mockExecuteFunctions.getNode.mockReturnValue(chatNode);
mockExecuteFunctions.getParentNodes.mockReturnValue([
{
type: CHAT_TRIGGER_NODE_TYPE,
disabled: false,
parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } },
} as any,
]);
const result = await chat.execute.call(mockExecuteFunctions);
expect(result).toEqual([[{ json: {}, sendMessage: 'message' }]]);
});
it('should process onMessage without waiting for reply', async () => {
const data = { json: { chatInput: 'user message' } };
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce({ memoryConnection: true });
mockExecuteFunctions.getNodeParameter.mockReturnValueOnce(false);
mockExecuteFunctions.getInputData.mockReturnValue([data]);
mockExecuteFunctions.getNode.mockReturnValue(chatNode);
mockExecuteFunctions.getParentNodes.mockReturnValue([
{
type: CHAT_TRIGGER_NODE_TYPE,
disabled: false,
parameters: { mode: 'hostedChat', options: { responseMode: 'responseNodes' } },
} as any,
]);
const result = await chat.onMessage(mockExecuteFunctions, data);
expect(result).toEqual([[data]]);
});
});

View File

@@ -150,8 +150,7 @@ describe('ChatTrigger Node', () => {
): boolean | string | object | undefined => {
if (paramName === 'public') return true;
if (paramName === 'mode') return 'hostedChat';
if (paramName === 'options') return {};
if (paramName === 'options.responseMode') return 'streaming';
if (paramName === 'options') return { responseMode: 'streaming' };
return defaultValue;
},
);
@@ -184,8 +183,7 @@ describe('ChatTrigger Node', () => {
): boolean | string | object | undefined => {
if (paramName === 'public') return true;
if (paramName === 'mode') return 'hostedChat';
if (paramName === 'options') return {};
if (paramName === 'options.responseMode') return 'lastNode';
if (paramName === 'options') return { responseMode: 'lastNode' };
return defaultValue;
},
);
@@ -220,8 +218,7 @@ describe('ChatTrigger Node', () => {
): boolean | string | object | undefined => {
if (paramName === 'public') return true;
if (paramName === 'mode') return 'hostedChat';
if (paramName === 'options') return {};
if (paramName === 'options.responseMode') return 'streaming';
if (paramName === 'options') return { responseMode: 'streaming' };
return defaultValue;
},
);

View File

@@ -77,7 +77,7 @@ export function createPage({
</head>
<body>
<script type="module">
import { createChat } from 'https://cdn.jsdelivr.net/npm/@n8n/chat/dist/chat.bundle.es.js';
import { createChat } from 'https://cdn.jsdelivr.net/npm/n8n-chat-atekron@0.49.0/dist/chat.bundle.es.js';
(async function () {
const authentication = '${sanitizedAuthentication}';

View File

@@ -0,0 +1,67 @@
import { NodeOperationError, UserError, WAIT_INDEFINITELY } from 'n8n-workflow';
import type { IExecuteFunctions } from 'n8n-workflow';
export function configureWaitTillDate(context: IExecuteFunctions) {
let waitTill = WAIT_INDEFINITELY;
const limitOptions = context.getNodeParameter('options.limitWaitTime.values', 0, {}) as {
limitType?: string;
resumeAmount?: number;
resumeUnit?: string;
maxDateAndTime?: string;
};
if (Object.keys(limitOptions).length) {
try {
if (limitOptions.limitType === 'afterTimeInterval') {
let waitAmount = limitOptions.resumeAmount as number;
if (limitOptions.resumeUnit === 'minutes') {
waitAmount *= 60;
}
if (limitOptions.resumeUnit === 'hours') {
waitAmount *= 60 * 60;
}
if (limitOptions.resumeUnit === 'days') {
waitAmount *= 60 * 60 * 24;
}
waitAmount *= 1000;
waitTill = new Date(new Date().getTime() + waitAmount);
} else {
waitTill = new Date(limitOptions.maxDateAndTime as string);
}
if (isNaN(waitTill.getTime())) {
throw new UserError('Invalid date format');
}
} catch (error) {
throw new NodeOperationError(context.getNode(), 'Could not configure Limit Wait Time', {
description: error.message,
});
}
}
return waitTill;
}
export const configureInputs = (parameters: { options?: { memoryConnection?: boolean } }) => {
const inputs = [
{
type: 'main',
displayName: 'User Response',
},
];
if (parameters.options?.memoryConnection) {
return [
...inputs,
{
type: 'ai_memory',
displayName: 'Memory',
maxConnections: 1,
},
];
}
return inputs;
};

View File

@@ -125,6 +125,7 @@
"dist/nodes/tools/ToolWorkflow/ToolWorkflow.node.js",
"dist/nodes/trigger/ManualChatTrigger/ManualChatTrigger.node.js",
"dist/nodes/trigger/ChatTrigger/ChatTrigger.node.js",
"dist/nodes/trigger/ChatTrigger/Chat.node.js",
"dist/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.js",
"dist/nodes/vector_store/VectorStoreInMemoryInsert/VectorStoreInMemoryInsert.node.js",
"dist/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.js",

View File

@@ -117,6 +117,20 @@ export function getSessionId(
sessionId = bodyData.sessionId as string;
} else {
sessionId = ctx.evaluateExpression('{{ $json.sessionId }}', itemIndex) as string;
// try to get sessionId from chat trigger
if (!sessionId || sessionId === undefined) {
try {
const chatTrigger = ctx.getChatTrigger();
if (chatTrigger) {
sessionId = ctx.evaluateExpression(
`{{ $('${chatTrigger.name}').first().json.sessionId }}`,
itemIndex,
) as string;
}
} catch (error) {}
}
}
if (sessionId === '' || sessionId === undefined) {

View File

@@ -10,6 +10,7 @@ import {
getConnectedTools,
hasLongSequentialRepeat,
unwrapNestedOutput,
getSessionId,
} from '../helpers';
import { N8nTool } from '../N8nTool';
@@ -376,6 +377,52 @@ describe('unwrapNestedOutput', () => {
});
});
describe('getSessionId', () => {
let mockCtx: any;
beforeEach(() => {
mockCtx = {
getNodeParameter: jest.fn(),
evaluateExpression: jest.fn(),
getChatTrigger: jest.fn(),
getNode: jest.fn(),
};
});
it('should retrieve sessionId from bodyData', () => {
mockCtx.getBodyData = jest.fn();
mockCtx.getNodeParameter.mockReturnValue('fromInput');
mockCtx.getBodyData.mockReturnValue({ sessionId: '12345' });
const sessionId = getSessionId(mockCtx, 0);
expect(sessionId).toBe('12345');
});
it('should retrieve sessionId from chat trigger', () => {
mockCtx.getNodeParameter.mockReturnValue('fromInput');
mockCtx.evaluateExpression.mockReturnValueOnce(undefined);
mockCtx.getChatTrigger.mockReturnValue({ name: 'chatTrigger' });
mockCtx.evaluateExpression.mockReturnValueOnce('67890');
const sessionId = getSessionId(mockCtx, 0);
expect(sessionId).toBe('67890');
});
it('should throw error if sessionId is not found', () => {
mockCtx.getNodeParameter.mockReturnValue('fromInput');
mockCtx.evaluateExpression.mockReturnValue(undefined);
mockCtx.getChatTrigger.mockReturnValue(undefined);
expect(() => getSessionId(mockCtx, 0)).toThrow(NodeOperationError);
});
it('should use custom sessionId if provided', () => {
mockCtx.getNodeParameter.mockReturnValueOnce('custom').mockReturnValueOnce('customSessionId');
const sessionId = getSessionId(mockCtx, 0);
expect(sessionId).toBe('customSessionId');
});
});
describe('hasLongSequentialRepeat', () => {
it('should return false for text shorter than threshold', () => {
const text = 'a'.repeat(99);