mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-19 11:01:15 +00:00
feat: Respond to chat and wait for response (#12546)
Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in> Co-authored-by: Shireen Missi <94372015+ShireenMissi@users.noreply.github.com>
This commit is contained in:
@@ -577,7 +577,6 @@ describe('RunData', () => {
|
||||
executionTime: 3,
|
||||
// @ts-expect-error allow missing properties in test
|
||||
source: [{ previousNode: 'Execute Workflow Trigger' }],
|
||||
// @ts-expect-error allow missing properties in test
|
||||
executionStatus: 'error',
|
||||
// @ts-expect-error allow missing properties in test
|
||||
error: {
|
||||
|
||||
@@ -1043,6 +1043,7 @@ describe('useRunWorkflow({ router })', () => {
|
||||
|
||||
workflowsStore.activeWorkflows = ['test-wf-id'];
|
||||
workflowsStore.setActiveExecutionId('test-exec-id');
|
||||
workflowsStore.executionWaitingForWebhook = false;
|
||||
|
||||
getExecutionSpy.mockResolvedValue(executionData);
|
||||
|
||||
|
||||
@@ -155,6 +155,7 @@ export const MANUAL_TRIGGER_NODE_TYPE = 'n8n-nodes-base.manualTrigger';
|
||||
export const MANUAL_CHAT_TRIGGER_NODE_TYPE = '@n8n/n8n-nodes-langchain.manualChatTrigger';
|
||||
export const MCP_TRIGGER_NODE_TYPE = '@n8n/n8n-nodes-langchain.mcpTrigger';
|
||||
export const CHAT_TRIGGER_NODE_TYPE = '@n8n/n8n-nodes-langchain.chatTrigger';
|
||||
export const CHAT_NODE_TYPE = '@n8n/n8n-nodes-langchain.chat';
|
||||
export const AGENT_NODE_TYPE = '@n8n/n8n-nodes-langchain.agent';
|
||||
export const OPEN_AI_NODE_TYPE = '@n8n/n8n-nodes-langchain.openAi';
|
||||
export const OPEN_AI_NODE_MESSAGE_ASSISTANT_TYPE =
|
||||
|
||||
@@ -0,0 +1,125 @@
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
import { useChatMessaging } from '../composables/useChatMessaging';
|
||||
import { ref, computed } from 'vue';
|
||||
import type { Ref, ComputedRef } from 'vue';
|
||||
import type { IRunExecutionData } from 'n8n-workflow';
|
||||
import type { IExecutionPushResponse, INodeUi } from '@/Interface';
|
||||
import type { RunWorkflowChatPayload } from '../composables/useChatMessaging';
|
||||
import { vi } from 'vitest';
|
||||
import type { ChatMessage } from '@n8n/chat/types';
|
||||
|
||||
vi.mock('../logs.utils', () => {
|
||||
return {
|
||||
extractBotResponse: vi.fn(() => 'Last node response'),
|
||||
getInputKey: vi.fn(),
|
||||
processFiles: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('useChatMessaging', () => {
|
||||
let chatMessaging: ReturnType<typeof useChatMessaging>;
|
||||
let chatTrigger: Ref<INodeUi | null>;
|
||||
let messages: Ref<ChatMessage[]>;
|
||||
let sessionId: Ref<string>;
|
||||
let executionResultData: ComputedRef<IRunExecutionData['resultData'] | undefined>;
|
||||
let onRunChatWorkflow: (
|
||||
payload: RunWorkflowChatPayload,
|
||||
) => Promise<IExecutionPushResponse | undefined>;
|
||||
let ws: Ref<WebSocket | null>;
|
||||
let executionData: IRunExecutionData['resultData'] | undefined = undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
executionData = undefined;
|
||||
createTestingPinia();
|
||||
chatTrigger = ref(null);
|
||||
messages = ref([]);
|
||||
sessionId = ref('session-id');
|
||||
executionResultData = computed(() => executionData);
|
||||
onRunChatWorkflow = vi.fn().mockResolvedValue({
|
||||
executionId: 'execution-id',
|
||||
} as IExecutionPushResponse);
|
||||
ws = ref(null);
|
||||
|
||||
chatMessaging = useChatMessaging({
|
||||
chatTrigger,
|
||||
messages,
|
||||
sessionId,
|
||||
executionResultData,
|
||||
onRunChatWorkflow,
|
||||
ws,
|
||||
});
|
||||
});
|
||||
|
||||
it('should initialize correctly', () => {
|
||||
expect(chatMessaging).toBeDefined();
|
||||
expect(chatMessaging.previousMessageIndex.value).toBe(0);
|
||||
expect(chatMessaging.isLoading.value).toBe(false);
|
||||
});
|
||||
|
||||
it('should send a message and add it to messages', async () => {
|
||||
const messageText = 'Hello, world!';
|
||||
await chatMessaging.sendMessage(messageText);
|
||||
|
||||
expect(messages.value).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should send message via WebSocket if open', async () => {
|
||||
const messageText = 'Hello, WebSocket!';
|
||||
ws.value = {
|
||||
readyState: WebSocket.OPEN,
|
||||
send: vi.fn(),
|
||||
} as unknown as WebSocket;
|
||||
|
||||
await chatMessaging.sendMessage(messageText);
|
||||
|
||||
expect(ws.value.send).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
sessionId: sessionId.value,
|
||||
action: 'sendMessage',
|
||||
chatInput: messageText,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should startWorkflowWithMessage and add message to messages with final message', async () => {
|
||||
const messageText = 'Hola!';
|
||||
chatTrigger.value = {
|
||||
id: 'trigger-id',
|
||||
name: 'Trigger',
|
||||
typeVersion: 1.1,
|
||||
parameters: { options: {} },
|
||||
} as unknown as INodeUi;
|
||||
|
||||
(onRunChatWorkflow as jest.Mock).mockResolvedValue({
|
||||
executionId: 'execution-id',
|
||||
} as IExecutionPushResponse);
|
||||
|
||||
executionData = {
|
||||
runData: {},
|
||||
} as unknown as IRunExecutionData['resultData'];
|
||||
|
||||
await chatMessaging.sendMessage(messageText);
|
||||
expect(messages.value).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should startWorkflowWithMessage and not add final message if responseMode is responseNode and version is 1.3', async () => {
|
||||
const messageText = 'Hola!';
|
||||
chatTrigger.value = {
|
||||
id: 'trigger-id',
|
||||
name: 'Trigger',
|
||||
typeVersion: 1.3,
|
||||
parameters: { options: { responseMode: 'responseNodes' } },
|
||||
} as unknown as INodeUi;
|
||||
|
||||
(onRunChatWorkflow as jest.Mock).mockResolvedValue({
|
||||
executionId: 'execution-id',
|
||||
} as IExecutionPushResponse);
|
||||
|
||||
executionData = {
|
||||
runData: {},
|
||||
} as unknown as IRunExecutionData['resultData'];
|
||||
|
||||
await chatMessaging.sendMessage(messageText);
|
||||
expect(messages.value).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
@@ -658,6 +658,7 @@ describe('LogsPanel', () => {
|
||||
sendMessage: vi.fn(),
|
||||
previousMessageIndex: ref(0),
|
||||
isLoading: computed(() => false),
|
||||
setLoadingState: vi.fn(),
|
||||
};
|
||||
});
|
||||
});
|
||||
@@ -693,6 +694,7 @@ describe('LogsPanel', () => {
|
||||
sendMessage: vi.fn(),
|
||||
previousMessageIndex: ref(0),
|
||||
isLoading: computed(() => false),
|
||||
setLoadingState: vi.fn(),
|
||||
});
|
||||
|
||||
logsStore.state = LOGS_PANEL_STATE.ATTACHED;
|
||||
@@ -800,6 +802,7 @@ describe('LogsPanel', () => {
|
||||
sendMessage: sendMessageSpy,
|
||||
previousMessageIndex: ref(0),
|
||||
isLoading: computed(() => false),
|
||||
setLoadingState: vi.fn(),
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
@@ -17,7 +17,8 @@ import { usePinnedData } from '@/composables/usePinnedData';
|
||||
import { MODAL_CONFIRM } from '@/constants';
|
||||
import { useI18n } from '@n8n/i18n';
|
||||
import type { IExecutionPushResponse, INodeUi } from '@/Interface';
|
||||
import { extractBotResponse, getInputKey } from '@/features/logs/logs.utils';
|
||||
|
||||
import { extractBotResponse, getInputKey, processFiles } from '@/features/logs/logs.utils';
|
||||
|
||||
export type RunWorkflowChatPayload = {
|
||||
triggerNode: string;
|
||||
@@ -33,6 +34,7 @@ export interface ChatMessagingDependencies {
|
||||
onRunChatWorkflow: (
|
||||
payload: RunWorkflowChatPayload,
|
||||
) => Promise<IExecutionPushResponse | undefined>;
|
||||
ws: Ref<WebSocket | null>;
|
||||
}
|
||||
|
||||
export function useChatMessaging({
|
||||
@@ -41,12 +43,17 @@ export function useChatMessaging({
|
||||
sessionId,
|
||||
executionResultData,
|
||||
onRunChatWorkflow,
|
||||
ws,
|
||||
}: ChatMessagingDependencies) {
|
||||
const locale = useI18n();
|
||||
const { showError } = useToast();
|
||||
const previousMessageIndex = ref(0);
|
||||
const isLoading = ref(false);
|
||||
|
||||
const setLoadingState = (loading: boolean) => {
|
||||
isLoading.value = loading;
|
||||
};
|
||||
|
||||
/** Converts a file to binary data */
|
||||
async function convertFileToBinaryData(file: File): Promise<IBinaryData> {
|
||||
const reader = new FileReader();
|
||||
@@ -140,10 +147,16 @@ export function useChatMessaging({
|
||||
message,
|
||||
});
|
||||
isLoading.value = false;
|
||||
ws.value = null;
|
||||
if (!response?.executionId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Response Node mode should not return last node result if responseMode is "responseNodes"
|
||||
const responseMode = (triggerNode.parameters.options as { responseMode?: string })
|
||||
?.responseMode;
|
||||
if (responseMode === 'responseNodes') return;
|
||||
|
||||
const chatMessage = executionResultData.value
|
||||
? extractBotResponse(
|
||||
executionResultData.value,
|
||||
@@ -193,12 +206,25 @@ export function useChatMessaging({
|
||||
};
|
||||
messages.value.push(newMessage);
|
||||
|
||||
await startWorkflowWithMessage(newMessage.text, files);
|
||||
if (ws.value?.readyState === WebSocket.OPEN && !isLoading.value) {
|
||||
ws.value.send(
|
||||
JSON.stringify({
|
||||
sessionId: sessionId.value,
|
||||
action: 'sendMessage',
|
||||
chatInput: message,
|
||||
files: await processFiles(files),
|
||||
}),
|
||||
);
|
||||
isLoading.value = true;
|
||||
} else {
|
||||
await startWorkflowWithMessage(newMessage.text, files);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
previousMessageIndex,
|
||||
isLoading: computed(() => isLoading.value),
|
||||
setLoadingState,
|
||||
sendMessage,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -5,17 +5,25 @@ import { useNodeHelpers } from '@/composables/useNodeHelpers';
|
||||
import { useRunWorkflow } from '@/composables/useRunWorkflow';
|
||||
import { VIEWS } from '@/constants';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import { ChatOptionsSymbol, ChatSymbol } from '@n8n/chat/constants';
|
||||
import { useRootStore } from '@n8n/stores/useRootStore';
|
||||
import { ChatOptionsSymbol } from '@n8n/chat/constants';
|
||||
import { chatEventBus } from '@n8n/chat/event-buses';
|
||||
import type { Chat, ChatMessage, ChatOptions } from '@n8n/chat/types';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import type { Ref } from 'vue';
|
||||
import type { InjectionKey, Ref } from 'vue';
|
||||
import { computed, provide, ref, watch } from 'vue';
|
||||
import { useRouter } from 'vue-router';
|
||||
import { useLogsStore } from '@/stores/logs.store';
|
||||
import { restoreChatHistory } from '@/features/logs/logs.utils';
|
||||
import type { INodeParameters } from 'n8n-workflow';
|
||||
import { isChatNode } from '@/utils/aiUtils';
|
||||
import { constructChatWebsocketUrl } from '@n8n/chat/utils';
|
||||
|
||||
type IntegratedChat = Omit<Chat, 'sendMessage'> & {
|
||||
sendMessage: (text: string, files: File[]) => Promise<void>;
|
||||
};
|
||||
|
||||
const ChatSymbol = 'Chat' as unknown as InjectionKey<IntegratedChat>;
|
||||
|
||||
interface ChatState {
|
||||
currentSessionId: Ref<string>;
|
||||
@@ -29,11 +37,13 @@ interface ChatState {
|
||||
export function useChatState(isReadOnly: boolean): ChatState {
|
||||
const locale = useI18n();
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
const rootStore = useRootStore();
|
||||
const logsStore = useLogsStore();
|
||||
const router = useRouter();
|
||||
const nodeHelpers = useNodeHelpers();
|
||||
const { runWorkflow } = useRunWorkflow({ router });
|
||||
|
||||
const ws = ref<WebSocket | null>(null);
|
||||
const messages = ref<ChatMessage[]>([]);
|
||||
const currentSessionId = ref<string>(uuid().replace(/-/g, ''));
|
||||
|
||||
@@ -52,25 +62,32 @@ export function useChatState(isReadOnly: boolean): ChatState {
|
||||
)?.allowedFilesMimeTypes?.toString() ?? '',
|
||||
);
|
||||
|
||||
const { sendMessage, isLoading } = useChatMessaging({
|
||||
const respondNodesResponseMode = computed(
|
||||
() =>
|
||||
(chatTriggerNode.value?.parameters?.options as { responseMode?: string })?.responseMode ===
|
||||
'responseNodes',
|
||||
);
|
||||
|
||||
const { sendMessage, isLoading, setLoadingState } = useChatMessaging({
|
||||
chatTrigger: chatTriggerNode,
|
||||
messages,
|
||||
sessionId: currentSessionId,
|
||||
executionResultData: computed(() => workflowsStore.getWorkflowExecution?.data?.resultData),
|
||||
onRunChatWorkflow,
|
||||
ws,
|
||||
});
|
||||
|
||||
// Extracted pure functions for better testability
|
||||
function createChatConfig(params: {
|
||||
messages: Chat['messages'];
|
||||
sendMessage: Chat['sendMessage'];
|
||||
sendMessage: IntegratedChat['sendMessage'];
|
||||
currentSessionId: Chat['currentSessionId'];
|
||||
isLoading: Ref<boolean>;
|
||||
isDisabled: Ref<boolean>;
|
||||
allowFileUploads: Ref<boolean>;
|
||||
locale: ReturnType<typeof useI18n>;
|
||||
}): { chatConfig: Chat; chatOptions: ChatOptions } {
|
||||
const chatConfig: Chat = {
|
||||
}): { chatConfig: IntegratedChat; chatOptions: ChatOptions } {
|
||||
const chatConfig: IntegratedChat = {
|
||||
messages: params.messages,
|
||||
sendMessage: params.sendMessage,
|
||||
initialMessages: ref([]),
|
||||
@@ -154,6 +171,43 @@ export function useChatState(isReadOnly: boolean): ChatState {
|
||||
const response = await runWorkflow(runWorkflowOptions);
|
||||
|
||||
if (response) {
|
||||
if (respondNodesResponseMode.value) {
|
||||
const wsUrl = constructChatWebsocketUrl(
|
||||
rootStore.urlBaseEditor,
|
||||
response.executionId as string,
|
||||
currentSessionId.value,
|
||||
false,
|
||||
);
|
||||
|
||||
ws.value = new WebSocket(wsUrl);
|
||||
ws.value.onmessage = (event) => {
|
||||
if (event.data === 'n8n|heartbeat') {
|
||||
ws.value?.send('n8n|heartbeat-ack');
|
||||
return;
|
||||
}
|
||||
if (event.data === 'n8n|continue') {
|
||||
setLoadingState(true);
|
||||
return;
|
||||
}
|
||||
setLoadingState(false);
|
||||
const newMessage: ChatMessage & { sessionId: string } = {
|
||||
text: event.data,
|
||||
sender: 'bot',
|
||||
sessionId: currentSessionId.value,
|
||||
id: uuid(),
|
||||
};
|
||||
messages.value.push(newMessage);
|
||||
|
||||
if (logsStore.isOpen) {
|
||||
chatEventBus.emit('focusInput');
|
||||
}
|
||||
};
|
||||
ws.value.onclose = () => {
|
||||
setLoadingState(false);
|
||||
ws.value = null;
|
||||
};
|
||||
}
|
||||
|
||||
await createExecutionPromise();
|
||||
workflowsStore.appendChatMessage(payload.message);
|
||||
return response;
|
||||
|
||||
@@ -14,13 +14,11 @@ import {
|
||||
getTreeNodeData,
|
||||
mergeStartData,
|
||||
restoreChatHistory,
|
||||
processFiles,
|
||||
extractBotResponse,
|
||||
} from './logs.utils';
|
||||
import {
|
||||
AGENT_LANGCHAIN_NODE_TYPE,
|
||||
NodeConnectionTypes,
|
||||
type ExecutionError,
|
||||
type ITaskStartedData,
|
||||
} from 'n8n-workflow';
|
||||
import { AGENT_LANGCHAIN_NODE_TYPE, NodeConnectionTypes } from 'n8n-workflow';
|
||||
import type { ExecutionError, ITaskStartedData, IRunExecutionData } from 'n8n-workflow';
|
||||
import {
|
||||
aiAgentNode,
|
||||
aiChatWorkflow,
|
||||
@@ -1170,6 +1168,115 @@ describe(createLogTree, () => {
|
||||
expect(logs[0].children).toHaveLength(1);
|
||||
expect(logs[0].children[0].node.name).toBe(aiModelNode.name);
|
||||
});
|
||||
|
||||
it('should process files correctly', async () => {
|
||||
const mockFile = new File(['test content'], 'test.txt', { type: 'text/plain' });
|
||||
const result = await processFiles([mockFile]);
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
name: 'test.txt',
|
||||
type: 'text/plain',
|
||||
data: 'data:text/plain;base64,dGVzdCBjb250ZW50',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return an empty array if no files are provided', async () => {
|
||||
expect(await processFiles(undefined)).toEqual([]);
|
||||
expect(await processFiles([])).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractBotResponse', () => {
|
||||
it('should extract a successful bot response', () => {
|
||||
const resultData: IRunExecutionData['resultData'] = {
|
||||
lastNodeExecuted: 'nodeA',
|
||||
runData: {
|
||||
nodeA: [
|
||||
{
|
||||
executionTime: 1,
|
||||
startTime: 1,
|
||||
executionIndex: 1,
|
||||
source: [],
|
||||
data: {
|
||||
main: [[{ json: { message: 'Test output' } }]],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
const executionId = 'test-exec-id';
|
||||
const result = extractBotResponse(resultData, executionId);
|
||||
expect(result).toEqual({
|
||||
text: 'Test output',
|
||||
sender: 'bot',
|
||||
id: executionId,
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract an error bot response', () => {
|
||||
const resultData: IRunExecutionData['resultData'] = {
|
||||
lastNodeExecuted: 'nodeA',
|
||||
runData: {
|
||||
nodeA: [
|
||||
{
|
||||
executionTime: 1,
|
||||
startTime: 1,
|
||||
executionIndex: 1,
|
||||
source: [],
|
||||
error: {
|
||||
message: 'Test error',
|
||||
} as unknown as ExecutionError,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
const executionId = 'test-exec-id';
|
||||
const result = extractBotResponse(resultData, executionId);
|
||||
expect(result).toEqual({
|
||||
text: '[ERROR: Test error]',
|
||||
sender: 'bot',
|
||||
id: 'test-exec-id',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return undefined if no response data is available', () => {
|
||||
const resultData = {
|
||||
lastNodeExecuted: 'nodeA',
|
||||
runData: {
|
||||
nodeA: [
|
||||
{
|
||||
executionTime: 1,
|
||||
startTime: 1,
|
||||
executionIndex: 1,
|
||||
source: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
const executionId = 'test-exec-id';
|
||||
const result = extractBotResponse(resultData, executionId);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return undefined if lastNodeExecuted is not available', () => {
|
||||
const resultData = {
|
||||
runData: {
|
||||
nodeA: [
|
||||
{
|
||||
executionTime: 1,
|
||||
startTime: 1,
|
||||
executionIndex: 1,
|
||||
source: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
const executionId = 'test-exec-id';
|
||||
const result = extractBotResponse(resultData, executionId);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe(deepToRaw, () => {
|
||||
|
||||
@@ -565,7 +565,7 @@ function extractResponseText(responseData?: IDataObject): string | undefined {
|
||||
}
|
||||
|
||||
// Paths where the response message might be located
|
||||
const paths = ['output', 'text', 'response.text'];
|
||||
const paths = ['output', 'text', 'response.text', 'message'];
|
||||
const matchedPath = paths.find((path) => get(responseData, path));
|
||||
|
||||
if (!matchedPath) return JSON.stringify(responseData, null, 2);
|
||||
@@ -599,6 +599,32 @@ export function restoreChatHistory(
|
||||
return [...(userMessage ? [userMessage] : []), ...(botMessage ? [botMessage] : [])];
|
||||
}
|
||||
|
||||
export async function processFiles(data: File[] | undefined) {
|
||||
if (!data || data.length === 0) return [];
|
||||
|
||||
const filePromises = data.map(async (file) => {
|
||||
// We do not need to await here as it will be awaited on the return by Promise.all
|
||||
// eslint-disable-next-line @typescript-eslint/return-await
|
||||
return new Promise<{ name: string; type: string; data: string }>((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
|
||||
reader.onload = () =>
|
||||
resolve({
|
||||
name: file.name,
|
||||
type: file.type,
|
||||
data: reader.result as string,
|
||||
});
|
||||
|
||||
reader.onerror = () =>
|
||||
reject(new Error(`Error reading file: ${reader.error?.message ?? 'Unknown error'}`));
|
||||
|
||||
reader.readAsDataURL(file);
|
||||
});
|
||||
});
|
||||
|
||||
return await Promise.all(filePromises);
|
||||
}
|
||||
|
||||
export function isSubNodeLog(logEntry: LogEntry): boolean {
|
||||
return logEntry.parent !== undefined && logEntry.parent.executionId === logEntry.executionId;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user