feat: Abort AI builder requests on chat stop (#17854)

This commit is contained in:
oleg
2025-08-04 09:55:07 +02:00
committed by GitHub
parent 1554e76500
commit ce98f7c175
19 changed files with 585 additions and 91 deletions

View File

@@ -167,10 +167,10 @@ export class AiWorkflowBuilderService {
return this.agent;
}
async *chat(payload: ChatPayload, user?: IUser) {
async *chat(payload: ChatPayload, user?: IUser, abortSignal?: AbortSignal) {
const agent = await this.getAgent(user);
for await (const output of agent.chat(payload, user?.id?.toString())) {
for await (const output of agent.chat(payload, user?.id?.toString(), abortSignal)) {
yield output;
}
}

View File

@@ -1,6 +1,7 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { AIMessage, ToolMessage } from '@langchain/core/messages';
import { HumanMessage, RemoveMessage } from '@langchain/core/messages';
import type { ToolMessage } from '@langchain/core/messages';
import { AIMessage, HumanMessage, RemoveMessage } from '@langchain/core/messages';
import type { RunnableConfig } from '@langchain/core/runnables';
import type { LangChainTracer } from '@langchain/core/tracers/tracer_langchain';
import { StateGraph, MemorySaver, END } from '@langchain/langgraph';
import type { Logger } from '@n8n/backend-common';
@@ -180,71 +181,73 @@ export class WorkflowBuilderAgent {
: crypto.randomUUID();
}
async *chat(payload: ChatPayload, userId?: string) {
private getDefaultWorkflowJSON(payload: ChatPayload): SimpleWorkflow {
return (
(payload.workflowContext?.currentWorkflow as SimpleWorkflow) ?? {
nodes: [],
connections: {},
}
);
}
async *chat(payload: ChatPayload, userId?: string, abortSignal?: AbortSignal) {
const agent = this.createWorkflow().compile({ checkpointer: this.checkpointer });
const workflowId = payload.workflowContext?.currentWorkflow?.id;
// Generate thread ID from workflowId and userId
// This ensures one session per workflow per user
const threadId = WorkflowBuilderAgent.generateThreadId(workflowId, userId);
// Configure thread for checkpointing
const threadConfig = {
const threadConfig: RunnableConfig = {
configurable: {
thread_id: threadId,
},
};
const streamConfig = {
...threadConfig,
streamMode: ['updates', 'custom'],
recursionLimit: 30,
signal: abortSignal,
callbacks: this.tracer ? [this.tracer] : undefined,
} as RunnableConfig;
// Check if this is a subsequent message
// If so, update the workflowJSON with the current editor state
const existingCheckpoint = await this.checkpointer.getTuple(threadConfig);
let stream;
if (!existingCheckpoint?.checkpoint) {
// First message - use initial state
const initialState: typeof WorkflowState.State = {
const stream = await agent.stream(
{
messages: [new HumanMessage({ content: payload.message })],
workflowJSON: (payload.workflowContext?.currentWorkflow as SimpleWorkflow) ?? {
nodes: [],
connections: {},
},
workflowJSON: this.getDefaultWorkflowJSON(payload),
workflowOperations: [],
workflowContext: payload.workflowContext,
};
},
streamConfig,
);
stream = await agent.stream(initialState, {
...threadConfig,
streamMode: ['updates', 'custom'],
recursionLimit: 30,
callbacks: this.tracer ? [this.tracer] : undefined,
});
} else {
// Subsequent message - update the state with current workflow
const stateUpdate: Partial<typeof WorkflowState.State> = {
messages: [new HumanMessage({ content: payload.message })],
workflowOperations: [], // Clear any pending operations from previous message
workflowContext: payload.workflowContext,
workflowJSON: { nodes: [], connections: {} }, // Default to empty workflow
};
if (payload.workflowContext?.currentWorkflow) {
stateUpdate.workflowJSON = payload.workflowContext?.currentWorkflow as SimpleWorkflow;
try {
const streamProcessor = createStreamProcessor(stream);
for await (const output of streamProcessor) {
yield output;
}
} catch (error) {
if (
error &&
typeof error === 'object' &&
'message' in error &&
typeof error.message === 'string' &&
// This is naive, but it's all we get from LangGraph AbortError
['Abort', 'Aborted'].includes(error.message)
) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
const messages = (await agent.getState(threadConfig)).values.messages as Array<
AIMessage | HumanMessage | ToolMessage
>;
// Stream with just the new message
stream = await agent.stream(stateUpdate, {
...threadConfig,
streamMode: ['updates', 'custom'],
recursionLimit: 80,
callbacks: this.tracer ? [this.tracer] : undefined,
});
}
// Use the stream processor utility to handle chunk processing
const streamProcessor = createStreamProcessor(stream);
for await (const output of streamProcessor) {
yield output;
// Handle abort errors gracefully
const abortedAiMessage = new AIMessage({
content: '[Task aborted]',
id: crypto.randomUUID(),
});
// TODO: Should we clear tool calls that are in progress?
await agent.updateState(threadConfig, { messages: [...messages, abortedAiMessage] });
return;
}
throw error;
}
}
@@ -256,7 +259,7 @@ export class WorkflowBuilderAgent {
if (workflowId) {
const threadId = WorkflowBuilderAgent.generateThreadId(workflowId, userId);
const threadConfig = {
const threadConfig: RunnableConfig = {
configurable: {
thread_id: threadId,
},

View File

@@ -152,6 +152,7 @@ describe('AiController', () => {
},
},
request.user,
expect.any(AbortSignal),
);
expect(response.header).toHaveBeenCalledWith('Content-type', 'application/json-lines');
expect(response.flush).toHaveBeenCalled();
@@ -241,5 +242,157 @@ describe('AiController', () => {
expect(response.json).not.toHaveBeenCalled();
expect(response.end).toHaveBeenCalled();
});
describe('Abort handling', () => {
it('should create AbortController and handle connection close', async () => {
let abortHandler: (() => void) | undefined;
let abortSignalPassed: AbortSignal | undefined;
// Mock response.on to capture the close handler
response.on.mockImplementation((event: string, handler: () => void) => {
if (event === 'close') {
abortHandler = handler;
}
return response;
});
// Create a generator that yields once then checks for abort
async function* testGenerator() {
yield {
messages: [{ role: 'assistant', type: 'message', text: 'Processing...' } as const],
};
// Check if aborted and throw if so
if (abortSignalPassed?.aborted) {
throw new Error('Aborted');
}
}
workflowBuilderService.chat.mockImplementation((_payload, _user, signal) => {
abortSignalPassed = signal;
return testGenerator();
});
// Start the request (but don't await it)
const buildPromise = controller.build(request, response, payload);
// Wait a bit to ensure the generator is created and starts processing
await new Promise((resolve) => setTimeout(resolve, 50));
// Verify abort signal was passed to the service
expect(abortSignalPassed).toBeDefined();
expect(abortSignalPassed).toBeInstanceOf(AbortSignal);
expect(abortSignalPassed?.aborted).toBe(false);
// Verify close handler was registered
expect(response.on).toHaveBeenCalledWith('close', expect.any(Function));
expect(abortHandler).toBeDefined();
// Simulate connection close
abortHandler!();
// Verify the signal was aborted
expect(abortSignalPassed?.aborted).toBe(true);
// Wait for the promise to settle
await buildPromise.catch(() => {
// Expected to throw due to abort
});
// Verify response was ended
expect(response.end).toHaveBeenCalled();
});
it('should pass abort signal to workflow builder service', async () => {
let capturedSignal: AbortSignal | undefined;
async function* mockGenerator() {
yield { messages: [{ role: 'assistant', type: 'message', text: 'Test' } as const] };
}
workflowBuilderService.chat.mockImplementation((_payload, _user, signal) => {
capturedSignal = signal;
return mockGenerator();
});
await controller.build(request, response, payload);
expect(capturedSignal).toBeDefined();
expect(capturedSignal).toBeInstanceOf(AbortSignal);
expect(workflowBuilderService.chat).toHaveBeenCalledWith(
expect.any(Object),
request.user,
capturedSignal,
);
});
it('should handle stream interruption when connection closes', async () => {
let abortHandler: (() => void) | undefined;
let abortSignalPassed: AbortSignal | undefined;
response.on.mockImplementation((event: string, handler: () => void) => {
if (event === 'close') {
abortHandler = handler;
}
return response;
});
// Create a generator that yields multiple chunks
async function* mockChatGenerator() {
yield { messages: [{ role: 'assistant', type: 'message', text: 'Chunk 1' } as const] };
// Check if aborted before yielding next chunk
if (abortSignalPassed?.aborted) {
throw new Error('Aborted');
}
// This second chunk should not be reached if aborted
yield { messages: [{ role: 'assistant', type: 'message', text: 'Chunk 2' } as const] };
}
workflowBuilderService.chat.mockImplementation((_payload, _user, signal) => {
abortSignalPassed = signal;
return mockChatGenerator();
});
// Start the build process
const buildPromise = controller.build(request, response, payload);
// Wait for first chunk to be written
await new Promise((resolve) => setTimeout(resolve, 20));
// Should have written at least one chunk
expect(response.write).toHaveBeenCalled();
const writeCallsBeforeAbort = response.write.mock.calls.length;
// Simulate connection close
abortHandler!();
// Wait for the build to complete
await buildPromise.catch(() => {
// Expected to catch abort error
});
// Should not have written additional chunks after abort
expect(response.write).toHaveBeenCalledTimes(writeCallsBeforeAbort);
expect(response.end).toHaveBeenCalled();
});
it('should cleanup abort listener on successful completion', async () => {
const onSpy = jest.spyOn(response, 'on');
const offSpy = jest.spyOn(response, 'off');
async function* mockGenerator() {
yield { messages: [{ role: 'assistant', type: 'message', text: 'Complete' } as const] };
}
workflowBuilderService.chat.mockReturnValue(mockGenerator());
await controller.build(request, response, payload);
// Verify close handler was registered and then removed
expect(onSpy).toHaveBeenCalledWith('close', expect.any(Function));
expect(offSpy).toHaveBeenCalledWith('close', expect.any(Function));
});
});
});
});

View File

@@ -46,6 +46,13 @@ export class AiController {
@Body payload: AiBuilderChatRequestDto,
) {
try {
const abortController = new AbortController();
const { signal } = abortController;
const handleClose = () => abortController.abort();
res.on('close', handleClose);
const { text, workflowContext } = payload.payload;
const aiResponse = this.workflowBuilderService.chat(
{
@@ -57,6 +64,7 @@ export class AiController {
},
},
req.user,
signal,
);
res.header('Content-type', 'application/json-lines').flush();
@@ -83,6 +91,9 @@ export class AiController {
],
};
res.write(JSON.stringify(errorChunk) + '⧉⇋⇋➽⌑⧉§§\n');
} finally {
// Clean up event listener
res.off('close', handleClose);
}
res.end();

View File

@@ -48,9 +48,9 @@ export class WorkflowBuilderService {
return this.service;
}
async *chat(payload: ChatPayload, user: IUser) {
async *chat(payload: ChatPayload, user: IUser, abortSignal?: AbortSignal) {
const service = await this.getService();
yield* service.chat(payload, user);
yield* service.chat(payload, user, abortSignal);
}
async getSessions(workflowId: string | undefined, user: IUser) {

View File

@@ -10,7 +10,6 @@ import AssistantText from '../AskAssistantText/AssistantText.vue';
import InlineAskAssistantButton from '../InlineAskAssistantButton/InlineAskAssistantButton.vue';
import N8nButton from '../N8nButton';
import N8nIcon from '../N8nIcon';
import N8nIconButton from '../N8nIconButton';
const { t } = useI18n();
@@ -28,10 +27,12 @@ interface Props {
title?: string;
placeholder?: string;
scrollOnNewMessage?: boolean;
showStop?: boolean;
}
const emit = defineEmits<{
close: [];
stop: [];
message: [string, string?, boolean?];
codeReplace: [number];
codeUndo: [number];
@@ -253,11 +254,24 @@ watch(
@input.prevent="growInput"
@keydown.stop
/>
<N8nIconButton
:class="{ [$style.sendButton]: true }"
<N8nButton
v-if="showStop && streaming"
:class="$style.stopButton"
icon="square"
size="large"
type="danger"
outline
square
data-test-id="send-message-button"
@click="emit('stop')"
/>
<N8nButton
v-else
:class="$style.sendButton"
icon="send"
:text="true"
size="large"
square
data-test-id="send-message-button"
:disabled="sendDisabled"
@click="onSendMessage"
@@ -274,7 +288,9 @@ watch(
display: grid;
grid-template-rows: auto 1fr auto;
}
:root .stopButton {
--button-border-color: transparent;
}
.header {
height: 65px; // same as header height in editor
padding: 0 var(--spacing-l);

View File

@@ -171,15 +171,19 @@ exports[`AskAssistantChat > does not render retry button if no error is present
rows="1"
wrap="hard"
/>
<n8n-icon-button-stub
<n8n-button-stub
active="false"
block="false"
class="sendButton"
data-test-id="send-message-button"
disabled="true"
element="button"
icon="send"
label=""
loading="false"
outline="false"
size="large"
square="true"
text="true"
type="primary"
/>
@@ -988,15 +992,19 @@ Testing more code
rows="1"
wrap="hard"
/>
<n8n-icon-button-stub
<n8n-button-stub
active="false"
block="false"
class="sendButton"
data-test-id="send-message-button"
disabled="true"
element="button"
icon="send"
label=""
loading="false"
outline="false"
size="large"
square="true"
text="true"
type="primary"
/>
@@ -1169,15 +1177,19 @@ exports[`AskAssistantChat > renders default placeholder chat correctly 1`] = `
rows="1"
wrap="hard"
/>
<n8n-icon-button-stub
<n8n-button-stub
active="false"
block="false"
class="sendButton"
data-test-id="send-message-button"
disabled="true"
element="button"
icon="send"
label=""
loading="false"
outline="false"
size="large"
square="true"
text="true"
type="primary"
/>
@@ -1438,15 +1450,19 @@ exports[`AskAssistantChat > renders end of session chat correctly 1`] = `
rows="1"
wrap="hard"
/>
<n8n-icon-button-stub
<n8n-button-stub
active="false"
block="false"
class="sendButton"
data-test-id="send-message-button"
disabled="true"
element="button"
icon="send"
label=""
loading="false"
outline="false"
size="large"
square="true"
text="true"
type="primary"
/>
@@ -1641,15 +1657,19 @@ exports[`AskAssistantChat > renders error message correctly with retry button 1`
rows="1"
wrap="hard"
/>
<n8n-icon-button-stub
<n8n-button-stub
active="false"
block="false"
class="sendButton"
data-test-id="send-message-button"
disabled="true"
element="button"
icon="send"
label=""
loading="false"
outline="false"
size="large"
square="true"
text="true"
type="primary"
/>
@@ -1900,15 +1920,19 @@ catch(e) {
rows="1"
wrap="hard"
/>
<n8n-icon-button-stub
<n8n-button-stub
active="false"
block="false"
class="sendButton"
data-test-id="send-message-button"
disabled="true"
element="button"
icon="send"
label=""
loading="false"
outline="false"
size="large"
square="true"
text="true"
type="primary"
/>
@@ -2092,15 +2116,19 @@ exports[`AskAssistantChat > renders streaming chat correctly 1`] = `
rows="1"
wrap="hard"
/>
<n8n-icon-button-stub
<n8n-button-stub
active="false"
block="false"
class="sendButton"
data-test-id="send-message-button"
disabled="true"
element="button"
icon="send"
label=""
loading="false"
outline="false"
size="large"
square="true"
text="true"
type="primary"
/>

View File

@@ -3,11 +3,20 @@ import { useCssModule } from 'vue';
import { useI18n } from '../../composables/useI18n';
import AssistantIcon from '../AskAssistantIcon/AssistantIcon.vue';
import N8nButton from '../N8nButton';
defineOptions({
name: 'CanvasThinkingPill',
});
defineProps<{
showStop?: boolean;
}>();
const emit = defineEmits<{
stop: [];
}>();
const { t } = useI18n();
const $style = useCssModule();
</script>
@@ -17,7 +26,17 @@ const $style = useCssModule();
<div :class="$style.iconWrapper">
<AssistantIcon theme="blank" />
</div>
<span :class="$style.text">{{ t('aiAssistant.builder.canvas.thinking') }}</span>
<span :class="$style.text"
>{{ t('aiAssistant.builder.canvas.thinking') }}
<N8nButton
v-if="showStop"
:class="$style.stopButton"
:label="'Stop'"
type="secondary"
size="mini"
@click="emit('stop')"
/>
</span>
</div>
</template>
@@ -28,7 +47,7 @@ const $style = useCssModule();
padding: 0 var(--spacing-s) 0 var(--spacing-xs);
justify-content: center;
align-items: center;
gap: var(--spacing-3xs);
gap: var(--spacing-2xs);
border-radius: 22px;
border: 1px solid var(--prim-gray-740);
background: rgba(65, 66, 68, 0.92);
@@ -51,6 +70,9 @@ const $style = useCssModule();
justify-content: center;
}
.stopButton {
margin-left: var(--spacing-xs);
}
.text {
color: white;
font-size: var(--font-size-s);

View File

@@ -46,7 +46,8 @@ exports[`CanvasThinkingPill > renders canvas thinking pill correctly 1`] = `
<span
class="text"
>
Working...
Working...
<!--v-if-->
</span>
</div>
</div>

View File

@@ -190,6 +190,7 @@
"aiAssistant.builder.canvasPrompt.cancelButton": "Cancel",
"aiAssistant.builder.canvasPrompt.startManually.title": "Start manually",
"aiAssistant.builder.canvasPrompt.startManually.subTitle": "Add the first node",
"aiAssistant.builder.streamAbortedMessage": "[Task aborted]",
"aiAssistant.assistant": "AI Assistant",
"aiAssistant.newSessionModal.title.part1": "Start new",
"aiAssistant.newSessionModal.title.part2": "session",

View File

@@ -219,6 +219,7 @@ export async function streamRequest<T extends object>(
onDone?: () => void,
onError?: (e: Error) => void,
separator = STREAM_SEPERATOR,
abortSignal?: AbortSignal,
): Promise<void> {
const headers: Record<string, string> = {
'browser-id': getBrowserId(),
@@ -229,6 +230,7 @@ export async function streamRequest<T extends object>(
method: 'POST',
credentials: 'include',
body: JSON.stringify(payload),
signal: abortSignal,
};
try {
const response = await fetch(`${context.baseUrl}${apiEndpoint}`, assistantRequest);

View File

@@ -13,6 +13,7 @@ export function chatWithBuilder(
onMessageUpdated: (data: ChatRequest.ResponsePayload) => void,
onDone: () => void,
onError: (e: Error) => void,
abortSignal?: AbortSignal,
): void {
void streamRequest<ChatRequest.ResponsePayload>(
ctx,
@@ -21,6 +22,8 @@ export function chatWithBuilder(
onMessageUpdated,
onDone,
onError,
undefined,
abortSignal,
);
}

View File

@@ -132,11 +132,13 @@ watch(currentRoute, () => {
:loading-message="loadingMessage"
:mode="i18n.baseText('aiAssistant.builder.mode')"
:title="'n8n AI'"
:show-stop="true"
:scroll-on-new-message="true"
:placeholder="i18n.baseText('aiAssistant.builder.placeholder')"
@close="emit('close')"
@message="onUserMessage"
@feedback="onFeedback"
@stop="builderStore.stopStreaming"
>
<template #header>
<slot name="header" />

View File

@@ -30,6 +30,7 @@ const workflowSaver = useWorkflowSaving({ router });
const prompt = ref('');
const userEditedPrompt = ref(false);
const isFocused = ref(false);
const isLoading = ref(false);
// Computed properties
const hasContent = computed(() => prompt.value.trim().length > 0);
@@ -42,6 +43,7 @@ const suggestions = ref(WORKFLOW_SUGGESTIONS);
*/
async function onSubmit() {
if (!hasContent.value || builderStore.streaming) return;
isLoading.value = true;
const isNewWorkflow = workflowsStore.isNewWorkflow;
@@ -52,6 +54,7 @@ async function onSubmit() {
// Here we need to await for chat to open and session to be loaded
await builderStore.openChat();
isLoading.value = false;
builderStore.sendChatMessage({ text: prompt.value, source: 'canvas' });
}
@@ -120,7 +123,7 @@ function onAddNodeClick() {
name="aiBuilderPrompt"
:class="$style.formTextarea"
type="textarea"
:disabled="builderStore.streaming"
:disabled="isLoading || builderStore.streaming"
:placeholder="i18n.baseText('aiAssistant.builder.placeholder')"
:read-only="false"
:rows="15"
@@ -133,6 +136,7 @@ function onAddNodeClick() {
<n8n-button
native-type="submit"
:disabled="!hasContent || builderStore.streaming"
:loading="isLoading"
@keydown.enter="onSubmit"
>
{{ i18n.baseText('aiAssistant.builder.canvasPrompt.buildWorkflow') }}

View File

@@ -262,6 +262,16 @@ export function useBuilderMessages() {
} as ChatUI.AssistantMessage;
}
function createAssistantMessage(content: string, id: string): ChatUI.AssistantMessage {
return {
id,
role: 'assistant',
type: 'text',
content,
read: true,
} as ChatUI.AssistantMessage;
}
function createErrorMessage(
content: string,
id: string,
@@ -346,6 +356,7 @@ export function useBuilderMessages() {
return {
processAssistantMessages,
createUserMessage,
createAssistantMessage,
createErrorMessage,
clearMessages,
addMessages,

View File

@@ -1,3 +1,6 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { describe, it, expect, beforeEach, vi } from 'vitest';
import { setActivePinia, createPinia } from 'pinia';
import { ENABLED_VIEWS, useBuilderStore } from '@/stores/builder.store';
@@ -505,4 +508,205 @@ describe('AI Builder store', () => {
'I can help you build a workflow',
);
});
describe('Abort functionality', () => {
it('should create and manage abort controller', () => {
const builderStore = useBuilderStore();
// Initially no abort controller (might be undefined or null)
expect(builderStore.streamingAbortController).toBeFalsy();
// Start streaming creates abort controller
apiSpy.mockImplementationOnce((_ctx, _payload, onMessage, _onDone, _onError, _signal) => {
// Simulate successful start of streaming
setTimeout(() => {
onMessage({
messages: [
{
type: 'message',
role: 'assistant',
text: 'Processing...',
},
],
sessionId: 'test-session',
});
}, 0);
});
builderStore.sendChatMessage({ text: 'test' });
expect(builderStore.streamingAbortController).not.toBeNull();
expect(builderStore.streamingAbortController).toBeInstanceOf(AbortController);
});
it('should call abort on existing controller when stopStreaming is called', () => {
const builderStore = useBuilderStore();
// First start a request to create an abort controller
apiSpy.mockImplementationOnce(() => {});
builderStore.sendChatMessage({ text: 'test' });
// Verify controller was created
const controller = builderStore.streamingAbortController;
expect(controller).toBeInstanceOf(AbortController);
// Spy on the abort method
const abortSpy = vi.spyOn(controller!, 'abort');
// Call stopStreaming
builderStore.stopStreaming();
// Verify abort was called
expect(abortSpy).toHaveBeenCalled();
expect(builderStore.streamingAbortController).toBeNull();
expect(builderStore.streaming).toBe(false);
});
it('should handle AbortError gracefully', async () => {
const builderStore = useBuilderStore();
// Simulate an abort error
const abortError = new Error('AbortError');
abortError.name = 'AbortError';
apiSpy.mockImplementationOnce((_ctx, _payload, _onMessage, _onDone, onError) => {
onError(abortError);
});
builderStore.sendChatMessage({ text: 'test message' });
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBe(2));
// Should have user message and aborted message
expect(builderStore.chatMessages[0].role).toBe('user');
expect(builderStore.chatMessages[1].role).toBe('assistant');
expect(builderStore.chatMessages[1].type).toBe('text');
expect((builderStore.chatMessages[1] as ChatUI.TextMessage).content).toBe('[Task aborted]');
// Verify streaming state was reset
expect(builderStore.streaming).toBe(false);
expect(builderStore.assistantThinkingMessage).toBeUndefined();
});
it('should abort previous request when sending new message', () => {
const builderStore = useBuilderStore();
// The current implementation prevents sending a new message while streaming
// by checking if streaming.value is true and returning early.
// Mock for first request - keep it pending
apiSpy.mockImplementationOnce((_ctx, _payload, onMessage, _onDone) => {
// Don't call onDone to keep streaming active
setTimeout(() => {
onMessage({
messages: [
{
type: 'message',
role: 'assistant',
text: 'Processing first message...',
},
],
sessionId: 'test-session',
});
}, 10);
});
// Start first request
builderStore.sendChatMessage({ text: 'first message' });
// Verify streaming is active and controller was created
expect(builderStore.streaming).toBe(true);
const firstController = builderStore.streamingAbortController;
expect(firstController).not.toBeNull();
expect(firstController).toBeInstanceOf(AbortController);
// Track if abort was called
const abortSpy = vi.spyOn(firstController!, 'abort');
// Try to send second message while streaming - it should be ignored
builderStore.sendChatMessage({ text: 'second message ignored' });
// Verify the abort was NOT called and controller is the same
expect(abortSpy).not.toHaveBeenCalled();
expect(builderStore.streamingAbortController).toBe(firstController);
// Now properly stop streaming first
builderStore.stopStreaming();
// Verify abort was called and controller was cleared
expect(abortSpy).toHaveBeenCalled();
expect(builderStore.streamingAbortController).toBeNull();
expect(builderStore.streaming).toBe(false);
// Mock for second request
apiSpy.mockImplementationOnce(() => {});
// Now we can send a new message
builderStore.sendChatMessage({ text: 'second message' });
// New controller should be created
const secondController = builderStore.streamingAbortController;
expect(secondController).not.toBe(firstController);
expect(secondController).not.toBeNull();
expect(secondController).toBeInstanceOf(AbortController);
});
it('should pass abort signal to API call', () => {
const builderStore = useBuilderStore();
// Mock the API to prevent actual network calls
apiSpy.mockImplementationOnce(() => {});
builderStore.sendChatMessage({ text: 'test' });
// Verify the API was called with correct parameters
expect(apiSpy).toHaveBeenCalled();
const callArgs = apiSpy.mock.calls[0];
expect(callArgs).toHaveLength(6); // Should have 6 arguments
const signal = callArgs[5]; // The 6th argument is the abort signal
expect(signal).toBeDefined();
expect(signal).toBeInstanceOf(AbortSignal);
// Check that it's the same signal from the controller
const controller = builderStore.streamingAbortController;
expect(controller).not.toBeNull();
expect(controller).toBeInstanceOf(AbortController);
expect(signal).toBe(controller!.signal);
});
it('should not create error message for aborted requests', async () => {
const builderStore = useBuilderStore();
// Track telemetry calls
const telemetryTrackSpy = vi.fn();
track.mockImplementation(telemetryTrackSpy);
// Simulate abort error
const abortError = new Error('AbortError');
abortError.name = 'AbortError';
apiSpy.mockImplementationOnce((_ctx, _payload, _onMessage, _onDone, onError) => {
// Call error handler immediately
onError(abortError);
});
// Clear messages before test
builderStore.chatMessages.length = 0;
builderStore.sendChatMessage({ text: 'test' });
// Wait for the error to be processed
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBeGreaterThan(1));
// Should not track error for abort
expect(telemetryTrackSpy).not.toHaveBeenCalledWith(
'Workflow generation errored',
expect.anything(),
);
// Find the assistant messages (skip user message)
const assistantMessages = builderStore.chatMessages.filter((msg) => msg.role === 'assistant');
expect(assistantMessages).toHaveLength(1);
expect(assistantMessages[0].type).toBe('text');
expect((assistantMessages[0] as ChatUI.TextMessage).content).toBe('[Task aborted]');
});
});
});

View File

@@ -36,6 +36,7 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
const chatWindowOpen = ref<boolean>(false);
const streaming = ref<boolean>(false);
const assistantThinkingMessage = ref<string | undefined>();
const streamingAbortController = ref<AbortController | null>(null);
// Store dependencies
const settings = useSettingsStore();
@@ -51,6 +52,7 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
const {
processAssistantMessages,
createUserMessage,
createAssistantMessage,
createErrorMessage,
clearMessages,
mapAssistantMessageToUI,
@@ -151,6 +153,10 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
function stopStreaming() {
streaming.value = false;
if (streamingAbortController.value) {
streamingAbortController.value.abort();
streamingAbortController.value = null;
}
}
// Error handling
@@ -166,11 +172,19 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
stopStreaming();
assistantThinkingMessage.value = undefined;
if (e.name === 'AbortError') {
// Handle abort errors as they are expected when stopping streaming
const userMsg = createAssistantMessage('[Task aborted]', 'aborted-streaming');
chatMessages.value = [...chatMessages.value, userMsg];
return;
}
const errorMessage = createErrorMessage(
locale.baseText('aiAssistant.serviceError.message', { interpolate: { message: e.message } }),
id,
retry,
);
chatMessages.value = [...chatMessages.value, errorMessage];
telemetry.track('Workflow generation errored', {
@@ -247,6 +261,12 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
});
const retry = createRetryHandler(messageId, async () => sendChatMessage(options));
// Abort previous streaming request if any
if (streamingAbortController.value) {
streamingAbortController.value.abort();
}
streamingAbortController.value = new AbortController();
try {
chatWithBuilder(
rootStore.restApiContext,
@@ -269,6 +289,7 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
},
() => stopStreaming(),
(e) => handleServiceError(e, messageId, retry),
streamingAbortController.value?.signal,
);
} catch (e: unknown) {
handleServiceError(e, messageId, retry);
@@ -393,9 +414,11 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
toolMessages,
workflowMessages,
trackingSessionId,
streamingAbortController,
// Methods
updateWindowWidth,
stopStreaming,
closeChat,
openChat,
resetBuilderChat,

View File

@@ -2132,7 +2132,12 @@ onBeforeUnmount(() => {
{{ i18n.baseText('readOnlyEnv.cantEditOrRun') }}
</N8nCallout>
<CanvasThinkingPill v-if="builderStore.streaming" :class="$style.thinkingPill" />
<CanvasThinkingPill
v-if="builderStore.streaming"
:class="$style.thinkingPill"
show-stop
@stop="builderStore.stopStreaming"
/>
<Suspense>
<LazyNodeCreation

43
pnpm-lock.yaml generated
View File

@@ -947,7 +947,7 @@ importers:
version: 4.3.0
'@getzep/zep-cloud':
specifier: 1.0.12
version: 1.0.12(@langchain/core@0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(encoding@0.1.13)(langchain@0.3.29(685a0af47c92c1c99f5d7c1501acb3dd))
version: 1.0.12(@langchain/core@0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(encoding@0.1.13)(langchain@0.3.29(e83255224da9386305dfb90eaf104268))
'@getzep/zep-js':
specifier: 0.9.0
version: 0.9.0
@@ -974,7 +974,7 @@ importers:
version: 0.3.4(@langchain/core@0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(encoding@0.1.13)
'@langchain/community':
specifier: 'catalog:'
version: 0.3.47(d020f0fdc951e6780c547ab03e43240e)
version: 0.3.47(dc9073baf40e018f46d70486a38ca362)
'@langchain/core':
specifier: 'catalog:'
version: 0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67))
@@ -1091,7 +1091,7 @@ importers:
version: 23.0.1
langchain:
specifier: 0.3.29
version: 0.3.29(685a0af47c92c1c99f5d7c1501acb3dd)
version: 0.3.29(e83255224da9386305dfb90eaf104268)
lodash:
specifier: 'catalog:'
version: 4.17.21
@@ -18429,7 +18429,7 @@ snapshots:
'@currents/commit-info': 1.0.1-beta.0
async-retry: 1.3.3
axios: 1.10.0(debug@4.4.1)
axios-retry: 4.5.0(axios@1.10.0)
axios-retry: 4.5.0(axios@1.10.0(debug@4.4.1))
c12: 1.11.2(magicast@0.3.5)
chalk: 4.1.2
commander: 12.1.0
@@ -18732,7 +18732,7 @@ snapshots:
'@gar/promisify@1.1.3':
optional: true
'@getzep/zep-cloud@1.0.12(@langchain/core@0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(encoding@0.1.13)(langchain@0.3.29(685a0af47c92c1c99f5d7c1501acb3dd))':
'@getzep/zep-cloud@1.0.12(@langchain/core@0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(encoding@0.1.13)(langchain@0.3.29(e83255224da9386305dfb90eaf104268))':
dependencies:
form-data: 4.0.4
node-fetch: 2.7.0(encoding@0.1.13)
@@ -18741,7 +18741,7 @@ snapshots:
zod: 3.25.67
optionalDependencies:
'@langchain/core': 0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67))
langchain: 0.3.29(685a0af47c92c1c99f5d7c1501acb3dd)
langchain: 0.3.29(e83255224da9386305dfb90eaf104268)
transitivePeerDependencies:
- encoding
@@ -19295,7 +19295,7 @@ snapshots:
- aws-crt
- encoding
'@langchain/community@0.3.47(d020f0fdc951e6780c547ab03e43240e)':
'@langchain/community@0.3.47(dc9073baf40e018f46d70486a38ca362)':
dependencies:
'@browserbasehq/stagehand': 1.9.0(@playwright/test@1.53.0)(deepmerge@4.3.1)(dotenv@16.5.0)(encoding@0.1.13)(openai@5.8.1(ws@8.18.2)(zod@3.25.67))(zod@3.25.67)
'@ibm-cloud/watsonx-ai': 1.1.2
@@ -19307,7 +19307,7 @@ snapshots:
flat: 5.0.2
ibm-cloud-sdk-core: 5.3.2
js-yaml: 4.1.0
langchain: 0.3.29(685a0af47c92c1c99f5d7c1501acb3dd)
langchain: 0.3.29(e83255224da9386305dfb90eaf104268)
langsmith: 0.3.45(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67))
openai: 5.8.1(ws@8.18.2)(zod@3.25.67)
uuid: 10.0.0
@@ -19321,7 +19321,7 @@ snapshots:
'@aws-sdk/credential-provider-node': 3.808.0
'@azure/storage-blob': 12.26.0
'@browserbasehq/sdk': 2.6.0(encoding@0.1.13)
'@getzep/zep-cloud': 1.0.12(@langchain/core@0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(encoding@0.1.13)(langchain@0.3.29(685a0af47c92c1c99f5d7c1501acb3dd))
'@getzep/zep-cloud': 1.0.12(@langchain/core@0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(encoding@0.1.13)(langchain@0.3.29(e83255224da9386305dfb90eaf104268))
'@getzep/zep-js': 0.9.0
'@google-ai/generativelanguage': 2.6.0(encoding@0.1.13)
'@google-cloud/storage': 7.12.1(encoding@0.1.13)
@@ -23066,6 +23066,11 @@ snapshots:
axe-core@4.7.2: {}
axios-retry@4.5.0(axios@1.10.0(debug@4.4.1)):
dependencies:
axios: 1.10.0(debug@4.4.1)
is-retry-allowed: 2.2.0
axios-retry@4.5.0(axios@1.10.0):
dependencies:
axios: 1.10.0
@@ -25109,7 +25114,7 @@ snapshots:
eslint-import-resolver-node@0.3.9:
dependencies:
debug: 3.2.7(supports-color@8.1.1)
debug: 3.2.7(supports-color@5.5.0)
is-core-module: 2.16.1
resolve: 1.22.10
transitivePeerDependencies:
@@ -25133,7 +25138,7 @@ snapshots:
eslint-module-utils@2.12.1(@typescript-eslint/parser@8.35.0(eslint@9.29.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.3)(eslint@9.29.0(jiti@1.21.7)):
dependencies:
debug: 3.2.7(supports-color@8.1.1)
debug: 3.2.7(supports-color@5.5.0)
optionalDependencies:
'@typescript-eslint/parser': 8.35.0(eslint@9.29.0(jiti@1.21.7))(typescript@5.8.3)
eslint: 9.29.0(jiti@1.21.7)
@@ -25172,7 +25177,7 @@ snapshots:
array.prototype.findlastindex: 1.2.6
array.prototype.flat: 1.3.3
array.prototype.flatmap: 1.3.3
debug: 3.2.7(supports-color@8.1.1)
debug: 3.2.7(supports-color@5.5.0)
doctrine: 2.1.0
eslint: 9.29.0(jiti@1.21.7)
eslint-import-resolver-node: 0.3.9
@@ -26109,7 +26114,7 @@ snapshots:
array-parallel: 0.1.3
array-series: 0.1.5
cross-spawn: 7.0.6
debug: 3.2.7(supports-color@8.1.1)
debug: 3.2.7(supports-color@5.5.0)
transitivePeerDependencies:
- supports-color
@@ -26451,7 +26456,7 @@ snapshots:
'@types/debug': 4.1.12
'@types/node': 20.19.1
'@types/tough-cookie': 4.0.5
axios: 1.10.0
axios: 1.10.0(debug@4.4.1)
camelcase: 6.3.0
debug: 4.4.1(supports-color@8.1.1)
dotenv: 16.5.0
@@ -26461,7 +26466,7 @@ snapshots:
isstream: 0.1.2
jsonwebtoken: 9.0.2
mime-types: 2.1.35
retry-axios: 2.6.0(axios@1.10.0(debug@4.4.1))
retry-axios: 2.6.0(axios@1.10.0)
tough-cookie: 4.1.4
transitivePeerDependencies:
- supports-color
@@ -27675,7 +27680,7 @@ snapshots:
kuler@2.0.0: {}
langchain@0.3.29(685a0af47c92c1c99f5d7c1501acb3dd):
langchain@0.3.29(e83255224da9386305dfb90eaf104268):
dependencies:
'@langchain/core': 0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67))
'@langchain/openai': 0.5.16(@langchain/core@0.3.61(@opentelemetry/api@1.9.0)(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(ws@8.18.2)
@@ -29446,7 +29451,7 @@ snapshots:
pdf-parse@1.1.1:
dependencies:
debug: 3.2.7(supports-color@8.1.1)
debug: 3.2.7(supports-color@5.5.0)
node-ensure: 0.0.0
transitivePeerDependencies:
- supports-color
@@ -30389,7 +30394,7 @@ snapshots:
onetime: 5.1.2
signal-exit: 3.0.7
retry-axios@2.6.0(axios@1.10.0(debug@4.4.1)):
retry-axios@2.6.0(axios@1.10.0):
dependencies:
axios: 1.10.0
@@ -30416,7 +30421,7 @@ snapshots:
rhea@1.0.24:
dependencies:
debug: 3.2.7(supports-color@8.1.1)
debug: 3.2.7(supports-color@5.5.0)
transitivePeerDependencies:
- supports-color