feat: Update Chat SDK to support streaming responses (#17006)

Co-authored-by: Eugene Molodkin <eugene@n8n.io>
This commit is contained in:
Benjamin Schroth
2025-07-10 12:25:29 +02:00
committed by GitHub
parent b9e7b719c0
commit 3edadb5a75
26 changed files with 1670 additions and 37 deletions

View File

@@ -82,6 +82,7 @@ function createAgentExecutor(
async function processEventStream(
ctx: IExecuteFunctions,
eventStream: IterableReadableStream<StreamEvent>,
itemIndex: number,
returnIntermediateSteps: boolean = false,
): Promise<{ output: string; intermediateSteps?: any[] }> {
const agentResult: { output: string; intermediateSteps?: any[] } = {
@@ -92,7 +93,7 @@ async function processEventStream(
agentResult.intermediateSteps = [];
}
ctx.sendChunk('begin');
ctx.sendChunk('begin', itemIndex);
for await (const event of eventStream) {
// Stream chat model tokens as they come in
switch (event.event) {
@@ -108,7 +109,7 @@ async function processEventStream(
} else if (typeof chunkContent === 'string') {
chunkText = chunkContent;
}
ctx.sendChunk('item', chunkText);
ctx.sendChunk('item', itemIndex, chunkText);
agentResult.output += chunkText;
}
@@ -155,7 +156,7 @@ async function processEventStream(
break;
}
}
ctx.sendChunk('end');
ctx.sendChunk('end', itemIndex);
return agentResult;
}
@@ -274,7 +275,12 @@ export async function toolsAgentExecute(
},
);
return await processEventStream(this, eventStream, options.returnIntermediateSteps);
return await processEventStream(
this,
eventStream,
itemIndex,
options.returnIntermediateSteps,
);
} else {
// Handle regular execution
return await executor.invoke(invokeParams, executeOptions);

View File

@@ -487,10 +487,10 @@ describe('toolsAgentExecute', () => {
const result = await toolsAgentExecute.call(mockContext);
expect(mockContext.sendChunk).toHaveBeenCalledWith('begin');
expect(mockContext.sendChunk).toHaveBeenCalledWith('item', 'Hello ');
expect(mockContext.sendChunk).toHaveBeenCalledWith('item', 'world!');
expect(mockContext.sendChunk).toHaveBeenCalledWith('end');
expect(mockContext.sendChunk).toHaveBeenCalledWith('begin', 0);
expect(mockContext.sendChunk).toHaveBeenCalledWith('item', 0, 'Hello ');
expect(mockContext.sendChunk).toHaveBeenCalledWith('item', 0, 'world!');
expect(mockContext.sendChunk).toHaveBeenCalledWith('end', 0);
expect(mockExecutor.streamEvents).toHaveBeenCalledTimes(1);
expect(result[0]).toHaveLength(1);
expect(result[0][0].json.output).toBe('Hello world!');

View File

@@ -591,6 +591,7 @@ export class ChatTrigger extends Node {
allowFileUploads: options.allowFileUploads,
allowedFilesMimeTypes: options.allowedFilesMimeTypes,
customCss: options.customCss,
enableStreaming,
});
res.status(200).send(page).end();

View File

@@ -12,6 +12,7 @@ export function createPage({
allowFileUploads,
allowedFilesMimeTypes,
customCss,
enableStreaming,
}: {
instanceId: string;
webhookUrl?: string;
@@ -26,6 +27,7 @@ export function createPage({
allowFileUploads?: boolean;
allowedFilesMimeTypes?: string;
customCss?: string;
enableStreaming?: boolean;
}) {
const validAuthenticationOptions: AuthenticationChatOption[] = [
'none',
@@ -124,6 +126,7 @@ export function createPage({
${en ? `en: ${JSON.stringify(en)},` : ''}
},
${initialMessages.length ? `initialMessages: ${JSON.stringify(initialMessages)},` : ''}
enableStreaming: ${!!enableStreaming},
});
})();
</script>