fix: Track responseMode and public chat in node graph (no-changelog) (#17231)

Co-authored-by: Mutasem Aldmour <mutasem@n8n.io>
This commit is contained in:
Benjamin Schroth
2025-07-15 15:15:04 +02:00
committed by GitHub
parent df80673c96
commit 8232d7f1d4
3 changed files with 164 additions and 0 deletions

View File

@@ -2619,10 +2619,13 @@ export interface INodeGraphItem {
src_node_id?: string; src_node_id?: string;
src_instance_id?: string; src_instance_id?: string;
agent?: string; //@n8n/n8n-nodes-langchain.agent agent?: string; //@n8n/n8n-nodes-langchain.agent
is_streaming?: boolean; //@n8n/n8n-nodes-langchain.agent
prompts?: IDataObject[] | IDataObject; //ai node's prompts, cloud only prompts?: IDataObject[] | IDataObject; //ai node's prompts, cloud only
toolSettings?: IDataObject; //various langchain tool's settings toolSettings?: IDataObject; //various langchain tool's settings
sql?: string; //merge node combineBySql, cloud only sql?: string; //merge node combineBySql, cloud only
workflow_id?: string; //@n8n/n8n-nodes-langchain.toolWorkflow and n8n-nodes-base.executeWorkflow workflow_id?: string; //@n8n/n8n-nodes-langchain.toolWorkflow and n8n-nodes-base.executeWorkflow
response_mode?: string; // @n8n/n8n-nodes-langchain.chatTrigger, n8n-nodes-base.webhook selected response mode
public_chat?: boolean; // @n8n/n8n-nodes-langchain.chatTrigger
runs?: number; runs?: number;
items_total?: number; items_total?: number;
metric_names?: string[]; metric_names?: string[];

View File

@@ -3,6 +3,7 @@ import {
AI_TRANSFORM_NODE_TYPE, AI_TRANSFORM_NODE_TYPE,
CHAIN_LLM_LANGCHAIN_NODE_TYPE, CHAIN_LLM_LANGCHAIN_NODE_TYPE,
CHAIN_SUMMARIZATION_LANGCHAIN_NODE_TYPE, CHAIN_SUMMARIZATION_LANGCHAIN_NODE_TYPE,
CHAT_TRIGGER_NODE_TYPE,
EVALUATION_NODE_TYPE, EVALUATION_NODE_TYPE,
EVALUATION_TRIGGER_NODE_TYPE, EVALUATION_TRIGGER_NODE_TYPE,
EXECUTE_WORKFLOW_NODE_TYPE, EXECUTE_WORKFLOW_NODE_TYPE,
@@ -254,6 +255,20 @@ export function generateNodesGraph(
nodeItem.prompts = { instructions: node.parameters.instructions as string }; nodeItem.prompts = { instructions: node.parameters.instructions as string };
} else if (node.type === AGENT_LANGCHAIN_NODE_TYPE) { } else if (node.type === AGENT_LANGCHAIN_NODE_TYPE) {
nodeItem.agent = (node.parameters.agent as string) ?? 'toolsAgent'; nodeItem.agent = (node.parameters.agent as string) ?? 'toolsAgent';
if (node.typeVersion >= 2.1) {
const options = node.parameters?.options;
if (
typeof options === 'object' &&
options &&
'enableStreaming' in options &&
options.enableStreaming === false
) {
nodeItem.is_streaming = false;
} else {
nodeItem.is_streaming = true;
}
}
} else if (node.type === MERGE_NODE_TYPE) { } else if (node.type === MERGE_NODE_TYPE) {
nodeItem.operation = node.parameters.mode as string; nodeItem.operation = node.parameters.mode as string;
@@ -365,6 +380,24 @@ export function generateNodesGraph(
} }
} else if (node.type === WEBHOOK_NODE_TYPE) { } else if (node.type === WEBHOOK_NODE_TYPE) {
webhookNodeNames.push(node.name); webhookNodeNames.push(node.name);
const responseMode = node.parameters?.responseMode;
nodeItem.response_mode = typeof responseMode === 'string' ? responseMode : 'onReceived';
} else if (node.type === CHAT_TRIGGER_NODE_TYPE) {
// Capture streaming response mode parameter
const options = node.parameters?.options;
if (
typeof options === 'object' &&
options &&
'responseMode' in options &&
typeof options.responseMode === 'string'
) {
nodeItem.response_mode = options.responseMode;
}
// Capture public chat setting
const isPublic = node.parameters?.public;
if (typeof isPublic === 'boolean') {
nodeItem.public_chat = isPublic;
}
} else if ( } else if (
node.type === EXECUTE_WORKFLOW_NODE_TYPE || node.type === EXECUTE_WORKFLOW_NODE_TYPE ||
node.type === WORKFLOW_TOOL_LANGCHAIN_NODE_TYPE node.type === WORKFLOW_TOOL_LANGCHAIN_NODE_TYPE

View File

@@ -500,6 +500,7 @@ describe('generateNodesGraph', () => {
type: 'n8n-nodes-base.webhook', type: 'n8n-nodes-base.webhook',
version: 1.1, version: 1.1,
position: [520, 380], position: [520, 380],
response_mode: 'onReceived',
}, },
}, },
notes: {}, notes: {},
@@ -2387,4 +2388,131 @@ describe('extractLastExecutedNodeStructuredOutputErrorInfo', () => {
model_name: 'gemini-1.5-pro', model_name: 'gemini-1.5-pro',
}); });
}); });
it('should capture Agent node streaming parameters', () => {
const workflow: Partial<IWorkflowBase> = {
nodes: [
{
parameters: {
agent: 'toolsAgent',
options: {
enableStreaming: false,
},
},
id: 'agent-id-streaming-disabled',
name: 'Agent with streaming disabled',
type: '@n8n/n8n-nodes-langchain.agent',
typeVersion: 2.1,
position: [100, 100],
},
{
parameters: {
agent: 'conversationalAgent',
options: {
enableStreaming: true,
},
},
id: 'agent-id-streaming-enabled',
name: 'Agent with streaming enabled',
type: '@n8n/n8n-nodes-langchain.agent',
typeVersion: 2.1,
position: [300, 100],
},
{
parameters: {
agent: 'openAiFunctionsAgent',
},
id: 'agent-id-default-streaming',
name: 'Agent with default streaming',
type: '@n8n/n8n-nodes-langchain.agent',
typeVersion: 2.1,
position: [500, 100],
},
],
connections: {},
};
const result = generateNodesGraph(workflow, nodeTypes);
expect(result.nodeGraph.nodes['0']).toEqual({
id: 'agent-id-streaming-disabled',
type: '@n8n/n8n-nodes-langchain.agent',
version: 2.1,
position: [100, 100],
agent: 'toolsAgent',
is_streaming: false,
});
expect(result.nodeGraph.nodes['1']).toEqual({
id: 'agent-id-streaming-enabled',
type: '@n8n/n8n-nodes-langchain.agent',
version: 2.1,
position: [300, 100],
agent: 'conversationalAgent',
is_streaming: true,
});
expect(result.nodeGraph.nodes['2']).toEqual({
id: 'agent-id-default-streaming',
type: '@n8n/n8n-nodes-langchain.agent',
version: 2.1,
position: [500, 100],
agent: 'openAiFunctionsAgent',
is_streaming: true,
});
});
it('should capture Chat Trigger node streaming parameters', () => {
const workflow: Partial<IWorkflowBase> = {
nodes: [
{
parameters: {
public: true,
options: {
responseMode: 'streaming',
},
},
id: 'chat-trigger-id',
name: 'Chat Trigger',
type: '@n8n/n8n-nodes-langchain.chatTrigger',
typeVersion: 1,
position: [100, 100],
},
{
parameters: {
public: false,
options: {
responseMode: 'lastNode',
},
},
id: 'chat-trigger-id-2',
name: 'Chat Trigger 2',
type: '@n8n/n8n-nodes-langchain.chatTrigger',
typeVersion: 1,
position: [300, 100],
},
],
connections: {},
};
const result = generateNodesGraph(workflow, nodeTypes);
expect(result.nodeGraph.nodes['0']).toEqual({
id: 'chat-trigger-id',
type: '@n8n/n8n-nodes-langchain.chatTrigger',
version: 1,
position: [100, 100],
response_mode: 'streaming',
public_chat: true,
});
expect(result.nodeGraph.nodes['1']).toEqual({
id: 'chat-trigger-id-2',
type: '@n8n/n8n-nodes-langchain.chatTrigger',
version: 1,
position: [300, 100],
response_mode: 'lastNode',
public_chat: false,
});
});
}); });