feat: Provide instance URL to the AI builder (no-changelog) (#18237)

Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
Co-authored-by: Eugene <burivuhster@users.noreply.github.com>
This commit is contained in:
Eugene
2025-08-21 10:13:25 +02:00
committed by GitHub
parent 815d191c01
commit 60670e1e40
7 changed files with 124 additions and 55 deletions

View File

@@ -30,6 +30,7 @@ export class AiWorkflowBuilderService {
private readonly nodeTypes: INodeTypes,
private readonly client?: AiAssistantClient,
private readonly logger?: Logger,
private readonly instanceUrl?: string,
) {
this.parsedNodeTypes = this.getNodeTypes();
}
@@ -162,6 +163,7 @@ export class AiWorkflowBuilderService {
tracer: this.tracingClient
? new LangChainTracer({ client: this.tracingClient, projectName: 'n8n-workflow-builder' })
: undefined,
instanceUrl: this.instanceUrl,
});
return this.agent;

View File

@@ -6,6 +6,7 @@ import { z } from 'zod';
import { LLMServiceError } from '../errors';
import type { ParameterUpdaterOptions } from '../types/config';
import { instanceUrlPrompt } from './prompts/instance-url';
import { ParameterUpdatePromptBuilder } from './prompts/prompt-builder';
export const parametersSchema = z
@@ -40,7 +41,6 @@ const workflowContextPrompt = `
{execution_schema}
</current_execution_nodes_schemas>
<selected_node>
Name: {node_name}
Type: {node_type}
@@ -101,6 +101,10 @@ export const createParameterUpdaterChain = (
text: nodeDefinitionPrompt,
cache_control: { type: 'ephemeral' },
},
{
type: 'text',
text: instanceUrlPrompt,
},
],
],
]);

View File

@@ -0,0 +1,12 @@
export const instanceUrlPrompt = `
<instance_url>
The n8n instance base URL is: {instanceUrl}
This URL is essential for webhook nodes and chat triggers as it provides the base URL for:
- Webhook URLs that external services need to call
- Chat trigger URLs for conversational interfaces
- Any node that requires the full instance URL to generate proper callback URLs
When working with webhook or chat trigger nodes, use this URL as the base for constructing proper endpoint URLs.
</instance_url>
`;

View File

@@ -1,5 +1,7 @@
import { ChatPromptTemplate } from '@langchain/core/prompts';
import { instanceUrlPrompt } from '@/chains/prompts/instance-url';
const systemPrompt = `You are an AI assistant specialized in creating and editing n8n workflows. Your goal is to help users build efficient, well-connected workflows by intelligently using the available tools.
<prime_directive>
@@ -329,7 +331,9 @@ update_node_parameters({{
}})
Then tell the user: "I've set up the Gmail Tool node with dynamic AI parameters - it will automatically determine recipients and subjects based on context."
</handling_uncertainty>`;
</handling_uncertainty>
`;
const responsePatterns = `
<response_patterns>
@@ -392,6 +396,10 @@ export const mainAgentPrompt = ChatPromptTemplate.fromMessages([
text: systemPrompt,
cache_control: { type: 'ephemeral' },
},
{
type: 'text',
text: instanceUrlPrompt,
},
{
type: 'text',
text: currentWorkflowJson,

View File

@@ -43,6 +43,74 @@ function buildSuccessMessage(node: INode, changes: string[]): string {
return `Successfully updated parameters for node "${node.name}" (${node.type}):\n${changesList}`;
}
/**
* Process parameter updates using the LLM chain
*/
async function processParameterUpdates(
node: INode,
nodeType: INodeTypeDescription,
nodeId: string,
changes: string[],
state: ReturnType<typeof getWorkflowState>,
llm: BaseChatModel,
logger?: Logger,
instanceUrl?: string,
): Promise<INodeParameters> {
const workflow = getCurrentWorkflow(state);
// Get current parameters
const currentParameters = extractNodeParameters(node);
// Format inputs for the chain
const formattedChanges = formatChangesForPrompt(changes);
// Get the node's properties definition as JSON
const nodePropertiesJson = JSON.stringify(nodeType.properties || [], null, 2);
// Call the parameter updater chain with dynamic prompt building
const parametersChain = createParameterUpdaterChain(
llm,
{
nodeType: node.type,
nodeDefinition: nodeType,
requestedChanges: changes,
},
logger,
);
const newParameters = (await parametersChain.invoke({
workflow_json: workflow,
execution_schema: state.workflowContext?.executionSchema ?? 'NO SCHEMA',
execution_data: state.workflowContext?.executionData ?? 'NO EXECUTION DATA YET',
node_id: nodeId,
node_name: node.name,
node_type: node.type,
current_parameters: JSON.stringify(currentParameters, null, 2),
node_definition: nodePropertiesJson,
changes: formattedChanges,
instanceUrl: instanceUrl ?? '',
})) as INodeParameters;
// Ensure newParameters is a valid object
if (!newParameters || typeof newParameters !== 'object') {
throw new ParameterUpdateError('Invalid parameters returned from LLM', {
nodeId,
nodeType: node.type,
});
}
// Ensure parameters property exists and is valid
if (!newParameters.parameters || typeof newParameters.parameters !== 'object') {
throw new ParameterUpdateError('Invalid parameters structure returned from LLM', {
nodeId,
nodeType: node.type,
});
}
// Fix expression prefixes in the new parameters
return fixExpressionPrefixes(newParameters.parameters) as INodeParameters;
}
/**
* Factory function to create the update node parameters tool
*/
@@ -50,6 +118,7 @@ export function createUpdateNodeParametersTool(
nodeTypes: INodeTypeDescription[],
llm: BaseChatModel,
logger?: Logger,
instanceUrl?: string,
) {
return tool(
async (input, config) => {
@@ -90,61 +159,19 @@ export function createUpdateNodeParametersTool(
});
try {
// Get current parameters
const currentParameters = extractNodeParameters(node);
// Format inputs for the chain
const formattedChanges = formatChangesForPrompt(changes);
// Get the node's properties definition as JSON
const nodePropertiesJson = JSON.stringify(nodeType.properties || [], null, 2);
// Call the parameter updater chain with dynamic prompt building
const parametersChain = createParameterUpdaterChain(
const updatedParameters = await processParameterUpdates(
node,
nodeType,
nodeId,
changes,
state,
llm,
{
nodeType: node.type,
nodeDefinition: nodeType,
requestedChanges: changes,
},
logger,
instanceUrl,
);
const newParameters = (await parametersChain.invoke({
workflow_json: workflow,
execution_schema: state.workflowContext?.executionSchema ?? 'NO SCHEMA',
execution_data: state.workflowContext?.executionData ?? 'NO EXECUTION DATA YET',
node_id: nodeId,
node_name: node.name,
node_type: node.type,
current_parameters: JSON.stringify(currentParameters, null, 2),
node_definition: nodePropertiesJson,
changes: formattedChanges,
})) as INodeParameters;
// Ensure newParameters is a valid object
if (!newParameters || typeof newParameters !== 'object') {
throw new ParameterUpdateError('Invalid parameters returned from LLM', {
nodeId,
nodeType: node.type,
});
}
// Ensure parameters property exists and is valid
if (!newParameters.parameters || typeof newParameters.parameters !== 'object') {
throw new ParameterUpdateError('Invalid parameters structure returned from LLM', {
nodeId,
nodeType: node.type,
});
}
// Fix expression prefixes in the new parameters
const fixedParameters = fixExpressionPrefixes(
newParameters.parameters,
) as INodeParameters;
// Create updated node
const updatedNode = updateNodeWithParameters(node, fixedParameters);
const updatedNode = updateNodeWithParameters(node, updatedParameters);
// Build success message
const message = buildSuccessMessage(node, changes);
@@ -154,7 +181,7 @@ export function createUpdateNodeParametersTool(
nodeId,
nodeName: node.name,
nodeType: node.type,
updatedParameters: fixedParameters,
updatedParameters,
appliedChanges: changes,
message,
};

View File

@@ -40,6 +40,7 @@ export interface WorkflowBuilderAgentConfig {
checkpointer?: MemorySaver;
tracer?: LangChainTracer;
autoCompactThresholdTokens?: number;
instanceUrl?: string;
}
export interface ChatPayload {
@@ -59,6 +60,7 @@ export class WorkflowBuilderAgent {
private logger?: Logger;
private tracer?: LangChainTracer;
private autoCompactThresholdTokens: number;
private instanceUrl?: string;
constructor(config: WorkflowBuilderAgentConfig) {
this.parsedNodeTypes = config.parsedNodeTypes;
@@ -69,6 +71,7 @@ export class WorkflowBuilderAgent {
this.tracer = config.tracer;
this.autoCompactThresholdTokens =
config.autoCompactThresholdTokens ?? DEFAULT_AUTO_COMPACT_THRESHOLD_TOKENS;
this.instanceUrl = config.instanceUrl;
}
private createWorkflow() {
@@ -78,7 +81,12 @@ export class WorkflowBuilderAgent {
createAddNodeTool(this.parsedNodeTypes),
createConnectNodesTool(this.parsedNodeTypes, this.logger),
createRemoveNodeTool(this.logger),
createUpdateNodeParametersTool(this.parsedNodeTypes, this.llmComplexTask, this.logger),
createUpdateNodeParametersTool(
this.parsedNodeTypes,
this.llmComplexTask,
this.logger,
this.instanceUrl,
),
];
// Create a map for quick tool lookup
@@ -98,6 +106,7 @@ export class WorkflowBuilderAgent {
...state,
executionData: state.workflowContext?.executionData ?? {},
executionSchema: state.workflowContext?.executionSchema ?? [],
instanceUrl: this.instanceUrl,
});
const response = await this.llmSimpleTask.bindTools(tools).invoke(prompt);

View File

@@ -9,6 +9,7 @@ import type { IUser } from 'n8n-workflow';
import { N8N_VERSION } from '@/constants';
import { License } from '@/license';
import { NodeTypes } from '@/node-types';
import { UrlService } from '@/services/url.service';
/**
* This service wraps the actual AiWorkflowBuilderService to avoid circular dependencies.
@@ -23,6 +24,7 @@ export class WorkflowBuilderService {
private readonly license: License,
private readonly config: GlobalConfig,
private readonly logger: Logger,
private readonly urlService: UrlService,
) {}
private async getService(): Promise<AiWorkflowBuilderService> {
@@ -43,7 +45,12 @@ export class WorkflowBuilderService {
});
}
this.service = new AiWorkflowBuilderService(this.nodeTypes, client, this.logger);
this.service = new AiWorkflowBuilderService(
this.nodeTypes,
client,
this.logger,
this.urlService.getInstanceBaseUrl(),
);
}
return this.service;
}