feat: AI workflow builder front-end (no-changelog) (#14820)

Co-authored-by: Giulio Andreini <g.andreini@gmail.com>
This commit is contained in:
oleg
2025-04-28 15:38:32 +02:00
committed by GitHub
parent dbffcdc2ff
commit 97055d5714
56 changed files with 3857 additions and 1067 deletions

View File

@@ -33,7 +33,7 @@
"@langchain/openai": "catalog:",
"@n8n/config": "workspace:*",
"@n8n/di": "workspace:*",
"@n8n_io/ai-assistant-sdk": "1.13.0",
"@n8n_io/ai-assistant-sdk": "catalog:",
"n8n-workflow": "workspace:*",
"zod": "catalog:"
},

View File

@@ -12,6 +12,7 @@ import { connectionComposerChain } from './chains/connection-composer';
import { nodesSelectionChain } from './chains/node-selector';
import { nodesComposerChain } from './chains/nodes-composer';
import { plannerChain } from './chains/planner';
import { validatorChain } from './chains/validator';
import { ILicenseService } from './interfaces';
import { anthropicClaude37Sonnet, gpt41mini } from './llm-config';
import type { MessageResponse } from './types';
@@ -58,19 +59,21 @@ export class AiWorkflowBuilderService {
assert(this.client, 'Client not setup');
// @ts-expect-error getProxyHeaders will only be available after `@n8n_io/ai-assistant-sdk` v1.14.0 is released
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
const authHeaders = (await this.client?.getProxyHeaders(user)) as Record<string, string>;
const authHeaders = await this.client.generateApiProxyCredentials(user);
this.llmSimpleTask = gpt41mini({
baseUrl: baseUrl + '/v1/api-proxy/openai',
// When using api-proxy the key will be populated automatically, we just need to pass a placeholder
apiKey: '_',
headers: authHeaders,
apiKey: '-',
headers: {
Authorization: authHeaders.apiKey,
},
});
this.llmComplexTask = anthropicClaude37Sonnet({
baseUrl: baseUrl + '/v1/api-proxy/anthropic',
apiKey: '_',
headers: authHeaders,
apiKey: '-',
headers: {
Authorization: authHeaders.apiKey,
},
});
return;
}
@@ -97,6 +100,7 @@ export class AiWorkflowBuilderService {
private isWorkflowEvent(eventName: string): boolean {
return [
'prompt_validation',
'generated_steps',
'generated_nodes',
'composed_nodes',
@@ -106,6 +110,33 @@ export class AiWorkflowBuilderService {
}
private getAgent() {
const validatorChainNode = async (
state: typeof WorkflowState.State,
config: RunnableConfig,
): Promise<Partial<typeof WorkflowState.State>> => {
assert(this.llmSimpleTask, 'LLM not setup');
const isWorkflowPrompt = await validatorChain(this.llmSimpleTask).invoke(
{
prompt: state.prompt,
},
config,
);
if (!isWorkflowPrompt) {
await dispatchCustomEvent('prompt_validation', {
role: 'assistant',
type: 'prompt-validation',
isWorkflowPrompt,
id: Date.now().toString(),
});
}
return {
isWorkflowPrompt,
};
};
const plannerChainNode = async (
state: typeof WorkflowState.State,
config: RunnableConfig,
@@ -290,7 +321,7 @@ export class AiWorkflowBuilderService {
///////////////////// Workflow Graph Definition /////////////////////
const workflowGraph = new StateGraph(WorkflowState)
// .addNode('supervisor', supervisorChainNode)
.addNode('validator', validatorChainNode)
.addNode('planner', plannerChainNode)
.addNode('node_selector', nodeSelectionChainNode)
.addNode('nodes_composer', nodesComposerChainNode)
@@ -298,8 +329,12 @@ export class AiWorkflowBuilderService {
.addNode('finalize', generateWorkflowJSON);
// Define the graph edges to set the processing order:
// Start with the planner.
workflowGraph.addEdge(START, 'planner');
// Start with the validator
workflowGraph.addEdge(START, 'validator');
// If validated, continue to planner
workflowGraph.addConditionalEdges('validator', (state) => {
return state.isWorkflowPrompt ? 'planner' : END;
});
// Planner node flows into node selector:
workflowGraph.addEdge('planner', 'node_selector');
// Node selector is followed by nodes composer:
@@ -327,6 +362,7 @@ export class AiWorkflowBuilderService {
steps: [],
nodes: [],
workflowJSON: { nodes: [], connections: {} },
isWorkflowPrompt: false,
next: 'PLAN',
};

View File

@@ -10,18 +10,25 @@ export const plannerPrompt = new SystemMessage(
`You are a Workflow Planner for n8n, a platform that helps users automate processes across different services and APIs.
## Your Task
Convert user requests into clear, sequential workflow steps that can be implemented with n8n nodes.
Convert user requests into clear, sequential workflow steps that can be implemented with n8n nodes. ONLY include steps that are explicitly stated or directly implied in the user request.
## Guidelines
1. Analyze the user request to understand their end goal and required process
2. Break down the automation into logical steps based on complexity - simpler workflows need fewer steps, complex ones may need more
3. Focus on actions (fetch data, transform, filter, send notification, etc.)
3. Focus ONLY on actions mentioned directly in the user prompt
4. Create steps that can be mapped to n8n nodes later
5. Order steps sequentially from trigger to final action
6. Be specific about data transformations needed
7. Include error handling steps when appropriate
6. Be specific about data transformations needed ONLY if mentioned in the request
7. NEVER add extra steps like storing data or sending notifications unless explicitly requested
8. Only recommend raw HTTP requests if you think there isn't a suitable n8n node
## CRITICAL REQUIREMENTS
- DO NOT add any steps not directly mentioned or implied in the user request
- DO NOT assume the user wants to store data in a database unless explicitly stated
- DO NOT assume the user wants to send notifications or emails unless explicitly stated
- DO NOT add any "nice to have" steps that aren't clearly part of the user's request
- Keep the workflow EXACTLY focused on what was requested, nothing more
## Output Format
Return ONLY a JSON object with this structure:
\`\`\`json

View File

@@ -0,0 +1,75 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { AIMessageChunk } from '@langchain/core/messages';
import { SystemMessage } from '@langchain/core/messages';
import { ChatPromptTemplate, HumanMessagePromptTemplate } from '@langchain/core/prompts';
import { DynamicStructuredTool } from '@langchain/core/tools';
import { OperationalError } from 'n8n-workflow';
import { z } from 'zod';
const validatorPrompt = new SystemMessage(
`You are a workflow prompt validator for n8n. You need to analyze the user's prompt and determine
if they're actually trying to build a workflow that connects different online services or automates a task.
A workflow prompt should:
- Describe an automation or integration task
- Potentially mention connecting services (like Google Sheets, Slack, etc.)
- Describe a process that could be broken down into steps
- Mention something that could be automated
Examples of VALID workflow prompts:
- "Create a workflow that sends a Slack message when a new row is added to Google Sheets"
- "I want to automatically save Gmail attachments to Dropbox"
- "Build a workflow that posts new Twitter mentions to a Discord channel"
- "When I get a new lead in my CRM, add them to my email marketing list"
Examples of INVALID workflow prompts:
- "What's the weather like today?"
- "Tell me a joke"
- "What is n8n?"
- "Help me fix my computer"
- "What time is it?"
Analyze the prompt and determine if it's a valid workflow prompt. Respond with just true or false.`,
);
const validatorSchema = z.object({
isWorkflowPrompt: z.boolean(),
});
const validatorTool = new DynamicStructuredTool({
name: 'validate_prompt',
description: 'Validate if the user prompt is a workflow prompt',
schema: validatorSchema,
func: async ({ isWorkflowPrompt }) => {
return { isWorkflowPrompt };
},
});
const humanTemplate = `
<user_prompt>
{prompt}
</user_prompt>
`;
const chatPrompt = ChatPromptTemplate.fromMessages([
validatorPrompt,
HumanMessagePromptTemplate.fromTemplate(humanTemplate),
]);
export const validatorChain = (llm: BaseChatModel) => {
if (!llm.bindTools) {
throw new OperationalError("LLM doesn't support binding tools");
}
return chatPrompt
.pipe(
llm.bindTools([validatorTool], {
tool_choice: validatorTool.name,
}),
)
.pipe((x: AIMessageChunk) => {
const toolCall = x.tool_calls?.[0];
return (toolCall?.args as z.infer<typeof validatorTool.schema>).isWorkflowPrompt;
});
};

View File

@@ -88,6 +88,13 @@ export interface WorkflowConnectionsMessage {
read: boolean;
}
export interface PromptValidationMessage {
role: 'assistant';
type: 'prompt-validation';
isWorkflowPrompt: boolean;
id: string;
}
export type MessageResponse =
| ((
| AssistantChatMessage
@@ -99,6 +106,7 @@ export type MessageResponse =
| WorkflowNodeMessage
| WorkflowComposedMessage
| WorkflowConnectionsMessage
| PromptValidationMessage
) & {
quickReplies?: QuickReplyOption[];
})

View File

@@ -17,6 +17,8 @@ export const WorkflowState = Annotation.Root({
workflowJSON: Annotation<SimpleWorkflow>({
reducer: (x, y) => y ?? x ?? { nodes: [], connections: {} },
}),
// Whether the user prompt is a workflow prompt.
isWorkflowPrompt: Annotation<boolean>({ reducer: (x, y) => y ?? x ?? false }),
// The next phase to be executed in the workflow graph.
next: Annotation<string>({ reducer: (x, y) => y ?? x ?? END, default: () => END }),
});