From f0cf6dcbcdfaae86233c6fdac8795bd7f2111d3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Ovejero?= Date: Wed, 11 Jun 2025 13:11:13 +0200 Subject: [PATCH] perf(core): Lazyload AI workflow builder's langchain dependencies (#16178) --- .../src/ai-workflow-builder.service.ts | 8 +++---- .../ai-workflow-builder/src/llm-config.ts | 21 +++++++++++-------- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/packages/@n8n/ai-workflow-builder/src/ai-workflow-builder.service.ts b/packages/@n8n/ai-workflow-builder/src/ai-workflow-builder.service.ts index d72c0d6b8a..c1686ab5e1 100644 --- a/packages/@n8n/ai-workflow-builder/src/ai-workflow-builder.service.ts +++ b/packages/@n8n/ai-workflow-builder/src/ai-workflow-builder.service.ts @@ -60,7 +60,7 @@ export class AiWorkflowBuilderService { assert(this.client, 'Client not setup'); const authHeaders = await this.client.generateApiProxyCredentials(user); - this.llmSimpleTask = gpt41mini({ + this.llmSimpleTask = await gpt41mini({ baseUrl: baseUrl + '/v1/api-proxy/openai', // When using api-proxy the key will be populated automatically, we just need to pass a placeholder apiKey: '-', @@ -68,7 +68,7 @@ export class AiWorkflowBuilderService { Authorization: authHeaders.apiKey, }, }); - this.llmComplexTask = anthropicClaude37Sonnet({ + this.llmComplexTask = await anthropicClaude37Sonnet({ baseUrl: baseUrl + '/v1/api-proxy/anthropic', apiKey: '-', headers: { @@ -78,10 +78,10 @@ export class AiWorkflowBuilderService { return; } // If base URL is not set, use environment variables - this.llmSimpleTask = gpt41mini({ + this.llmSimpleTask = await gpt41mini({ apiKey: process.env.N8N_AI_OPENAI_API_KEY ?? '', }); - this.llmComplexTask = anthropicClaude37Sonnet({ + this.llmComplexTask = await anthropicClaude37Sonnet({ apiKey: process.env.N8N_AI_ANTHROPIC_KEY ?? '', }); } diff --git a/packages/@n8n/ai-workflow-builder/src/llm-config.ts b/packages/@n8n/ai-workflow-builder/src/llm-config.ts index f50073ad70..c89defba37 100644 --- a/packages/@n8n/ai-workflow-builder/src/llm-config.ts +++ b/packages/@n8n/ai-workflow-builder/src/llm-config.ts @@ -1,14 +1,12 @@ -import { ChatAnthropic } from '@langchain/anthropic'; -import { ChatOpenAI } from '@langchain/openai'; - type LLMConfig = { apiKey: string; baseUrl?: string; headers?: Record; }; -export const o4mini = (config: LLMConfig) => - new ChatOpenAI({ +export const o4mini = async (config: LLMConfig) => { + const { ChatOpenAI } = await import('@langchain/openai'); + return new ChatOpenAI({ modelName: 'o4-mini-2025-04-16', apiKey: config.apiKey, configuration: { @@ -16,9 +14,11 @@ export const o4mini = (config: LLMConfig) => defaultHeaders: config.headers, }, }); +}; -export const gpt41mini = (config: LLMConfig) => - new ChatOpenAI({ +export const gpt41mini = async (config: LLMConfig) => { + const { ChatOpenAI } = await import('@langchain/openai'); + return new ChatOpenAI({ modelName: 'gpt-4.1-mini-2025-04-14', apiKey: config.apiKey, temperature: 0, @@ -27,9 +27,11 @@ export const gpt41mini = (config: LLMConfig) => defaultHeaders: config.headers, }, }); +}; -export const anthropicClaude37Sonnet = (config: LLMConfig) => - new ChatAnthropic({ +export const anthropicClaude37Sonnet = async (config: LLMConfig) => { + const { ChatAnthropic } = await import('@langchain/anthropic'); + return new ChatAnthropic({ modelName: 'claude-3-7-sonnet-20250219', apiKey: config.apiKey, temperature: 0, @@ -39,3 +41,4 @@ export const anthropicClaude37Sonnet = (config: LLMConfig) => defaultHeaders: config.headers, }, }); +};