perf(core): Lazyload AI workflow builder's langchain dependencies (#16178)

This commit is contained in:
Iván Ovejero
2025-06-11 13:11:13 +02:00
committed by GitHub
parent 42b9a8a0e7
commit f0cf6dcbcd
2 changed files with 16 additions and 13 deletions

View File

@@ -60,7 +60,7 @@ export class AiWorkflowBuilderService {
assert(this.client, 'Client not setup'); assert(this.client, 'Client not setup');
const authHeaders = await this.client.generateApiProxyCredentials(user); const authHeaders = await this.client.generateApiProxyCredentials(user);
this.llmSimpleTask = gpt41mini({ this.llmSimpleTask = await gpt41mini({
baseUrl: baseUrl + '/v1/api-proxy/openai', baseUrl: baseUrl + '/v1/api-proxy/openai',
// When using api-proxy the key will be populated automatically, we just need to pass a placeholder // When using api-proxy the key will be populated automatically, we just need to pass a placeholder
apiKey: '-', apiKey: '-',
@@ -68,7 +68,7 @@ export class AiWorkflowBuilderService {
Authorization: authHeaders.apiKey, Authorization: authHeaders.apiKey,
}, },
}); });
this.llmComplexTask = anthropicClaude37Sonnet({ this.llmComplexTask = await anthropicClaude37Sonnet({
baseUrl: baseUrl + '/v1/api-proxy/anthropic', baseUrl: baseUrl + '/v1/api-proxy/anthropic',
apiKey: '-', apiKey: '-',
headers: { headers: {
@@ -78,10 +78,10 @@ export class AiWorkflowBuilderService {
return; return;
} }
// If base URL is not set, use environment variables // If base URL is not set, use environment variables
this.llmSimpleTask = gpt41mini({ this.llmSimpleTask = await gpt41mini({
apiKey: process.env.N8N_AI_OPENAI_API_KEY ?? '', apiKey: process.env.N8N_AI_OPENAI_API_KEY ?? '',
}); });
this.llmComplexTask = anthropicClaude37Sonnet({ this.llmComplexTask = await anthropicClaude37Sonnet({
apiKey: process.env.N8N_AI_ANTHROPIC_KEY ?? '', apiKey: process.env.N8N_AI_ANTHROPIC_KEY ?? '',
}); });
} }

View File

@@ -1,14 +1,12 @@
import { ChatAnthropic } from '@langchain/anthropic';
import { ChatOpenAI } from '@langchain/openai';
type LLMConfig = { type LLMConfig = {
apiKey: string; apiKey: string;
baseUrl?: string; baseUrl?: string;
headers?: Record<string, string>; headers?: Record<string, string>;
}; };
export const o4mini = (config: LLMConfig) => export const o4mini = async (config: LLMConfig) => {
new ChatOpenAI({ const { ChatOpenAI } = await import('@langchain/openai');
return new ChatOpenAI({
modelName: 'o4-mini-2025-04-16', modelName: 'o4-mini-2025-04-16',
apiKey: config.apiKey, apiKey: config.apiKey,
configuration: { configuration: {
@@ -16,9 +14,11 @@ export const o4mini = (config: LLMConfig) =>
defaultHeaders: config.headers, defaultHeaders: config.headers,
}, },
}); });
};
export const gpt41mini = (config: LLMConfig) => export const gpt41mini = async (config: LLMConfig) => {
new ChatOpenAI({ const { ChatOpenAI } = await import('@langchain/openai');
return new ChatOpenAI({
modelName: 'gpt-4.1-mini-2025-04-14', modelName: 'gpt-4.1-mini-2025-04-14',
apiKey: config.apiKey, apiKey: config.apiKey,
temperature: 0, temperature: 0,
@@ -27,9 +27,11 @@ export const gpt41mini = (config: LLMConfig) =>
defaultHeaders: config.headers, defaultHeaders: config.headers,
}, },
}); });
};
export const anthropicClaude37Sonnet = (config: LLMConfig) => export const anthropicClaude37Sonnet = async (config: LLMConfig) => {
new ChatAnthropic({ const { ChatAnthropic } = await import('@langchain/anthropic');
return new ChatAnthropic({
modelName: 'claude-3-7-sonnet-20250219', modelName: 'claude-3-7-sonnet-20250219',
apiKey: config.apiKey, apiKey: config.apiKey,
temperature: 0, temperature: 0,
@@ -39,3 +41,4 @@ export const anthropicClaude37Sonnet = (config: LLMConfig) =>
defaultHeaders: config.headers, defaultHeaders: config.headers,
}, },
}); });
};