feat: Add HTTP proxy for supported LLM nodes (#15449)

This commit is contained in:
oleg
2025-05-16 12:23:26 +02:00
committed by GitHub
parent 570d1e7aad
commit 907485d016
14 changed files with 173 additions and 30 deletions

View File

@@ -9,6 +9,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import { searchModels } from './methods/loadModels';
@@ -346,7 +347,9 @@ export class LmChatOpenAi implements INodeType {
reasoningEffort?: 'low' | 'medium' | 'high';
};
const configuration: ClientOptions = {};
const configuration: ClientOptions = {
httpAgent: getHttpProxyAgent(),
};
if (options.baseURL) {
configuration.baseURL = options.baseURL;
} else if (credentials.url) {