From 907485d01692f004ea289dcd2d6af2d0900e1b49 Mon Sep 17 00:00:00 2001 From: oleg Date: Fri, 16 May 2025 12:23:26 +0200 Subject: [PATCH] feat: Add HTTP proxy for supported LLM nodes (#15449) --- .../LMChatAnthropic/LmChatAnthropic.node.ts | 4 + .../llms/LMChatOpenAi/LmChatOpenAi.node.ts | 5 +- .../llms/LMChatOpenAi/methods/loadModels.ts | 8 +- .../nodes/llms/LMOpenAi/LmOpenAi.node.ts | 6 +- .../LmChatAwsBedrock/LmChatAwsBedrock.node.ts | 4 + .../LmChatAzureOpenAi.node.ts | 5 ++ .../LmChatDeepSeek/LmChatDeepSeek.node.ts | 2 + .../nodes/llms/LmChatGroq/LmChatGroq.node.ts | 2 + .../LmChatOpenRouter/LmChatOpenRouter.node.ts | 2 + .../llms/LmChatXAiGrok/LmChatXAiGrok.node.ts | 2 + packages/@n8n/nodes-langchain/package.json | 1 + .../nodes-langchain/utils/httpProxyAgent.ts | 11 +++ .../utils/tests/httpProxyAgent.test.ts | 88 +++++++++++++++++++ pnpm-lock.yaml | 63 +++++++------ 14 files changed, 173 insertions(+), 30 deletions(-) create mode 100644 packages/@n8n/nodes-langchain/utils/httpProxyAgent.ts create mode 100644 packages/@n8n/nodes-langchain/utils/tests/httpProxyAgent.test.ts diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts index 129d221a22..bdde9fb7d7 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts @@ -12,6 +12,7 @@ import { type SupplyData, } from 'n8n-workflow'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { searchModels } from './methods/searchModels'; @@ -329,6 +330,9 @@ export class LmChatAnthropic implements INodeType { callbacks: [new N8nLlmTracing(this, { tokensUsageParser })], onFailedAttempt: makeN8nLlmFailedAttemptHandler(this), invocationKwargs, + clientOptions: { + httpAgent: getHttpProxyAgent(), + }, }); return { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts index 770a1c80b8..fdc58c9855 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts @@ -9,6 +9,7 @@ import { type SupplyData, } from 'n8n-workflow'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { searchModels } from './methods/loadModels'; @@ -346,7 +347,9 @@ export class LmChatOpenAi implements INodeType { reasoningEffort?: 'low' | 'medium' | 'high'; }; - const configuration: ClientOptions = {}; + const configuration: ClientOptions = { + httpAgent: getHttpProxyAgent(), + }; if (options.baseURL) { configuration.baseURL = options.baseURL; } else if (credentials.url) { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/methods/loadModels.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/methods/loadModels.ts index cc22c147e7..31e3f45ba7 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/methods/loadModels.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/methods/loadModels.ts @@ -1,6 +1,8 @@ import type { ILoadOptionsFunctions, INodeListSearchResult } from 'n8n-workflow'; import OpenAI from 'openai'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; + export async function searchModels( this: ILoadOptionsFunctions, filter?: string, @@ -11,7 +13,11 @@ export async function searchModels( (credentials.url as string) || 'https://api.openai.com/v1'; - const openai = new OpenAI({ baseURL, apiKey: credentials.apiKey as string }); + const openai = new OpenAI({ + baseURL, + apiKey: credentials.apiKey as string, + httpAgent: getHttpProxyAgent(), + }); const { data: models = [] } = await openai.models.list(); const filteredModels = models.filter((model: { id: string }) => { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts index 8f442f6555..a839596653 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts @@ -9,6 +9,8 @@ import type { ILoadOptionsFunctions, } from 'n8n-workflow'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; + import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; import { N8nLlmTracing } from '../N8nLlmTracing'; @@ -248,7 +250,9 @@ export class LmOpenAi implements INodeType { topP?: number; }; - const configuration: ClientOptions = {}; + const configuration: ClientOptions = { + httpAgent: getHttpProxyAgent(), + }; if (options.baseURL) { configuration.baseURL = options.baseURL; } diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts index 8db75531c8..0663cc4d78 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts @@ -8,6 +8,7 @@ import { type SupplyData, } from 'n8n-workflow'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; @@ -147,6 +148,9 @@ export class LmChatAwsBedrock implements INodeType { model: modelName, temperature: options.temperature, maxTokens: options.maxTokensToSample, + clientConfig: { + httpAgent: getHttpProxyAgent(), + }, credentials: { secretAccessKey: credentials.secretAccessKey as string, accessKeyId: credentials.accessKeyId as string, diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts index b513f31a46..4a993462ea 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts @@ -10,6 +10,8 @@ import { type SupplyData, } from 'n8n-workflow'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; + import { setupApiKeyAuthentication } from './credentials/api-key'; import { setupOAuth2Authentication } from './credentials/oauth2'; import { properties } from './properties'; @@ -111,6 +113,9 @@ export class LmChatAzureOpenAi implements INodeType { timeout: options.timeout ?? 60000, maxRetries: options.maxRetries ?? 2, callbacks: [new N8nLlmTracing(this)], + configuration: { + httpAgent: getHttpProxyAgent(), + }, modelKwargs: options.responseFormat ? { response_format: { type: options.responseFormat }, diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.ts index 33d6eb8554..c9e9a2dbd6 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.ts @@ -9,6 +9,7 @@ import { type SupplyData, } from 'n8n-workflow'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling'; @@ -228,6 +229,7 @@ export class LmChatDeepSeek implements INodeType { const configuration: ClientOptions = { baseURL: credentials.url, + httpAgent: getHttpProxyAgent(), }; const model = new ChatOpenAI({ diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts index 14c94d7bee..d2e03a5961 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts @@ -8,6 +8,7 @@ import { type SupplyData, } from 'n8n-workflow'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; @@ -146,6 +147,7 @@ export class LmChatGroq implements INodeType { maxTokens: options.maxTokensToSample, temperature: options.temperature, callbacks: [new N8nLlmTracing(this)], + httpAgent: getHttpProxyAgent(), onFailedAttempt: makeN8nLlmFailedAttemptHandler(this), }); diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.ts index 005ee84ab5..c9310ac9bd 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.ts @@ -9,6 +9,7 @@ import { type SupplyData, } from 'n8n-workflow'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling'; @@ -227,6 +228,7 @@ export class LmChatOpenRouter implements INodeType { const configuration: ClientOptions = { baseURL: credentials.url, + httpAgent: getHttpProxyAgent(), }; const model = new ChatOpenAI({ diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.ts index 4183b9c22c..e12dac28f4 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.ts @@ -9,6 +9,7 @@ import { type SupplyData, } from 'n8n-workflow'; +import { getHttpProxyAgent } from '@utils/httpProxyAgent'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling'; @@ -228,6 +229,7 @@ export class LmChatXAiGrok implements INodeType { const configuration: ClientOptions = { baseURL: credentials.url, + httpAgent: getHttpProxyAgent(), }; const model = new ChatOpenAI({ diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index 913f65b793..36bb0a9e87 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -194,6 +194,7 @@ "form-data": "catalog:", "generate-schema": "2.6.0", "html-to-text": "9.0.5", + "https-proxy-agent": "^7.0.6", "jsdom": "23.0.1", "langchain": "0.3.11", "lodash": "catalog:", diff --git a/packages/@n8n/nodes-langchain/utils/httpProxyAgent.ts b/packages/@n8n/nodes-langchain/utils/httpProxyAgent.ts new file mode 100644 index 0000000000..4dfa867c8f --- /dev/null +++ b/packages/@n8n/nodes-langchain/utils/httpProxyAgent.ts @@ -0,0 +1,11 @@ +import { HttpsProxyAgent } from 'https-proxy-agent'; + +export function getHttpProxyAgent() { + const httpProxy = + process.env.HTTPS_PROXY ?? + process.env.https_proxy ?? + process.env.HTTP_PROXY ?? + process.env.http_proxy; + + return httpProxy ? new HttpsProxyAgent(httpProxy) : undefined; +} diff --git a/packages/@n8n/nodes-langchain/utils/tests/httpProxyAgent.test.ts b/packages/@n8n/nodes-langchain/utils/tests/httpProxyAgent.test.ts new file mode 100644 index 0000000000..b98724916a --- /dev/null +++ b/packages/@n8n/nodes-langchain/utils/tests/httpProxyAgent.test.ts @@ -0,0 +1,88 @@ +import { HttpsProxyAgent } from 'https-proxy-agent'; + +import { getHttpProxyAgent } from '../httpProxyAgent'; + +// Mock the https-proxy-agent package +jest.mock('https-proxy-agent', () => ({ + HttpsProxyAgent: jest.fn().mockImplementation((url) => ({ proxyUrl: url })), +})); + +describe('getHttpProxyAgent', () => { + // Store original environment variables + const originalEnv = { ...process.env }; + + // Reset environment variables before each test + beforeEach(() => { + jest.clearAllMocks(); + process.env = { ...originalEnv }; + delete process.env.HTTP_PROXY; + delete process.env.http_proxy; + delete process.env.HTTPS_PROXY; + delete process.env.https_proxy; + }); + + // Restore original environment after all tests + afterAll(() => { + process.env = originalEnv; + }); + + it('should return undefined when no proxy environment variables are set', () => { + const agent = getHttpProxyAgent(); + expect(agent).toBeUndefined(); + expect(HttpsProxyAgent).not.toHaveBeenCalled(); + }); + + it('should create HttpsProxyAgent when HTTP_PROXY is set', () => { + const proxyUrl = 'http://proxy.example.com:8080'; + process.env.HTTP_PROXY = proxyUrl; + + const agent = getHttpProxyAgent(); + + expect(HttpsProxyAgent).toHaveBeenCalledWith(proxyUrl); + expect(agent).toEqual({ proxyUrl }); + }); + + it('should create HttpsProxyAgent when http_proxy is set', () => { + const proxyUrl = 'http://proxy.example.com:8080'; + process.env.http_proxy = proxyUrl; + + const agent = getHttpProxyAgent(); + + expect(HttpsProxyAgent).toHaveBeenCalledWith(proxyUrl); + expect(agent).toEqual({ proxyUrl }); + }); + + it('should create HttpsProxyAgent when HTTPS_PROXY is set', () => { + const proxyUrl = 'http://proxy.example.com:8080'; + process.env.HTTPS_PROXY = proxyUrl; + + const agent = getHttpProxyAgent(); + + expect(HttpsProxyAgent).toHaveBeenCalledWith(proxyUrl); + expect(agent).toEqual({ proxyUrl }); + }); + + it('should create HttpsProxyAgent when https_proxy is set', () => { + const proxyUrl = 'http://proxy.example.com:8080'; + process.env.https_proxy = proxyUrl; + + const agent = getHttpProxyAgent(); + + expect(HttpsProxyAgent).toHaveBeenCalledWith(proxyUrl); + expect(agent).toEqual({ proxyUrl }); + }); + + it('should respect priority order of proxy environment variables', () => { + // Set multiple proxy environment variables + process.env.HTTP_PROXY = 'http://http-proxy.example.com:8080'; + process.env.http_proxy = 'http://http-proxy-lowercase.example.com:8080'; + process.env.HTTPS_PROXY = 'http://https-proxy.example.com:8080'; + process.env.https_proxy = 'http://https-proxy-lowercase.example.com:8080'; + + const agent = getHttpProxyAgent(); + + // Should use HTTPS_PROXY as it has highest priority now + expect(HttpsProxyAgent).toHaveBeenCalledWith('http://https-proxy.example.com:8080'); + expect(agent).toEqual({ proxyUrl: 'http://https-proxy.example.com:8080' }); + }); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 014ce83dfa..242170fe37 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -729,7 +729,7 @@ importers: version: 4.3.0 '@getzep/zep-cloud': specifier: 1.0.12 - version: 1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(7f2a4b9c5436679ca8b0df05212b4905)) + version: 1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(b4eb53fe8b825d6e8edd96cc3d942586)) '@getzep/zep-js': specifier: 0.9.0 version: 0.9.0 @@ -756,7 +756,7 @@ importers: version: 0.3.2(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) '@langchain/community': specifier: 'catalog:' - version: 0.3.24(0b620065402de60ffbc4ade3af2d8197) + version: 0.3.24(67fb36bad0bcdd2b0df3579415b33a93) '@langchain/core': specifier: 'catalog:' version: 0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) @@ -853,12 +853,15 @@ importers: html-to-text: specifier: 9.0.5 version: 9.0.5 + https-proxy-agent: + specifier: ^7.0.6 + version: 7.0.6 jsdom: specifier: 23.0.1 version: 23.0.1 langchain: specifier: 0.3.11 - version: 0.3.11(7f2a4b9c5436679ca8b0df05212b4905) + version: 0.3.11(b4eb53fe8b825d6e8edd96cc3d942586) lodash: specifier: 'catalog:' version: 4.17.21 @@ -6963,6 +6966,10 @@ packages: resolution: {integrity: sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==} engines: {node: '>= 14'} + agent-base@7.1.3: + resolution: {integrity: sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==} + engines: {node: '>= 14'} + agentkeepalive@4.2.1: resolution: {integrity: sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA==} engines: {node: '>= 8.0.0'} @@ -9472,8 +9479,8 @@ packages: resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} engines: {node: '>= 6'} - https-proxy-agent@7.0.5: - resolution: {integrity: sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==} + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} engines: {node: '>= 14'} human-signals@1.1.1: @@ -16471,7 +16478,7 @@ snapshots: '@gar/promisify@1.1.3': optional: true - '@getzep/zep-cloud@1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(7f2a4b9c5436679ca8b0df05212b4905))': + '@getzep/zep-cloud@1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(b4eb53fe8b825d6e8edd96cc3d942586))': dependencies: form-data: 4.0.0 node-fetch: 2.7.0(encoding@0.1.13) @@ -16480,7 +16487,7 @@ snapshots: zod: 3.24.1 optionalDependencies: '@langchain/core': 0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) - langchain: 0.3.11(7f2a4b9c5436679ca8b0df05212b4905) + langchain: 0.3.11(b4eb53fe8b825d6e8edd96cc3d942586) transitivePeerDependencies: - encoding @@ -16992,7 +16999,7 @@ snapshots: - aws-crt - encoding - '@langchain/community@0.3.24(0b620065402de60ffbc4ade3af2d8197)': + '@langchain/community@0.3.24(67fb36bad0bcdd2b0df3579415b33a93)': dependencies: '@browserbasehq/stagehand': 1.9.0(@playwright/test@1.49.1)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))(zod@3.24.1) '@ibm-cloud/watsonx-ai': 1.1.2 @@ -17003,7 +17010,7 @@ snapshots: flat: 5.0.2 ibm-cloud-sdk-core: 5.3.2 js-yaml: 4.1.0 - langchain: 0.3.11(7f2a4b9c5436679ca8b0df05212b4905) + langchain: 0.3.11(b4eb53fe8b825d6e8edd96cc3d942586) langsmith: 0.2.15(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) openai: 4.78.1(encoding@0.1.13)(zod@3.24.1) uuid: 10.0.0 @@ -17018,7 +17025,7 @@ snapshots: '@aws-sdk/credential-provider-node': 3.808.0 '@azure/storage-blob': 12.26.0 '@browserbasehq/sdk': 2.0.0(encoding@0.1.13) - '@getzep/zep-cloud': 1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(7f2a4b9c5436679ca8b0df05212b4905)) + '@getzep/zep-cloud': 1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(b4eb53fe8b825d6e8edd96cc3d942586)) '@getzep/zep-js': 0.9.0 '@google-ai/generativelanguage': 2.6.0(encoding@0.1.13) '@google-cloud/storage': 7.12.1(encoding@0.1.13) @@ -17990,7 +17997,7 @@ snapshots: '@redocly/ajv': 8.11.2 '@redocly/config': 0.20.3 colorette: 1.4.0 - https-proxy-agent: 7.0.5 + https-proxy-agent: 7.0.6 js-levenshtein: 1.1.6 js-yaml: 4.1.0 minimatch: 5.1.5 @@ -19691,7 +19698,7 @@ snapshots: '@typespec/ts-http-runtime@0.2.2': dependencies: http-proxy-agent: 7.0.0 - https-proxy-agent: 7.0.5 + https-proxy-agent: 7.0.6 tslib: 2.6.2 transitivePeerDependencies: - supports-color @@ -20095,6 +20102,8 @@ snapshots: transitivePeerDependencies: - supports-color + agent-base@7.1.3: {} + agentkeepalive@4.2.1: dependencies: debug: 4.4.0(supports-color@8.1.1) @@ -22039,7 +22048,7 @@ snapshots: eslint-import-resolver-node@0.3.9: dependencies: - debug: 3.2.7(supports-color@8.1.1) + debug: 3.2.7(supports-color@5.5.0) is-core-module: 2.13.1 resolve: 1.22.8 transitivePeerDependencies: @@ -22064,7 +22073,7 @@ snapshots: eslint-module-utils@2.8.0(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.8.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0): dependencies: - debug: 3.2.7(supports-color@8.1.1) + debug: 3.2.7(supports-color@5.5.0) optionalDependencies: '@typescript-eslint/parser': 7.2.0(eslint@8.57.0)(typescript@5.8.2) eslint: 8.57.0 @@ -22084,7 +22093,7 @@ snapshots: array.prototype.findlastindex: 1.2.3 array.prototype.flat: 1.3.2 array.prototype.flatmap: 1.3.2 - debug: 3.2.7(supports-color@8.1.1) + debug: 3.2.7(supports-color@5.5.0) doctrine: 2.1.0 eslint: 8.57.0 eslint-import-resolver-node: 0.3.9 @@ -22724,7 +22733,7 @@ snapshots: gaxios@6.6.0(encoding@0.1.13): dependencies: extend: 3.0.2 - https-proxy-agent: 7.0.5 + https-proxy-agent: 7.0.6 is-stream: 2.0.1 node-fetch: 2.7.0(encoding@0.1.13) uuid: 9.0.1 @@ -22911,7 +22920,7 @@ snapshots: array-parallel: 0.1.3 array-series: 0.1.5 cross-spawn: 7.0.6 - debug: 3.2.7(supports-color@8.1.1) + debug: 3.2.7(supports-color@5.5.0) transitivePeerDependencies: - supports-color @@ -23183,9 +23192,9 @@ snapshots: transitivePeerDependencies: - supports-color - https-proxy-agent@7.0.5: + https-proxy-agent@7.0.6: dependencies: - agent-base: 7.1.0 + agent-base: 7.1.3 debug: 4.4.0(supports-color@8.1.1) transitivePeerDependencies: - supports-color @@ -23205,7 +23214,7 @@ snapshots: '@types/debug': 4.1.12 '@types/node': 18.16.16 '@types/tough-cookie': 4.0.2 - axios: 1.8.3 + axios: 1.8.3(debug@4.4.0) camelcase: 6.3.0 debug: 4.4.0(supports-color@8.1.1) dotenv: 16.4.5 @@ -23215,7 +23224,7 @@ snapshots: isstream: 0.1.2 jsonwebtoken: 9.0.2 mime-types: 2.1.35 - retry-axios: 2.6.0(axios@1.8.3(debug@4.4.0)) + retry-axios: 2.6.0(axios@1.8.3) tough-cookie: 4.1.3 transitivePeerDependencies: - supports-color @@ -24063,7 +24072,7 @@ snapshots: form-data: 4.0.0 html-encoding-sniffer: 4.0.0 http-proxy-agent: 7.0.0 - https-proxy-agent: 7.0.5 + https-proxy-agent: 7.0.6 is-potential-custom-element-name: 1.0.1 nwsapi: 2.2.7 parse5: 7.1.2 @@ -24209,7 +24218,7 @@ snapshots: kuler@2.0.0: {} - langchain@0.3.11(7f2a4b9c5436679ca8b0df05212b4905): + langchain@0.3.11(b4eb53fe8b825d6e8edd96cc3d942586): dependencies: '@langchain/core': 0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) '@langchain/openai': 0.3.17(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) @@ -25774,7 +25783,7 @@ snapshots: pdf-parse@1.1.1: dependencies: - debug: 3.2.7(supports-color@8.1.1) + debug: 3.2.7(supports-color@5.5.0) node-ensure: 0.0.0 transitivePeerDependencies: - supports-color @@ -26586,7 +26595,7 @@ snapshots: onetime: 5.1.2 signal-exit: 3.0.7 - retry-axios@2.6.0(axios@1.8.3(debug@4.4.0)): + retry-axios@2.6.0(axios@1.8.3): dependencies: axios: 1.8.3 @@ -26613,7 +26622,7 @@ snapshots: rhea@1.0.24: dependencies: - debug: 3.2.7(supports-color@8.1.1) + debug: 3.2.7(supports-color@5.5.0) transitivePeerDependencies: - supports-color @@ -27057,7 +27066,7 @@ snapshots: fastest-levenshtein: 1.0.16 generic-pool: 3.9.0 glob: 10.4.5 - https-proxy-agent: 7.0.5 + https-proxy-agent: 7.0.6 jsonwebtoken: 9.0.2 mime-types: 2.1.35 mkdirp: 1.0.4