chore: Update LangChain dependencies (no-changelog) (#16523)

This commit is contained in:
Eugene
2025-06-20 11:50:14 +02:00
committed by GitHub
parent 57911225e7
commit 67852b826f
21 changed files with 644 additions and 551 deletions

View File

@@ -94,7 +94,8 @@
"typescript": "^5.8.2",
"vue-tsc": "^2.2.8",
"google-gax": "^4.3.7",
"ws": ">=8.17.1"
"ws": ">=8.17.1",
"zod": "3.25.67"
},
"patchedDependencies": {
"bull@4.16.4": "patches/bull@4.16.4.patch",

View File

@@ -7,7 +7,7 @@ type LLMConfig = {
export const o4mini = async (config: LLMConfig) => {
const { ChatOpenAI } = await import('@langchain/openai');
return new ChatOpenAI({
modelName: 'o4-mini-2025-04-16',
model: 'o4-mini-2025-04-16',
apiKey: config.apiKey,
configuration: {
baseURL: config.baseUrl,
@@ -19,7 +19,7 @@ export const o4mini = async (config: LLMConfig) => {
export const gpt41mini = async (config: LLMConfig) => {
const { ChatOpenAI } = await import('@langchain/openai');
return new ChatOpenAI({
modelName: 'gpt-4.1-mini-2025-04-14',
model: 'gpt-4.1-mini-2025-04-14',
apiKey: config.apiKey,
temperature: 0,
configuration: {
@@ -32,7 +32,7 @@ export const gpt41mini = async (config: LLMConfig) => {
export const anthropicClaude37Sonnet = async (config: LLMConfig) => {
const { ChatAnthropic } = await import('@langchain/anthropic');
return new ChatAnthropic({
modelName: 'claude-3-7-sonnet-20250219',
model: 'claude-3-7-sonnet-20250219',
apiKey: config.apiKey,
temperature: 0,
maxTokens: 16000,

View File

@@ -1,9 +1,8 @@
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
import { NodeOperationError, type IExecuteFunctions, type INode } from 'n8n-workflow';
import type { z } from 'zod';
type ZodObjectAny = z.ZodObject<any, any, any, any>;
import type { ZodObjectAny } from '../../../../types/types';
export async function extractParsedOutput(
ctx: IExecuteFunctions,

View File

@@ -4,6 +4,7 @@ import { NodeOperationError } from 'n8n-workflow';
import type { INode } from 'n8n-workflow';
import { z } from 'zod';
import type { ZodObjectAny } from '../../../../types/types';
import { checkForStructuredTools } from '../agents/utils';
describe('checkForStructuredTools', () => {
@@ -41,7 +42,7 @@ describe('checkForStructuredTools', () => {
func: async () => 'result',
});
const tools: Array<Tool | DynamicStructuredTool> = [dynamicTool];
const tools: Array<Tool | DynamicStructuredTool<ZodObjectAny>> = [dynamicTool];
await expect(checkForStructuredTools(tools, mockNode, 'Conversation Agent')).rejects.toThrow(
NodeOperationError,

View File

@@ -189,7 +189,11 @@ describe('imageUtils', () => {
it('should handle image data differently for GoogleGenerativeAI models', async () => {
// Mock a Google model - using our mocked class
mockContext.getInputConnectionData.mockResolvedValue(new ChatGoogleGenerativeAI());
mockContext.getInputConnectionData.mockResolvedValue(
new ChatGoogleGenerativeAI({
model: 'gemini-1.0-pro',
}),
);
const message: MessageTemplate = {
type: 'HumanMessagePromptTemplate',

View File

@@ -126,7 +126,7 @@ export class EmbeddingsGoogleGemini implements INodeType {
const credentials = await this.getCredentials('googlePalmApi');
const embeddings = new GoogleGenerativeAIEmbeddings({
apiKey: credentials.apiKey as string,
modelName,
model: modelName,
});
return {

View File

@@ -146,7 +146,7 @@ export class EmbeddingsMistralCloud implements INodeType {
const embeddings = new MistralAIEmbeddings({
apiKey: credentials.apiKey as string,
modelName,
model: modelName,
...options,
});

View File

@@ -321,7 +321,7 @@ export class LmChatAnthropic implements INodeType {
const model = new ChatAnthropic({
anthropicApiKey: credentials.apiKey,
modelName,
model: modelName,
anthropicApiUrl: baseURL,
maxTokens: options.maxTokensToSample,
temperature: options.temperature,

View File

@@ -367,7 +367,7 @@ export class LmChatOpenAi implements INodeType {
const model = new ChatOpenAI({
openAIApiKey: credentials.apiKey as string,
modelName,
model: modelName,
...options,
timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2,

View File

@@ -259,7 +259,7 @@ export class LmOpenAi implements INodeType {
const model = new OpenAI({
openAIApiKey: credentials.apiKey as string,
modelName,
model: modelName,
...options,
configuration,
timeout: options.timeout ?? 60000,

View File

@@ -12,6 +12,7 @@ import {
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { OpenAICompatibleCredential } from '../../../types/types';
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
import { N8nLlmTracing } from '../N8nLlmTracing';
@@ -234,7 +235,7 @@ export class LmChatDeepSeek implements INodeType {
const model = new ChatOpenAI({
openAIApiKey: credentials.apiKey,
modelName,
model: modelName,
...options,
timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2,

View File

@@ -149,7 +149,7 @@ export class LmChatGoogleGemini implements INodeType {
const model = new ChatGoogleGenerativeAI({
apiKey: credentials.apiKey as string,
baseUrl: credentials.host as string,
modelName,
model: modelName,
topK: options.topK,
topP: options.topP,
temperature: options.temperature,

View File

@@ -143,7 +143,7 @@ export class LmChatGroq implements INodeType {
const model = new ChatGroq({
apiKey: credentials.apiKey as string,
modelName,
model: modelName,
maxTokens: options.maxTokensToSample,
temperature: options.temperature,
callbacks: [new N8nLlmTracing(this)],

View File

@@ -190,7 +190,7 @@ export class LmChatMistralCloud implements INodeType {
const model = new ChatMistralAI({
apiKey: credentials.apiKey as string,
modelName,
model: modelName,
...options,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),

View File

@@ -12,6 +12,7 @@ import {
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { OpenAICompatibleCredential } from '../../../types/types';
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
import { N8nLlmTracing } from '../N8nLlmTracing';
@@ -233,7 +234,7 @@ export class LmChatOpenRouter implements INodeType {
const model = new ChatOpenAI({
openAIApiKey: credentials.apiKey,
modelName,
model: modelName,
...options,
timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2,

View File

@@ -12,6 +12,7 @@ import {
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { OpenAICompatibleCredential } from '../../../types/types';
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
import { N8nLlmTracing } from '../N8nLlmTracing';
@@ -234,7 +235,7 @@ export class LmChatXAiGrok implements INodeType {
const model = new ChatOpenAI({
openAIApiKey: credentials.apiKey,
modelName,
model: modelName,
...options,
timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2,

View File

@@ -160,20 +160,20 @@
"@google/generative-ai": "0.21.0",
"@huggingface/inference": "2.8.0",
"@langchain/anthropic": "catalog:",
"@langchain/aws": "0.1.10",
"@langchain/cohere": "0.3.2",
"@langchain/aws": "0.1.11",
"@langchain/cohere": "0.3.4",
"@langchain/community": "catalog:",
"@langchain/core": "catalog:",
"@langchain/google-genai": "0.1.6",
"@langchain/google-vertexai": "0.1.8",
"@langchain/groq": "0.1.3",
"@langchain/mistralai": "0.2.0",
"@langchain/google-genai": "0.2.13",
"@langchain/google-vertexai": "0.2.13",
"@langchain/groq": "0.2.3",
"@langchain/mistralai": "0.2.1",
"@langchain/mongodb": "^0.1.0",
"@langchain/ollama": "0.1.4",
"@langchain/ollama": "0.2.2",
"@langchain/openai": "catalog:",
"@langchain/pinecone": "0.1.3",
"@langchain/pinecone": "0.2.0",
"@langchain/qdrant": "0.1.2",
"@langchain/redis": "0.1.0",
"@langchain/redis": "0.1.1",
"@langchain/textsplitters": "0.1.0",
"@modelcontextprotocol/sdk": "1.12.0",
"@mozilla/readability": "0.6.0",
@@ -182,7 +182,7 @@
"@n8n/typeorm": "0.3.20-12",
"@n8n/typescript-config": "workspace:*",
"@n8n/vm2": "3.9.25",
"@pinecone-database/pinecone": "4.0.0",
"@pinecone-database/pinecone": "^5.0.2",
"@qdrant/js-client-rest": "1.14.1",
"@supabase/supabase-js": "2.49.9",
"@xata.io/client": "0.28.4",
@@ -197,14 +197,14 @@
"html-to-text": "9.0.5",
"https-proxy-agent": "catalog:",
"jsdom": "23.0.1",
"langchain": "0.3.11",
"langchain": "0.3.28",
"lodash": "catalog:",
"mammoth": "1.7.2",
"mime-types": "2.1.35",
"mongodb": "6.11.0",
"n8n-nodes-base": "workspace:*",
"n8n-workflow": "workspace:*",
"openai": "4.78.1",
"openai": "4.103.0",
"pdf-parse": "1.1.1",
"pg": "8.12.0",
"redis": "4.6.12",

View File

@@ -1 +1,5 @@
type OpenAICompatibleCredential = { apiKey: string; url: string };
import type { z } from 'zod';
export type OpenAICompatibleCredential = { apiKey: string; url: string };
export type ZodObjectAny = z.ZodObject<any, any, any, any>;

View File

@@ -6,6 +6,8 @@ import { NodeConnectionTypes, jsonParse, NodeOperationError } from 'n8n-workflow
import type { ZodTypeAny } from 'zod';
import { ZodBoolean, ZodNullable, ZodNumber, ZodObject, ZodOptional } from 'zod';
import type { ZodObjectAny } from '../types/types';
const getSimplifiedType = (schema: ZodTypeAny) => {
if (schema instanceof ZodObject) {
return 'object';
@@ -44,10 +46,10 @@ ALL parameters marked as required must be provided`;
return description;
};
export class N8nTool extends DynamicStructuredTool {
export class N8nTool extends DynamicStructuredTool<ZodObjectAny> {
constructor(
private context: ISupplyDataFunctions,
fields: DynamicStructuredToolInput,
fields: DynamicStructuredToolInput<ZodObjectAny>,
) {
super(fields);
}

1103
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -37,12 +37,12 @@ catalog:
uuid: 10.0.0
xml2js: 0.6.2
xss: 1.0.15
zod: 3.24.1
zod: 3.25.67
zod-to-json-schema: 3.23.3
'@langchain/core': 0.3.48
'@langchain/openai': 0.5.0
'@langchain/anthropic': 0.3.21
'@langchain/community': 0.3.24
'@langchain/core': 0.3.59
'@langchain/openai': 0.5.13
'@langchain/anthropic': 0.3.22
'@langchain/community': 0.3.46
'@n8n_io/ai-assistant-sdk': 1.14.0
catalogs: