chore: Update LangChain dependencies (no-changelog) (#16523)

This commit is contained in:
Eugene
2025-06-20 11:50:14 +02:00
committed by GitHub
parent 57911225e7
commit 67852b826f
21 changed files with 644 additions and 551 deletions

View File

@@ -321,7 +321,7 @@ export class LmChatAnthropic implements INodeType {
const model = new ChatAnthropic({
anthropicApiKey: credentials.apiKey,
modelName,
model: modelName,
anthropicApiUrl: baseURL,
maxTokens: options.maxTokensToSample,
temperature: options.temperature,

View File

@@ -367,7 +367,7 @@ export class LmChatOpenAi implements INodeType {
const model = new ChatOpenAI({
openAIApiKey: credentials.apiKey as string,
modelName,
model: modelName,
...options,
timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2,

View File

@@ -259,7 +259,7 @@ export class LmOpenAi implements INodeType {
const model = new OpenAI({
openAIApiKey: credentials.apiKey as string,
modelName,
model: modelName,
...options,
configuration,
timeout: options.timeout ?? 60000,

View File

@@ -12,6 +12,7 @@ import {
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { OpenAICompatibleCredential } from '../../../types/types';
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
import { N8nLlmTracing } from '../N8nLlmTracing';
@@ -234,7 +235,7 @@ export class LmChatDeepSeek implements INodeType {
const model = new ChatOpenAI({
openAIApiKey: credentials.apiKey,
modelName,
model: modelName,
...options,
timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2,

View File

@@ -149,7 +149,7 @@ export class LmChatGoogleGemini implements INodeType {
const model = new ChatGoogleGenerativeAI({
apiKey: credentials.apiKey as string,
baseUrl: credentials.host as string,
modelName,
model: modelName,
topK: options.topK,
topP: options.topP,
temperature: options.temperature,

View File

@@ -143,7 +143,7 @@ export class LmChatGroq implements INodeType {
const model = new ChatGroq({
apiKey: credentials.apiKey as string,
modelName,
model: modelName,
maxTokens: options.maxTokensToSample,
temperature: options.temperature,
callbacks: [new N8nLlmTracing(this)],

View File

@@ -190,7 +190,7 @@ export class LmChatMistralCloud implements INodeType {
const model = new ChatMistralAI({
apiKey: credentials.apiKey as string,
modelName,
model: modelName,
...options,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),

View File

@@ -12,6 +12,7 @@ import {
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { OpenAICompatibleCredential } from '../../../types/types';
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
import { N8nLlmTracing } from '../N8nLlmTracing';
@@ -233,7 +234,7 @@ export class LmChatOpenRouter implements INodeType {
const model = new ChatOpenAI({
openAIApiKey: credentials.apiKey,
modelName,
model: modelName,
...options,
timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2,

View File

@@ -12,6 +12,7 @@ import {
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { OpenAICompatibleCredential } from '../../../types/types';
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
import { N8nLlmTracing } from '../N8nLlmTracing';
@@ -234,7 +235,7 @@ export class LmChatXAiGrok implements INodeType {
const model = new ChatOpenAI({
openAIApiKey: credentials.apiKey,
modelName,
model: modelName,
...options,
timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2,