mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 01:56:46 +00:00
chore: Update LangChain dependencies (no-changelog) (#16523)
This commit is contained in:
@@ -321,7 +321,7 @@ export class LmChatAnthropic implements INodeType {
|
||||
|
||||
const model = new ChatAnthropic({
|
||||
anthropicApiKey: credentials.apiKey,
|
||||
modelName,
|
||||
model: modelName,
|
||||
anthropicApiUrl: baseURL,
|
||||
maxTokens: options.maxTokensToSample,
|
||||
temperature: options.temperature,
|
||||
|
||||
@@ -367,7 +367,7 @@ export class LmChatOpenAi implements INodeType {
|
||||
|
||||
const model = new ChatOpenAI({
|
||||
openAIApiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
model: modelName,
|
||||
...options,
|
||||
timeout: options.timeout ?? 60000,
|
||||
maxRetries: options.maxRetries ?? 2,
|
||||
|
||||
@@ -259,7 +259,7 @@ export class LmOpenAi implements INodeType {
|
||||
|
||||
const model = new OpenAI({
|
||||
openAIApiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
model: modelName,
|
||||
...options,
|
||||
configuration,
|
||||
timeout: options.timeout ?? 60000,
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import type { OpenAICompatibleCredential } from '../../../types/types';
|
||||
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
@@ -234,7 +235,7 @@ export class LmChatDeepSeek implements INodeType {
|
||||
|
||||
const model = new ChatOpenAI({
|
||||
openAIApiKey: credentials.apiKey,
|
||||
modelName,
|
||||
model: modelName,
|
||||
...options,
|
||||
timeout: options.timeout ?? 60000,
|
||||
maxRetries: options.maxRetries ?? 2,
|
||||
|
||||
@@ -149,7 +149,7 @@ export class LmChatGoogleGemini implements INodeType {
|
||||
const model = new ChatGoogleGenerativeAI({
|
||||
apiKey: credentials.apiKey as string,
|
||||
baseUrl: credentials.host as string,
|
||||
modelName,
|
||||
model: modelName,
|
||||
topK: options.topK,
|
||||
topP: options.topP,
|
||||
temperature: options.temperature,
|
||||
|
||||
@@ -143,7 +143,7 @@ export class LmChatGroq implements INodeType {
|
||||
|
||||
const model = new ChatGroq({
|
||||
apiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
model: modelName,
|
||||
maxTokens: options.maxTokensToSample,
|
||||
temperature: options.temperature,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
|
||||
@@ -190,7 +190,7 @@ export class LmChatMistralCloud implements INodeType {
|
||||
|
||||
const model = new ChatMistralAI({
|
||||
apiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
model: modelName,
|
||||
...options,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import type { OpenAICompatibleCredential } from '../../../types/types';
|
||||
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
@@ -233,7 +234,7 @@ export class LmChatOpenRouter implements INodeType {
|
||||
|
||||
const model = new ChatOpenAI({
|
||||
openAIApiKey: credentials.apiKey,
|
||||
modelName,
|
||||
model: modelName,
|
||||
...options,
|
||||
timeout: options.timeout ?? 60000,
|
||||
maxRetries: options.maxRetries ?? 2,
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
import { getHttpProxyAgent } from '@utils/httpProxyAgent';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import type { OpenAICompatibleCredential } from '../../../types/types';
|
||||
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
@@ -234,7 +235,7 @@ export class LmChatXAiGrok implements INodeType {
|
||||
|
||||
const model = new ChatOpenAI({
|
||||
openAIApiKey: credentials.apiKey,
|
||||
modelName,
|
||||
model: modelName,
|
||||
...options,
|
||||
timeout: options.timeout ?? 60000,
|
||||
maxRetries: options.maxRetries ?? 2,
|
||||
|
||||
Reference in New Issue
Block a user