refactor: Update Langchain to 0.1.41 & add support for Claude 3 (#8825)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
Co-authored-by: Michael Kret <michael.k@radency.com>
This commit is contained in:
oleg
2024-03-07 11:36:36 +01:00
committed by GitHub
parent c6f6254c0e
commit 0f7ae3f50a
76 changed files with 908 additions and 451 deletions

View File

@@ -1,16 +1,53 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type INodeProperties,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { ChatAnthropic } from 'langchain/chat_models/anthropic';
import { ChatAnthropic } from '@langchain/anthropic';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
const modelField: INodeProperties = {
displayName: 'Model',
name: 'model',
type: 'options',
// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
options: [
{
name: 'Claude 3 Opus(20240229)',
value: 'claude-3-opus-20240229',
},
{
name: 'Claude 3 Sonnet(20240229)',
value: 'claude-3-sonnet-20240229',
},
{
name: 'LEGACY: Claude 2',
value: 'claude-2',
},
{
name: 'LEGACY: Claude 2.1',
value: 'claude-2.1',
},
{
name: 'LEGACY: Claude Instant 1.2',
value: 'claude-instant-1.2',
},
{
name: 'LEGACY: Claude Instant 1',
value: 'claude-instant-1',
},
],
description:
'The model which will generate the completion. <a href="https://docs.anthropic.com/claude/docs/models-overview">Learn more</a>.',
default: 'claude-2',
};
export class LmChatAnthropic implements INodeType {
description: INodeTypeDescription = {
displayName: 'Anthropic Chat Model',
@@ -18,7 +55,7 @@ export class LmChatAnthropic implements INodeType {
name: 'lmChatAnthropic',
icon: 'file:anthropic.svg',
group: ['transform'],
version: 1,
version: [1, 1.1],
description: 'Language Model Anthropic',
defaults: {
name: 'Anthropic Chat Model',
@@ -35,6 +72,7 @@ export class LmChatAnthropic implements INodeType {
},
],
},
alias: ['claude', 'sonnet', 'opus'],
},
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
@@ -50,30 +88,21 @@ export class LmChatAnthropic implements INodeType {
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]),
{
displayName: 'Model',
name: 'model',
type: 'options',
options: [
{
name: 'Claude 2',
value: 'claude-2',
...modelField,
displayOptions: {
show: {
'@version': [1],
},
{
name: 'Claude 2.1',
value: 'claude-2.1',
},
},
{
...modelField,
default: 'claude-3-sonnet-20240229',
displayOptions: {
hide: {
'@version': [1],
},
{
name: 'Claude Instant 1.2',
value: 'claude-instant-1.2',
},
{
name: 'Claude Instant 1',
value: 'claude-instant-1',
},
],
description:
'The model which will generate the completion. <a href="https://docs.anthropic.com/claude/reference/selecting-a-model">Learn more</a>.',
default: 'claude-2',
},
},
{
displayName: 'Options',
@@ -86,7 +115,7 @@ export class LmChatAnthropic implements INodeType {
{
displayName: 'Maximum Number of Tokens',
name: 'maxTokensToSample',
default: 32768,
default: 4096,
description: 'The maximum number of tokens to generate in the completion',
type: 'number',
},
@@ -126,12 +155,20 @@ export class LmChatAnthropic implements INodeType {
const credentials = await this.getCredentials('anthropicApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as object;
const options = this.getNodeParameter('options', itemIndex, {}) as {
maxTokensToSample?: number;
temperature: number;
topK: number;
topP: number;
};
const model = new ChatAnthropic({
anthropicApiKey: credentials.apiKey as string,
modelName,
...options,
maxTokens: options.maxTokensToSample,
temperature: options.temperature,
topK: options.topK,
topP: options.topP,
});
return {

View File

@@ -7,8 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { ChatOllama } from 'langchain/chat_models/ollama';
// import { ChatAnthropic } from 'langchain/chat_models/anthropic';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description';

View File

@@ -7,8 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import type { ClientOptions } from 'openai';
import { ChatOpenAI } from 'langchain/chat_models/openai';
import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -7,7 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { Cohere } from 'langchain/llms/cohere';
import { Cohere } from '@langchain/cohere';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -7,7 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { Ollama } from 'langchain/llms/ollama';
import { Ollama } from '@langchain/community/llms/ollama';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { ollamaDescription, ollamaModel, ollamaOptions } from './description';

View File

@@ -8,8 +8,7 @@ import type {
ILoadOptionsFunctions,
} from 'n8n-workflow';
import type { ClientOptions } from 'openai';
import { OpenAI } from 'langchain/llms/openai';
import { OpenAI, type ClientOptions } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -7,7 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { HuggingFaceInference } from 'langchain/llms/hf';
import { HuggingFaceInference } from '@langchain/community/llms/hf';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { ChatBedrock } from 'langchain/chat_models/bedrock';
import { BedrockChat } from '@langchain/community/chat_models/bedrock';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
// Dependencies needed underneath the hood. We add them
@@ -149,7 +149,7 @@ export class LmChatAwsBedrock implements INodeType {
maxTokensToSample: number;
};
const model = new ChatBedrock({
const model = new BedrockChat({
region: credentials.region as string,
model: modelName,
temperature: options.temperature,

View File

@@ -7,8 +7,8 @@ import {
type SupplyData,
} from 'n8n-workflow';
import type { ClientOptions } from 'openai';
import { ChatOpenAI } from 'langchain/chat_models/openai';
import type { ClientOptions } from '@langchain/openai';
import { ChatOpenAI } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { ChatGooglePaLM } from 'langchain/chat_models/googlepalm';
import { ChatGooglePaLM } from '@langchain/community/chat_models/googlepalm';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { GooglePaLM } from 'langchain/llms/googlepalm';
import { GooglePaLM } from '@langchain/community/llms/googlepalm';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';