refactor: Update Langchain to 0.1.41 & add support for Claude 3 (#8825)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
Co-authored-by: Michael Kret <michael.k@radency.com>
This commit is contained in:
oleg
2024-03-07 11:36:36 +01:00
committed by GitHub
parent c6f6254c0e
commit 0f7ae3f50a
76 changed files with 908 additions and 451 deletions

View File

@@ -6,9 +6,9 @@ import {
} from 'n8n-workflow';
import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from '@langchain/core/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers';
import {
isChatInstance,

View File

@@ -7,11 +7,11 @@ import {
import type { AgentExecutorInput } from 'langchain/agents';
import { AgentExecutor, OpenAIAgent } from 'langchain/agents';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from '@langchain/core/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers';
import { BufferMemory, type BaseChatMemory } from 'langchain/memory';
import { ChatOpenAI } from 'langchain/chat_models/openai';
import { ChatOpenAI } from '@langchain/openai';
import {
getConnectedTools,
getOptionalOutputParsers,

View File

@@ -5,10 +5,10 @@ import {
NodeOperationError,
} from 'n8n-workflow';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from '@langchain/core/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers';
import type { BaseChatModel } from 'langchain/chat_models/base';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute';
import {
getConnectedTools,

View File

@@ -7,10 +7,10 @@ import {
import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents';
import type { BaseLanguageModel } from 'langchain/base_language';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from '@langchain/core/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers';
import type { BaseChatModel } from 'langchain/chat_models/base';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import {
getConnectedTools,
getOptionalOutputParsers,

View File

@@ -3,13 +3,14 @@ import {
type INodeExecutionData,
NodeConnectionType,
NodeOperationError,
type IDataObject,
} from 'n8n-workflow';
import { SqlDatabase } from 'langchain/sql_db';
import type { SqlCreatePromptArgs } from 'langchain/agents/toolkits/sql';
import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql';
import type { BaseLanguageModel } from 'langchain/dist/base_language';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { DataSource } from '@n8n/typeorm';
import { getPromptInputByType, serializeChatHistory } from '../../../../../utils/helpers';
@@ -123,7 +124,7 @@ export async function sqlAgentAgentExecute(
chatHistory = serializeChatHistory(messages);
}
let response;
let response: IDataObject;
try {
response = await agentExecutor.call({
input,
@@ -131,10 +132,10 @@ export async function sqlAgentAgentExecute(
chatHistory,
});
} catch (error) {
if (error.message?.output) {
response = error.message;
if ((error.message as IDataObject)?.output) {
response = error.message as IDataObject;
} else {
throw new NodeOperationError(this.getNode(), error.message, { itemIndex: i });
throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex: i });
}
}

View File

@@ -1,6 +1,6 @@
import { zodToJsonSchema } from 'zod-to-json-schema';
import type { OpenAI as OpenAIClient } from 'openai';
import type { StructuredTool } from 'langchain/tools';
import type { StructuredTool } from '@langchain/core/tools';
// Copied from langchain(`langchain/src/tools/convert_to_openai.ts`)
// since these functions are not exported

View File

@@ -8,19 +8,19 @@ import type {
INodeTypeDescription,
} from 'n8n-workflow';
import type { BaseLanguageModel } from 'langchain/base_language';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import {
AIMessagePromptTemplate,
PromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
ChatPromptTemplate,
} from 'langchain/prompts';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
} from '@langchain/core/prompts';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { CombiningOutputParser } from 'langchain/output_parsers';
import { LLMChain } from 'langchain/chains';
import type { BaseChatModel } from 'langchain/chat_models/base';
import { HumanMessage } from 'langchain/schema';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { HumanMessage } from '@langchain/core/messages';
import { getTemplateNoticeField } from '../../../utils/sharedFields';
import {
getOptionalOutputParsers,
@@ -92,6 +92,7 @@ async function getChainPromptTemplate(
llm: BaseLanguageModel | BaseChatModel,
messages?: MessagesTemplate[],
formatInstructions?: string,
query?: string,
) {
const queryTemplate = new PromptTemplate({
template: `{query}${formatInstructions ? '\n{formatInstructions}' : ''}`,
@@ -129,7 +130,15 @@ async function getChainPromptTemplate(
}),
);
parsedMessages.push(new HumanMessagePromptTemplate(queryTemplate));
const lastMessage = parsedMessages[parsedMessages.length - 1];
// If the last message is a human message and it has an array of content, we need to add the query to the last message
if (lastMessage instanceof HumanMessage && Array.isArray(lastMessage.content)) {
const humanMessage = new HumanMessagePromptTemplate(queryTemplate);
const test = await humanMessage.format({ query });
lastMessage.content.push({ text: test.content.toString(), type: 'text' });
} else {
parsedMessages.push(new HumanMessagePromptTemplate(queryTemplate));
}
return ChatPromptTemplate.fromMessages(parsedMessages);
}
@@ -146,6 +155,7 @@ async function createSimpleLLMChain(
llm,
prompt,
});
const response = (await chain.call({
query,
signal: context.getExecutionCancelSignal(),
@@ -167,6 +177,8 @@ async function getChain(
itemIndex,
llm,
messages,
undefined,
query,
);
// If there are no output parsers, create a simple LLM chain and execute the query
@@ -187,6 +199,7 @@ async function getChain(
llm,
messages,
formatInstructions,
query,
);
const chain = prompt.pipe(llm).pipe(combinedOutputParser);

View File

@@ -8,8 +8,8 @@ import {
} from 'n8n-workflow';
import { RetrievalQAChain } from 'langchain/chains';
import type { BaseLanguageModel } from 'langchain/dist/base_language';
import type { BaseRetriever } from 'langchain/schema/retriever';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseRetriever } from '@langchain/core/retrievers';
import { getTemplateNoticeField } from '../../../utils/sharedFields';
import { getPromptInputByType } from '../../../utils/helpers';

View File

@@ -9,9 +9,9 @@ import {
import type { SummarizationChainParams } from 'langchain/chains';
import { loadSummarizationChain } from 'langchain/chains';
import type { BaseLanguageModel } from 'langchain/dist/base_language';
import type { Document } from 'langchain/document';
import { PromptTemplate } from 'langchain/prompts';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { Document } from '@langchain/core/documents';
import { PromptTemplate } from '@langchain/core/prompts';
import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader';
import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader';
import { getTemplateNoticeField } from '../../../../utils/sharedFields';

View File

@@ -9,8 +9,8 @@ import type {
} from 'n8n-workflow';
import { loadSummarizationChain } from 'langchain/chains';
import type { BaseLanguageModel } from 'langchain/dist/base_language';
import type { Document } from 'langchain/document';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { Document } from '@langchain/core/documents';
import type { TextSplitter } from 'langchain/text_splitter';
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader';

View File

@@ -1,6 +1,5 @@
import type { SummarizationChainParams } from 'langchain/chains';
import { PromptTemplate } from 'langchain/prompts';
import { PromptTemplate } from '@langchain/core/prompts';
interface ChainTypeOptions {
combineMapPrompt?: string;
prompt?: string;

View File

@@ -15,7 +15,7 @@ import {
import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox';
import { standardizeOutput } from 'n8n-nodes-base/dist/nodes/Code/utils';
import type { Tool } from 'langchain/tools';
import type { Tool } from '@langchain/core/tools';
import { makeResolverFromLegacyOptions } from '@n8n/vm2';
import { logWrapper } from '../../utils/logWrapper';
@@ -36,7 +36,7 @@ const connectorTypes = {
[NodeConnectionType.Main]: 'Main',
};
const defaultCodeExecute = `const { PromptTemplate } = require('langchain/prompts');
const defaultCodeExecute = `const { PromptTemplate } = require('@langchain/core/prompts');
const query = 'Tell me a joke';
const prompt = PromptTemplate.fromTemplate(query);

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { BedrockEmbeddings } from 'langchain/embeddings/bedrock';
import { BedrockEmbeddings } from '@langchain/community/embeddings/bedrock';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -7,7 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { OpenAIEmbeddings } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { CohereEmbeddings } from 'langchain/embeddings/cohere';
import { CohereEmbeddings } from '@langchain/cohere';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
@@ -89,7 +89,7 @@ export class EmbeddingsCohere implements INodeType {
const credentials = await this.getCredentials('cohereApi');
const embeddings = new CohereEmbeddings({
apiKey: credentials.apiKey as string,
modelName,
model: modelName,
});
return {

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { GooglePaLMEmbeddings } from 'langchain/embeddings/googlepalm';
import { GooglePaLMEmbeddings } from '@langchain/community/embeddings/googlepalm';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { HuggingFaceInferenceEmbeddings } from 'langchain/embeddings/hf';
import { HuggingFaceInferenceEmbeddings } from '@langchain/community/embeddings/hf';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { OllamaEmbeddings } from 'langchain/embeddings/ollama';
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { ollamaDescription, ollamaModel } from '../../llms/LMOllama/description';

View File

@@ -9,7 +9,7 @@ import {
} from 'n8n-workflow';
import type { ClientOptions } from 'openai';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { OpenAIEmbeddings } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -1,16 +1,53 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type INodeProperties,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { ChatAnthropic } from 'langchain/chat_models/anthropic';
import { ChatAnthropic } from '@langchain/anthropic';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
const modelField: INodeProperties = {
displayName: 'Model',
name: 'model',
type: 'options',
// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
options: [
{
name: 'Claude 3 Opus(20240229)',
value: 'claude-3-opus-20240229',
},
{
name: 'Claude 3 Sonnet(20240229)',
value: 'claude-3-sonnet-20240229',
},
{
name: 'LEGACY: Claude 2',
value: 'claude-2',
},
{
name: 'LEGACY: Claude 2.1',
value: 'claude-2.1',
},
{
name: 'LEGACY: Claude Instant 1.2',
value: 'claude-instant-1.2',
},
{
name: 'LEGACY: Claude Instant 1',
value: 'claude-instant-1',
},
],
description:
'The model which will generate the completion. <a href="https://docs.anthropic.com/claude/docs/models-overview">Learn more</a>.',
default: 'claude-2',
};
export class LmChatAnthropic implements INodeType {
description: INodeTypeDescription = {
displayName: 'Anthropic Chat Model',
@@ -18,7 +55,7 @@ export class LmChatAnthropic implements INodeType {
name: 'lmChatAnthropic',
icon: 'file:anthropic.svg',
group: ['transform'],
version: 1,
version: [1, 1.1],
description: 'Language Model Anthropic',
defaults: {
name: 'Anthropic Chat Model',
@@ -35,6 +72,7 @@ export class LmChatAnthropic implements INodeType {
},
],
},
alias: ['claude', 'sonnet', 'opus'],
},
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
@@ -50,30 +88,21 @@ export class LmChatAnthropic implements INodeType {
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]),
{
displayName: 'Model',
name: 'model',
type: 'options',
options: [
{
name: 'Claude 2',
value: 'claude-2',
...modelField,
displayOptions: {
show: {
'@version': [1],
},
{
name: 'Claude 2.1',
value: 'claude-2.1',
},
},
{
...modelField,
default: 'claude-3-sonnet-20240229',
displayOptions: {
hide: {
'@version': [1],
},
{
name: 'Claude Instant 1.2',
value: 'claude-instant-1.2',
},
{
name: 'Claude Instant 1',
value: 'claude-instant-1',
},
],
description:
'The model which will generate the completion. <a href="https://docs.anthropic.com/claude/reference/selecting-a-model">Learn more</a>.',
default: 'claude-2',
},
},
{
displayName: 'Options',
@@ -86,7 +115,7 @@ export class LmChatAnthropic implements INodeType {
{
displayName: 'Maximum Number of Tokens',
name: 'maxTokensToSample',
default: 32768,
default: 4096,
description: 'The maximum number of tokens to generate in the completion',
type: 'number',
},
@@ -126,12 +155,20 @@ export class LmChatAnthropic implements INodeType {
const credentials = await this.getCredentials('anthropicApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as object;
const options = this.getNodeParameter('options', itemIndex, {}) as {
maxTokensToSample?: number;
temperature: number;
topK: number;
topP: number;
};
const model = new ChatAnthropic({
anthropicApiKey: credentials.apiKey as string,
modelName,
...options,
maxTokens: options.maxTokensToSample,
temperature: options.temperature,
topK: options.topK,
topP: options.topP,
});
return {

View File

@@ -7,8 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { ChatOllama } from 'langchain/chat_models/ollama';
// import { ChatAnthropic } from 'langchain/chat_models/anthropic';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description';

View File

@@ -7,8 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import type { ClientOptions } from 'openai';
import { ChatOpenAI } from 'langchain/chat_models/openai';
import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -7,7 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { Cohere } from 'langchain/llms/cohere';
import { Cohere } from '@langchain/cohere';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -7,7 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { Ollama } from 'langchain/llms/ollama';
import { Ollama } from '@langchain/community/llms/ollama';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { ollamaDescription, ollamaModel, ollamaOptions } from './description';

View File

@@ -8,8 +8,7 @@ import type {
ILoadOptionsFunctions,
} from 'n8n-workflow';
import type { ClientOptions } from 'openai';
import { OpenAI } from 'langchain/llms/openai';
import { OpenAI, type ClientOptions } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -7,7 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { HuggingFaceInference } from 'langchain/llms/hf';
import { HuggingFaceInference } from '@langchain/community/llms/hf';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { ChatBedrock } from 'langchain/chat_models/bedrock';
import { BedrockChat } from '@langchain/community/chat_models/bedrock';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
// Dependencies needed underneath the hood. We add them
@@ -149,7 +149,7 @@ export class LmChatAwsBedrock implements INodeType {
maxTokensToSample: number;
};
const model = new ChatBedrock({
const model = new BedrockChat({
region: credentials.region as string,
model: modelName,
temperature: options.temperature,

View File

@@ -7,8 +7,8 @@ import {
type SupplyData,
} from 'n8n-workflow';
import type { ClientOptions } from 'openai';
import { ChatOpenAI } from 'langchain/chat_models/openai';
import type { ClientOptions } from '@langchain/openai';
import { ChatOpenAI } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { ChatGooglePaLM } from 'langchain/chat_models/googlepalm';
import { ChatGooglePaLM } from '@langchain/community/chat_models/googlepalm';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { GooglePaLM } from 'langchain/llms/googlepalm';
import { GooglePaLM } from '@langchain/community/llms/googlepalm';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -7,8 +7,8 @@ import {
type INodeType,
type INodeTypeDescription,
} from 'n8n-workflow';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseMessage } from 'langchain/schema';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { BaseMessage } from '@langchain/core/messages';
function simplifyMessages(messages: BaseMessage[]) {
const chunkedMessages = [];

View File

@@ -7,8 +7,8 @@ import type {
INodeType,
INodeTypeDescription,
} from 'n8n-workflow';
import type { BaseChatMemory } from 'langchain/memory';
import { AIMessage, SystemMessage, HumanMessage, type BaseMessage } from 'langchain/schema';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { AIMessage, SystemMessage, HumanMessage, type BaseMessage } from '@langchain/core/messages';
type MessageRole = 'ai' | 'system' | 'user';
interface MessageRecord {

View File

@@ -7,7 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { MotorheadMemory } from 'langchain/memory';
import { MotorheadMemory } from '@langchain/community/memory/motorhead_memory';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { sessionIdOption, sessionKeyProperty } from '../descriptions';

View File

@@ -8,8 +8,8 @@ import {
NodeConnectionType,
} from 'n8n-workflow';
import { BufferMemory } from 'langchain/memory';
import type { RedisChatMessageHistoryInput } from 'langchain/stores/message/redis';
import { RedisChatMessageHistory } from 'langchain/stores/message/redis';
import type { RedisChatMessageHistoryInput } from '@langchain/redis';
import { RedisChatMessageHistory } from '@langchain/redis';
import type { RedisClientOptions } from 'redis';
import { createClient } from 'redis';
import { logWrapper } from '../../../utils/logWrapper';

View File

@@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
import { XataChatMessageHistory } from 'langchain/stores/message/xata';
import { XataChatMessageHistory } from '@langchain/community/stores/message/xata';
import { BufferMemory } from 'langchain/memory';
import { BaseClient } from '@xata.io/client';
import { logWrapper } from '../../../utils/logWrapper';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { ZepMemory } from 'langchain/memory/zep';
import { ZepMemory } from '@langchain/community/memory/zep';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { sessionIdOption, sessionKeyProperty } from '../descriptions';

View File

@@ -7,8 +7,8 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { OutputFixingParser } from 'langchain/output_parsers';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import type { BaseLanguageModel } from 'langchain/base_language';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -1,4 +1,4 @@
import { BaseOutputParser, OutputParserException } from 'langchain/schema/output_parser';
import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers';
export class ItemListOutputParser extends BaseOutputParser<string[]> {
lc_namespace = ['n8n-nodes-langchain', 'output_parsers', 'list_items'];

View File

@@ -13,7 +13,7 @@ import { parseSchema } from 'json-schema-to-zod';
import { z } from 'zod';
import type { JSONSchema7 } from 'json-schema';
import { StructuredOutputParser } from 'langchain/output_parsers';
import { OutputParserException } from 'langchain/schema/output_parser';
import { OutputParserException } from '@langchain/core/output_parsers';
import get from 'lodash/get';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -9,8 +9,8 @@ import {
import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression';
import { LLMChainExtractor } from 'langchain/retrievers/document_compressors/chain_extract';
import type { BaseLanguageModel } from 'langchain/base_language';
import type { BaseRetriever } from 'langchain/schema/retriever';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseRetriever } from '@langchain/core/retrievers';
import { logWrapper } from '../../../utils/logWrapper';

View File

@@ -8,8 +8,8 @@ import {
} from 'n8n-workflow';
import { MultiQueryRetriever } from 'langchain/retrievers/multi_query';
import type { BaseLanguageModel } from 'langchain/base_language';
import type { BaseRetriever } from 'langchain/schema/retriever';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseRetriever } from '@langchain/core/retrievers';
import { logWrapper } from '../../../utils/logWrapper';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import type { VectorStore } from 'langchain/vectorstores/base';
import type { VectorStore } from '@langchain/core/vectorstores';
import { logWrapper } from '../../../utils/logWrapper';
export class RetrieverVectorStore implements INodeType {

View File

@@ -11,8 +11,8 @@ import type {
SupplyData,
} from 'n8n-workflow';
import { BaseRetriever, type BaseRetrieverInput } from 'langchain/schema/retriever';
import { Document } from 'langchain/document';
import { BaseRetriever, type BaseRetrieverInput } from '@langchain/core/retrievers';
import { Document } from '@langchain/core/documents';
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';

View File

@@ -6,11 +6,31 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import type { RecursiveCharacterTextSplitterParams } from 'langchain/text_splitter';
import type {
RecursiveCharacterTextSplitterParams,
SupportedTextSplitterLanguage,
} from 'langchain/text_splitter';
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
const supportedLanguages: SupportedTextSplitterLanguage[] = [
'cpp',
'go',
'java',
'js',
'php',
'proto',
'python',
'rst',
'ruby',
'rust',
'scala',
'swift',
'markdown',
'latex',
'html',
];
export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
description: INodeTypeDescription = {
displayName: 'Recursive Character Text Splitter',
@@ -54,6 +74,23 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
type: 'number',
default: 0,
},
{
displayName: 'Options',
name: 'options',
placeholder: 'Add Option',
description: 'Additional options to add',
type: 'collection',
default: {},
options: [
{
displayName: 'Split Code',
name: 'splitCode',
default: 'markdown',
type: 'options',
options: supportedLanguages.map((lang) => ({ name: lang, value: lang })),
},
],
},
],
};
@@ -62,7 +99,11 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number;
const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex) as number;
const splitCode = this.getNodeParameter(
'options.splitCode',
itemIndex,
null,
) as SupportedTextSplitterLanguage | null;
const params: RecursiveCharacterTextSplitterParams = {
// TODO: These are the default values, should we allow the user to change them?
separators: ['\n\n', '\n', ' ', ''],
@@ -70,8 +111,13 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
chunkOverlap,
keepSeparator: false,
};
let splitter: RecursiveCharacterTextSplitter;
const splitter = new RecursiveCharacterTextSplitter(params);
if (splitCode && supportedLanguages.includes(splitCode)) {
splitter = RecursiveCharacterTextSplitter.fromLanguage(splitCode, params);
} else {
splitter = new RecursiveCharacterTextSplitter(params);
}
return {
response: logWrapper(splitter, this),

View File

@@ -12,7 +12,7 @@ import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox';
import { PythonSandbox } from 'n8n-nodes-base/dist/nodes/Code/PythonSandbox';
import { DynamicTool } from 'langchain/tools';
import { DynamicTool } from '@langchain/core/tools';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
export class ToolCode implements INodeType {

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { SerpAPI } from 'langchain/tools';
import { SerpAPI } from '@langchain/community/tools/serpapi';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { WikipediaQueryRun } from 'langchain/tools';
import { WikipediaQueryRun } from '@langchain/community/tools/wikipedia_query_run';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -6,7 +6,7 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { WolframAlphaTool } from 'langchain/tools';
import { WolframAlphaTool } from '@langchain/community/tools/wolframalpha';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -13,7 +13,7 @@ import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
import { DynamicTool } from 'langchain/tools';
import { DynamicTool } from '@langchain/core/tools';
import get from 'lodash/get';
import isObject from 'lodash/isObject';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View File

@@ -7,7 +7,7 @@ import {
NodeConnectionType,
} from 'n8n-workflow';
import { pick } from 'lodash';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { createPage } from './templates';
import { validateAuth } from './GenericFunctions';
import type { LoadPreviousSessionChatOption } from './types';

View File

@@ -7,7 +7,7 @@ import {
type INodeTypeDescription,
} from 'n8n-workflow';
import type { Document } from 'langchain/document';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Embeddings } from '@langchain/core/embeddings';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { processDocuments } from '../shared/processDocuments';
import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager';

View File

@@ -6,7 +6,7 @@ import {
type INodeType,
type INodeTypeDescription,
} from 'n8n-workflow';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Embeddings } from '@langchain/core/embeddings';
import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager';
import { logWrapper } from '../../../utils/logWrapper';

View File

@@ -1,6 +1,6 @@
import { NodeOperationError, type INodeProperties } from 'n8n-workflow';
import type { PineconeLibArgs } from 'langchain/vectorstores/pinecone';
import { PineconeStore } from 'langchain/vectorstores/pinecone';
import type { PineconeStoreParams } from '@langchain/pinecone';
import { PineconeStore } from '@langchain/pinecone';
import { Pinecone } from '@pinecone-database/pinecone';
import { createVectorStoreNode } from '../shared/createVectorStoreNode';
import { metadataFilterField } from '../../../utils/sharedFields';
@@ -87,11 +87,10 @@ export const VectorStorePinecone = createVectorStoreNode({
const client = new Pinecone({
apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
});
const pineconeIndex = client.Index(index);
const config: PineconeLibArgs = {
const config: PineconeStoreParams = {
namespace: options.pineconeNamespace ?? undefined,
pineconeIndex,
filter,
@@ -111,10 +110,9 @@ export const VectorStorePinecone = createVectorStoreNode({
const client = new Pinecone({
apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
});
const indexes = (await client.listIndexes()).map((i) => i.name);
const indexes = ((await client.listIndexes()).indexes ?? []).map((i) => i.name);
if (!indexes.includes(index)) {
throw new NodeOperationError(context.getNode(), `Index ${index} not found`, {

View File

@@ -5,10 +5,11 @@ import {
type INodeExecutionData,
NodeConnectionType,
} from 'n8n-workflow';
import { PineconeStore } from 'langchain/vectorstores/pinecone';
import type { Embeddings } from '@langchain/core/embeddings';
import type { Document } from '@langchain/core/documents';
import { PineconeStore } from '@langchain/pinecone';
import { Pinecone } from '@pinecone-database/pinecone';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Document } from 'langchain/document';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { processDocuments } from '../shared/processDocuments';
import { pineconeIndexRLC } from '../shared/descriptions';
@@ -115,7 +116,6 @@ export class VectorStorePineconeInsert implements INodeType {
const client = new Pinecone({
apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
});
const pineconeIndex = client.Index(index);

View File

@@ -5,10 +5,11 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import type { PineconeLibArgs } from 'langchain/vectorstores/pinecone';
import { PineconeStore } from 'langchain/vectorstores/pinecone';
import type { PineconeStoreParams } from '@langchain/pinecone';
import { PineconeStore } from '@langchain/pinecone';
import { Pinecone } from '@pinecone-database/pinecone';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Embeddings } from '@langchain/core/embeddings';
import { logWrapper } from '../../../utils/logWrapper';
import { metadataFilterField } from '../../../utils/sharedFields';
import { getMetadataFiltersValues } from '../../../utils/helpers';
@@ -99,11 +100,10 @@ export class VectorStorePineconeLoad implements INodeType {
const client = new Pinecone({
apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
});
const pineconeIndex = client.Index(index);
const config: PineconeLibArgs = {
const config: PineconeStoreParams = {
namespace: namespace || undefined,
pineconeIndex,
filter: getMetadataFiltersValues(this, itemIndex),

View File

@@ -1,6 +1,6 @@
import { type INodeProperties } from 'n8n-workflow';
import type { QdrantLibArgs } from 'langchain/vectorstores/qdrant';
import { QdrantVectorStore } from 'langchain/vectorstores/qdrant';
import type { QdrantLibArgs } from '@langchain/community/vectorstores/qdrant';
import { QdrantVectorStore } from '@langchain/community/vectorstores/qdrant';
import type { Schemas as QdrantSchemas } from '@qdrant/js-client-rest';
import { createVectorStoreNode } from '../shared/createVectorStoreNode';
import { qdrantCollectionRLC } from '../shared/descriptions';

View File

@@ -1,6 +1,6 @@
import { NodeOperationError, type INodeProperties } from 'n8n-workflow';
import { createClient } from '@supabase/supabase-js';
import { SupabaseVectorStore } from 'langchain/vectorstores/supabase';
import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase';
import { createVectorStoreNode } from '../shared/createVectorStoreNode';
import { metadataFilterField } from '../../../utils/sharedFields';
import { supabaseTableNameRLC } from '../shared/descriptions';

View File

@@ -5,10 +5,10 @@ import {
type INodeExecutionData,
NodeConnectionType,
} from 'n8n-workflow';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Document } from 'langchain/document';
import type { Embeddings } from '@langchain/core/embeddings';
import type { Document } from '@langchain/core/documents';
import { createClient } from '@supabase/supabase-js';
import { SupabaseVectorStore } from 'langchain/vectorstores/supabase';
import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { processDocuments } from '../shared/processDocuments';

View File

@@ -5,10 +5,10 @@ import {
type SupplyData,
NodeConnectionType,
} from 'n8n-workflow';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Embeddings } from '@langchain/core/embeddings';
import { createClient } from '@supabase/supabase-js';
import type { SupabaseLibArgs } from 'langchain/vectorstores/supabase';
import { SupabaseVectorStore } from 'langchain/vectorstores/supabase';
import type { SupabaseLibArgs } from '@langchain/community/vectorstores/supabase';
import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase';
import { logWrapper } from '../../../utils/logWrapper';
import { metadataFilterField } from '../../../utils/sharedFields';
import { getMetadataFiltersValues } from '../../../utils/helpers';

View File

@@ -1,7 +1,7 @@
import type { IDataObject, INodeProperties } from 'n8n-workflow';
import { NodeOperationError } from 'n8n-workflow';
import type { IZepConfig } from 'langchain/vectorstores/zep';
import { ZepVectorStore } from 'langchain/vectorstores/zep';
import type { IZepConfig } from '@langchain/community/vectorstores/zep';
import { ZepVectorStore } from '@langchain/community/vectorstores/zep';
import { createVectorStoreNode } from '../shared/createVectorStoreNode';
import { metadataFilterField } from '../../../utils/sharedFields';

View File

@@ -5,9 +5,9 @@ import {
type INodeExecutionData,
NodeConnectionType,
} from 'n8n-workflow';
import { ZepVectorStore } from 'langchain/vectorstores/zep';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Document } from 'langchain/document';
import { ZepVectorStore } from '@langchain/community/vectorstores/zep';
import type { Embeddings } from '@langchain/core/embeddings';
import type { Document } from '@langchain/core/documents';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { processDocuments } from '../shared/processDocuments';

View File

@@ -5,9 +5,9 @@ import {
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import type { IZepConfig } from 'langchain/vectorstores/zep';
import { ZepVectorStore } from 'langchain/vectorstores/zep';
import type { Embeddings } from 'langchain/embeddings/base';
import type { IZepConfig } from '@langchain/community/vectorstores/zep';
import { ZepVectorStore } from '@langchain/community/vectorstores/zep';
import type { Embeddings } from '@langchain/core/embeddings';
import { metadataFilterField } from '../../../utils/sharedFields';
import { getMetadataFiltersValues } from '../../../utils/helpers';
import { logWrapper } from '../../../utils/logWrapper';

View File

@@ -1,5 +1,5 @@
import type { Document } from 'langchain/document';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Document } from '@langchain/core/documents';
import type { Embeddings } from '@langchain/core/embeddings';
import { MemoryVectorStore } from 'langchain/vectorstores/memory';
export class MemoryVectorStoreManager {

View File

@@ -1,6 +1,6 @@
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import type { VectorStore } from 'langchain/vectorstores/base';
import type { VectorStore } from '@langchain/core/vectorstores';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
INodeCredentialDescription,
@@ -13,8 +13,8 @@ import type {
ILoadOptionsFunctions,
INodeListSearchResult,
} from 'n8n-workflow';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Document } from 'langchain/document';
import type { Embeddings } from '@langchain/core/embeddings';
import type { Document } from '@langchain/core/documents';
import { logWrapper } from '../../../utils/logWrapper';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import type { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';

View File

@@ -7,12 +7,11 @@ export async function pineconeIndexSearch(this: ILoadOptionsFunctions) {
const client = new Pinecone({
apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
});
const indexes = await client.listIndexes();
const results = indexes.map((index) => ({
const results = (indexes.indexes ?? []).map((index) => ({
name: index.name,
value: index.name,
}));

View File

@@ -1,4 +1,4 @@
import type { Document } from 'langchain/document';
import type { Document } from '@langchain/core/documents';
import type { INodeExecutionData } from 'n8n-workflow';
import { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';

View File

@@ -170,7 +170,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
let after: string | undefined;
do {
const response = await apiRequest.call(this, 'GET', '/assistants', {
const response = (await apiRequest.call(this, 'GET', '/assistants', {
headers: {
'OpenAI-Beta': 'assistants=v1',
},
@@ -178,16 +178,16 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
limit: 100,
after,
},
});
})) as { data: IDataObject[]; has_more: boolean; last_id: string };
for (const assistant of response.data || []) {
assistants.push(assistant.name);
assistants.push(assistant.name as string);
}
has_more = response.has_more;
if (has_more) {
after = response.last_id as string;
after = response.last_id;
} else {
break;
}

View File

@@ -1,6 +1,6 @@
import { zodToJsonSchema } from 'zod-to-json-schema';
import type { OpenAI as OpenAIClient } from 'openai';
import type { StructuredTool } from 'langchain/tools';
import type { OpenAIClient } from '@langchain/openai';
import type { StructuredTool } from '@langchain/core/tools';
// Copied from langchain(`langchain/src/tools/convert_to_openai.ts`)
// since these functions are not exported