feat: Add AI tool building capabilities (#7336)

Github issue / Community forum post (link here to close automatically):
https://community.n8n.io/t/langchain-memory-chat/23733

---------

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
Co-authored-by: Oleg Ivaniv <me@olegivaniv.com>
Co-authored-by: Val <68596159+valya@users.noreply.github.com>
Co-authored-by: Alex Grozav <alex@grozav.com>
Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in>
Co-authored-by: Deborah <deborah@starfallprojects.co.uk>
Co-authored-by: Jesper Bylund <mail@jesperbylund.com>
Co-authored-by: Jon <jonathan.bennetts@gmail.com>
Co-authored-by: Michael Kret <88898367+michael-radency@users.noreply.github.com>
Co-authored-by: Giulio Andreini <andreini@netseven.it>
Co-authored-by: Mason Geloso <Mason.geloso@gmail.com>
Co-authored-by: Mason Geloso <hone@Masons-Mac-mini.local>
Co-authored-by: Mutasem Aldmour <mutasem@n8n.io>
This commit is contained in:
Jan Oberhauser
2023-11-29 12:13:55 +01:00
committed by GitHub
parent dbfd617ace
commit 87def60979
243 changed files with 21526 additions and 321 deletions

View File

@@ -0,0 +1,82 @@
// Modified version of https://github.com/hwchase17/langchainjs/blob/main/langchain/src/document_loaders/fs/epub.ts
// to support loading of EPUB files from a Buffer
import { parseEpub } from '@gxl/epub-parser';
import { BaseDocumentLoader } from 'langchain/document_loaders/base';
import { Document } from 'langchain/document';
import { htmlToText } from 'html-to-text';
/**
* A class that extends the `BaseDocumentLoader` class. It represents a
* document loader that loads documents from EPUB files.
*/
export class N8nEPubLoader extends BaseDocumentLoader {
private splitChapters: boolean;
constructor(
public file: Buffer,
{ splitChapters = true } = {},
) {
super();
this.splitChapters = splitChapters;
}
/**
* A protected method that takes an EPUB object as a parameter and returns
* a promise that resolves to an array of objects representing the content
* and metadata of each chapter.
* @param epub The EPUB object to parse.
* @returns A promise that resolves to an array of objects representing the content and metadata of each chapter.
*/
protected async parse(
epub: ReturnType<typeof parseEpub>,
): Promise<Array<{ pageContent: string; metadata?: object }>> {
// We await it here because epub-parsers doesn't export a type for the
// return value of parseEpub.
const parsed = await epub;
const chapters = await Promise.all(
(parsed.sections ?? []).map(async (chapter) => {
if (!chapter.id) return null as never;
const html = chapter.htmlString;
if (!html) return null as never;
return {
html,
title: chapter.id,
};
}),
);
return chapters.filter(Boolean).map((chapter) => ({
pageContent: htmlToText(chapter.html),
metadata: {
...(chapter.title && { chapter: chapter.title }),
},
}));
}
/**
* A method that loads the EPUB file and returns a promise that resolves
* to an array of `Document` instances.
* @returns A promise that resolves to an array of `Document` instances.
*/
public async load(): Promise<Document[]> {
const epub = parseEpub(this.file, { type: 'buffer' });
const parsed = await this.parse(epub);
return this.splitChapters
? parsed.map(
(chapter) =>
new Document({
pageContent: chapter.pageContent,
metadata: {
...chapter.metadata,
},
}),
)
: [
new Document({
pageContent: parsed.map((chapter) => chapter.pageContent).join('\n\n'),
}),
];
}
}

View File

@@ -0,0 +1,168 @@
import type { IExecuteFunctions, INodeExecutionData, IBinaryData } from 'n8n-workflow';
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import type { TextSplitter } from 'langchain/text_splitter';
import type { Document } from 'langchain/document';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { N8nEPubLoader } from './EpubLoader';
import { getMetadataFiltersValues } from './helpers';
const SUPPORTED_MIME_TYPES = {
auto: ['*/*'],
pdfLoader: ['application/pdf'],
csvLoader: ['text/csv'],
epubLoader: ['application/epub+zip'],
docxLoader: ['application/vnd.openxmlformats-officedocument.wordprocessingml.document'],
textLoader: ['text/plain', 'text/mdx', 'text/md'],
jsonLoader: ['application/json'],
};
export class N8nBinaryLoader {
private context: IExecuteFunctions;
private optionsPrefix: string;
constructor(context: IExecuteFunctions, optionsPrefix = '') {
this.context = context;
this.optionsPrefix = optionsPrefix;
}
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
const docs: Document[] = [];
if (!items) return [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const processedDocuments = await this.processItem(items[itemIndex], itemIndex);
docs.push(...processedDocuments);
}
return docs;
}
async processItem(item: INodeExecutionData, itemIndex: number): Promise<Document[]> {
const selectedLoader: keyof typeof SUPPORTED_MIME_TYPES = this.context.getNodeParameter(
'loader',
itemIndex,
) as keyof typeof SUPPORTED_MIME_TYPES;
const binaryDataKey = this.context.getNodeParameter('binaryDataKey', itemIndex) as string;
const docs: Document[] = [];
const metadata = getMetadataFiltersValues(this.context, itemIndex);
if (!item) return [];
// TODO: Should we support traversing the object to find the binary data?
const binaryData = item.binary?.[binaryDataKey] as IBinaryData;
if (!binaryData) {
throw new NodeOperationError(this.context.getNode(), 'No binary data set.');
}
const { mimeType } = binaryData;
// Check if loader matches the mime-type of the data
if (selectedLoader !== 'auto' && !SUPPORTED_MIME_TYPES[selectedLoader].includes(mimeType)) {
const neededLoader = Object.keys(SUPPORTED_MIME_TYPES).find((loader) =>
SUPPORTED_MIME_TYPES[loader as keyof typeof SUPPORTED_MIME_TYPES].includes(mimeType),
);
throw new NodeOperationError(
this.context.getNode(),
`Mime type doesn't match selected loader. Please select under "Loader Type": ${neededLoader}`,
);
}
if (!Object.values(SUPPORTED_MIME_TYPES).flat().includes(mimeType)) {
throw new NodeOperationError(this.context.getNode(), `Unsupported mime type: ${mimeType}`);
}
if (
!SUPPORTED_MIME_TYPES[selectedLoader].includes(mimeType) &&
selectedLoader !== 'textLoader' &&
selectedLoader !== 'auto'
) {
throw new NodeOperationError(
this.context.getNode(),
`Unsupported mime type: ${mimeType} for selected loader: ${selectedLoader}`,
);
}
const bufferData = await this.context.helpers.getBinaryDataBuffer(itemIndex, binaryDataKey);
const itemBlob = new Blob([new Uint8Array(bufferData)], { type: mimeType });
let loader: PDFLoader | CSVLoader | N8nEPubLoader | DocxLoader | TextLoader | JSONLoader;
switch (mimeType) {
case 'application/pdf':
const splitPages = this.context.getNodeParameter(
`${this.optionsPrefix}splitPages`,
itemIndex,
false,
) as boolean;
loader = new PDFLoader(itemBlob, {
splitPages,
});
break;
case 'text/csv':
const column = this.context.getNodeParameter(
`${this.optionsPrefix}column`,
itemIndex,
null,
) as string;
const separator = this.context.getNodeParameter(
`${this.optionsPrefix}separator`,
itemIndex,
',',
) as string;
loader = new CSVLoader(itemBlob, {
column: column ?? undefined,
separator,
});
break;
case 'application/epub+zip':
loader = new N8nEPubLoader(Buffer.from(bufferData));
break;
case 'application/vnd.openxmlformats-officedocument.wordprocessingml.document':
loader = new DocxLoader(itemBlob);
break;
case 'text/plain':
loader = new TextLoader(itemBlob);
break;
case 'application/json':
const pointers = this.context.getNodeParameter(
`${this.optionsPrefix}pointers`,
itemIndex,
'',
) as string;
const pointersArray = pointers.split(',').map((pointer) => pointer.trim());
loader = new JSONLoader(itemBlob, pointersArray);
break;
default:
loader = new TextLoader(itemBlob);
}
const textSplitter = (await this.context.getInputConnectionData(
NodeConnectionType.AiTextSplitter,
0,
)) as TextSplitter | undefined;
const loadedDoc = textSplitter ? await loader.loadAndSplit(textSplitter) : await loader.load();
docs.push(...loadedDoc);
if (metadata) {
docs.forEach((document) => {
document.metadata = {
...document.metadata,
...metadata,
};
});
}
return docs;
}
}

View File

@@ -0,0 +1,98 @@
import {
type IExecuteFunctions,
type INodeExecutionData,
NodeConnectionType,
NodeOperationError,
} from 'n8n-workflow';
import type { CharacterTextSplitter } from 'langchain/text_splitter';
import type { Document } from 'langchain/document';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { getMetadataFiltersValues } from './helpers';
export class N8nJsonLoader {
private context: IExecuteFunctions;
private optionsPrefix: string;
constructor(context: IExecuteFunctions, optionsPrefix = '') {
this.context = context;
this.optionsPrefix = optionsPrefix;
}
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
const docs: Document[] = [];
if (!items) return [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const processedDocuments = await this.processItem(items[itemIndex], itemIndex);
docs.push(...processedDocuments);
}
return docs;
}
async processItem(item: INodeExecutionData, itemIndex: number): Promise<Document[]> {
const mode = this.context.getNodeParameter('jsonMode', itemIndex, 'allInputData') as
| 'allInputData'
| 'expressionData';
const pointers = this.context.getNodeParameter(
`${this.optionsPrefix}pointers`,
itemIndex,
'',
) as string;
const pointersArray = pointers.split(',').map((pointer) => pointer.trim());
const textSplitter = (await this.context.getInputConnectionData(
NodeConnectionType.AiTextSplitter,
0,
)) as CharacterTextSplitter | undefined;
const metadata = getMetadataFiltersValues(this.context, itemIndex) ?? [];
if (!item) return [];
let documentLoader: JSONLoader | TextLoader | null = null;
if (mode === 'allInputData') {
const itemString = JSON.stringify(item.json);
const itemBlob = new Blob([itemString], { type: 'application/json' });
documentLoader = new JSONLoader(itemBlob, pointersArray);
}
if (mode === 'expressionData') {
const dataString = this.context.getNodeParameter('jsonData', itemIndex) as string | object;
if (typeof dataString === 'object') {
const itemBlob = new Blob([JSON.stringify(dataString)], { type: 'application/json' });
documentLoader = new JSONLoader(itemBlob, pointersArray);
}
if (typeof dataString === 'string') {
const itemBlob = new Blob([dataString], { type: 'text/plain' });
documentLoader = new TextLoader(itemBlob);
}
}
if (documentLoader === null) {
// This should never happen
throw new NodeOperationError(this.context.getNode(), 'Document loader is not initialized');
}
const docs = textSplitter
? await documentLoader.loadAndSplit(textSplitter)
: await documentLoader.load();
if (metadata) {
docs.forEach((doc) => {
doc.metadata = {
...doc.metadata,
...metadata,
};
});
}
return docs;
}
}

View File

@@ -0,0 +1,16 @@
import type { IExecuteFunctions } from 'n8n-workflow';
export function getMetadataFiltersValues(
ctx: IExecuteFunctions,
itemIndex: number,
): Record<string, never> | undefined {
const metadata = ctx.getNodeParameter('options.metadata.metadataValues', itemIndex, []) as Array<{
name: string;
value: string;
}>;
if (metadata.length > 0) {
return metadata.reduce((acc, { name, value }) => ({ ...acc, [name]: value }), {});
}
return undefined;
}

View File

@@ -0,0 +1,500 @@
import {
NodeOperationError,
type ConnectionTypes,
type IExecuteFunctions,
type INodeExecutionData,
NodeConnectionType,
} from 'n8n-workflow';
import { Tool } from 'langchain/tools';
import type { BaseMessage, ChatResult, InputValues } from 'langchain/schema';
import { BaseChatMessageHistory } from 'langchain/schema';
import { BaseChatModel } from 'langchain/chat_models/base';
import type { CallbackManagerForLLMRun } from 'langchain/callbacks';
import { Embeddings } from 'langchain/embeddings/base';
import { VectorStore } from 'langchain/vectorstores/base';
import type { Document } from 'langchain/document';
import { TextSplitter } from 'langchain/text_splitter';
import type { BaseDocumentLoader } from 'langchain/document_loaders/base';
import type { BaseCallbackConfig, Callbacks } from 'langchain/dist/callbacks/manager';
import { BaseLLM } from 'langchain/llms/base';
import { BaseChatMemory } from 'langchain/memory';
import type { MemoryVariables } from 'langchain/dist/memory/base';
import { BaseRetriever } from 'langchain/schema/retriever';
import type { FormatInstructionsOptions } from 'langchain/schema/output_parser';
import { BaseOutputParser } from 'langchain/schema/output_parser';
import { isObject } from 'lodash';
import { N8nJsonLoader } from './N8nJsonLoader';
import { N8nBinaryLoader } from './N8nBinaryLoader';
const errorsMap: { [key: string]: { message: string; description: string } } = {
'You exceeded your current quota, please check your plan and billing details.': {
message: 'OpenAI quota exceeded',
description: 'You exceeded your current quota, please check your plan and billing details.',
},
};
export async function callMethodAsync<T>(
this: T,
parameters: {
executeFunctions: IExecuteFunctions;
connectionType: ConnectionTypes;
currentNodeRunIndex: number;
method: (...args: any[]) => Promise<unknown>;
arguments: unknown[];
},
): Promise<unknown> {
try {
return await parameters.method.call(this, ...parameters.arguments);
} catch (e) {
const connectedNode = parameters.executeFunctions.getNode();
const error = new NodeOperationError(connectedNode, e, {
functionality: 'configuration-node',
});
if (errorsMap[error.message]) {
error.description = errorsMap[error.message].description;
error.message = errorsMap[error.message].message;
}
parameters.executeFunctions.addOutputData(
parameters.connectionType,
parameters.currentNodeRunIndex,
error,
);
if (error.message) {
error.description = error.message;
throw error;
}
throw new NodeOperationError(
connectedNode,
`Error on node "${connectedNode.name}" which is connected via input "${parameters.connectionType}"`,
{ functionality: 'configuration-node' },
);
}
}
export function callMethodSync<T>(
this: T,
parameters: {
executeFunctions: IExecuteFunctions;
connectionType: ConnectionTypes;
currentNodeRunIndex: number;
method: (...args: any[]) => T;
arguments: unknown[];
},
): unknown {
try {
return parameters.method.call(this, ...parameters.arguments);
} catch (e) {
const connectedNode = parameters.executeFunctions.getNode();
const error = new NodeOperationError(connectedNode, e);
parameters.executeFunctions.addOutputData(
parameters.connectionType,
parameters.currentNodeRunIndex,
error,
);
throw new NodeOperationError(
connectedNode,
`Error on node "${connectedNode.name}" which is connected via input "${parameters.connectionType}"`,
{ functionality: 'configuration-node' },
);
}
}
export function logWrapper(
originalInstance:
| Tool
| BaseChatModel
| BaseChatMemory
| BaseLLM
| BaseChatMessageHistory
| BaseOutputParser
| BaseRetriever
| Embeddings
| Document[]
| Document
| BaseDocumentLoader
| TextSplitter
| VectorStore
| N8nBinaryLoader
| N8nJsonLoader,
executeFunctions: IExecuteFunctions,
) {
return new Proxy(originalInstance, {
get: (target, prop) => {
let connectionType: ConnectionTypes | undefined;
// ========== BaseChatMemory ==========
if (originalInstance instanceof BaseChatMemory) {
if (prop === 'loadMemoryVariables' && 'loadMemoryVariables' in target) {
return async (values: InputValues): Promise<MemoryVariables> => {
connectionType = NodeConnectionType.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'loadMemoryVariables', values } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [values],
})) as MemoryVariables;
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'loadMemoryVariables', response } }],
]);
return response;
};
} else if (
prop === 'outputKey' &&
'outputKey' in target &&
target.constructor.name === 'BufferWindowMemory'
) {
connectionType = NodeConnectionType.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'chatHistory' } }],
]);
const response = target[prop];
target.chatHistory
.getMessages()
.then((messages) => {
executeFunctions.addOutputData(NodeConnectionType.AiMemory, index, [
[{ json: { action: 'chatHistory', chatHistory: messages } }],
]);
})
.catch((error: Error) => {
executeFunctions.addOutputData(NodeConnectionType.AiMemory, index, [
[{ json: { action: 'chatHistory', error } }],
]);
});
return response;
}
}
// ========== BaseChatMessageHistory ==========
if (originalInstance instanceof BaseChatMessageHistory) {
if (prop === 'getMessages' && 'getMessages' in target) {
return async (): Promise<BaseMessage[]> => {
connectionType = NodeConnectionType.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'getMessages' } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [],
})) as BaseMessage[];
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'getMessages', response } }],
]);
return response;
};
} else if (prop === 'addMessage' && 'addMessage' in target) {
return async (message: BaseMessage): Promise<void> => {
connectionType = NodeConnectionType.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'addMessage', message } }],
]);
await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [message],
});
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'addMessage' } }],
]);
};
}
}
// ========== BaseChatModel ==========
if (originalInstance instanceof BaseLLM || originalInstance instanceof BaseChatModel) {
if (prop === '_generate' && '_generate' in target) {
return async (
messages: BaseMessage[] & string[],
options: any,
runManager?: CallbackManagerForLLMRun,
): Promise<ChatResult> => {
connectionType = NodeConnectionType.AiLanguageModel;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { messages, options } }],
]);
try {
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [
messages,
{ ...options, signal: executeFunctions.getExecutionCancelSignal() },
runManager,
],
})) as ChatResult;
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
} catch (error) {
// Mute AbortError as they are expected
if (error?.name === 'AbortError') return { generations: [] };
throw error;
}
};
}
}
// ========== BaseOutputParser ==========
if (originalInstance instanceof BaseOutputParser) {
if (prop === 'getFormatInstructions' && 'getFormatInstructions' in target) {
return (options?: FormatInstructionsOptions): string => {
connectionType = NodeConnectionType.AiOutputParser;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'getFormatInstructions' } }],
]);
// @ts-ignore
const response = callMethodSync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [options],
}) as string;
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'getFormatInstructions', response } }],
]);
return response;
};
} else if (prop === 'parse' && 'parse' in target) {
return async (text: string | Record<string, unknown>): Promise<unknown> => {
connectionType = NodeConnectionType.AiOutputParser;
const stringifiedText = isObject(text) ? JSON.stringify(text) : text;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'parse', text: stringifiedText } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [stringifiedText],
})) as object;
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'parse', response } }],
]);
return response;
};
}
}
// ========== BaseRetriever ==========
if (originalInstance instanceof BaseRetriever) {
if (prop === 'getRelevantDocuments' && 'getRelevantDocuments' in target) {
return async (
query: string,
config?: Callbacks | BaseCallbackConfig,
): Promise<Document[]> => {
connectionType = NodeConnectionType.AiRetriever;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query, config } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query, config],
})) as Array<Document<Record<string, any>>>;
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
// ========== Embeddings ==========
if (originalInstance instanceof Embeddings) {
// Docs -> Embeddings
if (prop === 'embedDocuments' && 'embedDocuments' in target) {
return async (documents: string[]): Promise<number[][]> => {
connectionType = NodeConnectionType.AiEmbedding;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { documents } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [documents],
})) as number[][];
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
// Query -> Embeddings
if (prop === 'embedQuery' && 'embedQuery' in target) {
return async (query: string): Promise<number[]> => {
connectionType = NodeConnectionType.AiEmbedding;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query],
})) as number[];
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
// ========== N8n Loaders Process All ==========
if (
originalInstance instanceof N8nJsonLoader ||
originalInstance instanceof N8nBinaryLoader
) {
// Process All
if (prop === 'processAll' && 'processAll' in target) {
return async (items: INodeExecutionData[]): Promise<number[]> => {
connectionType = NodeConnectionType.AiDocument;
const { index } = executeFunctions.addInputData(connectionType, [items]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [items],
})) as number[];
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
// Process Each
if (prop === 'processItem' && 'processItem' in target) {
return async (item: INodeExecutionData, itemIndex: number): Promise<number[]> => {
connectionType = NodeConnectionType.AiDocument;
const { index } = executeFunctions.addInputData(connectionType, [[item]]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [item, itemIndex],
})) as number[];
executeFunctions.addOutputData(connectionType, index, [
[{ json: { response }, pairedItem: { item: itemIndex } }],
]);
return response;
};
}
}
// ========== TextSplitter ==========
if (originalInstance instanceof TextSplitter) {
if (prop === 'splitText' && 'splitText' in target) {
return async (text: string): Promise<string[]> => {
connectionType = NodeConnectionType.AiTextSplitter;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { textSplitter: text } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [text],
})) as string[];
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
// ========== Tool ==========
if (originalInstance instanceof Tool) {
if (prop === '_call' && '_call' in target) {
return async (query: string): Promise<string> => {
connectionType = NodeConnectionType.AiTool;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query],
})) as string;
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
// ========== VectorStore ==========
if (originalInstance instanceof VectorStore) {
if (prop === 'similaritySearch' && 'similaritySearch' in target) {
return async (
query: string,
k?: number,
// @ts-ignore
filter?: BiquadFilterType | undefined,
_callbacks?: Callbacks | undefined,
): Promise<Document[]> => {
connectionType = NodeConnectionType.AiVectorStore;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query, k, filter } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query, k, filter, _callbacks],
})) as Array<Document<Record<string, any>>>;
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
return (target as any)[prop];
},
});
}

View File

@@ -0,0 +1,141 @@
import { NodeConnectionType, type INodeProperties } from 'n8n-workflow';
export const metadataFilterField: INodeProperties = {
displayName: 'Metadata Filter',
name: 'metadata',
type: 'fixedCollection',
description: 'Metadata to filter the document by',
typeOptions: {
multipleValues: true,
},
default: {},
placeholder: 'Add filter field',
options: [
{
name: 'metadataValues',
displayName: 'Fields to Set',
values: [
{
displayName: 'Name',
name: 'name',
type: 'string',
default: '',
required: true,
},
{
displayName: 'Value',
name: 'value',
type: 'string',
default: '',
},
],
},
],
};
export function getTemplateNoticeField(templateId: number): INodeProperties {
return {
displayName: `Save time with an <a href="/templates/${templateId}" target="_blank">example</a> of how this node works`,
name: 'notice',
type: 'notice',
default: '',
};
}
const connectionsString = {
[NodeConnectionType.AiAgent]: {
// Root AI view
connection: '',
locale: 'AI Agent',
},
[NodeConnectionType.AiChain]: {
// Root AI view
connection: '',
locale: 'AI Chain',
},
[NodeConnectionType.AiDocument]: {
connection: NodeConnectionType.AiDocument,
locale: 'Document Loader',
},
[NodeConnectionType.AiVectorStore]: {
connection: NodeConnectionType.AiVectorStore,
locale: 'Vector Store',
},
[NodeConnectionType.AiRetriever]: {
connection: NodeConnectionType.AiRetriever,
locale: 'Vector Store Retriever',
},
};
type AllowedConnectionTypes =
| NodeConnectionType.AiAgent
| NodeConnectionType.AiChain
| NodeConnectionType.AiDocument
| NodeConnectionType.AiVectorStore
| NodeConnectionType.AiRetriever;
function determineArticle(nextWord: string): string {
// check if the next word starts with a vowel sound
const vowels = /^[aeiouAEIOU]/;
return vowels.test(nextWord) ? 'an' : 'a';
}
const getAhref = (connectionType: { connection: string; locale: string }) =>
`<a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='${connectionType.connection}'>${connectionType.locale}</a>`;
export function getConnectionHintNoticeField(
connectionTypes: AllowedConnectionTypes[],
): INodeProperties {
const groupedConnections = new Map<string, string[]>();
// group connection types by their 'connection' value
// to not create multiple links
connectionTypes.forEach((connectionType) => {
const connectionString = connectionsString[connectionType].connection;
const localeString = connectionsString[connectionType].locale;
if (!groupedConnections.has(connectionString)) {
groupedConnections.set(connectionString, [localeString]);
return;
}
groupedConnections.get(connectionString)?.push(localeString);
});
let displayName;
if (groupedConnections.size === 1) {
const [[connection, locales]] = Array.from(groupedConnections);
displayName = `This node must be connected to ${determineArticle(
locales[0],
)} ${locales[0].toLowerCase()}. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='${connection}'>Insert one</a>`;
} else {
const ahrefs = Array.from(groupedConnections, ([connection, locales]) => {
// If there are multiple locales, join them with ' or '
// use determineArticle to insert the correct article
const locale =
locales.length > 1
? locales
.map((localeString, index, { length }) => {
return (
(index === 0 ? `${determineArticle(localeString)} ` : '') +
(index < length - 1 ? `${localeString} or ` : localeString)
);
})
.join('')
: `${determineArticle(locales[0])} ${locales[0]}`;
return getAhref({ connection, locale });
});
displayName = `This node needs to be connected to ${ahrefs.join(' or ')}.`;
}
return {
displayName,
name: 'notice',
type: 'notice',
default: '',
typeOptions: {
containerClass: 'ndv-connection-hint-notice',
},
};
}