mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-16 09:36:44 +00:00
feat: Add Cohere reranking capability to vector stores (#16014)
Co-authored-by: Yiorgis Gozadinos <yiorgis@n8n.io> Co-authored-by: Mutasem Aldmour <mutasem@n8n.io>
This commit is contained in:
@@ -6,6 +6,7 @@ import { Embeddings } from '@langchain/core/embeddings';
|
||||
import type { InputValues, MemoryVariables, OutputValues } from '@langchain/core/memory';
|
||||
import type { BaseMessage } from '@langchain/core/messages';
|
||||
import { BaseRetriever } from '@langchain/core/retrievers';
|
||||
import { BaseDocumentCompressor } from '@langchain/core/retrievers/document_compressors';
|
||||
import type { StructuredTool, Tool } from '@langchain/core/tools';
|
||||
import { VectorStore } from '@langchain/core/vectorstores';
|
||||
import { TextSplitter } from '@langchain/textsplitters';
|
||||
@@ -18,7 +19,12 @@ import type {
|
||||
ITaskMetadata,
|
||||
NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeOperationError, NodeConnectionTypes, parseErrorMetadata } from 'n8n-workflow';
|
||||
import {
|
||||
NodeOperationError,
|
||||
NodeConnectionTypes,
|
||||
parseErrorMetadata,
|
||||
deepCopy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
|
||||
import { N8nBinaryLoader } from './N8nBinaryLoader';
|
||||
@@ -102,6 +108,7 @@ export function logWrapper<
|
||||
| BaseChatMemory
|
||||
| BaseChatMessageHistory
|
||||
| BaseRetriever
|
||||
| BaseDocumentCompressor
|
||||
| Embeddings
|
||||
| Document[]
|
||||
| Document
|
||||
@@ -297,6 +304,32 @@ export function logWrapper<
|
||||
}
|
||||
}
|
||||
|
||||
// ========== Rerankers ==========
|
||||
if (originalInstance instanceof BaseDocumentCompressor) {
|
||||
if (prop === 'compressDocuments' && 'compressDocuments' in target) {
|
||||
return async (documents: Document[], query: string): Promise<Document[]> => {
|
||||
connectionType = NodeConnectionTypes.AiReranker;
|
||||
const { index } = executeFunctions.addInputData(connectionType, [
|
||||
[{ json: { query, documents } }],
|
||||
]);
|
||||
|
||||
const response = (await callMethodAsync.call(target, {
|
||||
executeFunctions,
|
||||
connectionType,
|
||||
currentNodeRunIndex: index,
|
||||
method: target[prop],
|
||||
// compressDocuments mutates the original object
|
||||
// messing up the input data logging
|
||||
arguments: [deepCopy(documents), query],
|
||||
})) as Document[];
|
||||
|
||||
logAiEvent(executeFunctions, 'ai-document-reranked', { query });
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ========== N8n Loaders Process All ==========
|
||||
if (
|
||||
originalInstance instanceof N8nJsonLoader ||
|
||||
|
||||
Reference in New Issue
Block a user