feat(Postgres Chat Memory Node): Implement Postgres Chat Memory node (#10071)

This commit is contained in:
oleg
2024-07-17 08:25:37 +02:00
committed by GitHub
parent e5dda5731d
commit 9cbbb6335d
6 changed files with 200 additions and 6 deletions

View File

@@ -10,6 +10,18 @@ import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { BaseMessage } from '@langchain/core/messages';
import { DynamicTool, type Tool } from '@langchain/core/tools';
import type { BaseLLM } from '@langchain/core/language_models/llms';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseChatMessageHistory } from '@langchain/core/chat_history';
function hasMethods<T>(obj: unknown, ...methodNames: Array<string | symbol>): obj is T {
return methodNames.every(
(methodName) =>
typeof obj === 'object' &&
obj !== null &&
methodName in obj &&
typeof (obj as Record<string | symbol, unknown>)[methodName] === 'function',
);
}
export function getMetadataFiltersValues(
ctx: IExecuteFunctions,
@@ -38,8 +50,16 @@ export function getMetadataFiltersValues(
return undefined;
}
export function isBaseChatMemory(obj: unknown) {
return hasMethods<BaseChatMemory>(obj, 'loadMemoryVariables', 'saveContext');
}
export function isBaseChatMessageHistory(obj: unknown) {
return hasMethods<BaseChatMessageHistory>(obj, 'getMessages', 'addMessage');
}
export function isChatInstance(model: unknown): model is BaseChatModel {
const namespace = (model as BaseLLM | BaseChatModel)?.lc_namespace ?? [];
const namespace = (model as BaseLLM)?.lc_namespace ?? [];
return namespace.includes('chat_models');
}

View File

@@ -4,21 +4,21 @@ import type { ConnectionTypes, IExecuteFunctions, INodeExecutionData } from 'n8n
import type { Tool } from '@langchain/core/tools';
import type { BaseMessage } from '@langchain/core/messages';
import type { InputValues, MemoryVariables, OutputValues } from '@langchain/core/memory';
import { BaseChatMessageHistory } from '@langchain/core/chat_history';
import type { BaseChatMessageHistory } from '@langchain/core/chat_history';
import type { BaseCallbackConfig, Callbacks } from '@langchain/core/callbacks/manager';
import { Embeddings } from '@langchain/core/embeddings';
import { VectorStore } from '@langchain/core/vectorstores';
import type { Document } from '@langchain/core/documents';
import { TextSplitter } from '@langchain/textsplitters';
import { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { BaseRetriever } from '@langchain/core/retrievers';
import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers';
import { isObject } from 'lodash';
import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base';
import { N8nJsonLoader } from './N8nJsonLoader';
import { N8nBinaryLoader } from './N8nBinaryLoader';
import { logAiEvent, isToolsInstance } from './helpers';
import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
const errorsMap: { [key: string]: { message: string; description: string } } = {
'You exceeded your current quota, please check your plan and billing details.': {
@@ -125,7 +125,7 @@ export function logWrapper(
get: (target, prop) => {
let connectionType: ConnectionTypes | undefined;
// ========== BaseChatMemory ==========
if (originalInstance instanceof BaseChatMemory) {
if (isBaseChatMemory(originalInstance)) {
if (prop === 'loadMemoryVariables' && 'loadMemoryVariables' in target) {
return async (values: InputValues): Promise<MemoryVariables> => {
connectionType = NodeConnectionType.AiMemory;
@@ -177,7 +177,7 @@ export function logWrapper(
}
// ========== BaseChatMessageHistory ==========
if (originalInstance instanceof BaseChatMessageHistory) {
if (isBaseChatMessageHistory(originalInstance)) {
if (prop === 'getMessages' && 'getMessages' in target) {
return async (): Promise<BaseMessage[]> => {
connectionType = NodeConnectionType.AiMemory;