mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 01:56:46 +00:00
refactor(core): All calls to supplyData should use a distinct context type (no-changelog) (#11421)
This commit is contained in:
committed by
GitHub
parent
04c075a46b
commit
8f5fe05a92
@@ -1,13 +1,13 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeOperationError,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type INodeOutputConfiguration,
|
||||
type SupplyData,
|
||||
NodeConnectionType,
|
||||
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
INodeOutputConfiguration,
|
||||
SupplyData,
|
||||
ISupplyDataFunctions,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
// TODO: Add support for execute function. Got already started but got commented out
|
||||
@@ -72,7 +72,7 @@ export const vmResolver = makeResolverFromLegacyOptions({
|
||||
});
|
||||
|
||||
function getSandbox(
|
||||
this: IExecuteFunctions,
|
||||
this: IExecuteFunctions | ISupplyDataFunctions,
|
||||
code: string,
|
||||
options?: { addItems?: boolean; itemIndex?: number },
|
||||
) {
|
||||
@@ -354,7 +354,7 @@ export class Code implements INodeType {
|
||||
}
|
||||
}
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const code = this.getNodeParameter('code', itemIndex) as { supplyData?: { code: string } };
|
||||
|
||||
if (!code.supplyData?.code) {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -177,7 +177,7 @@ export class DocumentBinaryInputLoader implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for Binary Input Loader');
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -283,7 +283,7 @@ export class DocumentDefaultDataLoader implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const dataType = this.getNodeParameter('dataType', itemIndex, 'json') as 'json' | 'binary';
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { GithubRepoLoader } from '@langchain/community/document_loaders/web/github';
|
||||
@@ -93,7 +93,7 @@ export class DocumentGithubLoader implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
console.log('Supplying data for Github Document Loader');
|
||||
|
||||
const repository = this.getNodeParameter('repository', itemIndex) as string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -79,7 +79,7 @@ export class DocumentJsonInputLoader implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for JSON Input Loader');
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
import { BedrockEmbeddings } from '@langchain/aws';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -104,7 +104,7 @@ export class EmbeddingsAwsBedrock implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('aws');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -92,7 +92,7 @@ export class EmbeddingsAzureOpenAi implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings');
|
||||
const credentials = await this.getCredentials<{
|
||||
apiKey: string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { CohereEmbeddings } from '@langchain/cohere';
|
||||
@@ -99,7 +99,7 @@ export class EmbeddingsCohere implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings Cohere');
|
||||
const modelName = this.getNodeParameter('modelName', itemIndex, 'embed-english-v2.0') as string;
|
||||
const credentials = await this.getCredentials<{ apiKey: string }>('cohereApi');
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { GoogleGenerativeAIEmbeddings } from '@langchain/google-genai';
|
||||
@@ -116,7 +116,7 @@ export class EmbeddingsGoogleGemini implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings Google Gemini');
|
||||
const modelName = this.getNodeParameter(
|
||||
'modelName',
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { HuggingFaceInferenceEmbeddings } from '@langchain/community/embeddings/hf';
|
||||
@@ -81,7 +81,7 @@ export class EmbeddingsHuggingFaceInference implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings HF Inference');
|
||||
const model = this.getNodeParameter(
|
||||
'modelName',
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { MistralAIEmbeddingsParams } from '@langchain/mistralai';
|
||||
@@ -134,7 +134,7 @@ export class EmbeddingsMistralCloud implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('mistralCloudApi');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter(
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { OllamaEmbeddings } from '@langchain/ollama';
|
||||
@@ -44,7 +44,7 @@ export class EmbeddingsOllama implements INodeType {
|
||||
properties: [getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]), ollamaModel],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings Ollama');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const credentials = await this.getCredentials('ollamaApi');
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
type ISupplyDataFunctions,
|
||||
type INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -170,7 +170,7 @@ export class EmbeddingsOpenAi implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings');
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import {
|
||||
NodeConnectionType,
|
||||
type INodePropertyOptions,
|
||||
type INodeProperties,
|
||||
type IExecuteFunctions,
|
||||
type ISupplyDataFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
@@ -175,7 +175,7 @@ export class LmChatAnthropic implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('anthropicApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -52,7 +52,7 @@ export class LmChatOllama implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('ollamaApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
type JsonObject,
|
||||
NodeApiError,
|
||||
@@ -242,7 +242,7 @@ export class LmChatOpenAi implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -90,7 +90,7 @@ export class LmCohere implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('cohereApi');
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -51,7 +51,7 @@ export class LmOllama implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('ollamaApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
ILoadOptionsFunctions,
|
||||
} from 'n8n-workflow';
|
||||
@@ -229,7 +229,7 @@ export class LmOpenAi implements INodeType {
|
||||
},
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex, '', {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -132,7 +132,7 @@ export class LmOpenHuggingFaceInference implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('huggingFaceApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
import { ChatBedrockConverse } from '@langchain/aws';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -132,7 +132,7 @@ export class LmChatAwsBedrock implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('aws');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -162,7 +162,7 @@ export class LmChatAzureOpenAi implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials<{
|
||||
apiKey: string;
|
||||
resourceName: string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
|
||||
@@ -113,7 +113,7 @@ export class LmChatGoogleGemini implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('googlePalmApi');
|
||||
|
||||
const modelName = this.getNodeParameter('modelName', itemIndex) as string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
type ILoadOptionsFunctions,
|
||||
type JsonObject,
|
||||
@@ -124,7 +124,7 @@ export class LmChatGoogleVertex implements INodeType {
|
||||
},
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('googleApi');
|
||||
const privateKey = formatPrivateKey(credentials.privateKey as string);
|
||||
const email = (credentials.email as string).trim();
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -129,7 +129,7 @@ export class LmChatGroq implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('groqApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -172,7 +172,7 @@ export class LmChatMistralCloud implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('mistralCloudApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
@@ -7,7 +7,7 @@ import type {
|
||||
SerializedSecret,
|
||||
} from '@langchain/core/load/serializable';
|
||||
import type { LLMResult } from '@langchain/core/outputs';
|
||||
import type { IDataObject, IExecuteFunctions } from 'n8n-workflow';
|
||||
import type { IDataObject, ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import { pick } from 'lodash';
|
||||
import type { BaseMessage } from '@langchain/core/messages';
|
||||
@@ -30,8 +30,6 @@ const TIKTOKEN_ESTIMATE_MODEL = 'gpt-4o';
|
||||
export class N8nLlmTracing extends BaseCallbackHandler {
|
||||
name = 'N8nLlmTracing';
|
||||
|
||||
executionFunctions: IExecuteFunctions;
|
||||
|
||||
connectionType = NodeConnectionType.AiLanguageModel;
|
||||
|
||||
promptTokensEstimate = 0;
|
||||
@@ -61,11 +59,10 @@ export class N8nLlmTracing extends BaseCallbackHandler {
|
||||
};
|
||||
|
||||
constructor(
|
||||
executionFunctions: IExecuteFunctions,
|
||||
private executionFunctions: ISupplyDataFunctions,
|
||||
options?: { tokensUsageParser: TokensUsageParser },
|
||||
) {
|
||||
super();
|
||||
this.executionFunctions = executionFunctions;
|
||||
this.options = { ...this.options, ...options };
|
||||
}
|
||||
|
||||
@@ -138,7 +135,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
|
||||
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [
|
||||
[{ json: { ...response } }],
|
||||
]);
|
||||
void logAiEvent(this.executionFunctions, 'ai-llm-generated-output', {
|
||||
logAiEvent(this.executionFunctions, 'ai-llm-generated-output', {
|
||||
messages: parsedMessages,
|
||||
options: runDetails.options,
|
||||
response,
|
||||
@@ -186,7 +183,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
|
||||
});
|
||||
}
|
||||
|
||||
void logAiEvent(this.executionFunctions, 'ai-llm-errored', {
|
||||
logAiEvent(this.executionFunctions, 'ai-llm-errored', {
|
||||
error: Object.keys(error).length === 0 ? error.toString() : error,
|
||||
runId,
|
||||
parentRunId,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { BufferWindowMemoryInput } from 'langchain/memory';
|
||||
@@ -134,7 +134,7 @@ export class MemoryBufferWindow implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const contextWindowLength = this.getNodeParameter('contextWindowLength', itemIndex) as number;
|
||||
const workflowId = this.getWorkflow().id;
|
||||
const memoryInstance = MemoryChatBufferSingleton.getInstance();
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -86,7 +86,7 @@ export class MemoryMotorhead implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('motorheadApi');
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
|
||||
import type {
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import { BufferMemory, BufferWindowMemory } from 'langchain/memory';
|
||||
import { PostgresChatMessageHistory } from '@langchain/community/stores/message/postgres';
|
||||
@@ -73,7 +78,7 @@ export class MemoryPostgresChat implements INodeType {
|
||||
},
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials<PostgresNodeCredentials>('postgres');
|
||||
const tableName = this.getNodeParameter('tableName', itemIndex, 'n8n_chat_histories') as string;
|
||||
const sessionId = getSessionId(this, itemIndex);
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeOperationError,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
@@ -102,7 +102,7 @@ export class MemoryRedisChat implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('redis');
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
|
||||
import type {
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { XataChatMessageHistory } from '@langchain/community/stores/message/xata';
|
||||
import { BufferMemory, BufferWindowMemory } from 'langchain/memory';
|
||||
import { BaseClient } from '@xata.io/client';
|
||||
@@ -88,7 +93,7 @@ export class MemoryXata implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('xataApi');
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type ISupplyDataFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
@@ -103,7 +103,7 @@ export class MemoryZep implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials<{
|
||||
apiKey?: string;
|
||||
apiUrl?: string;
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
|
||||
import type {
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
N8nOutputFixingParser,
|
||||
@@ -63,7 +68,7 @@ export class OutputParserAutofixing implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
itemIndex,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -80,7 +80,7 @@ export class OutputParserItemList implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
numberOfItems?: number;
|
||||
separator?: string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import type { JSONSchema7 } from 'json-schema';
|
||||
import {
|
||||
jsonParse,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
NodeOperationError,
|
||||
NodeConnectionType,
|
||||
@@ -122,7 +122,7 @@ export class OutputParserStructured implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const schemaType = this.getNodeParameter('schemaType', itemIndex, '') as 'fromJson' | 'manual';
|
||||
// We initialize these even though one of them will always be empty
|
||||
// it makes it easer to navigate the ternary operator
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -63,7 +63,7 @@ export class RetrieverContextualCompression implements INodeType {
|
||||
properties: [],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for Contextual Compression Retriever');
|
||||
|
||||
const model = (await this.getInputConnectionData(
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
@@ -82,7 +82,7 @@ export class RetrieverMultiQuery implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for MultiQuery Retriever');
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as { queryCount?: number };
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { VectorStore } from '@langchain/core/vectorstores';
|
||||
@@ -56,7 +56,7 @@ export class RetrieverVectorStore implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for Vector Store Retriever');
|
||||
|
||||
const topK = this.getNodeParameter('topK', itemIndex, 4) as number;
|
||||
|
||||
@@ -5,7 +5,7 @@ import type {
|
||||
IExecuteWorkflowInfo,
|
||||
INodeExecutionData,
|
||||
IWorkflowBase,
|
||||
IExecuteFunctions,
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
@@ -292,15 +292,15 @@ export class RetrieverWorkflow implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
class WorkflowRetriever extends BaseRetriever {
|
||||
lc_namespace = ['n8n-nodes-langchain', 'retrievers', 'workflow'];
|
||||
|
||||
executeFunctions: IExecuteFunctions;
|
||||
|
||||
constructor(executeFunctions: IExecuteFunctions, fields: BaseRetrieverInput) {
|
||||
constructor(
|
||||
private executeFunctions: ISupplyDataFunctions,
|
||||
fields: BaseRetrieverInput,
|
||||
) {
|
||||
super(fields);
|
||||
this.executeFunctions = executeFunctions;
|
||||
}
|
||||
|
||||
async _getRelevantDocuments(
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { CharacterTextSplitterParams } from '@langchain/textsplitters';
|
||||
@@ -63,7 +63,7 @@ export class TextSplitterCharacterTextSplitter implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for Text Splitter');
|
||||
|
||||
const separator = this.getNodeParameter('separator', itemIndex) as string;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type {
|
||||
@@ -94,7 +94,7 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for Text Splitter');
|
||||
|
||||
const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { TokenTextSplitter } from '@langchain/textsplitters';
|
||||
@@ -56,7 +56,7 @@ export class TextSplitterTokenSplitter implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for Text Splitter');
|
||||
|
||||
const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { Calculator } from '@langchain/community/tools/calculator';
|
||||
@@ -43,7 +43,7 @@ export class ToolCalculator implements INodeType {
|
||||
properties: [getConnectionHintNoticeField([NodeConnectionType.AiAgent])],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
return {
|
||||
response: logWrapper(new Calculator(), this),
|
||||
};
|
||||
|
||||
@@ -6,9 +6,9 @@ import { PythonSandbox } from 'n8n-nodes-base/dist/nodes/Code/PythonSandbox';
|
||||
import type { Sandbox } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
|
||||
import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
ExecutionError,
|
||||
IDataObject,
|
||||
@@ -175,7 +175,7 @@ export class ToolCode implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const node = this.getNode();
|
||||
const workflowMode = this.getMode();
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
IHttpRequestMethods,
|
||||
IHttpRequestOptions,
|
||||
@@ -250,7 +250,7 @@ export class ToolHttpRequest implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const name = this.getNode().name.replace(/ /g, '_');
|
||||
try {
|
||||
tryToParseAlphanumericString(name);
|
||||
|
||||
@@ -8,12 +8,12 @@ import unset from 'lodash/unset';
|
||||
import * as mime from 'mime-types';
|
||||
import { getOAuth2AdditionalParameters } from 'n8n-nodes-base/dist/nodes/HttpRequest/GenericFunctions';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
IDataObject,
|
||||
IHttpRequestOptions,
|
||||
IRequestOptionsSimplified,
|
||||
ExecutionError,
|
||||
NodeApiError,
|
||||
ISupplyDataFunctions,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
@@ -28,7 +28,7 @@ import type {
|
||||
} from './interfaces';
|
||||
import type { DynamicZodObject } from '../../../types/zod.types';
|
||||
|
||||
const genericCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: number) => {
|
||||
const genericCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => {
|
||||
const genericType = ctx.getNodeParameter('genericAuthType', itemIndex) as string;
|
||||
|
||||
if (genericType === 'httpBasicAuth' || genericType === 'httpDigestAuth') {
|
||||
@@ -104,7 +104,7 @@ const genericCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: numbe
|
||||
});
|
||||
};
|
||||
|
||||
const predefinedCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: number) => {
|
||||
const predefinedCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => {
|
||||
const predefinedType = ctx.getNodeParameter('nodeCredentialType', itemIndex) as string;
|
||||
const additionalOptions = getOAuth2AdditionalParameters(predefinedType);
|
||||
|
||||
@@ -119,7 +119,7 @@ const predefinedCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: nu
|
||||
};
|
||||
|
||||
export const configureHttpRequestFunction = async (
|
||||
ctx: IExecuteFunctions,
|
||||
ctx: ISupplyDataFunctions,
|
||||
credentialsType: 'predefinedCredentialType' | 'genericCredentialType' | 'none',
|
||||
itemIndex: number,
|
||||
) => {
|
||||
@@ -146,7 +146,7 @@ const defaultOptimizer = <T>(response: T) => {
|
||||
return String(response);
|
||||
};
|
||||
|
||||
const htmlOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: number) => {
|
||||
const htmlOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number, maxLength: number) => {
|
||||
const cssSelector = ctx.getNodeParameter('cssSelector', itemIndex, '') as string;
|
||||
const onlyContent = ctx.getNodeParameter('onlyContent', itemIndex, false) as boolean;
|
||||
let elementsToOmit: string[] = [];
|
||||
@@ -214,7 +214,7 @@ const htmlOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: num
|
||||
};
|
||||
};
|
||||
|
||||
const textOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: number) => {
|
||||
const textOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number, maxLength: number) => {
|
||||
return (response: string | IDataObject) => {
|
||||
if (typeof response === 'object') {
|
||||
try {
|
||||
@@ -245,7 +245,7 @@ const textOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: num
|
||||
};
|
||||
};
|
||||
|
||||
const jsonOptimizer = (ctx: IExecuteFunctions, itemIndex: number) => {
|
||||
const jsonOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number) => {
|
||||
return (response: string): string => {
|
||||
let responseData: IDataObject | IDataObject[] | string = response;
|
||||
|
||||
@@ -324,7 +324,7 @@ const jsonOptimizer = (ctx: IExecuteFunctions, itemIndex: number) => {
|
||||
};
|
||||
};
|
||||
|
||||
export const configureResponseOptimizer = (ctx: IExecuteFunctions, itemIndex: number) => {
|
||||
export const configureResponseOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number) => {
|
||||
const optimizeResponse = ctx.getNodeParameter('optimizeResponse', itemIndex, false) as boolean;
|
||||
|
||||
if (optimizeResponse) {
|
||||
@@ -469,7 +469,7 @@ const MODEL_INPUT_DESCRIPTION = {
|
||||
};
|
||||
|
||||
export const updateParametersAndOptions = (options: {
|
||||
ctx: IExecuteFunctions;
|
||||
ctx: ISupplyDataFunctions;
|
||||
itemIndex: number;
|
||||
toolParameters: ToolParameter[];
|
||||
placeholdersDefinitions: PlaceholderDefinition[];
|
||||
@@ -558,7 +558,7 @@ export const prepareToolDescription = (
|
||||
};
|
||||
|
||||
export const configureToolFunction = (
|
||||
ctx: IExecuteFunctions,
|
||||
ctx: ISupplyDataFunctions,
|
||||
itemIndex: number,
|
||||
toolParameters: ToolParameter[],
|
||||
requestOptions: IHttpRequestOptions,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { SerpAPI } from '@langchain/community/tools/serpapi';
|
||||
@@ -113,7 +113,7 @@ export class ToolSerpApi implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('serpApi');
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex) as object;
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
|
||||
import type {
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import { VectorStoreQATool } from 'langchain/tools';
|
||||
@@ -82,7 +87,7 @@ export class ToolVectorStore implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const name = this.getNodeParameter('name', itemIndex) as string;
|
||||
const toolDescription = this.getNodeParameter('description', itemIndex) as string;
|
||||
const topK = this.getNodeParameter('topK', itemIndex, 4) as number;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { WikipediaQueryRun } from '@langchain/community/tools/wikipedia_query_run';
|
||||
@@ -43,7 +43,7 @@ export class ToolWikipedia implements INodeType {
|
||||
properties: [getConnectionHintNoticeField([NodeConnectionType.AiAgent])],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
const WikiTool = new WikipediaQueryRun();
|
||||
|
||||
WikiTool.description =
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { WolframAlphaTool } from '@langchain/community/tools/wolframalpha';
|
||||
@@ -49,7 +49,7 @@ export class ToolWolframAlpha implements INodeType {
|
||||
properties: [getConnectionHintNoticeField([NodeConnectionType.AiAgent])],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('wolframAlphaApi');
|
||||
|
||||
return {
|
||||
|
||||
@@ -6,12 +6,12 @@ import isObject from 'lodash/isObject';
|
||||
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
|
||||
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
IExecuteWorkflowInfo,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
IWorkflowBase,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
ExecutionError,
|
||||
IDataObject,
|
||||
@@ -357,7 +357,7 @@ export class ToolWorkflow implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const name = this.getNodeParameter('name', itemIndex) as string;
|
||||
const description = this.getNodeParameter('description', itemIndex) as string;
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type SupplyData,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager';
|
||||
@@ -59,7 +59,7 @@ export class VectorStoreInMemoryLoad implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const embeddings = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiEmbedding,
|
||||
itemIndex,
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { PineconeStoreParams } from '@langchain/pinecone';
|
||||
@@ -84,7 +84,7 @@ export class VectorStorePineconeLoad implements INodeType {
|
||||
},
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for Pinecone Load Vector Store');
|
||||
|
||||
const namespace = this.getNodeParameter('pineconeNamespace', itemIndex) as string;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
@@ -81,7 +81,7 @@ export class VectorStoreSupabaseLoad implements INodeType {
|
||||
|
||||
methods = { listSearch: { supabaseTableNameSearch } };
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Supabase Load Vector Store');
|
||||
|
||||
const tableName = this.getNodeParameter('tableName', itemIndex, '', {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { IZepConfig } from '@langchain/community/vectorstores/zep';
|
||||
@@ -83,7 +83,7 @@ export class VectorStoreZepLoad implements INodeType {
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for Zep Load Vector Store');
|
||||
|
||||
const collectionName = this.getNodeParameter('collectionName', itemIndex) as string;
|
||||
|
||||
@@ -7,11 +7,8 @@ export class MemoryVectorStoreManager {
|
||||
|
||||
private vectorStoreBuffer: Map<string, MemoryVectorStore>;
|
||||
|
||||
private embeddings: Embeddings;
|
||||
|
||||
private constructor(embeddings: Embeddings) {
|
||||
private constructor(private embeddings: Embeddings) {
|
||||
this.vectorStoreBuffer = new Map();
|
||||
this.embeddings = embeddings;
|
||||
}
|
||||
|
||||
public static getInstance(embeddings: Embeddings): MemoryVectorStoreManager {
|
||||
|
||||
@@ -5,12 +5,13 @@ import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import type { VectorStore } from '@langchain/core/vectorstores';
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeCredentialDescription,
|
||||
INodeProperties,
|
||||
INodeExecutionData,
|
||||
IExecuteFunctions,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
ILoadOptionsFunctions,
|
||||
INodeListSearchResult,
|
||||
@@ -57,13 +58,13 @@ interface VectorStoreNodeConstructorArgs {
|
||||
retrieveFields?: INodeProperties[];
|
||||
updateFields?: INodeProperties[];
|
||||
populateVectorStore: (
|
||||
context: IExecuteFunctions,
|
||||
context: ISupplyDataFunctions,
|
||||
embeddings: Embeddings,
|
||||
documents: Array<Document<Record<string, unknown>>>,
|
||||
itemIndex: number,
|
||||
) => Promise<void>;
|
||||
getVectorStoreClient: (
|
||||
context: IExecuteFunctions,
|
||||
context: ISupplyDataFunctions,
|
||||
filter: Record<string, never> | undefined,
|
||||
embeddings: Embeddings,
|
||||
itemIndex: number,
|
||||
@@ -281,7 +282,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
|
||||
});
|
||||
|
||||
resultData.push(...serializedDocs);
|
||||
void logAiEvent(this, 'ai-vector-store-searched', { query: prompt });
|
||||
logAiEvent(this, 'ai-vector-store-searched', { query: prompt });
|
||||
}
|
||||
|
||||
return [resultData];
|
||||
@@ -311,7 +312,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
|
||||
try {
|
||||
await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex);
|
||||
|
||||
void logAiEvent(this, 'ai-vector-store-populated');
|
||||
logAiEvent(this, 'ai-vector-store-populated');
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
@@ -365,7 +366,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
|
||||
ids: [documentId],
|
||||
});
|
||||
|
||||
void logAiEvent(this, 'ai-vector-store-updated');
|
||||
logAiEvent(this, 'ai-vector-store-updated');
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
@@ -380,7 +381,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
|
||||
);
|
||||
}
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const mode = this.getNodeParameter('mode', 0) as 'load' | 'insert' | 'retrieve';
|
||||
const filter = getMetadataFiltersValues(this, itemIndex);
|
||||
const embeddings = (await this.getInputConnectionData(
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import { pipeline } from 'stream/promises';
|
||||
import { createWriteStream } from 'fs';
|
||||
import type { IBinaryData, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||
import type {
|
||||
IBinaryData,
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
ISupplyDataFunctions,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeOperationError, BINARY_ENCODING } from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
@@ -26,25 +31,12 @@ const SUPPORTED_MIME_TYPES = {
|
||||
};
|
||||
|
||||
export class N8nBinaryLoader {
|
||||
private context: IExecuteFunctions;
|
||||
|
||||
private optionsPrefix: string;
|
||||
|
||||
private binaryDataKey: string;
|
||||
|
||||
private textSplitter?: TextSplitter;
|
||||
|
||||
constructor(
|
||||
context: IExecuteFunctions,
|
||||
optionsPrefix = '',
|
||||
binaryDataKey = '',
|
||||
textSplitter?: TextSplitter,
|
||||
) {
|
||||
this.context = context;
|
||||
this.textSplitter = textSplitter;
|
||||
this.optionsPrefix = optionsPrefix;
|
||||
this.binaryDataKey = binaryDataKey;
|
||||
}
|
||||
private context: IExecuteFunctions | ISupplyDataFunctions,
|
||||
private optionsPrefix = '',
|
||||
private binaryDataKey = '',
|
||||
private textSplitter?: TextSplitter,
|
||||
) {}
|
||||
|
||||
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
|
||||
const docs: Document[] = [];
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
import { type IExecuteFunctions, type INodeExecutionData, NodeOperationError } from 'n8n-workflow';
|
||||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type ISupplyDataFunctions,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
import type { Document } from '@langchain/core/documents';
|
||||
@@ -7,17 +12,11 @@ import { TextLoader } from 'langchain/document_loaders/fs/text';
|
||||
import { getMetadataFiltersValues } from './helpers';
|
||||
|
||||
export class N8nJsonLoader {
|
||||
private context: IExecuteFunctions;
|
||||
|
||||
private optionsPrefix: string;
|
||||
|
||||
private textSplitter?: TextSplitter;
|
||||
|
||||
constructor(context: IExecuteFunctions, optionsPrefix = '', textSplitter?: TextSplitter) {
|
||||
this.context = context;
|
||||
this.textSplitter = textSplitter;
|
||||
this.optionsPrefix = optionsPrefix;
|
||||
}
|
||||
constructor(
|
||||
private context: IExecuteFunctions | ISupplyDataFunctions,
|
||||
private optionsPrefix = '',
|
||||
private textSplitter?: TextSplitter,
|
||||
) {}
|
||||
|
||||
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
|
||||
const docs: Document[] = [];
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { DynamicStructuredToolInput } from '@langchain/core/tools';
|
||||
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
|
||||
import type { IExecuteFunctions, IDataObject } from 'n8n-workflow';
|
||||
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
|
||||
import { NodeConnectionType, jsonParse, NodeOperationError } from 'n8n-workflow';
|
||||
import { StructuredOutputParser } from 'langchain/output_parsers';
|
||||
import type { ZodTypeAny } from 'zod';
|
||||
@@ -45,12 +45,11 @@ ALL parameters marked as required must be provided`;
|
||||
};
|
||||
|
||||
export class N8nTool extends DynamicStructuredTool {
|
||||
private context: IExecuteFunctions;
|
||||
|
||||
constructor(context: IExecuteFunctions, fields: DynamicStructuredToolInput) {
|
||||
constructor(
|
||||
private context: ISupplyDataFunctions,
|
||||
fields: DynamicStructuredToolInput,
|
||||
) {
|
||||
super(fields);
|
||||
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
asDynamicTool(): DynamicTool {
|
||||
|
||||
@@ -5,7 +5,13 @@ import type { BaseMessage } from '@langchain/core/messages';
|
||||
import type { Tool } from '@langchain/core/tools';
|
||||
import type { BaseChatMemory } from 'langchain/memory';
|
||||
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
|
||||
import type { AiEvent, IDataObject, IExecuteFunctions, IWebhookFunctions } from 'n8n-workflow';
|
||||
import type {
|
||||
AiEvent,
|
||||
IDataObject,
|
||||
IExecuteFunctions,
|
||||
ISupplyDataFunctions,
|
||||
IWebhookFunctions,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { N8nTool } from './N8nTool';
|
||||
|
||||
@@ -20,7 +26,7 @@ function hasMethods<T>(obj: unknown, ...methodNames: Array<string | symbol>): ob
|
||||
}
|
||||
|
||||
export function getMetadataFiltersValues(
|
||||
ctx: IExecuteFunctions,
|
||||
ctx: IExecuteFunctions | ISupplyDataFunctions,
|
||||
itemIndex: number,
|
||||
): Record<string, never> | undefined {
|
||||
const options = ctx.getNodeParameter('options', itemIndex, {});
|
||||
@@ -93,7 +99,7 @@ export function getPromptInputByType(options: {
|
||||
}
|
||||
|
||||
export function getSessionId(
|
||||
ctx: IExecuteFunctions | IWebhookFunctions,
|
||||
ctx: ISupplyDataFunctions | IWebhookFunctions,
|
||||
itemIndex: number,
|
||||
selectorKey = 'sessionIdType',
|
||||
autoSelect = 'fromInput',
|
||||
@@ -133,13 +139,13 @@ export function getSessionId(
|
||||
return sessionId;
|
||||
}
|
||||
|
||||
export async function logAiEvent(
|
||||
executeFunctions: IExecuteFunctions,
|
||||
export function logAiEvent(
|
||||
executeFunctions: IExecuteFunctions | ISupplyDataFunctions,
|
||||
event: AiEvent,
|
||||
data?: IDataObject,
|
||||
) {
|
||||
try {
|
||||
await executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined);
|
||||
executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined);
|
||||
} catch (error) {
|
||||
executeFunctions.logger.debug(`Error logging AI event: ${event}`);
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import type { Tool } from '@langchain/core/tools';
|
||||
import { VectorStore } from '@langchain/core/vectorstores';
|
||||
import { TextSplitter } from '@langchain/textsplitters';
|
||||
import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base';
|
||||
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeExecutionData, ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
|
||||
@@ -27,7 +27,7 @@ const errorsMap: { [key: string]: { message: string; description: string } } = {
|
||||
export async function callMethodAsync<T>(
|
||||
this: T,
|
||||
parameters: {
|
||||
executeFunctions: IExecuteFunctions;
|
||||
executeFunctions: IExecuteFunctions | ISupplyDataFunctions;
|
||||
connectionType: NodeConnectionType;
|
||||
currentNodeRunIndex: number;
|
||||
method: (...args: any[]) => Promise<unknown>;
|
||||
@@ -113,7 +113,7 @@ export function logWrapper(
|
||||
| VectorStore
|
||||
| N8nBinaryLoader
|
||||
| N8nJsonLoader,
|
||||
executeFunctions: IExecuteFunctions,
|
||||
executeFunctions: IExecuteFunctions | ISupplyDataFunctions,
|
||||
) {
|
||||
return new Proxy(originalInstance, {
|
||||
get: (target, prop) => {
|
||||
@@ -190,7 +190,7 @@ export function logWrapper(
|
||||
const payload = { action: 'getMessages', response };
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
|
||||
|
||||
void logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response });
|
||||
logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response });
|
||||
return response;
|
||||
};
|
||||
} else if (prop === 'addMessage' && 'addMessage' in target) {
|
||||
@@ -207,7 +207,7 @@ export function logWrapper(
|
||||
arguments: [message],
|
||||
});
|
||||
|
||||
void logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message });
|
||||
logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message });
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
|
||||
};
|
||||
}
|
||||
@@ -233,7 +233,7 @@ export function logWrapper(
|
||||
arguments: [query, config],
|
||||
})) as Array<Document<Record<string, any>>>;
|
||||
|
||||
void logAiEvent(executeFunctions, 'ai-documents-retrieved', { query });
|
||||
logAiEvent(executeFunctions, 'ai-documents-retrieved', { query });
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -258,7 +258,7 @@ export function logWrapper(
|
||||
arguments: [documents],
|
||||
})) as number[][];
|
||||
|
||||
void logAiEvent(executeFunctions, 'ai-document-embedded');
|
||||
logAiEvent(executeFunctions, 'ai-document-embedded');
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -278,7 +278,7 @@ export function logWrapper(
|
||||
method: target[prop],
|
||||
arguments: [query],
|
||||
})) as number[];
|
||||
void logAiEvent(executeFunctions, 'ai-query-embedded');
|
||||
logAiEvent(executeFunctions, 'ai-query-embedded');
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -323,7 +323,7 @@ export function logWrapper(
|
||||
arguments: [item, itemIndex],
|
||||
})) as number[];
|
||||
|
||||
void logAiEvent(executeFunctions, 'ai-document-processed');
|
||||
logAiEvent(executeFunctions, 'ai-document-processed');
|
||||
executeFunctions.addOutputData(connectionType, index, [
|
||||
[{ json: { response }, pairedItem: { item: itemIndex } }],
|
||||
]);
|
||||
@@ -349,7 +349,7 @@ export function logWrapper(
|
||||
arguments: [text],
|
||||
})) as string[];
|
||||
|
||||
void logAiEvent(executeFunctions, 'ai-text-split');
|
||||
logAiEvent(executeFunctions, 'ai-text-split');
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -373,7 +373,7 @@ export function logWrapper(
|
||||
arguments: [query],
|
||||
})) as string;
|
||||
|
||||
void logAiEvent(executeFunctions, 'ai-tool-called', { query, response });
|
||||
logAiEvent(executeFunctions, 'ai-tool-called', { query, response });
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -403,7 +403,7 @@ export function logWrapper(
|
||||
arguments: [query, k, filter, _callbacks],
|
||||
})) as Array<Document<Record<string, any>>>;
|
||||
|
||||
void logAiEvent(executeFunctions, 'ai-vector-store-searched', { query });
|
||||
logAiEvent(executeFunctions, 'ai-vector-store-searched', { query });
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
|
||||
return response;
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { Callbacks } from '@langchain/core/callbacks/manager';
|
||||
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
|
||||
import type { AIMessage } from '@langchain/core/messages';
|
||||
import { BaseOutputParser } from '@langchain/core/output_parsers';
|
||||
import type { IExecuteFunctions } from 'n8n-workflow';
|
||||
import type { ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser';
|
||||
@@ -10,23 +10,14 @@ import { NAIVE_FIX_PROMPT } from './prompt';
|
||||
import { logAiEvent } from '../helpers';
|
||||
|
||||
export class N8nOutputFixingParser extends BaseOutputParser {
|
||||
private context: IExecuteFunctions;
|
||||
|
||||
private model: BaseLanguageModel;
|
||||
|
||||
private outputParser: N8nStructuredOutputParser;
|
||||
|
||||
lc_namespace = ['langchain', 'output_parsers', 'fix'];
|
||||
|
||||
constructor(
|
||||
context: IExecuteFunctions,
|
||||
model: BaseLanguageModel,
|
||||
outputParser: N8nStructuredOutputParser,
|
||||
private context: ISupplyDataFunctions,
|
||||
private model: BaseLanguageModel,
|
||||
private outputParser: N8nStructuredOutputParser,
|
||||
) {
|
||||
super();
|
||||
this.context = context;
|
||||
this.model = model;
|
||||
this.outputParser = outputParser;
|
||||
}
|
||||
|
||||
getRetryChain() {
|
||||
@@ -48,7 +39,7 @@ export class N8nOutputFixingParser extends BaseOutputParser {
|
||||
try {
|
||||
// First attempt to parse the completion
|
||||
const response = await this.outputParser.parse(completion, callbacks, (e) => e);
|
||||
void logAiEvent(this.context, 'ai-output-parsed', { text: completion, response });
|
||||
logAiEvent(this.context, 'ai-output-parsed', { text: completion, response });
|
||||
|
||||
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [
|
||||
[{ json: { action: 'parse', response } }],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { Callbacks } from '@langchain/core/callbacks/manager';
|
||||
import { StructuredOutputParser } from 'langchain/output_parsers';
|
||||
import get from 'lodash/get';
|
||||
import type { IExecuteFunctions } from 'n8n-workflow';
|
||||
import type { ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
|
||||
@@ -14,11 +14,11 @@ const STRUCTURED_OUTPUT_ARRAY_KEY = '__structured__output__array';
|
||||
export class N8nStructuredOutputParser extends StructuredOutputParser<
|
||||
z.ZodType<object, z.ZodTypeDef, object>
|
||||
> {
|
||||
context: IExecuteFunctions;
|
||||
|
||||
constructor(context: IExecuteFunctions, zodSchema: z.ZodSchema<object>) {
|
||||
constructor(
|
||||
private context: ISupplyDataFunctions,
|
||||
zodSchema: z.ZodSchema<object>,
|
||||
) {
|
||||
super(zodSchema);
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
lc_namespace = ['langchain', 'output_parsers', 'structured'];
|
||||
@@ -39,7 +39,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
|
||||
get(parsed, STRUCTURED_OUTPUT_KEY) ??
|
||||
parsed) as Record<string, unknown>;
|
||||
|
||||
void logAiEvent(this.context, 'ai-output-parsed', { text, response: result });
|
||||
logAiEvent(this.context, 'ai-output-parsed', { text, response: result });
|
||||
|
||||
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [
|
||||
[{ json: { action: 'parse', response: result } }],
|
||||
@@ -56,7 +56,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
|
||||
},
|
||||
);
|
||||
|
||||
void logAiEvent(this.context, 'ai-output-parsed', {
|
||||
logAiEvent(this.context, 'ai-output-parsed', {
|
||||
text,
|
||||
response: e.message ?? e,
|
||||
});
|
||||
@@ -73,7 +73,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
|
||||
static async fromZodJsonSchema(
|
||||
zodSchema: z.ZodSchema<object>,
|
||||
nodeVersion: number,
|
||||
context: IExecuteFunctions,
|
||||
context: ISupplyDataFunctions,
|
||||
): Promise<N8nStructuredOutputParser> {
|
||||
let returnSchema: z.ZodType<object, z.ZodTypeDef, object>;
|
||||
if (nodeVersion === 1) {
|
||||
|
||||
Reference in New Issue
Block a user