refactor(core): All calls to supplyData should use a distinct context type (no-changelog) (#11421)

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™
2024-10-28 11:37:23 +01:00
committed by GitHub
parent 04c075a46b
commit 8f5fe05a92
70 changed files with 560 additions and 308 deletions

View File

@@ -3,7 +3,7 @@ import {
NodeConnectionType,
type INodePropertyOptions,
type INodeProperties,
type IExecuteFunctions,
type ISupplyDataFunctions,
type INodeType,
type INodeTypeDescription,
type SupplyData,
@@ -175,7 +175,7 @@ export class LmChatAnthropic implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('anthropicApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
@@ -52,7 +52,7 @@ export class LmChatOllama implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('ollamaApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
type JsonObject,
NodeApiError,
@@ -242,7 +242,7 @@ export class LmChatOpenAi implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('openAiApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
@@ -90,7 +90,7 @@ export class LmCohere implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('cohereApi');
const options = this.getNodeParameter('options', itemIndex, {}) as object;

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
@@ -51,7 +51,7 @@ export class LmOllama implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('ollamaApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { NodeConnectionType } from 'n8n-workflow';
import type {
IExecuteFunctions,
INodeType,
INodeTypeDescription,
ISupplyDataFunctions,
SupplyData,
ILoadOptionsFunctions,
} from 'n8n-workflow';
@@ -229,7 +229,7 @@ export class LmOpenAi implements INodeType {
},
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('openAiApi');
const modelName = this.getNodeParameter('model', itemIndex, '', {

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
@@ -132,7 +132,7 @@ export class LmOpenHuggingFaceInference implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('huggingFaceApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;

View File

@@ -2,9 +2,9 @@
import { ChatBedrockConverse } from '@langchain/aws';
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
@@ -132,7 +132,7 @@ export class LmChatAwsBedrock implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('aws');
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as {

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
@@ -162,7 +162,7 @@ export class LmChatAzureOpenAi implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials<{
apiKey: string;
resourceName: string;

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
@@ -113,7 +113,7 @@ export class LmChatGoogleGemini implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('googlePalmApi');
const modelName = this.getNodeParameter('modelName', itemIndex) as string;

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
type ILoadOptionsFunctions,
type JsonObject,
@@ -124,7 +124,7 @@ export class LmChatGoogleVertex implements INodeType {
},
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('googleApi');
const privateKey = formatPrivateKey(credentials.privateKey as string);
const email = (credentials.email as string).trim();

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
@@ -129,7 +129,7 @@ export class LmChatGroq implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('groqApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;

View File

@@ -1,9 +1,9 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
@@ -172,7 +172,7 @@ export class LmChatMistralCloud implements INodeType {
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('mistralCloudApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;

View File

@@ -7,7 +7,7 @@ import type {
SerializedSecret,
} from '@langchain/core/load/serializable';
import type { LLMResult } from '@langchain/core/outputs';
import type { IDataObject, IExecuteFunctions } from 'n8n-workflow';
import type { IDataObject, ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import { pick } from 'lodash';
import type { BaseMessage } from '@langchain/core/messages';
@@ -30,8 +30,6 @@ const TIKTOKEN_ESTIMATE_MODEL = 'gpt-4o';
export class N8nLlmTracing extends BaseCallbackHandler {
name = 'N8nLlmTracing';
executionFunctions: IExecuteFunctions;
connectionType = NodeConnectionType.AiLanguageModel;
promptTokensEstimate = 0;
@@ -61,11 +59,10 @@ export class N8nLlmTracing extends BaseCallbackHandler {
};
constructor(
executionFunctions: IExecuteFunctions,
private executionFunctions: ISupplyDataFunctions,
options?: { tokensUsageParser: TokensUsageParser },
) {
super();
this.executionFunctions = executionFunctions;
this.options = { ...this.options, ...options };
}
@@ -138,7 +135,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [
[{ json: { ...response } }],
]);
void logAiEvent(this.executionFunctions, 'ai-llm-generated-output', {
logAiEvent(this.executionFunctions, 'ai-llm-generated-output', {
messages: parsedMessages,
options: runDetails.options,
response,
@@ -186,7 +183,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
});
}
void logAiEvent(this.executionFunctions, 'ai-llm-errored', {
logAiEvent(this.executionFunctions, 'ai-llm-errored', {
error: Object.keys(error).length === 0 ? error.toString() : error,
runId,
parentRunId,