refactor: Migrate NodeConnectionType to const object type (no-changelog) (#14078)

Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in>
This commit is contained in:
Alex Grozav
2025-03-21 14:01:26 +02:00
committed by GitHub
parent 7e8179b848
commit 8215e0b59f
703 changed files with 3104 additions and 3018 deletions

View File

@@ -2,7 +2,7 @@ import type { DynamicStructuredToolInput } from '@langchain/core/tools';
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
import { StructuredOutputParser } from 'langchain/output_parsers';
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
import { NodeConnectionType, jsonParse, NodeOperationError } from 'n8n-workflow';
import { NodeConnectionTypes, jsonParse, NodeOperationError } from 'n8n-workflow';
import type { ZodTypeAny } from 'zod';
import { ZodBoolean, ZodNullable, ZodNumber, ZodObject, ZodOptional } from 'zod';
@@ -96,8 +96,8 @@ export class N8nTool extends DynamicStructuredTool {
return result;
} catch (e) {
const { index } = context.addInputData(NodeConnectionType.AiTool, [[{ json: { query } }]]);
void context.addOutputData(NodeConnectionType.AiTool, index, e);
const { index } = context.addInputData(NodeConnectionTypes.AiTool, [[{ json: { query } }]]);
void context.addOutputData(NodeConnectionTypes.AiTool, index, e);
return e.toString();
}

View File

@@ -4,7 +4,7 @@ import type { BaseLLM } from '@langchain/core/language_models/llms';
import type { BaseMessage } from '@langchain/core/messages';
import type { Tool } from '@langchain/core/tools';
import type { BaseChatMemory } from 'langchain/memory';
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
import { NodeConnectionTypes, NodeOperationError, jsonStringify } from 'n8n-workflow';
import type {
AiEvent,
IDataObject,
@@ -190,7 +190,7 @@ export const getConnectedTools = async (
escapeCurlyBrackets: boolean = false,
) => {
const connectedTools =
((await ctx.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
((await ctx.getInputConnectionData(NodeConnectionTypes.AiTool, 0)) as Tool[]) || [];
if (!enforceUniqueNames) return connectedTools;

View File

@@ -15,8 +15,9 @@ import type {
INodeExecutionData,
ISupplyDataFunctions,
ITaskMetadata,
NodeConnectionType,
} from 'n8n-workflow';
import { NodeOperationError, NodeConnectionType, parseErrorMetadata } from 'n8n-workflow';
import { NodeOperationError, NodeConnectionTypes, parseErrorMetadata } from 'n8n-workflow';
import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
import { N8nBinaryLoader } from './N8nBinaryLoader';
@@ -116,7 +117,7 @@ export function logWrapper(
if (isBaseChatMemory(originalInstance)) {
if (prop === 'loadMemoryVariables' && 'loadMemoryVariables' in target) {
return async (values: InputValues): Promise<MemoryVariables> => {
connectionType = NodeConnectionType.AiMemory;
connectionType = NodeConnectionTypes.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'loadMemoryVariables', values } }],
@@ -139,7 +140,7 @@ export function logWrapper(
};
} else if (prop === 'saveContext' && 'saveContext' in target) {
return async (input: InputValues, output: OutputValues): Promise<MemoryVariables> => {
connectionType = NodeConnectionType.AiMemory;
connectionType = NodeConnectionTypes.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'saveContext', input, output } }],
@@ -168,7 +169,7 @@ export function logWrapper(
if (isBaseChatMessageHistory(originalInstance)) {
if (prop === 'getMessages' && 'getMessages' in target) {
return async (): Promise<BaseMessage[]> => {
connectionType = NodeConnectionType.AiMemory;
connectionType = NodeConnectionTypes.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'getMessages' } }],
]);
@@ -189,7 +190,7 @@ export function logWrapper(
};
} else if (prop === 'addMessage' && 'addMessage' in target) {
return async (message: BaseMessage): Promise<void> => {
connectionType = NodeConnectionType.AiMemory;
connectionType = NodeConnectionTypes.AiMemory;
const payload = { action: 'addMessage', message };
const { index } = executeFunctions.addInputData(connectionType, [[{ json: payload }]]);
@@ -214,7 +215,7 @@ export function logWrapper(
query: string,
config?: Callbacks | BaseCallbackConfig,
): Promise<Document[]> => {
connectionType = NodeConnectionType.AiRetriever;
connectionType = NodeConnectionTypes.AiRetriever;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query, config } }],
]);
@@ -255,7 +256,7 @@ export function logWrapper(
// Docs -> Embeddings
if (prop === 'embedDocuments' && 'embedDocuments' in target) {
return async (documents: string[]): Promise<number[][]> => {
connectionType = NodeConnectionType.AiEmbedding;
connectionType = NodeConnectionTypes.AiEmbedding;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { documents } }],
]);
@@ -276,7 +277,7 @@ export function logWrapper(
// Query -> Embeddings
if (prop === 'embedQuery' && 'embedQuery' in target) {
return async (query: string): Promise<number[]> => {
connectionType = NodeConnectionType.AiEmbedding;
connectionType = NodeConnectionTypes.AiEmbedding;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query } }],
]);
@@ -303,7 +304,7 @@ export function logWrapper(
// Process All
if (prop === 'processAll' && 'processAll' in target) {
return async (items: INodeExecutionData[]): Promise<number[]> => {
connectionType = NodeConnectionType.AiDocument;
connectionType = NodeConnectionTypes.AiDocument;
const { index } = executeFunctions.addInputData(connectionType, [items]);
const response = (await callMethodAsync.call(target, {
@@ -322,7 +323,7 @@ export function logWrapper(
// Process Each
if (prop === 'processItem' && 'processItem' in target) {
return async (item: INodeExecutionData, itemIndex: number): Promise<number[]> => {
connectionType = NodeConnectionType.AiDocument;
connectionType = NodeConnectionTypes.AiDocument;
const { index } = executeFunctions.addInputData(connectionType, [[item]]);
const response = (await callMethodAsync.call(target, {
@@ -346,7 +347,7 @@ export function logWrapper(
if (originalInstance instanceof TextSplitter) {
if (prop === 'splitText' && 'splitText' in target) {
return async (text: string): Promise<string[]> => {
connectionType = NodeConnectionType.AiTextSplitter;
connectionType = NodeConnectionTypes.AiTextSplitter;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { textSplitter: text } }],
]);
@@ -370,7 +371,7 @@ export function logWrapper(
if (isToolsInstance(originalInstance)) {
if (prop === '_call' && '_call' in target) {
return async (query: string): Promise<string> => {
connectionType = NodeConnectionType.AiTool;
connectionType = NodeConnectionTypes.AiTool;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query } }],
]);
@@ -399,7 +400,7 @@ export function logWrapper(
filter?: BiquadFilterType | undefined,
_callbacks?: Callbacks | undefined,
): Promise<Document[]> => {
connectionType = NodeConnectionType.AiVectorStore;
connectionType = NodeConnectionTypes.AiVectorStore;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query, k, filter } }],
]);

View File

@@ -4,7 +4,7 @@ import type { AIMessage } from '@langchain/core/messages';
import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers';
import type { PromptTemplate } from '@langchain/core/prompts';
import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';
import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser';
import { logAiEvent } from '../helpers';
@@ -33,7 +33,7 @@ export class N8nOutputFixingParser extends BaseOutputParser {
* @throws Error if both parsing attempts fail
*/
async parse(completion: string, callbacks?: Callbacks) {
const { index } = this.context.addInputData(NodeConnectionType.AiOutputParser, [
const { index } = this.context.addInputData(NodeConnectionTypes.AiOutputParser, [
[{ json: { action: 'parse', text: completion } }],
]);
@@ -47,7 +47,7 @@ export class N8nOutputFixingParser extends BaseOutputParser {
});
logAiEvent(this.context, 'ai-output-parsed', { text: completion, response });
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [
this.context.addOutputData(NodeConnectionTypes.AiOutputParser, index, [
[{ json: { action: 'parse', response } }],
]);
@@ -68,14 +68,14 @@ export class N8nOutputFixingParser extends BaseOutputParser {
const parsed = await this.outputParser.parse(resultText, callbacks);
// Add the successfully parsed output to the context
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [
this.context.addOutputData(NodeConnectionTypes.AiOutputParser, index, [
[{ json: { action: 'parse', response: parsed } }],
]);
return parsed;
} catch (autoParseError) {
// If both attempts fail, add the error to the output and throw
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, autoParseError);
this.context.addOutputData(NodeConnectionTypes.AiOutputParser, index, autoParseError);
throw autoParseError;
}
}

View File

@@ -1,5 +1,5 @@
import type { IExecuteFunctions } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';
import { N8nItemListOutputParser } from './N8nItemListOutputParser';
import { N8nOutputFixingParser } from './N8nOutputFixingParser';
@@ -19,7 +19,7 @@ export async function getOptionalOutputParser(
if (ctx.getNodeParameter('hasOutputParser', 0, true) === true) {
outputParser = (await ctx.getInputConnectionData(
NodeConnectionType.AiOutputParser,
NodeConnectionTypes.AiOutputParser,
0,
)) as N8nOutputParser;
}

View File

@@ -2,7 +2,7 @@ import type { Callbacks } from '@langchain/core/callbacks/manager';
import { StructuredOutputParser } from 'langchain/output_parsers';
import get from 'lodash/get';
import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';
import { z } from 'zod';
import { logAiEvent, unwrapNestedOutput } from '../helpers';
@@ -28,7 +28,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
_callbacks?: Callbacks,
errorMapper?: (error: Error) => Error,
): Promise<object> {
const { index } = this.context.addInputData(NodeConnectionType.AiOutputParser, [
const { index } = this.context.addInputData(NodeConnectionTypes.AiOutputParser, [
[{ json: { action: 'parse', text } }],
]);
try {
@@ -46,7 +46,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
logAiEvent(this.context, 'ai-output-parsed', { text, response: result });
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [
this.context.addOutputData(NodeConnectionTypes.AiOutputParser, index, [
[{ json: { action: 'parse', response: result } }],
]);
@@ -66,7 +66,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
response: e.message ?? e,
});
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, nodeError);
this.context.addOutputData(NodeConnectionTypes.AiOutputParser, index, nodeError);
if (errorMapper) {
throw errorMapper(e);
}

View File

@@ -1,4 +1,4 @@
import { NodeConnectionType, type INodeProperties } from 'n8n-workflow';
import { NodeConnectionTypes, type INodeProperties } from 'n8n-workflow';
export const metadataFilterField: INodeProperties = {
displayName: 'Metadata Filter',
@@ -43,36 +43,36 @@ export function getTemplateNoticeField(templateId: number): INodeProperties {
}
const connectionsString = {
[NodeConnectionType.AiAgent]: {
[NodeConnectionTypes.AiAgent]: {
// Root AI view
connection: '',
locale: 'AI Agent',
},
[NodeConnectionType.AiChain]: {
[NodeConnectionTypes.AiChain]: {
// Root AI view
connection: '',
locale: 'AI Chain',
},
[NodeConnectionType.AiDocument]: {
connection: NodeConnectionType.AiDocument,
[NodeConnectionTypes.AiDocument]: {
connection: NodeConnectionTypes.AiDocument,
locale: 'Document Loader',
},
[NodeConnectionType.AiVectorStore]: {
connection: NodeConnectionType.AiVectorStore,
[NodeConnectionTypes.AiVectorStore]: {
connection: NodeConnectionTypes.AiVectorStore,
locale: 'Vector Store',
},
[NodeConnectionType.AiRetriever]: {
connection: NodeConnectionType.AiRetriever,
[NodeConnectionTypes.AiRetriever]: {
connection: NodeConnectionTypes.AiRetriever,
locale: 'Vector Store Retriever',
},
};
type AllowedConnectionTypes =
| NodeConnectionType.AiAgent
| NodeConnectionType.AiChain
| NodeConnectionType.AiDocument
| NodeConnectionType.AiVectorStore
| NodeConnectionType.AiRetriever;
| typeof NodeConnectionTypes.AiAgent
| typeof NodeConnectionTypes.AiChain
| typeof NodeConnectionTypes.AiDocument
| typeof NodeConnectionTypes.AiVectorStore
| typeof NodeConnectionTypes.AiRetriever;
function determineArticle(nextWord: string): string {
// check if the next word starts with a vowel sound