refactor(core): Reorganize n8n-core and enforce file-name casing (no-changelog) (#12667)

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™
2025-01-17 15:17:25 +01:00
committed by GitHub
parent e7f00bcb7f
commit 05858c2153
132 changed files with 459 additions and 441 deletions

View File

@@ -0,0 +1,296 @@
import { mock } from 'jest-mock-extended';
import type {
IGetExecuteTriggerFunctions,
INode,
ITriggerResponse,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
TriggerTime,
CronExpression,
} from 'n8n-workflow';
import { LoggerProxy, TriggerCloseError, WorkflowActivationError } from 'n8n-workflow';
import type { ErrorReporter } from '@/errors/error-reporter';
import { ActiveWorkflows } from '../active-workflows';
import type { PollContext } from '../node-execution-context';
import type { ScheduledTaskManager } from '../scheduled-task-manager';
import type { TriggersAndPollers } from '../triggers-and-pollers';
describe('ActiveWorkflows', () => {
const workflowId = 'test-workflow-id';
const workflow = mock<Workflow>();
const additionalData = mock<IWorkflowExecuteAdditionalData>();
const mode: WorkflowExecuteMode = 'trigger';
const activation: WorkflowActivateMode = 'init';
const getTriggerFunctions = jest.fn() as IGetExecuteTriggerFunctions;
const triggerResponse = mock<ITriggerResponse>();
const pollFunctions = mock<PollContext>();
const getPollFunctions = jest.fn<PollContext, unknown[]>();
LoggerProxy.init(mock());
const scheduledTaskManager = mock<ScheduledTaskManager>();
const triggersAndPollers = mock<TriggersAndPollers>();
const errorReporter = mock<ErrorReporter>();
const triggerNode = mock<INode>();
const pollNode = mock<INode>();
let activeWorkflows: ActiveWorkflows;
beforeEach(() => {
jest.clearAllMocks();
activeWorkflows = new ActiveWorkflows(
mock(),
scheduledTaskManager,
triggersAndPollers,
errorReporter,
);
});
type PollTimes = { item: TriggerTime[] };
type TestOptions = {
triggerNodes?: INode[];
pollNodes?: INode[];
triggerError?: Error;
pollError?: Error;
pollTimes?: PollTimes;
};
const addWorkflow = async ({
triggerNodes = [],
pollNodes = [],
triggerError,
pollError,
pollTimes = { item: [{ mode: 'everyMinute' }] },
}: TestOptions) => {
workflow.getTriggerNodes.mockReturnValue(triggerNodes);
workflow.getPollNodes.mockReturnValue(pollNodes);
pollFunctions.getNodeParameter.calledWith('pollTimes').mockReturnValue(pollTimes);
if (triggerError) {
triggersAndPollers.runTrigger.mockRejectedValueOnce(triggerError);
} else {
triggersAndPollers.runTrigger.mockResolvedValue(triggerResponse);
}
if (pollError) {
triggersAndPollers.runPoll.mockRejectedValueOnce(pollError);
} else {
getPollFunctions.mockReturnValue(pollFunctions);
}
return await activeWorkflows.add(
workflowId,
workflow,
additionalData,
mode,
activation,
getTriggerFunctions,
getPollFunctions,
);
};
describe('add()', () => {
describe('should activate workflow', () => {
it('with trigger nodes', async () => {
await addWorkflow({ triggerNodes: [triggerNode] });
expect(activeWorkflows.isActive(workflowId)).toBe(true);
expect(workflow.getTriggerNodes).toHaveBeenCalled();
expect(triggersAndPollers.runTrigger).toHaveBeenCalledWith(
workflow,
triggerNode,
getTriggerFunctions,
additionalData,
mode,
activation,
);
});
it('with polling nodes', async () => {
await addWorkflow({ pollNodes: [pollNode] });
expect(activeWorkflows.isActive(workflowId)).toBe(true);
expect(workflow.getPollNodes).toHaveBeenCalled();
expect(scheduledTaskManager.registerCron).toHaveBeenCalled();
});
it('with both trigger and polling nodes', async () => {
await addWorkflow({ triggerNodes: [triggerNode], pollNodes: [pollNode] });
expect(activeWorkflows.isActive(workflowId)).toBe(true);
expect(workflow.getTriggerNodes).toHaveBeenCalled();
expect(workflow.getPollNodes).toHaveBeenCalled();
expect(triggersAndPollers.runTrigger).toHaveBeenCalledWith(
workflow,
triggerNode,
getTriggerFunctions,
additionalData,
mode,
activation,
);
expect(scheduledTaskManager.registerCron).toHaveBeenCalled();
expect(triggersAndPollers.runPoll).toHaveBeenCalledWith(workflow, pollNode, pollFunctions);
});
});
describe('should throw error', () => {
it('if trigger activation fails', async () => {
const error = new Error('Trigger activation failed');
await expect(
addWorkflow({ triggerNodes: [triggerNode], triggerError: error }),
).rejects.toThrow(WorkflowActivationError);
expect(activeWorkflows.isActive(workflowId)).toBe(false);
});
it('if polling activation fails', async () => {
const error = new Error('Failed to activate polling');
await expect(addWorkflow({ pollNodes: [pollNode], pollError: error })).rejects.toThrow(
WorkflowActivationError,
);
expect(activeWorkflows.isActive(workflowId)).toBe(false);
});
it('if the polling interval is too short', async () => {
const pollTimes: PollTimes = {
item: [
{
mode: 'custom',
cronExpression: '* * * * *' as CronExpression,
},
],
};
await expect(addWorkflow({ pollNodes: [pollNode], pollTimes })).rejects.toThrow(
'The polling interval is too short. It has to be at least a minute.',
);
expect(scheduledTaskManager.registerCron).not.toHaveBeenCalled();
});
});
describe('should handle polling errors', () => {
it('should throw error when poll fails during initial testing', async () => {
const error = new Error('Poll function failed');
await expect(addWorkflow({ pollNodes: [pollNode], pollError: error })).rejects.toThrow(
WorkflowActivationError,
);
expect(triggersAndPollers.runPoll).toHaveBeenCalledWith(workflow, pollNode, pollFunctions);
expect(pollFunctions.__emit).not.toHaveBeenCalled();
expect(pollFunctions.__emitError).not.toHaveBeenCalled();
});
it('should emit error when poll fails during regular polling', async () => {
const error = new Error('Poll function failed');
triggersAndPollers.runPoll
.mockResolvedValueOnce(null) // Succeed on first call (testing)
.mockRejectedValueOnce(error); // Fail on second call (regular polling)
await addWorkflow({ pollNodes: [pollNode] });
// Get the executeTrigger function that was registered
const registerCronCall = scheduledTaskManager.registerCron.mock.calls[0];
const executeTrigger = registerCronCall[2] as () => Promise<void>;
// Execute the trigger function to simulate a regular poll
await executeTrigger();
expect(triggersAndPollers.runPoll).toHaveBeenCalledTimes(2);
expect(pollFunctions.__emit).not.toHaveBeenCalled();
expect(pollFunctions.__emitError).toHaveBeenCalledWith(error);
});
});
});
describe('remove()', () => {
const setupForRemoval = async () => {
await addWorkflow({ triggerNodes: [triggerNode] });
return await activeWorkflows.remove(workflowId);
};
it('should remove an active workflow', async () => {
const result = await setupForRemoval();
expect(result).toBe(true);
expect(activeWorkflows.isActive(workflowId)).toBe(false);
expect(scheduledTaskManager.deregisterCrons).toHaveBeenCalledWith(workflowId);
expect(triggerResponse.closeFunction).toHaveBeenCalled();
});
it('should return false when removing non-existent workflow', async () => {
const result = await activeWorkflows.remove('non-existent');
expect(result).toBe(false);
expect(scheduledTaskManager.deregisterCrons).not.toHaveBeenCalled();
});
it('should handle TriggerCloseError when closing trigger', async () => {
const triggerCloseError = new TriggerCloseError(triggerNode, { level: 'warning' });
(triggerResponse.closeFunction as jest.Mock).mockRejectedValueOnce(triggerCloseError);
const result = await setupForRemoval();
expect(result).toBe(true);
expect(activeWorkflows.isActive(workflowId)).toBe(false);
expect(triggerResponse.closeFunction).toHaveBeenCalled();
expect(errorReporter.error).toHaveBeenCalledWith(triggerCloseError, {
extra: { workflowId },
});
});
it('should throw WorkflowDeactivationError when closeFunction throws regular error', async () => {
const error = new Error('Close function failed');
(triggerResponse.closeFunction as jest.Mock).mockRejectedValueOnce(error);
await addWorkflow({ triggerNodes: [triggerNode] });
await expect(activeWorkflows.remove(workflowId)).rejects.toThrow(
`Failed to deactivate trigger of workflow ID "${workflowId}": "Close function failed"`,
);
expect(triggerResponse.closeFunction).toHaveBeenCalled();
expect(errorReporter.error).not.toHaveBeenCalled();
});
});
describe('get() and isActive()', () => {
it('should return workflow data for active workflow', async () => {
await addWorkflow({ triggerNodes: [triggerNode] });
expect(activeWorkflows.isActive(workflowId)).toBe(true);
expect(activeWorkflows.get(workflowId)).toBeDefined();
});
it('should return undefined for non-active workflow', () => {
expect(activeWorkflows.isActive('non-existent')).toBe(false);
expect(activeWorkflows.get('non-existent')).toBeUndefined();
});
});
describe('allActiveWorkflows()', () => {
it('should return all active workflow IDs', async () => {
await addWorkflow({ triggerNodes: [triggerNode] });
const activeIds = activeWorkflows.allActiveWorkflows();
expect(activeIds).toEqual([workflowId]);
});
});
describe('removeAllTriggerAndPollerBasedWorkflows()', () => {
it('should remove all active workflows', async () => {
await addWorkflow({ triggerNodes: [triggerNode] });
await activeWorkflows.removeAllTriggerAndPollerBasedWorkflows();
expect(activeWorkflows.allActiveWorkflows()).toEqual([]);
expect(scheduledTaskManager.deregisterCrons).toHaveBeenCalledWith(workflowId);
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,71 @@
import { mock } from 'jest-mock-extended';
import type { Workflow } from 'n8n-workflow';
import type { InstanceSettings } from '@/instance-settings';
import { ScheduledTaskManager } from '../scheduled-task-manager';
describe('ScheduledTaskManager', () => {
const instanceSettings = mock<InstanceSettings>({ isLeader: true });
const workflow = mock<Workflow>({ timezone: 'GMT' });
const everyMinute = '0 * * * * *';
const onTick = jest.fn();
let scheduledTaskManager: ScheduledTaskManager;
beforeEach(() => {
jest.clearAllMocks();
jest.useFakeTimers();
scheduledTaskManager = new ScheduledTaskManager(instanceSettings);
});
it('should throw when workflow timezone is invalid', () => {
expect(() =>
scheduledTaskManager.registerCron(
mock<Workflow>({ timezone: 'somewhere' }),
everyMinute,
onTick,
),
).toThrow('Invalid timezone.');
});
it('should throw when cron expression is invalid', () => {
expect(() =>
//@ts-expect-error invalid cron expression is a type-error
scheduledTaskManager.registerCron(workflow, 'invalid-cron-expression', onTick),
).toThrow();
});
it('should register valid CronJobs', async () => {
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
expect(onTick).not.toHaveBeenCalled();
jest.advanceTimersByTime(10 * 60 * 1000); // 10 minutes
expect(onTick).toHaveBeenCalledTimes(10);
});
it('should should not invoke on follower instances', async () => {
scheduledTaskManager = new ScheduledTaskManager(mock<InstanceSettings>({ isLeader: false }));
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
expect(onTick).not.toHaveBeenCalled();
jest.advanceTimersByTime(10 * 60 * 1000); // 10 minutes
expect(onTick).not.toHaveBeenCalled();
});
it('should deregister CronJobs for a workflow', async () => {
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
expect(scheduledTaskManager.cronJobs.get(workflow.id)?.length).toBe(3);
scheduledTaskManager.deregisterCrons(workflow.id);
expect(scheduledTaskManager.cronJobs.get(workflow.id)?.length).toBe(0);
expect(onTick).not.toHaveBeenCalled();
jest.advanceTimersByTime(10 * 60 * 1000); // 10 minutes
expect(onTick).not.toHaveBeenCalled();
});
});

View File

@@ -0,0 +1,68 @@
import type { SSHCredentials } from 'n8n-workflow';
import { Client } from 'ssh2';
import { SSHClientsManager } from '../ssh-clients-manager';
describe('SSHClientsManager', () => {
const credentials: SSHCredentials = {
sshAuthenticateWith: 'password',
sshHost: 'example.com',
sshPort: 22,
sshUser: 'username',
sshPassword: 'password',
};
let sshClientsManager: SSHClientsManager;
const connectSpy = jest.spyOn(Client.prototype, 'connect');
const endSpy = jest.spyOn(Client.prototype, 'end');
beforeEach(() => {
jest.clearAllMocks();
jest.useFakeTimers();
sshClientsManager = new SSHClientsManager();
connectSpy.mockImplementation(function (this: Client) {
this.emit('ready');
return this;
});
});
it('should create a new SSH client', async () => {
const client = await sshClientsManager.getClient(credentials);
expect(client).toBeInstanceOf(Client);
});
it('should not create a new SSH client when connect fails', async () => {
connectSpy.mockImplementation(function (this: Client) {
throw new Error('Failed to connect');
});
await expect(sshClientsManager.getClient(credentials)).rejects.toThrow('Failed to connect');
});
it('should reuse an existing SSH client', async () => {
const client1 = await sshClientsManager.getClient(credentials);
const client2 = await sshClientsManager.getClient(credentials);
expect(client1).toBe(client2);
});
it('should close all SSH connections on process exit', async () => {
await sshClientsManager.getClient(credentials);
sshClientsManager.onShutdown();
expect(endSpy).toHaveBeenCalledTimes(1);
});
it('should cleanup stale SSH connections', async () => {
await sshClientsManager.getClient({ ...credentials, sshHost: 'host1' });
await sshClientsManager.getClient({ ...credentials, sshHost: 'host2' });
await sshClientsManager.getClient({ ...credentials, sshHost: 'host3' });
jest.advanceTimersByTime(6 * 60 * 1000);
sshClientsManager.cleanupStaleConnections();
expect(endSpy).toHaveBeenCalledTimes(3);
expect(sshClientsManager.clients.size).toBe(0);
});
});

View File

@@ -0,0 +1,163 @@
import { mock } from 'jest-mock-extended';
import { ApplicationError } from 'n8n-workflow';
import type {
Workflow,
INode,
INodeExecutionData,
IPollFunctions,
IWorkflowExecuteAdditionalData,
INodeType,
INodeTypes,
ITriggerFunctions,
WorkflowHooks,
IRun,
} from 'n8n-workflow';
import { TriggersAndPollers } from '../triggers-and-pollers';
describe('TriggersAndPollers', () => {
const node = mock<INode>();
const nodeType = mock<INodeType>({
trigger: undefined,
poll: undefined,
});
const nodeTypes = mock<INodeTypes>();
const workflow = mock<Workflow>({ nodeTypes });
const hookFunctions = mock<WorkflowHooks['hookFunctions']>({
sendResponse: [],
workflowExecuteAfter: [],
});
const additionalData = mock<IWorkflowExecuteAdditionalData>({
hooks: {
hookFunctions,
},
});
const triggersAndPollers = new TriggersAndPollers();
beforeEach(() => {
jest.clearAllMocks();
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
});
describe('runTrigger()', () => {
const triggerFunctions = mock<ITriggerFunctions>();
const getTriggerFunctions = jest.fn().mockReturnValue(triggerFunctions);
const triggerFn = jest.fn();
const mockEmitData: INodeExecutionData[][] = [[{ json: { data: 'test' } }]];
const runTriggerHelper = async (mode: 'manual' | 'trigger' = 'trigger') =>
await triggersAndPollers.runTrigger(
workflow,
node,
getTriggerFunctions,
additionalData,
mode,
'init',
);
it('should throw error if node type does not have trigger function', async () => {
await expect(runTriggerHelper()).rejects.toThrow(ApplicationError);
});
it('should call trigger function in regular mode', async () => {
nodeType.trigger = triggerFn;
triggerFn.mockResolvedValue({ test: true });
const result = await runTriggerHelper();
expect(triggerFn).toHaveBeenCalled();
expect(result).toEqual({ test: true });
});
describe('manual mode', () => {
const getMockTriggerFunctions = () => getTriggerFunctions.mock.results[0]?.value;
beforeEach(() => {
nodeType.trigger = triggerFn;
triggerFn.mockResolvedValue({ workflowId: '123' });
});
it('should handle promise resolution', async () => {
const result = await runTriggerHelper('manual');
expect(result?.manualTriggerResponse).toBeInstanceOf(Promise);
getMockTriggerFunctions()?.emit?.(mockEmitData);
});
it('should handle error emission', async () => {
const testError = new Error('Test error');
const result = await runTriggerHelper('manual');
getMockTriggerFunctions()?.emitError?.(testError);
await expect(result?.manualTriggerResponse).rejects.toThrow(testError);
});
it('should handle response promise', async () => {
const responsePromise = { resolve: jest.fn(), reject: jest.fn() };
await runTriggerHelper('manual');
getMockTriggerFunctions()?.emit?.(mockEmitData, responsePromise);
expect(hookFunctions.sendResponse?.length).toBe(1);
await hookFunctions.sendResponse![0]?.({ testResponse: true });
expect(responsePromise.resolve).toHaveBeenCalledWith({ testResponse: true });
});
it('should handle both response and done promises', async () => {
const responsePromise = { resolve: jest.fn(), reject: jest.fn() };
const donePromise = { resolve: jest.fn(), reject: jest.fn() };
const mockRunData = mock<IRun>({ data: { resultData: { runData: {} } } });
await runTriggerHelper('manual');
getMockTriggerFunctions()?.emit?.(mockEmitData, responsePromise, donePromise);
await hookFunctions.sendResponse![0]?.({ testResponse: true });
expect(responsePromise.resolve).toHaveBeenCalledWith({ testResponse: true });
await hookFunctions.workflowExecuteAfter?.[0]?.(mockRunData, {});
expect(donePromise.resolve).toHaveBeenCalledWith(mockRunData);
});
});
});
describe('runPoll()', () => {
const pollFunctions = mock<IPollFunctions>();
const pollFn = jest.fn();
const runPollHelper = async () =>
await triggersAndPollers.runPoll(workflow, node, pollFunctions);
it('should throw error if node type does not have poll function', async () => {
await expect(runPollHelper()).rejects.toThrow(ApplicationError);
});
it('should call poll function and return result', async () => {
const mockPollResult: INodeExecutionData[][] = [[{ json: { data: 'test' } }]];
nodeType.poll = pollFn;
pollFn.mockResolvedValue(mockPollResult);
const result = await runPollHelper();
expect(pollFn).toHaveBeenCalled();
expect(result).toBe(mockPollResult);
});
it('should return null if poll function returns no data', async () => {
nodeType.poll = pollFn;
pollFn.mockResolvedValue(null);
const result = await runPollHelper();
expect(pollFn).toHaveBeenCalled();
expect(result).toBeNull();
});
it('should propagate errors from poll function', async () => {
nodeType.poll = pollFn;
pollFn.mockRejectedValue(new Error('Poll function failed'));
await expect(runPollHelper()).rejects.toThrow('Poll function failed');
expect(pollFn).toHaveBeenCalled();
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,669 @@
{
"name": "My workflow 105",
"nodes": [
{
"parameters": {},
"id": "a94bc1fb-1f39-404b-b149-a76c4fbaed25",
"name": "When clicking \"Execute Workflow\"",
"type": "n8n-nodes-base.manualTrigger",
"typeVersion": 1,
"position": [-60, 780]
},
{
"parameters": {
"fields": {
"values": [
{
"name": "originalName",
"stringValue": "={{ $('Mock Data').item.json.name }}"
}
]
},
"options": {}
},
"id": "6ba26bdf-91e2-4f18-8f4c-09e98aa4a9df",
"name": "Success",
"type": "n8n-nodes-base.set",
"typeVersion": 3.2,
"position": [820, 1180]
},
{
"parameters": {
"fields": {
"values": [
{
"name": "originalName",
"stringValue": "={{ $('Mock Data').item.json.name }}"
}
]
},
"options": {}
},
"id": "e3d1eadf-0994-4806-97ce-c5c5f673c624",
"name": "Error",
"type": "n8n-nodes-base.set",
"typeVersion": 3.2,
"position": [820, 1360]
},
{
"parameters": {
"jsCode": "return [\n {\n \"id\": \"23423532\",\n \"name\": \"Jay Gatsby\",\n \"email\": \"gatsby@west-egg.com\",\n \"notes\": \"Keeps asking about a green light??\",\n \"country\": \"US\",\n \"created\": \"1925-04-10\"\n },\n {\n \"id\": \"23423533\",\n \"name\": \"José Arcadio Buendía\",\n \"email\": \"jab@macondo.co\",\n \"notes\": \"Lots of people named after him. Very confusing\",\n \"country\": \"CO\",\n \"created\": \"1967-05-05\"\n },\n {\n \"id\": \"23423534\",\n \"name\": \"Max Sendak\",\n \"email\": \"info@in-and-out-of-weeks.org\",\n \"notes\": \"Keeps rolling his terrible eyes\",\n \"country\": \"US\",\n \"created\": \"1963-04-09\"\n },\n {\n \"id\": \"23423535\",\n \"name\": \"Zaphod Beeblebrox\",\n \"email\": \"captain@heartofgold.com\",\n \"notes\": \"Felt like I was talking to more than one person\",\n \"country\": null,\n \"created\": \"1979-10-12\"\n },\n {\n \"id\": \"23423536\",\n \"name\": \"Edmund Pevensie\",\n \"email\": \"edmund@narnia.gov\",\n \"notes\": \"Passionate sailor\",\n \"country\": \"UK\",\n \"created\": \"1950-10-16\"\n }\n]"
},
"id": "01adfc2d-141d-4843-b2d6-04115a476bc1",
"name": "Mock Data",
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [160, 780]
},
{
"parameters": {
"content": "## On Error: Continue (using error output)",
"height": 414,
"width": 564
},
"id": "8ca689eb-7910-43ad-bd10-fae35a8fc203",
"name": "Sticky Note",
"type": "n8n-nodes-base.stickyNote",
"typeVersion": 1,
"position": [460, 1100]
},
{
"parameters": {
"content": "## Continue On Fail (deprecated)",
"height": 279,
"width": 564
},
"id": "a17460d6-b0c0-432d-ac6f-8ff684357c8d",
"name": "Sticky Note1",
"type": "n8n-nodes-base.stickyNote",
"typeVersion": 1,
"position": [460, 460]
},
{
"parameters": {
"fields": {
"values": [
{
"name": "originalName",
"stringValue": "={{ $('Mock Data').item.json.name }}"
}
]
},
"options": {}
},
"id": "46df5463-4289-4e61-9f80-87e035931bda",
"name": "Combined",
"type": "n8n-nodes-base.set",
"typeVersion": 3.2,
"position": [800, 560]
},
{
"parameters": {
"mode": "runOnceForEachItem",
"jsCode": "// Add a new field called 'myNewField' to the JSON of the item\n$input.item.json.myNewField = 1;\n\nif ($input.item.json.country === 'US') {\n throw new Error('This is an error');\n}\n\nreturn $input.item;"
},
"id": "a4708520-aaca-4618-b7a2-94da268fba37",
"name": "Throw Error",
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [480, 1280],
"errorOutput": true,
"onError": "continueErrorOutput"
},
{
"parameters": {
"content": "## On Error: Continue",
"height": 279,
"width": 564
},
"id": "f0a450cd-4124-490d-964f-a71b645f770c",
"name": "Sticky Note2",
"type": "n8n-nodes-base.stickyNote",
"typeVersion": 1,
"position": [460, 780]
},
{
"parameters": {
"mode": "runOnceForEachItem",
"jsCode": "// Add a new field called 'myNewField' to the JSON of the item\n$input.item.json.myNewField = 1;\n\nif ($input.item.json.country === 'US') {\n throw new Error('This is an error');\n}\n\nreturn $input.item;"
},
"id": "823f12e6-cbfc-4545-8505-fab158d1effe",
"name": "Throw Error2",
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [500, 880],
"onError": "continueRegularOutput"
},
{
"parameters": {
"fields": {
"values": [
{
"name": "originalName",
"stringValue": "={{ $('Mock Data').item.json.name }}"
}
]
},
"options": {}
},
"id": "8f88d130-9a13-4236-81c0-157f8a8990c0",
"name": "Combined1",
"type": "n8n-nodes-base.set",
"typeVersion": 3.2,
"position": [800, 880]
},
{
"parameters": {
"mode": "runOnceForEachItem",
"jsCode": "// Add a new field called 'myNewField' to the JSON of the item\n$input.item.json.myNewField = 1;\n\nif ($input.item.json.country === 'US') {\n throw new Error('This is an error');\n}\n\nreturn $input.item;"
},
"id": "1a3f4beb-0d1e-44fe-a411-5bd1096ffd74",
"name": "Throw Error1",
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [500, 560],
"continueOnFail": true
},
{
"parameters": {
"fields": {
"values": [
{
"name": "originalName",
"stringValue": "={{ $('Mock Data').item.json.name }}"
}
]
},
"options": {}
},
"id": "c617a3d7-15e3-49b4-a7dd-d45c5e059a22",
"name": "Success1",
"type": "n8n-nodes-base.set",
"typeVersion": 3.2,
"position": [820, 1640]
},
{
"parameters": {
"content": "## On Error: Continue (using error output) + Make sure error data gets removed",
"height": 509.71047006830065,
"width": 1183.725293692246
},
"id": "046de2cf-970a-4925-b87d-16e8cca511fd",
"name": "Sticky Note3",
"type": "n8n-nodes-base.stickyNote",
"typeVersion": 1,
"position": [460, 1560]
},
{
"parameters": {
"mode": "runOnceForEachItem",
"jsCode": "// Add a new field called 'myNewField' to the JSON of the item\n$input.item.json.myNewField = 1;\n\nif ($input.item.json.country === 'US') {\n throw new Error('This is an error');\n}\n\nreturn $input.item;"
},
"id": "9ec21de1-dfca-4fff-b5a7-a56364239d7b",
"name": "Throw Error3",
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [480, 1740],
"errorOutput": true,
"onError": "continueErrorOutput"
},
{
"parameters": {
"options": {}
},
"id": "e3605953-75cf-4036-99f7-05e3971a6a75",
"name": "Edit Fields",
"type": "n8n-nodes-base.set",
"typeVersion": 3.2,
"position": [1040, 1820],
"onError": "continueErrorOutput"
},
{
"parameters": {
"fields": {
"values": [
{
"name": "originalName",
"stringValue": "={{ $('Mock Data').item.json.name }}"
}
]
},
"options": {}
},
"id": "a71cfb77-adfd-4c77-9a8e-7e58cbd0931b",
"name": "Success2",
"type": "n8n-nodes-base.set",
"typeVersion": 3.2,
"position": [1320, 1680]
},
{
"parameters": {
"fields": {
"values": [
{
"name": "originalName",
"stringValue": "={{ $('Mock Data').item.json.name }}"
}
]
},
"options": {}
},
"id": "ea9d02e9-1716-4f69-a14a-9133f5184886",
"name": "Error2",
"type": "n8n-nodes-base.set",
"typeVersion": 3.2,
"position": [1320, 1900]
},
{
"parameters": {
"fields": {
"values": [
{
"name": "originalName",
"stringValue": "={{ $('Mock Data').item.json.name }}"
}
]
},
"options": {}
},
"id": "17780679-f7a3-4b1b-b6ee-f3f61e0843ad",
"name": "Error1",
"type": "n8n-nodes-base.set",
"typeVersion": 3.2,
"position": [820, 1820]
}
],
"pinData": {
"Combined": [
{
"json": {
"error": "This is an error [line 5, for item 0]",
"originalName": "Jay Gatsby"
}
},
{
"json": {
"id": "23423533",
"name": "José Arcadio Buendía",
"email": "jab@macondo.co",
"notes": "Lots of people named after him. Very confusing",
"country": "CO",
"created": "1967-05-05",
"myNewField": 1,
"originalName": "José Arcadio Buendía"
}
},
{
"json": {
"error": "This is an error [line 5, for item 2]",
"originalName": "Max Sendak"
}
},
{
"json": {
"id": "23423535",
"name": "Zaphod Beeblebrox",
"email": "captain@heartofgold.com",
"notes": "Felt like I was talking to more than one person",
"country": null,
"created": "1979-10-12",
"myNewField": 1,
"originalName": "Zaphod Beeblebrox"
}
},
{
"json": {
"id": "23423536",
"name": "Edmund Pevensie",
"email": "edmund@narnia.gov",
"notes": "Passionate sailor",
"country": "UK",
"created": "1950-10-16",
"myNewField": 1,
"originalName": "Edmund Pevensie"
}
}
],
"Combined1": [
{
"json": {
"error": "This is an error [line 5, for item 0]",
"originalName": "Jay Gatsby"
}
},
{
"json": {
"id": "23423533",
"name": "José Arcadio Buendía",
"email": "jab@macondo.co",
"notes": "Lots of people named after him. Very confusing",
"country": "CO",
"created": "1967-05-05",
"myNewField": 1,
"originalName": "José Arcadio Buendía"
}
},
{
"json": {
"error": "This is an error [line 5, for item 2]",
"originalName": "Max Sendak"
}
},
{
"json": {
"id": "23423535",
"name": "Zaphod Beeblebrox",
"email": "captain@heartofgold.com",
"notes": "Felt like I was talking to more than one person",
"country": null,
"created": "1979-10-12",
"myNewField": 1,
"originalName": "Zaphod Beeblebrox"
}
},
{
"json": {
"id": "23423536",
"name": "Edmund Pevensie",
"email": "edmund@narnia.gov",
"notes": "Passionate sailor",
"country": "UK",
"created": "1950-10-16",
"myNewField": 1,
"originalName": "Edmund Pevensie"
}
}
],
"Success1": [
{
"json": {
"id": "23423533",
"name": "José Arcadio Buendía",
"email": "jab@macondo.co",
"notes": "Lots of people named after him. Very confusing",
"country": "CO",
"created": "1967-05-05",
"myNewField": 1,
"originalName": "José Arcadio Buendía"
}
},
{
"json": {
"id": "23423535",
"name": "Zaphod Beeblebrox",
"email": "captain@heartofgold.com",
"notes": "Felt like I was talking to more than one person",
"country": null,
"created": "1979-10-12",
"myNewField": 1,
"originalName": "Zaphod Beeblebrox"
}
},
{
"json": {
"id": "23423536",
"name": "Edmund Pevensie",
"email": "edmund@narnia.gov",
"notes": "Passionate sailor",
"country": "UK",
"created": "1950-10-16",
"myNewField": 1,
"originalName": "Edmund Pevensie"
}
}
],
"Error1": [
{
"json": {
"id": "23423532",
"name": "Jay Gatsby",
"email": "gatsby@west-egg.com",
"notes": "Keeps asking about a green light??",
"country": "US",
"created": "1925-04-10",
"error": "This is an error [line 5, for item 0]",
"originalName": "Jay Gatsby"
}
},
{
"json": {
"id": "23423534",
"name": "Max Sendak",
"email": "info@in-and-out-of-weeks.org",
"notes": "Keeps rolling his terrible eyes",
"country": "US",
"created": "1963-04-09",
"error": "This is an error [line 5, for item 2]",
"originalName": "Max Sendak"
}
}
],
"Success2": [
{
"json": {
"id": "23423532",
"name": "Jay Gatsby",
"email": "gatsby@west-egg.com",
"notes": "Keeps asking about a green light??",
"country": "US",
"created": "1925-04-10",
"error": "This is an error [line 5, for item 0]",
"originalName": "Jay Gatsby"
}
},
{
"json": {
"id": "23423534",
"name": "Max Sendak",
"email": "info@in-and-out-of-weeks.org",
"notes": "Keeps rolling his terrible eyes",
"country": "US",
"created": "1963-04-09",
"error": "This is an error [line 5, for item 2]",
"originalName": "Max Sendak"
}
}
],
"Error": [
{
"json": {
"id": "23423532",
"name": "Jay Gatsby",
"email": "gatsby@west-egg.com",
"notes": "Keeps asking about a green light??",
"country": "US",
"created": "1925-04-10",
"error": "This is an error [line 5, for item 0]",
"originalName": "Jay Gatsby"
}
},
{
"json": {
"id": "23423534",
"name": "Max Sendak",
"email": "info@in-and-out-of-weeks.org",
"notes": "Keeps rolling his terrible eyes",
"country": "US",
"created": "1963-04-09",
"error": "This is an error [line 5, for item 2]",
"originalName": "Max Sendak"
}
}
],
"Success": [
{
"json": {
"id": "23423533",
"name": "José Arcadio Buendía",
"email": "jab@macondo.co",
"notes": "Lots of people named after him. Very confusing",
"country": "CO",
"created": "1967-05-05",
"myNewField": 1,
"originalName": "José Arcadio Buendía"
}
},
{
"json": {
"id": "23423535",
"name": "Zaphod Beeblebrox",
"email": "captain@heartofgold.com",
"notes": "Felt like I was talking to more than one person",
"country": null,
"created": "1979-10-12",
"myNewField": 1,
"originalName": "Zaphod Beeblebrox"
}
},
{
"json": {
"id": "23423536",
"name": "Edmund Pevensie",
"email": "edmund@narnia.gov",
"notes": "Passionate sailor",
"country": "UK",
"created": "1950-10-16",
"myNewField": 1,
"originalName": "Edmund Pevensie"
}
}
]
},
"connections": {
"When clicking \"Execute Workflow\"": {
"main": [
[
{
"node": "Mock Data",
"type": "main",
"index": 0
}
]
]
},
"Mock Data": {
"main": [
[
{
"node": "Throw Error",
"type": "main",
"index": 0
},
{
"node": "Throw Error2",
"type": "main",
"index": 0
},
{
"node": "Throw Error1",
"type": "main",
"index": 0
},
{
"node": "Throw Error3",
"type": "main",
"index": 0
}
]
]
},
"Throw Error": {
"main": [
[
{
"node": "Success",
"type": "main",
"index": 0
}
],
[
{
"node": "Error",
"type": "main",
"index": 0
}
]
]
},
"Throw Error2": {
"main": [
[
{
"node": "Combined1",
"type": "main",
"index": 0
}
]
]
},
"Throw Error1": {
"main": [
[
{
"node": "Combined",
"type": "main",
"index": 0
}
]
]
},
"Throw Error3": {
"main": [
[
{
"node": "Success1",
"type": "main",
"index": 0
}
],
[
{
"node": "Error1",
"type": "main",
"index": 0
}
]
]
},
"Edit Fields": {
"main": [
[
{
"node": "Success2",
"type": "main",
"index": 0
}
],
[
{
"node": "Error2",
"type": "main",
"index": 0
}
]
]
},
"Error1": {
"main": [
[
{
"node": "Edit Fields",
"type": "main",
"index": 0
}
]
]
}
},
"active": false,
"settings": {
"executionOrder": "v1"
},
"versionId": "94aaa2ce-558a-4fed-948a-09860174272a",
"meta": {
"templateCredsSetupCompleted": true,
"instanceId": "27cc9b56542ad45b38725555722c50a1c3fee1670bbb67980558314ee08517c4"
},
"id": "FJvJXVvjM5rw3sUM",
"tags": []
}

View File

@@ -0,0 +1,565 @@
{
"name": "paired items fix",
"nodes": [
{
"parameters": {
"values": {
"string": [
{
"name": "setting",
"value": "hello"
}
]
},
"options": {}
},
"name": "Set",
"type": "n8n-nodes-base.set",
"typeVersion": 1,
"position": [500, 680],
"id": "18333790-db22-4235-92e6-b7dec8c20b77"
},
{
"parameters": {
"conditions": {
"boolean": [
{
"value1": true
}
]
}
},
"id": "4d4af5e5-860d-416f-b2d7-f0f87f380355",
"name": "IF",
"type": "n8n-nodes-base.if",
"typeVersion": 1,
"position": [1080, 500],
"alwaysOutputData": true
},
{
"parameters": {
"values": {
"string": [
{
"value": "={{ $('Set').item.json }}"
}
]
},
"options": {}
},
"id": "26569caf-084d-4d5b-a575-c8e439358d10",
"name": "Set1",
"type": "n8n-nodes-base.set",
"typeVersion": 2,
"position": [1340, 480]
},
{
"parameters": {},
"id": "f4f91c8c-e695-422b-97ad-802b10c7d868",
"name": "When clicking \"Execute Workflow\"",
"type": "n8n-nodes-base.manualTrigger",
"typeVersion": 1,
"position": [200, 980]
},
{
"parameters": {
"jsCode": "return [\n {\n 'thing': 1,\n 'letter': 'a'\n },\n {\n 'thing': 2,\n 'letter': 'b'\n },\n {\n 'thing': 3,\n 'letter': 'c'\n }\n]"
},
"id": "5eb81a1f-b845-408a-9fcc-e75e607212fa",
"name": "Code",
"type": "n8n-nodes-base.code",
"typeVersion": 1,
"position": [840, 500]
},
{
"parameters": {
"functionCode": "return [\n {\n 'number': 1,\n 'letter': 'a'\n },\n {\n 'number': 2,\n 'letter': 'b'\n },\n {\n 'number': 3,\n 'letter': 'c'\n }\n]"
},
"name": "Generate new items",
"type": "n8n-nodes-base.function",
"typeVersion": 1,
"position": [840, 860],
"id": "dd5d92f2-5893-4591-9f22-051f50e1b348"
},
{
"parameters": {
"values": {
"number": [
{
"name": "numberOriginal",
"value": "={{ $('Generate new items').item.json.number }}"
}
],
"string": [
{
"name": "letterOriginal",
"value": "={{ $('Generate new items').item.json.letter }}"
}
]
},
"options": {}
},
"id": "ebb23410-831b-4f8f-834a-0ca22eb7c050",
"name": "Set3",
"type": "n8n-nodes-base.set",
"typeVersion": 2,
"position": [1320, 860]
},
{
"parameters": {
"functionCode": "return [\n {\n 'json': {\n 'originalItem': 'third'\n },\n 'pairedItem': 2\n },\n {\n 'json': {\n 'originalItem': 'first'\n },\n 'pairedItem': 0\n },\n {\n 'json': {\n 'originalItem': 'second'\n },\n 'pairedItem': 1\n }\n]"
},
"name": "Mix up pairing",
"type": "n8n-nodes-base.function",
"typeVersion": 1,
"position": [1080, 860],
"id": "33ee2a0e-edc9-4197-94a2-4f77735240ff"
},
{
"parameters": {
"content": "### Always output data & multiple possible output resolve which are identical",
"height": 258,
"width": 855
},
"id": "3dc9ccfa-ef78-4022-8bbc-45ef8eb3a207",
"name": "Sticky Note1",
"type": "n8n-nodes-base.stickyNote",
"typeVersion": 1,
"position": [780, 400]
},
{
"parameters": {
"operation": "getAllPeople"
},
"id": "5ba8d43b-9fa3-4ba0-9c08-3199a9d2d602",
"name": "cuctomers",
"type": "n8n-nodes-base.n8nTrainingCustomerDatastore",
"typeVersion": 1,
"position": [640, 1340]
},
{
"parameters": {
"options": {}
},
"id": "00114764-691d-40b4-ae11-c5206a9448e3",
"name": "result",
"type": "n8n-nodes-base.set",
"typeVersion": 2,
"position": [1380, 1180],
"alwaysOutputData": true
},
{
"parameters": {
"jsCode": "// Loop over input items and add a new field called 'myNewField' to the JSON of each one\nconst data = [];\nfor (const [index, entry] of $input.all().entries()) {\n entry.json.myNewField = index;\n entry.pairedItem = 0;\n data.push(entry);\n}\n\nreturn data;"
},
"id": "d3aa3bc3-3e5a-42d2-a26d-ea0c273ea3e8",
"name": "changePairedindex",
"type": "n8n-nodes-base.code",
"typeVersion": 1,
"position": [920, 1180]
},
{
"parameters": {
"keepOnlySet": true,
"values": {
"string": [
{
"name": "=nameOriginalItem",
"value": "={{ $('cuctomers').item.json.name }}"
},
{
"name": "name",
"value": "={{ $json.name }}"
}
],
"boolean": [
{
"name": "test",
"value": "={{ $('cuctomers').item.json.id === $json.id }}"
}
]
},
"options": {}
},
"id": "af18482d-4a88-4ffb-b6e3-67be45cdfad1",
"name": "checkWithOriginal",
"type": "n8n-nodes-base.set",
"typeVersion": 2,
"position": [1140, 1180]
},
{
"parameters": {
"options": {}
},
"id": "3bad8f81-2fac-4e6e-bb7c-3a4921674005",
"name": "loop",
"type": "n8n-nodes-base.splitInBatches",
"typeVersion": 2,
"position": [920, 1420]
},
{
"parameters": {
"keepOnlySet": true,
"values": {
"string": [
{
"name": "=nameOriginalItem",
"value": "={{ $('cuctomers').item.json.name }}"
},
{
"name": "name",
"value": "={{ $json.name }}"
}
],
"boolean": [
{
"name": "test",
"value": "={{ $('cuctomers').item.json.id === $json.id }}"
}
]
},
"options": {}
},
"id": "865691b7-e4b8-487e-a5ec-80387118ea61",
"name": "testAfterLoop",
"type": "n8n-nodes-base.set",
"typeVersion": 2,
"position": [1180, 1620]
},
{
"parameters": {
"options": {}
},
"id": "8c5d3a1c-e34b-4937-bc22-88b418391002",
"name": "result1",
"type": "n8n-nodes-base.set",
"typeVersion": 2,
"position": [1380, 1620],
"alwaysOutputData": true
},
{
"parameters": {
"jsCode": "// Loop over input items and add a new field called 'myNewField' to the JSON of each one\nconst data = [];\nfor (const [index, entry] of $input.all().entries()) {\n entry.json.myNewField = index;\n entry.pairedItem = 0;\n data.push(entry);\n}\n\nreturn data;"
},
"id": "ad476a3a-d491-406f-903d-022cb0f0ef3c",
"name": "changePairedindex1",
"type": "n8n-nodes-base.code",
"typeVersion": 1,
"position": [1380, 1400]
}
],
"pinData": {
"Set3": [
{
"json": {
"originalItem": "third",
"numberOriginal": 3,
"letterOriginal": "c"
},
"pairedItem": {
"item": 0
}
},
{
"json": {
"originalItem": "first",
"numberOriginal": 1,
"letterOriginal": "a"
},
"pairedItem": {
"item": 1
}
},
{
"json": {
"originalItem": "second",
"numberOriginal": 2,
"letterOriginal": "b"
},
"pairedItem": {
"item": 2
}
}
],
"Set1": [
{
"json": {
"propertyName": {
"setting": "hello"
}
},
"pairedItem": {
"item": 0
}
}
],
"result": [
{
"json": {
"test": true,
"nameOriginalItem": "Jay Gatsby",
"name": "Jay Gatsby"
},
"pairedItem": {
"item": 0
}
},
{
"json": {
"test": false,
"nameOriginalItem": "Jay Gatsby",
"name": "José Arcadio Buendía"
},
"pairedItem": {
"item": 1
}
},
{
"json": {
"test": false,
"nameOriginalItem": "Jay Gatsby",
"name": "Max Sendak"
},
"pairedItem": {
"item": 2
}
},
{
"json": {
"test": false,
"nameOriginalItem": "Jay Gatsby",
"name": "Zaphod Beeblebrox"
},
"pairedItem": {
"item": 3
}
},
{
"json": {
"test": false,
"nameOriginalItem": "Jay Gatsby",
"name": "Edmund Pevensie"
},
"pairedItem": {
"item": 4
}
}
],
"result1": [
{
"json": {
"test": true,
"nameOriginalItem": "Jay Gatsby",
"name": "Jay Gatsby"
},
"pairedItem": {
"item": 0
}
},
{
"json": {
"test": false,
"nameOriginalItem": "Jay Gatsby",
"name": "José Arcadio Buendía"
},
"pairedItem": {
"item": 1
}
},
{
"json": {
"test": false,
"nameOriginalItem": "Jay Gatsby",
"name": "Max Sendak"
},
"pairedItem": {
"item": 2
}
},
{
"json": {
"test": false,
"nameOriginalItem": "Jay Gatsby",
"name": "Zaphod Beeblebrox"
},
"pairedItem": {
"item": 3
}
},
{
"json": {
"test": false,
"nameOriginalItem": "Jay Gatsby",
"name": "Edmund Pevensie"
},
"pairedItem": {
"item": 4
}
}
]
},
"connections": {
"Set": {
"main": [
[
{
"node": "Code",
"type": "main",
"index": 0
},
{
"node": "Generate new items",
"type": "main",
"index": 0
}
]
]
},
"IF": {
"main": [
[
{
"node": "Set1",
"type": "main",
"index": 0
}
]
]
},
"When clicking \"Execute Workflow\"": {
"main": [
[
{
"node": "Set",
"type": "main",
"index": 0
},
{
"node": "cuctomers",
"type": "main",
"index": 0
}
]
]
},
"Code": {
"main": [
[
{
"node": "IF",
"type": "main",
"index": 0
}
]
]
},
"Generate new items": {
"main": [
[
{
"node": "Mix up pairing",
"type": "main",
"index": 0
}
]
]
},
"Mix up pairing": {
"main": [
[
{
"node": "Set3",
"type": "main",
"index": 0
}
]
]
},
"cuctomers": {
"main": [
[
{
"node": "changePairedindex",
"type": "main",
"index": 0
},
{
"node": "loop",
"type": "main",
"index": 0
}
]
]
},
"changePairedindex": {
"main": [
[
{
"node": "checkWithOriginal",
"type": "main",
"index": 0
}
]
]
},
"checkWithOriginal": {
"main": [
[
{
"node": "result",
"type": "main",
"index": 0
}
]
]
},
"loop": {
"main": [
[
{
"node": "changePairedindex1",
"type": "main",
"index": 0
}
],
[
{
"node": "testAfterLoop",
"type": "main",
"index": 0
}
]
]
},
"testAfterLoop": {
"main": [
[
{
"node": "result1",
"type": "main",
"index": 0
}
]
]
},
"changePairedindex1": {
"main": [
[
{
"node": "loop",
"type": "main",
"index": 0
}
]
]
}
},
"active": false,
"settings": {},
"versionId": "6f6ee01c-8c99-493f-a30c-6a5ed2b71750",
"id": "169",
"meta": {
"instanceId": "36203ea1ce3cef713fa25999bd9874ae26b9e4c2c3a90a365f2882a154d031d0"
},
"tags": []
}

View File

@@ -0,0 +1,242 @@
import { Service } from '@n8n/di';
import type {
IGetExecutePollFunctions,
IGetExecuteTriggerFunctions,
INode,
ITriggerResponse,
IWorkflowExecuteAdditionalData,
TriggerTime,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import {
ApplicationError,
toCronExpression,
TriggerCloseError,
WorkflowActivationError,
WorkflowDeactivationError,
} from 'n8n-workflow';
import { ErrorReporter } from '@/errors/error-reporter';
import type { IWorkflowData } from '@/interfaces';
import { Logger } from '@/logging/logger';
import { ScheduledTaskManager } from './scheduled-task-manager';
import { TriggersAndPollers } from './triggers-and-pollers';
@Service()
export class ActiveWorkflows {
constructor(
private readonly logger: Logger,
private readonly scheduledTaskManager: ScheduledTaskManager,
private readonly triggersAndPollers: TriggersAndPollers,
private readonly errorReporter: ErrorReporter,
) {}
private activeWorkflows: { [workflowId: string]: IWorkflowData } = {};
/**
* Returns if the workflow is active in memory.
*/
isActive(workflowId: string) {
return this.activeWorkflows.hasOwnProperty(workflowId);
}
/**
* Returns the IDs of the currently active workflows in memory.
*/
allActiveWorkflows() {
return Object.keys(this.activeWorkflows);
}
/**
* Returns the workflow data for the given ID if currently active in memory.
*/
get(workflowId: string) {
return this.activeWorkflows[workflowId];
}
/**
* Makes a workflow active
*
* @param {string} workflowId The id of the workflow to activate
* @param {Workflow} workflow The workflow to activate
* @param {IWorkflowExecuteAdditionalData} additionalData The additional data which is needed to run workflows
*/
async add(
workflowId: string,
workflow: Workflow,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
activation: WorkflowActivateMode,
getTriggerFunctions: IGetExecuteTriggerFunctions,
getPollFunctions: IGetExecutePollFunctions,
) {
const triggerNodes = workflow.getTriggerNodes();
const triggerResponses: ITriggerResponse[] = [];
for (const triggerNode of triggerNodes) {
try {
const triggerResponse = await this.triggersAndPollers.runTrigger(
workflow,
triggerNode,
getTriggerFunctions,
additionalData,
mode,
activation,
);
if (triggerResponse !== undefined) {
triggerResponses.push(triggerResponse);
}
} catch (e) {
const error = e instanceof Error ? e : new Error(`${e}`);
throw new WorkflowActivationError(
`There was a problem activating the workflow: "${error.message}"`,
{ cause: error, node: triggerNode },
);
}
}
this.activeWorkflows[workflowId] = { triggerResponses };
const pollingNodes = workflow.getPollNodes();
if (pollingNodes.length === 0) return;
for (const pollNode of pollingNodes) {
try {
await this.activatePolling(
pollNode,
workflow,
additionalData,
getPollFunctions,
mode,
activation,
);
} catch (e) {
// Do not mark this workflow as active if there are no triggerResponses, and any polling activation failed
if (triggerResponses.length === 0) {
delete this.activeWorkflows[workflowId];
}
const error = e instanceof Error ? e : new Error(`${e}`);
throw new WorkflowActivationError(
`There was a problem activating the workflow: "${error.message}"`,
{ cause: error, node: pollNode },
);
}
}
}
/**
* Activates polling for the given node
*/
private async activatePolling(
node: INode,
workflow: Workflow,
additionalData: IWorkflowExecuteAdditionalData,
getPollFunctions: IGetExecutePollFunctions,
mode: WorkflowExecuteMode,
activation: WorkflowActivateMode,
): Promise<void> {
const pollFunctions = getPollFunctions(workflow, node, additionalData, mode, activation);
const pollTimes = pollFunctions.getNodeParameter('pollTimes') as unknown as {
item: TriggerTime[];
};
// Get all the trigger times
const cronTimes = (pollTimes.item || []).map(toCronExpression);
// The trigger function to execute when the cron-time got reached
const executeTrigger = async (testingTrigger = false) => {
this.logger.debug(`Polling trigger initiated for workflow "${workflow.name}"`, {
workflowName: workflow.name,
workflowId: workflow.id,
});
try {
const pollResponse = await this.triggersAndPollers.runPoll(workflow, node, pollFunctions);
if (pollResponse !== null) {
pollFunctions.__emit(pollResponse);
}
} catch (error) {
// If the poll function fails in the first activation
// throw the error back so we let the user know there is
// an issue with the trigger.
if (testingTrigger) {
throw error;
}
pollFunctions.__emitError(error as Error);
}
};
// Execute the trigger directly to be able to know if it works
await executeTrigger(true);
for (const cronTime of cronTimes) {
const cronTimeParts = cronTime.split(' ');
if (cronTimeParts.length > 0 && cronTimeParts[0].includes('*')) {
throw new ApplicationError(
'The polling interval is too short. It has to be at least a minute.',
);
}
this.scheduledTaskManager.registerCron(workflow, cronTime, executeTrigger);
}
}
/**
* Makes a workflow inactive in memory.
*/
async remove(workflowId: string) {
if (!this.isActive(workflowId)) {
this.logger.warn(`Cannot deactivate already inactive workflow ID "${workflowId}"`);
return false;
}
this.scheduledTaskManager.deregisterCrons(workflowId);
const w = this.activeWorkflows[workflowId];
for (const r of w.triggerResponses ?? []) {
await this.closeTrigger(r, workflowId);
}
delete this.activeWorkflows[workflowId];
return true;
}
async removeAllTriggerAndPollerBasedWorkflows() {
for (const workflowId of Object.keys(this.activeWorkflows)) {
await this.remove(workflowId);
}
}
private async closeTrigger(response: ITriggerResponse, workflowId: string) {
if (!response.closeFunction) return;
try {
await response.closeFunction();
} catch (e) {
if (e instanceof TriggerCloseError) {
this.logger.error(
`There was a problem calling "closeFunction" on "${e.node.name}" in workflow "${workflowId}"`,
);
this.errorReporter.error(e, { extra: { workflowId } });
return;
}
const error = e instanceof Error ? e : new Error(`${e}`);
throw new WorkflowDeactivationError(
`Failed to deactivate trigger of workflow ID "${workflowId}": "${error.message}"`,
{ cause: error, workflowId },
);
}
}
}

View File

@@ -0,0 +1,6 @@
export * from './active-workflows';
export * from './routing-node';
export * from './node-execution-context';
export * from './partial-execution-utils';
export * from './node-execution-context/utils/execution-metadata';
export * from './workflow-execute';

View File

@@ -0,0 +1,218 @@
import { mock } from 'jest-mock-extended';
import type {
INode,
IWorkflowExecuteAdditionalData,
IRunExecutionData,
INodeExecutionData,
ITaskDataConnections,
IExecuteData,
Workflow,
WorkflowExecuteMode,
ICredentialsHelper,
Expression,
INodeType,
INodeTypes,
ICredentialDataDecryptedObject,
} from 'n8n-workflow';
import { ApplicationError, ExpressionError, NodeConnectionType } from 'n8n-workflow';
import { describeCommonTests } from './shared-tests';
import { ExecuteContext } from '../execute-context';
describe('ExecuteContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
name: 'Test Node',
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
nullParameter: null,
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const runExecutionData = mock<IRunExecutionData>();
const connectionInputData: INodeExecutionData[] = [];
const inputData: ITaskDataConnections = { main: [[{ json: { test: 'data' } }]] };
const executeData = mock<IExecuteData>();
const runIndex = 0;
const closeFn = jest.fn();
const abortSignal = mock<AbortSignal>();
const executeContext = new ExecuteContext(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
executeData,
[closeFn],
abortSignal,
);
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
describeCommonTests(executeContext, {
abortSignal,
node,
workflow,
executeData,
runExecutionData,
});
describe('getInputData', () => {
const inputIndex = 0;
const connectionType = NodeConnectionType.Main;
afterEach(() => {
inputData[connectionType] = [[{ json: { test: 'data' } }]];
});
it('should return the input data correctly', () => {
const expectedData = [{ json: { test: 'data' } }];
expect(executeContext.getInputData(inputIndex, connectionType)).toEqual(expectedData);
});
it('should return an empty array if the input name does not exist', () => {
const connectionType = 'nonExistent';
expect(executeContext.getInputData(inputIndex, connectionType as NodeConnectionType)).toEqual(
[],
);
});
it('should throw an error if the input index is out of range', () => {
const inputIndex = 2;
expect(() => executeContext.getInputData(inputIndex, connectionType)).toThrow(
ApplicationError,
);
});
it('should throw an error if the input index was not set', () => {
inputData.main[inputIndex] = null;
expect(() => executeContext.getInputData(inputIndex, connectionType)).toThrow(
ApplicationError,
);
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should throw if parameter is not defined on the node.parameters', () => {
expect(() => executeContext.getNodeParameter('invalidParameter', 0)).toThrow(
'Could not get parameter',
);
});
it('should return null if the parameter exists but has a null value', () => {
const parameter = executeContext.getNodeParameter('nullParameter', 0);
expect(parameter).toBeNull();
});
it('should return parameter value when it exists', () => {
const parameter = executeContext.getNodeParameter('testParameter', 0);
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = executeContext.getNodeParameter('otherParameter', 0, 'fallback');
expect(parameter).toBe('fallback');
});
it('should handle expression evaluation errors', () => {
const error = new ExpressionError('Invalid expression');
expression.getParameterValue.mockImplementationOnce(() => {
throw error;
});
expect(() => executeContext.getNodeParameter('testParameter', 0)).toThrow(error);
expect(error.context.parameter).toEqual('testParameter');
});
it('should handle expression errors on Set nodes (Ticket #PAY-684)', () => {
node.type = 'n8n-nodes-base.set';
node.continueOnFail = true;
expression.getParameterValue.mockImplementationOnce(() => {
throw new ExpressionError('Invalid expression');
});
const parameter = executeContext.getNodeParameter('testParameter', 0);
expect(parameter).toEqual([{ name: undefined, value: undefined }]);
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials = await executeContext.getCredentials<ICredentialDataDecryptedObject>(
testCredentialType,
0,
);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getExecuteData', () => {
it('should return the execute data correctly', () => {
expect(executeContext.getExecuteData()).toEqual(executeData);
});
});
describe('getWorkflowDataProxy', () => {
it('should return the workflow data proxy correctly', () => {
const workflowDataProxy = executeContext.getWorkflowDataProxy(0);
expect(workflowDataProxy.isProxy).toBe(true);
expect(Object.keys(workflowDataProxy.$input)).toEqual([
'all',
'context',
'first',
'item',
'last',
'params',
]);
});
});
});

View File

@@ -0,0 +1,199 @@
import { mock } from 'jest-mock-extended';
import type {
INode,
IWorkflowExecuteAdditionalData,
IRunExecutionData,
INodeExecutionData,
ITaskDataConnections,
IExecuteData,
Workflow,
WorkflowExecuteMode,
ICredentialsHelper,
Expression,
INodeType,
INodeTypes,
ICredentialDataDecryptedObject,
} from 'n8n-workflow';
import { ApplicationError, NodeConnectionType } from 'n8n-workflow';
import { describeCommonTests } from './shared-tests';
import { ExecuteSingleContext } from '../execute-single-context';
describe('ExecuteSingleContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
name: 'Test Node',
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const runExecutionData = mock<IRunExecutionData>();
const connectionInputData: INodeExecutionData[] = [];
const inputData: ITaskDataConnections = { main: [[{ json: { test: 'data' } }]] };
const executeData = mock<IExecuteData>();
const runIndex = 0;
const itemIndex = 0;
const abortSignal = mock<AbortSignal>();
const executeSingleContext = new ExecuteSingleContext(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
itemIndex,
executeData,
abortSignal,
);
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
describeCommonTests(executeSingleContext, {
abortSignal,
node,
workflow,
executeData,
runExecutionData,
});
describe('getInputData', () => {
const inputIndex = 0;
const connectionType = NodeConnectionType.Main;
afterEach(() => {
inputData[connectionType] = [[{ json: { test: 'data' } }]];
});
it('should return the input data correctly', () => {
const expectedData = { json: { test: 'data' } };
expect(executeSingleContext.getInputData(inputIndex, connectionType)).toEqual(expectedData);
});
it('should return an empty object if the input name does not exist', () => {
const connectionType = 'nonExistent';
const expectedData = { json: {} };
expect(
executeSingleContext.getInputData(inputIndex, connectionType as NodeConnectionType),
).toEqual(expectedData);
});
it('should throw an error if the input index is out of range', () => {
const inputIndex = 1;
expect(() => executeSingleContext.getInputData(inputIndex, connectionType)).toThrow(
ApplicationError,
);
});
it('should throw an error if the input index was not set', () => {
inputData.main[inputIndex] = null;
expect(() => executeSingleContext.getInputData(inputIndex, connectionType)).toThrow(
ApplicationError,
);
});
it('should throw an error if the value of input with given index was not set', () => {
delete inputData.main[inputIndex]![itemIndex];
expect(() => executeSingleContext.getInputData(inputIndex, connectionType)).toThrow(
ApplicationError,
);
});
});
describe('getItemIndex', () => {
it('should return the item index correctly', () => {
expect(executeSingleContext.getItemIndex()).toEqual(itemIndex);
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = executeSingleContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = executeSingleContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await executeSingleContext.getCredentials<ICredentialDataDecryptedObject>(
testCredentialType,
);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getExecuteData', () => {
it('should return the execute data correctly', () => {
expect(executeSingleContext.getExecuteData()).toEqual(executeData);
});
});
describe('getWorkflowDataProxy', () => {
it('should return the workflow data proxy correctly', () => {
const workflowDataProxy = executeSingleContext.getWorkflowDataProxy();
expect(workflowDataProxy.isProxy).toBe(true);
expect(Object.keys(workflowDataProxy.$input)).toEqual([
'all',
'context',
'first',
'item',
'last',
'params',
]);
});
});
});

View File

@@ -0,0 +1,147 @@
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWebhookDescription,
IWebhookData,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { ApplicationError } from 'n8n-workflow';
import { HookContext } from '../hook-context';
describe('HookContext', () => {
const testCredentialType = 'testCredential';
const webhookDescription: IWebhookDescription = {
name: 'default',
httpMethod: 'GET',
responseMode: 'onReceived',
path: 'testPath',
};
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
nodeType.description.webhooks = [webhookDescription];
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const activation: WorkflowActivateMode = 'init';
const webhookData = mock<IWebhookData>({
webhookDescription: {
name: 'default',
isFullPath: true,
},
});
const hookContext = new HookContext(
workflow,
node,
additionalData,
mode,
activation,
webhookData,
);
beforeEach(() => {
jest.clearAllMocks();
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
expression.getSimpleParameterValue.mockImplementation((_, value) => value);
});
describe('getActivationMode', () => {
it('should return the activation property', () => {
const result = hookContext.getActivationMode();
expect(result).toBe(activation);
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await hookContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getNodeParameter', () => {
it('should return parameter value when it exists', () => {
const parameter = hookContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
});
describe('getNodeWebhookUrl', () => {
it('should return node webhook url', () => {
const url = hookContext.getNodeWebhookUrl('default');
expect(url).toContain('testPath');
});
});
describe('getWebhookName', () => {
it('should return webhook name', () => {
const name = hookContext.getWebhookName();
expect(name).toBe('default');
});
it('should throw an error if webhookData is undefined', () => {
const hookContextWithoutWebhookData = new HookContext(
workflow,
node,
additionalData,
mode,
activation,
);
expect(() => hookContextWithoutWebhookData.getWebhookName()).toThrow(ApplicationError);
});
});
describe('getWebhookDescription', () => {
it('should return webhook description', () => {
const description = hookContext.getWebhookDescription('default');
expect(description).toEqual<IWebhookDescription>(webhookDescription);
});
});
});

View File

@@ -0,0 +1,102 @@
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWorkflowExecuteAdditionalData,
Workflow,
} from 'n8n-workflow';
import { LoadOptionsContext } from '../load-options-context';
describe('LoadOptionsContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const path = 'testPath';
const loadOptionsContext = new LoadOptionsContext(workflow, node, additionalData, path);
beforeEach(() => {
jest.clearAllMocks();
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await loadOptionsContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getCurrentNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
});
it('should return parameter value when it exists', () => {
additionalData.currentNodeParameters = {
testParameter: 'testValue',
};
const parameter = loadOptionsContext.getCurrentNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = loadOptionsContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = loadOptionsContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
});

View File

@@ -0,0 +1,338 @@
import { Container } from '@n8n/di';
import { mock } from 'jest-mock-extended';
import type {
Expression,
INode,
INodeType,
INodeTypes,
INodeExecutionData,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import { InstanceSettings } from '@/instance-settings';
import { NodeExecutionContext } from '../node-execution-context';
class TestContext extends NodeExecutionContext {}
describe('NodeExecutionContext', () => {
const instanceSettings = mock<InstanceSettings>({ instanceId: 'abc123' });
Container.set(InstanceSettings, instanceSettings);
const node = mock<INode>();
const nodeType = mock<INodeType>({ description: mock() });
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({
id: '123',
name: 'Test Workflow',
active: true,
nodeTypes,
timezone: 'UTC',
expression,
});
let additionalData = mock<IWorkflowExecuteAdditionalData>({
credentialsHelper: mock(),
});
const mode: WorkflowExecuteMode = 'manual';
let testContext: TestContext;
beforeEach(() => {
jest.clearAllMocks();
testContext = new TestContext(workflow, node, additionalData, mode);
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
});
describe('getNode', () => {
it('should return a deep copy of the node', () => {
const result = testContext.getNode();
expect(result).not.toBe(node);
expect(JSON.stringify(result)).toEqual(JSON.stringify(node));
});
});
describe('getWorkflow', () => {
it('should return the id, name, and active properties of the workflow', () => {
const result = testContext.getWorkflow();
expect(result).toEqual({ id: '123', name: 'Test Workflow', active: true });
});
});
describe('getMode', () => {
it('should return the mode property', () => {
const result = testContext.getMode();
expect(result).toBe(mode);
});
});
describe('getWorkflowStaticData', () => {
it('should call getStaticData method of workflow', () => {
testContext.getWorkflowStaticData('testType');
expect(workflow.getStaticData).toHaveBeenCalledWith('testType', node);
});
});
describe('getChildNodes', () => {
it('should return an array of NodeTypeAndVersion objects for the child nodes of the given node', () => {
const childNode1 = mock<INode>({ name: 'Child Node 1', type: 'testType1', typeVersion: 1 });
const childNode2 = mock<INode>({ name: 'Child Node 2', type: 'testType2', typeVersion: 2 });
workflow.getChildNodes.mockReturnValue(['Child Node 1', 'Child Node 2']);
workflow.nodes = {
'Child Node 1': childNode1,
'Child Node 2': childNode2,
};
const result = testContext.getChildNodes('Test Node');
expect(result).toMatchObject([
{ name: 'Child Node 1', type: 'testType1', typeVersion: 1 },
{ name: 'Child Node 2', type: 'testType2', typeVersion: 2 },
]);
});
});
describe('getParentNodes', () => {
it('should return an array of NodeTypeAndVersion objects for the parent nodes of the given node', () => {
const parentNode1 = mock<INode>({ name: 'Parent Node 1', type: 'testType1', typeVersion: 1 });
const parentNode2 = mock<INode>({ name: 'Parent Node 2', type: 'testType2', typeVersion: 2 });
workflow.getParentNodes.mockReturnValue(['Parent Node 1', 'Parent Node 2']);
workflow.nodes = {
'Parent Node 1': parentNode1,
'Parent Node 2': parentNode2,
};
const result = testContext.getParentNodes('Test Node');
expect(result).toMatchObject([
{ name: 'Parent Node 1', type: 'testType1', typeVersion: 1 },
{ name: 'Parent Node 2', type: 'testType2', typeVersion: 2 },
]);
});
});
describe('getKnownNodeTypes', () => {
it('should call getKnownTypes method of nodeTypes', () => {
testContext.getKnownNodeTypes();
expect(nodeTypes.getKnownTypes).toHaveBeenCalled();
});
});
describe('getRestApiUrl', () => {
it('should return the restApiUrl property of additionalData', () => {
additionalData.restApiUrl = 'https://example.com/api';
const result = testContext.getRestApiUrl();
expect(result).toBe('https://example.com/api');
});
});
describe('getInstanceBaseUrl', () => {
it('should return the instanceBaseUrl property of additionalData', () => {
additionalData.instanceBaseUrl = 'https://example.com';
const result = testContext.getInstanceBaseUrl();
expect(result).toBe('https://example.com');
});
});
describe('getInstanceId', () => {
it('should return the instanceId property of instanceSettings', () => {
const result = testContext.getInstanceId();
expect(result).toBe('abc123');
});
});
describe('getTimezone', () => {
it('should return the timezone property of workflow', () => {
const result = testContext.getTimezone();
expect(result).toBe('UTC');
});
});
describe('getCredentialsProperties', () => {
it('should call getCredentialsProperties method of additionalData.credentialsHelper', () => {
testContext.getCredentialsProperties('testType');
expect(additionalData.credentialsHelper.getCredentialsProperties).toHaveBeenCalledWith(
'testType',
);
});
});
describe('prepareOutputData', () => {
it('should return the input array wrapped in another array', async () => {
const outputData = [mock<INodeExecutionData>(), mock<INodeExecutionData>()];
const result = await testContext.prepareOutputData(outputData);
expect(result).toEqual([outputData]);
});
});
describe('getNodeInputs', () => {
it('should return static inputs array when inputs is an array', () => {
nodeType.description.inputs = [NodeConnectionType.Main, NodeConnectionType.AiLanguageModel];
const result = testContext.getNodeInputs();
expect(result).toEqual([
{ type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiLanguageModel },
]);
});
it('should return input objects when inputs contains configurations', () => {
nodeType.description.inputs = [
{ type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiLanguageModel, required: true },
];
const result = testContext.getNodeInputs();
expect(result).toEqual([
{ type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiLanguageModel, required: true },
]);
});
it('should evaluate dynamic inputs when inputs is a function', () => {
const inputsExpressions = '={{ ["main", "ai_languageModel"] }}';
nodeType.description.inputs = inputsExpressions;
expression.getSimpleParameterValue.mockReturnValue([
NodeConnectionType.Main,
NodeConnectionType.AiLanguageModel,
]);
const result = testContext.getNodeInputs();
expect(result).toEqual([
{ type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiLanguageModel },
]);
expect(expression.getSimpleParameterValue).toHaveBeenCalledWith(
node,
inputsExpressions,
'internal',
{},
);
});
});
describe('getNodeOutputs', () => {
it('should return static outputs array when outputs is an array', () => {
nodeType.description.outputs = [NodeConnectionType.Main, NodeConnectionType.AiLanguageModel];
const result = testContext.getNodeOutputs();
expect(result).toEqual([
{ type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiLanguageModel },
]);
});
it('should return output objects when outputs contains configurations', () => {
nodeType.description.outputs = [
{ type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiLanguageModel, required: true },
];
const result = testContext.getNodeOutputs();
expect(result).toEqual([
{ type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiLanguageModel, required: true },
]);
});
it('should evaluate dynamic outputs when outputs is a function', () => {
const outputsExpressions = '={{ ["main", "ai_languageModel"] }}';
nodeType.description.outputs = outputsExpressions;
expression.getSimpleParameterValue.mockReturnValue([
NodeConnectionType.Main,
NodeConnectionType.AiLanguageModel,
]);
const result = testContext.getNodeOutputs();
expect(result).toEqual([
{ type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiLanguageModel },
]);
expect(expression.getSimpleParameterValue).toHaveBeenCalledWith(
node,
outputsExpressions,
'internal',
{},
);
});
it('should add error output when node has continueOnFail error handling', () => {
const nodeWithError = mock<INode>({ onError: 'continueErrorOutput' });
const contextWithError = new TestContext(workflow, nodeWithError, additionalData, mode);
nodeType.description.outputs = [NodeConnectionType.Main];
const result = contextWithError.getNodeOutputs();
expect(result).toEqual([
{ type: NodeConnectionType.Main, displayName: 'Success' },
{ type: NodeConnectionType.Main, displayName: 'Error', category: 'error' },
]);
});
});
describe('getConnectedNodes', () => {
it('should return connected nodes of given type', () => {
const node1 = mock<INode>({ name: 'Node 1', type: 'test', disabled: false });
const node2 = mock<INode>({ name: 'Node 2', type: 'test', disabled: false });
workflow.getParentNodes.mockReturnValue(['Node 1', 'Node 2']);
workflow.getNode.mockImplementation((name) => {
if (name === 'Node 1') return node1;
if (name === 'Node 2') return node2;
return null;
});
const result = testContext.getConnectedNodes(NodeConnectionType.Main);
expect(result).toEqual([node1, node2]);
expect(workflow.getParentNodes).toHaveBeenCalledWith(node.name, NodeConnectionType.Main, 1);
});
it('should filter out disabled nodes', () => {
const node1 = mock<INode>({ name: 'Node 1', type: 'test', disabled: false });
const node2 = mock<INode>({ name: 'Node 2', type: 'test', disabled: true });
workflow.getParentNodes.mockReturnValue(['Node 1', 'Node 2']);
workflow.getNode.mockImplementation((name) => {
if (name === 'Node 1') return node1;
if (name === 'Node 2') return node2;
return null;
});
const result = testContext.getConnectedNodes(NodeConnectionType.Main);
expect(result).toEqual([node1]);
});
it('should filter out non-existent nodes', () => {
const node1 = mock<INode>({ name: 'Node 1', type: 'test', disabled: false });
workflow.getParentNodes.mockReturnValue(['Node 1', 'NonExistent']);
workflow.getNode.mockImplementation((name) => {
if (name === 'Node 1') return node1;
return null;
});
const result = testContext.getConnectedNodes(NodeConnectionType.Main);
expect(result).toEqual([node1]);
});
});
});

View File

@@ -0,0 +1,96 @@
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { PollContext } from '../poll-context';
describe('PollContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const activation: WorkflowActivateMode = 'init';
const pollContext = new PollContext(workflow, node, additionalData, mode, activation);
beforeEach(() => {
jest.clearAllMocks();
});
describe('getActivationMode', () => {
it('should return the activation property', () => {
const result = pollContext.getActivationMode();
expect(result).toBe(activation);
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await pollContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = pollContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = pollContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
});

View File

@@ -0,0 +1,242 @@
import { Container } from '@n8n/di';
import { captor, mock, type MockProxy } from 'jest-mock-extended';
import type {
IRunExecutionData,
ContextType,
IContextObject,
INode,
OnError,
Workflow,
ITaskMetadata,
ISourceData,
IExecuteData,
IWorkflowExecuteAdditionalData,
ExecuteWorkflowData,
RelatedExecution,
IExecuteWorkflowInfo,
} from 'n8n-workflow';
import { ApplicationError, NodeHelpers, WAIT_INDEFINITELY } from 'n8n-workflow';
import { BinaryDataService } from '@/binary-data/binary-data.service';
import type { BaseExecuteContext } from '../base-execute-context';
const binaryDataService = mock<BinaryDataService>();
Container.set(BinaryDataService, binaryDataService);
export const describeCommonTests = (
context: BaseExecuteContext,
{
abortSignal,
node,
workflow,
runExecutionData,
executeData,
}: {
abortSignal: AbortSignal;
node: INode;
workflow: Workflow;
runExecutionData: IRunExecutionData;
executeData: IExecuteData;
},
) => {
// @ts-expect-error `additionalData` is private
const additionalData = context.additionalData as MockProxy<IWorkflowExecuteAdditionalData>;
describe('getExecutionCancelSignal', () => {
it('should return the abort signal', () => {
expect(context.getExecutionCancelSignal()).toBe(abortSignal);
});
});
describe('onExecutionCancellation', () => {
const handler = jest.fn();
context.onExecutionCancellation(handler);
const fnCaptor = captor<() => void>();
expect(abortSignal.addEventListener).toHaveBeenCalledWith('abort', fnCaptor);
expect(handler).not.toHaveBeenCalled();
fnCaptor.value();
expect(abortSignal.removeEventListener).toHaveBeenCalledWith('abort', fnCaptor);
expect(handler).toHaveBeenCalled();
});
describe('continueOnFail', () => {
afterEach(() => {
node.onError = undefined;
node.continueOnFail = false;
});
it('should return false for nodes by default', () => {
expect(context.continueOnFail()).toEqual(false);
});
it('should return true if node has continueOnFail set to true', () => {
node.continueOnFail = true;
expect(context.continueOnFail()).toEqual(true);
});
test.each([
['continueRegularOutput', true],
['continueErrorOutput', true],
['stopWorkflow', false],
])('if node has onError set to %s, it should return %s', (onError, expected) => {
node.onError = onError as OnError;
expect(context.continueOnFail()).toEqual(expected);
});
});
describe('getContext', () => {
it('should return the context object', () => {
const contextType: ContextType = 'node';
const expectedContext = mock<IContextObject>();
const getContextSpy = jest.spyOn(NodeHelpers, 'getContext');
getContextSpy.mockReturnValue(expectedContext);
expect(context.getContext(contextType)).toEqual(expectedContext);
expect(getContextSpy).toHaveBeenCalledWith(runExecutionData, contextType, node);
getContextSpy.mockRestore();
});
});
describe('sendMessageToUI', () => {
it('should send console messages to the frontend', () => {
context.sendMessageToUI('Testing', 1, 2, {});
expect(additionalData.sendDataToUI).toHaveBeenCalledWith('sendConsoleMessage', {
source: '[Node: "Test Node"]',
messages: ['Testing', 1, 2, {}],
});
});
});
describe('logAiEvent', () => {
it('should log the AI event correctly', () => {
const eventName = 'ai-tool-called';
const msg = 'test message';
context.logAiEvent(eventName, msg);
expect(additionalData.logAiEvent).toHaveBeenCalledWith(eventName, {
executionId: additionalData.executionId,
nodeName: node.name,
workflowName: workflow.name,
nodeType: node.type,
workflowId: workflow.id,
msg,
});
});
});
describe('getInputSourceData', () => {
it('should return the input source data correctly', () => {
const inputSourceData = mock<ISourceData>();
executeData.source = { main: [inputSourceData] };
expect(context.getInputSourceData()).toEqual(inputSourceData);
});
it('should throw an error if the source data is missing', () => {
executeData.source = null;
expect(() => context.getInputSourceData()).toThrow(ApplicationError);
});
});
describe('setMetadata', () => {
it('sets metadata on execution data', () => {
const metadata: ITaskMetadata = {
subExecution: {
workflowId: '123',
executionId: 'xyz',
},
};
expect(context.getExecuteData().metadata?.subExecution).toEqual(undefined);
context.setMetadata(metadata);
expect(context.getExecuteData().metadata?.subExecution).toEqual(metadata.subExecution);
});
});
describe('evaluateExpression', () => {
it('should evaluate the expression correctly', () => {
const expression = '$json.test';
const expectedResult = 'data';
const resolveSimpleParameterValueSpy = jest.spyOn(
workflow.expression,
'resolveSimpleParameterValue',
);
resolveSimpleParameterValueSpy.mockReturnValue(expectedResult);
expect(context.evaluateExpression(expression, 0)).toEqual(expectedResult);
expect(resolveSimpleParameterValueSpy).toHaveBeenCalledWith(
`=${expression}`,
{},
runExecutionData,
0,
0,
node.name,
[],
'manual',
expect.objectContaining({}),
executeData,
);
resolveSimpleParameterValueSpy.mockRestore();
});
});
describe('putExecutionToWait', () => {
it('should set waitTill and execution status', async () => {
const waitTill = new Date();
await context.putExecutionToWait(waitTill);
expect(runExecutionData.waitTill).toEqual(waitTill);
expect(additionalData.setExecutionStatus).toHaveBeenCalledWith('waiting');
});
});
describe('executeWorkflow', () => {
const data = [[{ json: { test: true } }]];
const executeWorkflowData = mock<ExecuteWorkflowData>();
const workflowInfo = mock<IExecuteWorkflowInfo>();
const parentExecution: RelatedExecution = {
executionId: 'parent_execution_id',
workflowId: 'parent_workflow_id',
};
it('should execute workflow and return data', async () => {
additionalData.executeWorkflow.mockResolvedValue(executeWorkflowData);
binaryDataService.duplicateBinaryData.mockResolvedValue(data);
const result = await context.executeWorkflow(workflowInfo, undefined, undefined, {
parentExecution,
});
expect(result.data).toEqual(data);
expect(binaryDataService.duplicateBinaryData).toHaveBeenCalledWith(
workflow.id,
additionalData.executionId,
executeWorkflowData.data,
);
});
it('should put execution to wait if waitTill is returned', async () => {
const waitTill = new Date();
additionalData.executeWorkflow.mockResolvedValue({ ...executeWorkflowData, waitTill });
binaryDataService.duplicateBinaryData.mockResolvedValue(data);
const result = await context.executeWorkflow(workflowInfo, undefined, undefined, {
parentExecution,
});
expect(additionalData.setExecutionStatus).toHaveBeenCalledWith('waiting');
expect(runExecutionData.waitTill).toEqual(WAIT_INDEFINITELY);
expect(result.waitTill).toBe(waitTill);
});
});
};

View File

@@ -0,0 +1,178 @@
import { mock } from 'jest-mock-extended';
import type {
INode,
IWorkflowExecuteAdditionalData,
IRunExecutionData,
INodeExecutionData,
ITaskDataConnections,
IExecuteData,
Workflow,
WorkflowExecuteMode,
ICredentialsHelper,
Expression,
INodeType,
INodeTypes,
ICredentialDataDecryptedObject,
} from 'n8n-workflow';
import { ApplicationError, NodeConnectionType } from 'n8n-workflow';
import { describeCommonTests } from './shared-tests';
import { SupplyDataContext } from '../supply-data-context';
describe('SupplyDataContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
name: 'Test Node',
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const runExecutionData = mock<IRunExecutionData>();
const connectionInputData: INodeExecutionData[] = [];
const connectionType = NodeConnectionType.Main;
const inputData: ITaskDataConnections = { [connectionType]: [[{ json: { test: 'data' } }]] };
const executeData = mock<IExecuteData>();
const runIndex = 0;
const closeFn = jest.fn();
const abortSignal = mock<AbortSignal>();
const supplyDataContext = new SupplyDataContext(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
connectionType,
executeData,
[closeFn],
abortSignal,
);
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
describeCommonTests(supplyDataContext, {
abortSignal,
node,
workflow,
executeData,
runExecutionData,
});
describe('getInputData', () => {
const inputIndex = 0;
afterEach(() => {
inputData[connectionType] = [[{ json: { test: 'data' } }]];
});
it('should return the input data correctly', () => {
const expectedData = [{ json: { test: 'data' } }];
expect(supplyDataContext.getInputData(inputIndex, connectionType)).toEqual(expectedData);
});
it('should return an empty array if the input name does not exist', () => {
const connectionType = 'nonExistent';
expect(
supplyDataContext.getInputData(inputIndex, connectionType as NodeConnectionType),
).toEqual([]);
});
it('should throw an error if the input index is out of range', () => {
const inputIndex = 2;
expect(() => supplyDataContext.getInputData(inputIndex, connectionType)).toThrow(
ApplicationError,
);
});
it('should throw an error if the input index was not set', () => {
inputData.main[inputIndex] = null;
expect(() => supplyDataContext.getInputData(inputIndex, connectionType)).toThrow(
ApplicationError,
);
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = supplyDataContext.getNodeParameter('testParameter', 0);
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = supplyDataContext.getNodeParameter('otherParameter', 0, 'fallback');
expect(parameter).toBe('fallback');
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials = await supplyDataContext.getCredentials<ICredentialDataDecryptedObject>(
testCredentialType,
0,
);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getWorkflowDataProxy', () => {
it('should return the workflow data proxy correctly', () => {
const workflowDataProxy = supplyDataContext.getWorkflowDataProxy(0);
expect(workflowDataProxy.isProxy).toBe(true);
expect(Object.keys(workflowDataProxy.$input)).toEqual([
'all',
'context',
'first',
'item',
'last',
'params',
]);
});
});
});

View File

@@ -0,0 +1,96 @@
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { TriggerContext } from '../trigger-context';
describe('TriggerContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const activation: WorkflowActivateMode = 'init';
const triggerContext = new TriggerContext(workflow, node, additionalData, mode, activation);
beforeEach(() => {
jest.clearAllMocks();
});
describe('getActivationMode', () => {
it('should return the activation property', () => {
const result = triggerContext.getActivationMode();
expect(result).toBe(activation);
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await triggerContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = triggerContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = triggerContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
});

View File

@@ -0,0 +1,161 @@
import type { Request, Response } from 'express';
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWebhookData,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { WebhookContext } from '../webhook-context';
describe('WebhookContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({
credentialsHelper,
});
additionalData.httpRequest = {
body: { test: 'body' },
headers: { test: 'header' },
params: { test: 'param' },
query: { test: 'query' },
} as unknown as Request;
additionalData.httpResponse = mock<Response>();
const mode: WorkflowExecuteMode = 'manual';
const webhookData = mock<IWebhookData>({
webhookDescription: {
name: 'default',
},
});
const runExecutionData = null;
const webhookContext = new WebhookContext(
workflow,
node,
additionalData,
mode,
webhookData,
[],
runExecutionData,
);
beforeEach(() => {
jest.clearAllMocks();
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await webhookContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getBodyData', () => {
it('should return the body data of the request', () => {
const bodyData = webhookContext.getBodyData();
expect(bodyData).toEqual({ test: 'body' });
});
});
describe('getHeaderData', () => {
it('should return the header data of the request', () => {
const headerData = webhookContext.getHeaderData();
expect(headerData).toEqual({ test: 'header' });
});
});
describe('getParamsData', () => {
it('should return the params data of the request', () => {
const paramsData = webhookContext.getParamsData();
expect(paramsData).toEqual({ test: 'param' });
});
});
describe('getQueryData', () => {
it('should return the query data of the request', () => {
const queryData = webhookContext.getQueryData();
expect(queryData).toEqual({ test: 'query' });
});
});
describe('getRequestObject', () => {
it('should return the request object', () => {
const request = webhookContext.getRequestObject();
expect(request).toBe(additionalData.httpRequest);
});
});
describe('getResponseObject', () => {
it('should return the response object', () => {
const response = webhookContext.getResponseObject();
expect(response).toBe(additionalData.httpResponse);
});
});
describe('getWebhookName', () => {
it('should return the name of the webhook', () => {
const webhookName = webhookContext.getWebhookName();
expect(webhookName).toBe('default');
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = webhookContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = webhookContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
});

View File

@@ -0,0 +1,227 @@
import { Container } from '@n8n/di';
import { get } from 'lodash';
import type {
Workflow,
INode,
IWorkflowExecuteAdditionalData,
WorkflowExecuteMode,
IRunExecutionData,
INodeExecutionData,
ITaskDataConnections,
IExecuteData,
ICredentialDataDecryptedObject,
CallbackManager,
IExecuteWorkflowInfo,
RelatedExecution,
ExecuteWorkflowData,
ITaskMetadata,
ContextType,
IContextObject,
IWorkflowDataProxyData,
ISourceData,
AiEvent,
} from 'n8n-workflow';
import {
ApplicationError,
NodeHelpers,
NodeConnectionType,
WAIT_INDEFINITELY,
WorkflowDataProxy,
} from 'n8n-workflow';
import { BinaryDataService } from '@/binary-data/binary-data.service';
import { NodeExecutionContext } from './node-execution-context';
export class BaseExecuteContext extends NodeExecutionContext {
protected readonly binaryDataService = Container.get(BinaryDataService);
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
protected readonly runExecutionData: IRunExecutionData,
runIndex: number,
protected readonly connectionInputData: INodeExecutionData[],
protected readonly inputData: ITaskDataConnections,
protected readonly executeData: IExecuteData,
protected readonly abortSignal?: AbortSignal,
) {
super(workflow, node, additionalData, mode, runExecutionData, runIndex);
}
getExecutionCancelSignal() {
return this.abortSignal;
}
onExecutionCancellation(handler: () => unknown) {
const fn = () => {
this.abortSignal?.removeEventListener('abort', fn);
handler();
};
this.abortSignal?.addEventListener('abort', fn);
}
getExecuteData() {
return this.executeData;
}
setMetadata(metadata: ITaskMetadata): void {
this.executeData.metadata = {
...(this.executeData.metadata ?? {}),
...metadata,
};
}
getContext(type: ContextType): IContextObject {
return NodeHelpers.getContext(this.runExecutionData, type, this.node);
}
/** Returns if execution should be continued even if there was an error */
continueOnFail(): boolean {
const onError = get(this.node, 'onError', undefined);
if (onError === undefined) {
return get(this.node, 'continueOnFail', false);
}
return ['continueRegularOutput', 'continueErrorOutput'].includes(onError);
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(
type: string,
itemIndex: number,
) {
return await this._getCredentials<T>(
type,
this.executeData,
this.connectionInputData,
itemIndex,
);
}
async putExecutionToWait(waitTill: Date): Promise<void> {
this.runExecutionData.waitTill = waitTill;
if (this.additionalData.setExecutionStatus) {
this.additionalData.setExecutionStatus('waiting');
}
}
async executeWorkflow(
workflowInfo: IExecuteWorkflowInfo,
inputData?: INodeExecutionData[],
parentCallbackManager?: CallbackManager,
options?: {
doNotWaitToFinish?: boolean;
parentExecution?: RelatedExecution;
},
): Promise<ExecuteWorkflowData> {
const result = await this.additionalData.executeWorkflow(workflowInfo, this.additionalData, {
...options,
parentWorkflowId: this.workflow.id,
inputData,
parentWorkflowSettings: this.workflow.settings,
node: this.node,
parentCallbackManager,
});
// If a sub-workflow execution goes into the waiting state
if (result.waitTill) {
// then put the parent workflow execution also into the waiting state,
// but do not use the sub-workflow `waitTill` to avoid WaitTracker resuming the parent execution at the same time as the sub-workflow
await this.putExecutionToWait(WAIT_INDEFINITELY);
}
const data = await this.binaryDataService.duplicateBinaryData(
this.workflow.id,
this.additionalData.executionId!,
result.data,
);
return { ...result, data };
}
protected getInputItems(inputIndex: number, connectionType: NodeConnectionType) {
const inputData = this.inputData[connectionType];
if (inputData.length < inputIndex) {
throw new ApplicationError('Could not get input with given index', {
extra: { inputIndex, connectionType },
});
}
const allItems = inputData[inputIndex] as INodeExecutionData[] | null | undefined;
if (allItems === null) {
throw new ApplicationError('Input index was not set', {
extra: { inputIndex, connectionType },
});
}
return allItems;
}
getInputSourceData(inputIndex = 0, connectionType = NodeConnectionType.Main): ISourceData {
if (this.executeData?.source === null) {
// Should never happen as n8n sets it automatically
throw new ApplicationError('Source data is missing');
}
return this.executeData.source[connectionType][inputIndex]!;
}
getWorkflowDataProxy(itemIndex: number): IWorkflowDataProxyData {
return new WorkflowDataProxy(
this.workflow,
this.runExecutionData,
this.runIndex,
itemIndex,
this.node.name,
this.connectionInputData,
{},
this.mode,
this.additionalKeys,
this.executeData,
).getDataProxy();
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
sendMessageToUI(...args: any[]): void {
if (this.mode !== 'manual') {
return;
}
try {
if (this.additionalData.sendDataToUI) {
args = args.map((arg) => {
// prevent invalid dates from being logged as null
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-return
if (arg.isLuxonDateTime && arg.invalidReason) return { ...arg };
// log valid dates in human readable format, as in browser
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-argument
if (arg.isLuxonDateTime) return new Date(arg.ts).toString();
if (arg instanceof Date) return arg.toString();
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return arg;
});
this.additionalData.sendDataToUI('sendConsoleMessage', {
source: `[Node: "${this.node.name}"]`,
messages: args,
});
}
} catch (error) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
this.logger.warn(`There was a problem sending message to UI: ${error.message}`);
}
}
logAiEvent(eventName: AiEvent, msg: string) {
return this.additionalData.logAiEvent(eventName, {
executionId: this.additionalData.executionId ?? 'unsaved-execution',
nodeName: this.node.name,
workflowName: this.workflow.name ?? 'Unnamed workflow',
nodeType: this.node.type,
workflowId: this.workflow.id ?? 'unsaved-workflow',
msg,
});
}
}

View File

@@ -0,0 +1,211 @@
import type {
AINodeConnectionType,
CallbackManager,
CloseFunction,
IExecuteData,
IExecuteFunctions,
IExecuteResponsePromiseData,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
IRunExecutionData,
ITaskDataConnections,
IWorkflowExecuteAdditionalData,
Result,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import {
ApplicationError,
createDeferredPromise,
createEnvProviderState,
NodeConnectionType,
} from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
returnJsonArray,
copyInputItems,
normalizeItems,
constructExecutionMetaData,
assertBinaryData,
getBinaryDataBuffer,
copyBinaryFile,
getRequestHelperFunctions,
getBinaryHelperFunctions,
getSSHTunnelFunctions,
getFileSystemHelperFunctions,
getCheckProcessedHelperFunctions,
detectBinaryEncoding,
} from '@/node-execute-functions';
import { BaseExecuteContext } from './base-execute-context';
import { getInputConnectionData } from './utils/get-input-connection-data';
export class ExecuteContext extends BaseExecuteContext implements IExecuteFunctions {
readonly helpers: IExecuteFunctions['helpers'];
readonly nodeHelpers: IExecuteFunctions['nodeHelpers'];
readonly getNodeParameter: IExecuteFunctions['getNodeParameter'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
runExecutionData: IRunExecutionData,
runIndex: number,
connectionInputData: INodeExecutionData[],
inputData: ITaskDataConnections,
executeData: IExecuteData,
private readonly closeFunctions: CloseFunction[],
abortSignal?: AbortSignal,
) {
super(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
executeData,
abortSignal,
);
this.helpers = {
createDeferredPromise,
returnJsonArray,
copyInputItems,
normalizeItems,
constructExecutionMetaData,
...getRequestHelperFunctions(
workflow,
node,
additionalData,
runExecutionData,
connectionInputData,
),
...getBinaryHelperFunctions(additionalData, workflow.id),
...getSSHTunnelFunctions(),
...getFileSystemHelperFunctions(node),
...getCheckProcessedHelperFunctions(workflow, node),
assertBinaryData: (itemIndex, propertyName) =>
assertBinaryData(inputData, node, itemIndex, propertyName, 0),
getBinaryDataBuffer: async (itemIndex, propertyName) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, 0),
detectBinaryEncoding: (buffer: Buffer) => detectBinaryEncoding(buffer),
};
this.nodeHelpers = {
copyBinaryFile: async (filePath, fileName, mimeType) =>
await copyBinaryFile(
this.workflow.id,
this.additionalData.executionId!,
filePath,
fileName,
mimeType,
),
};
this.getNodeParameter = ((
parameterName: string,
itemIndex: number,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fallbackValue?: any,
options?: IGetNodeParameterOptions,
) =>
this._getNodeParameter(
parameterName,
itemIndex,
fallbackValue,
options,
)) as IExecuteFunctions['getNodeParameter'];
}
async startJob<T = unknown, E = unknown>(
jobType: string,
settings: unknown,
itemIndex: number,
): Promise<Result<T, E>> {
return await this.additionalData.startRunnerTask<T, E>(
this.additionalData,
jobType,
settings,
this,
this.inputData,
this.node,
this.workflow,
this.runExecutionData,
this.runIndex,
itemIndex,
this.node.name,
this.connectionInputData,
{},
this.mode,
createEnvProviderState(),
this.executeData,
);
}
async getInputConnectionData(
connectionType: AINodeConnectionType,
itemIndex: number,
): Promise<unknown> {
return await getInputConnectionData.call(
this,
this.workflow,
this.runExecutionData,
this.runIndex,
this.connectionInputData,
this.inputData,
this.additionalData,
this.executeData,
this.mode,
this.closeFunctions,
connectionType,
itemIndex,
this.abortSignal,
);
}
getInputData(inputIndex = 0, connectionType = NodeConnectionType.Main) {
if (!this.inputData.hasOwnProperty(connectionType)) {
// Return empty array because else it would throw error when nothing is connected to input
return [];
}
return super.getInputItems(inputIndex, connectionType) ?? [];
}
logNodeOutput(...args: unknown[]): void {
if (this.mode === 'manual') {
this.sendMessageToUI(...args);
return;
}
if (process.env.CODE_ENABLE_STDOUT === 'true') {
console.log(`[Workflow "${this.getWorkflow().id}"][Node "${this.node.name}"]`, ...args);
}
}
async sendResponse(response: IExecuteResponsePromiseData): Promise<void> {
await this.additionalData.hooks?.executeHookFunctions('sendResponse', [response]);
}
/** @deprecated use ISupplyDataFunctions.addInputData */
addInputData(): { index: number } {
throw new ApplicationError('addInputData should not be called on IExecuteFunctions');
}
/** @deprecated use ISupplyDataFunctions.addOutputData */
addOutputData(): void {
throw new ApplicationError('addOutputData should not be called on IExecuteFunctions');
}
getParentCallbackManager(): CallbackManager | undefined {
return this.additionalData.parentCallbackManager;
}
}

View File

@@ -0,0 +1,115 @@
import type {
ICredentialDataDecryptedObject,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
IRunExecutionData,
IExecuteSingleFunctions,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
ITaskDataConnections,
IExecuteData,
} from 'n8n-workflow';
import { ApplicationError, createDeferredPromise, NodeConnectionType } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
assertBinaryData,
detectBinaryEncoding,
getBinaryDataBuffer,
getBinaryHelperFunctions,
getRequestHelperFunctions,
returnJsonArray,
} from '@/node-execute-functions';
import { BaseExecuteContext } from './base-execute-context';
export class ExecuteSingleContext extends BaseExecuteContext implements IExecuteSingleFunctions {
readonly helpers: IExecuteSingleFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
runExecutionData: IRunExecutionData,
runIndex: number,
connectionInputData: INodeExecutionData[],
inputData: ITaskDataConnections,
private readonly itemIndex: number,
executeData: IExecuteData,
abortSignal?: AbortSignal,
) {
super(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
executeData,
abortSignal,
);
this.helpers = {
createDeferredPromise,
returnJsonArray,
...getRequestHelperFunctions(
workflow,
node,
additionalData,
runExecutionData,
connectionInputData,
),
...getBinaryHelperFunctions(additionalData, workflow.id),
assertBinaryData: (propertyName, inputIndex = 0) =>
assertBinaryData(inputData, node, itemIndex, propertyName, inputIndex),
getBinaryDataBuffer: async (propertyName, inputIndex = 0) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex),
detectBinaryEncoding: (buffer) => detectBinaryEncoding(buffer),
};
}
evaluateExpression(expression: string, itemIndex: number = this.itemIndex) {
return super.evaluateExpression(expression, itemIndex);
}
getInputData(inputIndex = 0, connectionType = NodeConnectionType.Main) {
if (!this.inputData.hasOwnProperty(connectionType)) {
// Return empty array because else it would throw error when nothing is connected to input
return { json: {} };
}
const allItems = super.getInputItems(inputIndex, connectionType);
const data = allItems?.[this.itemIndex];
if (data === undefined) {
throw new ApplicationError('Value of input with given index was not set', {
extra: { inputIndex, connectionType, itemIndex: this.itemIndex },
});
}
return data;
}
getItemIndex() {
return this.itemIndex;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
getNodeParameter(parameterName: string, fallbackValue?: any, options?: IGetNodeParameterOptions) {
return this._getNodeParameter(parameterName, this.itemIndex, fallbackValue, options);
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await super.getCredentials<T>(type, this.itemIndex);
}
getWorkflowDataProxy() {
return super.getWorkflowDataProxy(this.itemIndex);
}
}

View File

@@ -0,0 +1,69 @@
import type {
ICredentialDataDecryptedObject,
INode,
IHookFunctions,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
IWebhookData,
WebhookType,
} from 'n8n-workflow';
import { ApplicationError } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
getNodeWebhookUrl,
getRequestHelperFunctions,
getWebhookDescription,
} from '@/node-execute-functions';
import { NodeExecutionContext } from './node-execution-context';
export class HookContext extends NodeExecutionContext implements IHookFunctions {
readonly helpers: IHookFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly activation: WorkflowActivateMode,
private readonly webhookData?: IWebhookData,
) {
super(workflow, node, additionalData, mode);
this.helpers = getRequestHelperFunctions(workflow, node, additionalData);
}
getActivationMode() {
return this.activation;
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await this._getCredentials<T>(type);
}
getNodeWebhookUrl(name: WebhookType): string | undefined {
return getNodeWebhookUrl(
name,
this.workflow,
this.node,
this.additionalData,
this.mode,
this.additionalKeys,
this.webhookData?.isTest,
);
}
getWebhookName(): string {
if (this.webhookData === undefined) {
throw new ApplicationError('Only supported in webhook functions');
}
return this.webhookData.webhookDescription.name;
}
getWebhookDescription(name: WebhookType) {
return getWebhookDescription(name, this.workflow, this.node);
}
}

View File

@@ -0,0 +1,13 @@
// eslint-disable-next-line import/no-cycle
export { ExecuteContext } from './execute-context';
export { ExecuteSingleContext } from './execute-single-context';
export { HookContext } from './hook-context';
export { LoadOptionsContext } from './load-options-context';
export { LocalLoadOptionsContext } from './local-load-options-context';
export { PollContext } from './poll-context';
// eslint-disable-next-line import/no-cycle
export { SupplyDataContext } from './supply-data-context';
export { TriggerContext } from './trigger-context';
export { WebhookContext } from './webhook-context';
export { getAdditionalKeys } from './utils/get-additional-keys';

View File

@@ -0,0 +1,71 @@
import { get } from 'lodash';
import type {
ICredentialDataDecryptedObject,
IGetNodeParameterOptions,
INode,
ILoadOptionsFunctions,
IWorkflowExecuteAdditionalData,
NodeParameterValueType,
Workflow,
} from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import { getRequestHelperFunctions, getSSHTunnelFunctions } from '@/node-execute-functions';
import { NodeExecutionContext } from './node-execution-context';
import { extractValue } from './utils/extract-value';
export class LoadOptionsContext extends NodeExecutionContext implements ILoadOptionsFunctions {
readonly helpers: ILoadOptionsFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
private readonly path: string,
) {
super(workflow, node, additionalData, 'internal');
this.helpers = {
...getSSHTunnelFunctions(),
...getRequestHelperFunctions(workflow, node, additionalData),
};
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await this._getCredentials<T>(type);
}
getCurrentNodeParameter(
parameterPath: string,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object | undefined {
const nodeParameters = this.additionalData.currentNodeParameters;
if (parameterPath.charAt(0) === '&') {
parameterPath = `${this.path.split('.').slice(1, -1).join('.')}.${parameterPath.slice(1)}`;
}
let returnData = get(nodeParameters, parameterPath);
// This is outside the try/catch because it throws errors with proper messages
if (options?.extractValue) {
const nodeType = this.workflow.nodeTypes.getByNameAndVersion(
this.node.type,
this.node.typeVersion,
);
returnData = extractValue(
returnData,
parameterPath,
this.node,
nodeType,
) as NodeParameterValueType;
}
return returnData;
}
getCurrentNodeParameters() {
return this.additionalData.currentNodeParameters;
}
}

View File

@@ -0,0 +1,70 @@
import lodash from 'lodash';
import { ApplicationError, Workflow } from 'n8n-workflow';
import type {
INodeParameterResourceLocator,
IWorkflowExecuteAdditionalData,
NodeParameterValueType,
ILocalLoadOptionsFunctions,
IWorkflowLoader,
IWorkflowNodeContext,
INodeTypes,
} from 'n8n-workflow';
import { LoadWorkflowNodeContext } from './workflow-node-context';
export class LocalLoadOptionsContext implements ILocalLoadOptionsFunctions {
constructor(
private nodeTypes: INodeTypes,
private additionalData: IWorkflowExecuteAdditionalData,
private path: string,
private workflowLoader: IWorkflowLoader,
) {}
async getWorkflowNodeContext(nodeType: string): Promise<IWorkflowNodeContext | null> {
const { value: workflowId } = this.getCurrentNodeParameter(
'workflowId',
) as INodeParameterResourceLocator;
if (typeof workflowId !== 'string' || !workflowId) {
throw new ApplicationError(`No workflowId parameter defined on node of type "${nodeType}"!`);
}
const dbWorkflow = await this.workflowLoader.get(workflowId);
const selectedWorkflowNode = dbWorkflow.nodes.find((node) => node.type === nodeType);
if (selectedWorkflowNode) {
const selectedSingleNodeWorkflow = new Workflow({
nodes: [selectedWorkflowNode],
connections: {},
active: false,
nodeTypes: this.nodeTypes,
});
const workflowAdditionalData = {
...this.additionalData,
currentNodeParameters: selectedWorkflowNode.parameters,
};
return new LoadWorkflowNodeContext(
selectedSingleNodeWorkflow,
selectedWorkflowNode,
workflowAdditionalData,
);
}
return null;
}
getCurrentNodeParameter(parameterPath: string): NodeParameterValueType | object | undefined {
const nodeParameters = this.additionalData.currentNodeParameters;
if (parameterPath.startsWith('&')) {
parameterPath = `${this.path.split('.').slice(1, -1).join('.')}.${parameterPath.slice(1)}`;
}
const returnData = lodash.get(nodeParameters, parameterPath);
return returnData;
}
}

View File

@@ -0,0 +1,427 @@
import { Container } from '@n8n/di';
import { get } from 'lodash';
import type {
FunctionsBase,
ICredentialDataDecryptedObject,
ICredentialsExpressionResolveValues,
IExecuteData,
IGetNodeParameterOptions,
INode,
INodeCredentialDescription,
INodeCredentialsDetails,
INodeExecutionData,
INodeInputConfiguration,
INodeOutputConfiguration,
IRunExecutionData,
IWorkflowExecuteAdditionalData,
NodeConnectionType,
NodeParameterValueType,
NodeTypeAndVersion,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import {
ApplicationError,
deepCopy,
ExpressionError,
NodeHelpers,
NodeOperationError,
} from 'n8n-workflow';
import { HTTP_REQUEST_NODE_TYPE, HTTP_REQUEST_TOOL_NODE_TYPE } from '@/constants';
import { Memoized } from '@/decorators';
import { InstanceSettings } from '@/instance-settings';
import { Logger } from '@/logging/logger';
import { cleanupParameterData } from './utils/cleanup-parameter-data';
import { ensureType } from './utils/ensure-type';
import { extractValue } from './utils/extract-value';
import { getAdditionalKeys } from './utils/get-additional-keys';
import { validateValueAgainstSchema } from './utils/validate-value-against-schema';
export abstract class NodeExecutionContext implements Omit<FunctionsBase, 'getCredentials'> {
protected readonly instanceSettings = Container.get(InstanceSettings);
constructor(
protected readonly workflow: Workflow,
protected readonly node: INode,
protected readonly additionalData: IWorkflowExecuteAdditionalData,
protected readonly mode: WorkflowExecuteMode,
protected readonly runExecutionData: IRunExecutionData | null = null,
protected readonly runIndex = 0,
protected readonly connectionInputData: INodeExecutionData[] = [],
protected readonly executeData?: IExecuteData,
) {}
@Memoized
get logger() {
return Container.get(Logger);
}
getExecutionId() {
return this.additionalData.executionId!;
}
getNode(): INode {
return deepCopy(this.node);
}
getWorkflow() {
const { id, name, active } = this.workflow;
return { id, name, active };
}
getMode() {
return this.mode;
}
getWorkflowStaticData(type: string) {
return this.workflow.getStaticData(type, this.node);
}
getChildNodes(nodeName: string) {
const output: NodeTypeAndVersion[] = [];
const nodeNames = this.workflow.getChildNodes(nodeName);
for (const n of nodeNames) {
const node = this.workflow.nodes[n];
output.push({
name: node.name,
type: node.type,
typeVersion: node.typeVersion,
disabled: node.disabled ?? false,
});
}
return output;
}
getParentNodes(nodeName: string) {
const output: NodeTypeAndVersion[] = [];
const nodeNames = this.workflow.getParentNodes(nodeName);
for (const n of nodeNames) {
const node = this.workflow.nodes[n];
output.push({
name: node.name,
type: node.type,
typeVersion: node.typeVersion,
disabled: node.disabled ?? false,
});
}
return output;
}
@Memoized
get nodeType() {
const { type, typeVersion } = this.node;
return this.workflow.nodeTypes.getByNameAndVersion(type, typeVersion);
}
@Memoized
get nodeInputs() {
return NodeHelpers.getNodeInputs(this.workflow, this.node, this.nodeType.description).map(
(input) => (typeof input === 'string' ? { type: input } : input),
);
}
getNodeInputs(): INodeInputConfiguration[] {
return this.nodeInputs;
}
@Memoized
get nodeOutputs() {
return NodeHelpers.getNodeOutputs(this.workflow, this.node, this.nodeType.description).map(
(output) => (typeof output === 'string' ? { type: output } : output),
);
}
getConnectedNodes(connectionType: NodeConnectionType): INode[] {
return this.workflow
.getParentNodes(this.node.name, connectionType, 1)
.map((nodeName) => this.workflow.getNode(nodeName))
.filter((node) => !!node)
.filter((node) => node.disabled !== true);
}
getNodeOutputs(): INodeOutputConfiguration[] {
return this.nodeOutputs;
}
getKnownNodeTypes() {
return this.workflow.nodeTypes.getKnownTypes();
}
getRestApiUrl() {
return this.additionalData.restApiUrl;
}
getInstanceBaseUrl() {
return this.additionalData.instanceBaseUrl;
}
getInstanceId() {
return this.instanceSettings.instanceId;
}
getTimezone() {
return this.workflow.timezone;
}
getCredentialsProperties(type: string) {
return this.additionalData.credentialsHelper.getCredentialsProperties(type);
}
/** Returns the requested decrypted credentials if the node has access to them */
protected async _getCredentials<T extends object = ICredentialDataDecryptedObject>(
type: string,
executeData?: IExecuteData,
connectionInputData?: INodeExecutionData[],
itemIndex?: number,
): Promise<T> {
const { workflow, node, additionalData, mode, runExecutionData, runIndex } = this;
// Get the NodeType as it has the information if the credentials are required
const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
// Hardcode for now for security reasons that only a single node can access
// all credentials
const fullAccess = [HTTP_REQUEST_NODE_TYPE, HTTP_REQUEST_TOOL_NODE_TYPE].includes(node.type);
let nodeCredentialDescription: INodeCredentialDescription | undefined;
if (!fullAccess) {
if (nodeType.description.credentials === undefined) {
throw new NodeOperationError(
node,
`Node type "${node.type}" does not have any credentials defined`,
{ level: 'warning' },
);
}
nodeCredentialDescription = nodeType.description.credentials.find(
(credentialTypeDescription) => credentialTypeDescription.name === type,
);
if (nodeCredentialDescription === undefined) {
throw new NodeOperationError(
node,
`Node type "${node.type}" does not have any credentials of type "${type}" defined`,
{ level: 'warning' },
);
}
if (
!NodeHelpers.displayParameter(
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
additionalData.currentNodeParameters || node.parameters,
nodeCredentialDescription,
node,
node.parameters,
)
) {
// Credentials should not be displayed even if they would be defined
throw new NodeOperationError(node, 'Credentials not found');
}
}
// Check if node has any credentials defined
if (!fullAccess && !node.credentials?.[type]) {
// If none are defined check if the credentials are required or not
if (nodeCredentialDescription?.required === true) {
// Credentials are required so error
if (!node.credentials) {
throw new NodeOperationError(node, 'Node does not have any credentials set', {
level: 'warning',
});
}
if (!node.credentials[type]) {
throw new NodeOperationError(
node,
`Node does not have any credentials set for "${type}"`,
{
level: 'warning',
},
);
}
} else {
// Credentials are not required
throw new NodeOperationError(node, 'Node does not require credentials');
}
}
if (fullAccess && !node.credentials?.[type]) {
// Make sure that fullAccess nodes still behave like before that if they
// request access to credentials that are currently not set it returns undefined
throw new NodeOperationError(node, 'Credentials not found');
}
let expressionResolveValues: ICredentialsExpressionResolveValues | undefined;
if (connectionInputData && runExecutionData && runIndex !== undefined) {
expressionResolveValues = {
connectionInputData,
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
itemIndex: itemIndex || 0,
node,
runExecutionData,
runIndex,
workflow,
} as ICredentialsExpressionResolveValues;
}
const nodeCredentials = node.credentials
? node.credentials[type]
: ({} as INodeCredentialsDetails);
// TODO: solve using credentials via expression
// if (name.charAt(0) === '=') {
// // If the credential name is an expression resolve it
// const additionalKeys = getAdditionalKeys(additionalData, mode);
// name = workflow.expression.getParameterValue(
// name,
// runExecutionData || null,
// runIndex || 0,
// itemIndex || 0,
// node.name,
// connectionInputData || [],
// mode,
// additionalKeys,
// ) as string;
// }
const decryptedDataObject = await additionalData.credentialsHelper.getDecrypted(
additionalData,
nodeCredentials,
type,
mode,
executeData,
false,
expressionResolveValues,
);
return decryptedDataObject as T;
}
@Memoized
protected get additionalKeys() {
return getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData);
}
/** Returns the requested resolved (all expressions replaced) node parameters. */
getNodeParameter(
parameterName: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object {
const itemIndex = 0;
return this._getNodeParameter(parameterName, itemIndex, fallbackValue, options);
}
protected _getNodeParameter(
parameterName: string,
itemIndex: number,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object {
const { workflow, node, mode, runExecutionData, runIndex, connectionInputData, executeData } =
this;
const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const value = get(node.parameters, parameterName, fallbackValue);
if (value === undefined) {
throw new ApplicationError('Could not get parameter', { extra: { parameterName } });
}
if (options?.rawExpressions) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return value;
}
const { additionalKeys } = this;
let returnData;
try {
returnData = workflow.expression.getParameterValue(
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
value,
runExecutionData,
runIndex,
itemIndex,
node.name,
connectionInputData,
mode,
additionalKeys,
executeData,
false,
{},
options?.contextNode?.name,
);
cleanupParameterData(returnData);
} catch (e) {
if (
e instanceof ExpressionError &&
node.continueOnFail &&
node.type === 'n8n-nodes-base.set'
) {
// https://linear.app/n8n/issue/PAY-684
returnData = [{ name: undefined, value: undefined }];
} else {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
if (e.context) e.context.parameter = parameterName;
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access
e.cause = value;
throw e;
}
}
// This is outside the try/catch because it throws errors with proper messages
if (options?.extractValue) {
returnData = extractValue(returnData, parameterName, node, nodeType, itemIndex);
}
// Make sure parameter value is the type specified in the ensureType option, if needed convert it
if (options?.ensureType) {
returnData = ensureType(options.ensureType, returnData, parameterName, {
itemIndex,
runIndex,
nodeCause: node.name,
});
}
// Validate parameter value if it has a schema defined(RMC) or validateType defined
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
returnData = validateValueAgainstSchema(
node,
nodeType,
returnData,
parameterName,
runIndex,
itemIndex,
);
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return returnData;
}
evaluateExpression(expression: string, itemIndex: number = 0) {
return this.workflow.expression.resolveSimpleParameterValue(
`=${expression}`,
{},
this.runExecutionData,
this.runIndex,
itemIndex,
this.node.name,
this.connectionInputData,
this.mode,
this.additionalKeys,
this.executeData,
);
}
async prepareOutputData(outputData: INodeExecutionData[]) {
return [outputData];
}
}

View File

@@ -0,0 +1,60 @@
import type {
ICredentialDataDecryptedObject,
INode,
IPollFunctions,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
getBinaryHelperFunctions,
getRequestHelperFunctions,
getSchedulingFunctions,
returnJsonArray,
} from '@/node-execute-functions';
import { NodeExecutionContext } from './node-execution-context';
const throwOnEmit = () => {
throw new ApplicationError('Overwrite PollContext.__emit function');
};
const throwOnEmitError = () => {
throw new ApplicationError('Overwrite PollContext.__emitError function');
};
export class PollContext extends NodeExecutionContext implements IPollFunctions {
readonly helpers: IPollFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly activation: WorkflowActivateMode,
readonly __emit: IPollFunctions['__emit'] = throwOnEmit,
readonly __emitError: IPollFunctions['__emitError'] = throwOnEmitError,
) {
super(workflow, node, additionalData, mode);
this.helpers = {
createDeferredPromise,
returnJsonArray,
...getRequestHelperFunctions(workflow, node, additionalData),
...getBinaryHelperFunctions(additionalData, workflow.id),
...getSchedulingFunctions(workflow),
};
}
getActivationMode() {
return this.activation;
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await this._getCredentials<T>(type);
}
}

View File

@@ -0,0 +1,293 @@
import get from 'lodash/get';
import type {
AINodeConnectionType,
CloseFunction,
ExecutionBaseError,
IExecuteData,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
IRunExecutionData,
ISupplyDataFunctions,
ITaskData,
ITaskDataConnections,
ITaskMetadata,
IWorkflowExecuteAdditionalData,
NodeConnectionType,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { createDeferredPromise } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
assertBinaryData,
constructExecutionMetaData,
copyInputItems,
detectBinaryEncoding,
getBinaryDataBuffer,
getBinaryHelperFunctions,
getCheckProcessedHelperFunctions,
getFileSystemHelperFunctions,
getRequestHelperFunctions,
getSSHTunnelFunctions,
normalizeItems,
returnJsonArray,
} from '@/node-execute-functions';
import { BaseExecuteContext } from './base-execute-context';
import { getInputConnectionData } from './utils/get-input-connection-data';
export class SupplyDataContext extends BaseExecuteContext implements ISupplyDataFunctions {
readonly helpers: ISupplyDataFunctions['helpers'];
readonly getNodeParameter: ISupplyDataFunctions['getNodeParameter'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
runExecutionData: IRunExecutionData,
runIndex: number,
connectionInputData: INodeExecutionData[],
inputData: ITaskDataConnections,
private readonly connectionType: NodeConnectionType,
executeData: IExecuteData,
private readonly closeFunctions: CloseFunction[],
abortSignal?: AbortSignal,
) {
super(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
executeData,
abortSignal,
);
this.helpers = {
createDeferredPromise,
copyInputItems,
...getRequestHelperFunctions(
workflow,
node,
additionalData,
runExecutionData,
connectionInputData,
),
...getSSHTunnelFunctions(),
...getFileSystemHelperFunctions(node),
...getBinaryHelperFunctions(additionalData, workflow.id),
...getCheckProcessedHelperFunctions(workflow, node),
assertBinaryData: (itemIndex, propertyName) =>
assertBinaryData(inputData, node, itemIndex, propertyName, 0),
getBinaryDataBuffer: async (itemIndex, propertyName) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, 0),
detectBinaryEncoding: (buffer: Buffer) => detectBinaryEncoding(buffer),
returnJsonArray,
normalizeItems,
constructExecutionMetaData,
};
this.getNodeParameter = ((
parameterName: string,
itemIndex: number,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fallbackValue?: any,
options?: IGetNodeParameterOptions,
) =>
this._getNodeParameter(
parameterName,
itemIndex,
fallbackValue,
options,
)) as ISupplyDataFunctions['getNodeParameter'];
}
async getInputConnectionData(
connectionType: AINodeConnectionType,
itemIndex: number,
): Promise<unknown> {
return await getInputConnectionData.call(
this,
this.workflow,
this.runExecutionData,
this.runIndex,
this.connectionInputData,
this.inputData,
this.additionalData,
this.executeData,
this.mode,
this.closeFunctions,
connectionType,
itemIndex,
this.abortSignal,
);
}
getInputData(inputIndex = 0, connectionType = this.connectionType) {
if (!this.inputData.hasOwnProperty(connectionType)) {
// Return empty array because else it would throw error when nothing is connected to input
return [];
}
return super.getInputItems(inputIndex, connectionType) ?? [];
}
/** @deprecated create a context object with inputData for every runIndex */
addInputData(
connectionType: AINodeConnectionType,
data: INodeExecutionData[][],
): { index: number } {
const nodeName = this.node.name;
let currentNodeRunIndex = 0;
if (this.runExecutionData.resultData.runData.hasOwnProperty(nodeName)) {
currentNodeRunIndex = this.runExecutionData.resultData.runData[nodeName].length;
}
this.addExecutionDataFunctions(
'input',
data,
connectionType,
nodeName,
currentNodeRunIndex,
).catch((error) => {
this.logger.warn(
`There was a problem logging input data of node "${nodeName}": ${
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
error.message
}`,
);
});
return { index: currentNodeRunIndex };
}
/** @deprecated Switch to WorkflowExecute to store output on runExecutionData.resultData.runData */
addOutputData(
connectionType: AINodeConnectionType,
currentNodeRunIndex: number,
data: INodeExecutionData[][] | ExecutionBaseError,
metadata?: ITaskMetadata,
): void {
const nodeName = this.node.name;
this.addExecutionDataFunctions(
'output',
data,
connectionType,
nodeName,
currentNodeRunIndex,
metadata,
).catch((error) => {
this.logger.warn(
`There was a problem logging output data of node "${nodeName}": ${
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
error.message
}`,
);
});
}
async addExecutionDataFunctions(
type: 'input' | 'output',
data: INodeExecutionData[][] | ExecutionBaseError,
connectionType: AINodeConnectionType,
sourceNodeName: string,
currentNodeRunIndex: number,
metadata?: ITaskMetadata,
): Promise<void> {
const {
additionalData,
runExecutionData,
runIndex: sourceNodeRunIndex,
node: { name: nodeName },
} = this;
let taskData: ITaskData | undefined;
if (type === 'input') {
taskData = {
startTime: new Date().getTime(),
executionTime: 0,
executionStatus: 'running',
source: [null],
};
} else {
// At the moment we expect that there is always an input sent before the output
taskData = get(
runExecutionData,
['resultData', 'runData', nodeName, currentNodeRunIndex],
undefined,
);
if (taskData === undefined) {
return;
}
taskData.metadata = metadata;
}
taskData = taskData!;
if (data instanceof Error) {
taskData.executionStatus = 'error';
taskData.error = data;
} else {
if (type === 'output') {
taskData.executionStatus = 'success';
}
taskData.data = {
[connectionType]: data,
} as ITaskDataConnections;
}
if (type === 'input') {
if (!(data instanceof Error)) {
this.inputData[connectionType] = data;
// TODO: remove inputOverride
taskData.inputOverride = {
[connectionType]: data,
} as ITaskDataConnections;
}
if (!runExecutionData.resultData.runData.hasOwnProperty(nodeName)) {
runExecutionData.resultData.runData[nodeName] = [];
}
runExecutionData.resultData.runData[nodeName][currentNodeRunIndex] = taskData;
await additionalData.hooks?.executeHookFunctions('nodeExecuteBefore', [nodeName]);
} else {
// Outputs
taskData.executionTime = new Date().getTime() - taskData.startTime;
await additionalData.hooks?.executeHookFunctions('nodeExecuteAfter', [
nodeName,
taskData,
this.runExecutionData,
]);
if (get(runExecutionData, 'executionData.metadata', undefined) === undefined) {
runExecutionData.executionData!.metadata = {};
}
let sourceTaskData = runExecutionData.executionData?.metadata?.[sourceNodeName];
if (!sourceTaskData) {
runExecutionData.executionData!.metadata[sourceNodeName] = [];
sourceTaskData = runExecutionData.executionData!.metadata[sourceNodeName];
}
if (!sourceTaskData[sourceNodeRunIndex]) {
sourceTaskData[sourceNodeRunIndex] = {
subRun: [],
};
}
sourceTaskData[sourceNodeRunIndex].subRun!.push({
node: nodeName,
runIndex: currentNodeRunIndex,
});
}
}
}

View File

@@ -0,0 +1,62 @@
import type {
ICredentialDataDecryptedObject,
INode,
ITriggerFunctions,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
getBinaryHelperFunctions,
getRequestHelperFunctions,
getSchedulingFunctions,
getSSHTunnelFunctions,
returnJsonArray,
} from '@/node-execute-functions';
import { NodeExecutionContext } from './node-execution-context';
const throwOnEmit = () => {
throw new ApplicationError('Overwrite TriggerContext.emit function');
};
const throwOnEmitError = () => {
throw new ApplicationError('Overwrite TriggerContext.emitError function');
};
export class TriggerContext extends NodeExecutionContext implements ITriggerFunctions {
readonly helpers: ITriggerFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly activation: WorkflowActivateMode,
readonly emit: ITriggerFunctions['emit'] = throwOnEmit,
readonly emitError: ITriggerFunctions['emitError'] = throwOnEmitError,
) {
super(workflow, node, additionalData, mode);
this.helpers = {
createDeferredPromise,
returnJsonArray,
...getSSHTunnelFunctions(),
...getRequestHelperFunctions(workflow, node, additionalData),
...getBinaryHelperFunctions(additionalData, workflow.id),
...getSchedulingFunctions(workflow),
};
}
getActivationMode() {
return this.activation;
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await this._getCredentials<T>(type);
}
}

View File

@@ -0,0 +1,38 @@
import toPlainObject from 'lodash/toPlainObject';
import { DateTime } from 'luxon';
import type { NodeParameterValue } from 'n8n-workflow';
import { cleanupParameterData } from '../cleanup-parameter-data';
describe('cleanupParameterData', () => {
it('should stringify Luxon dates in-place', () => {
const input = { x: 1, y: DateTime.now() as unknown as NodeParameterValue };
expect(typeof input.y).toBe('object');
cleanupParameterData(input);
expect(typeof input.y).toBe('string');
});
it('should stringify plain Luxon dates in-place', () => {
const input = {
x: 1,
y: toPlainObject(DateTime.now()),
};
expect(typeof input.y).toBe('object');
cleanupParameterData(input);
expect(typeof input.y).toBe('string');
});
it('should handle objects with nameless constructors', () => {
const input = { x: 1, y: { constructor: {} } as NodeParameterValue };
expect(typeof input.y).toBe('object');
cleanupParameterData(input);
expect(typeof input.y).toBe('object');
});
it('should handle objects without a constructor', () => {
const input = { x: 1, y: { constructor: undefined } as unknown as NodeParameterValue };
expect(typeof input.y).toBe('object');
cleanupParameterData(input);
expect(typeof input.y).toBe('object');
});
});

View File

@@ -0,0 +1,481 @@
import { mock } from 'jest-mock-extended';
import type { INodeType, ISupplyDataFunctions, INode } from 'n8n-workflow';
import { z } from 'zod';
import { createNodeAsTool } from '../create-node-as-tool';
jest.mock('@langchain/core/tools', () => ({
DynamicStructuredTool: jest.fn().mockImplementation((config) => ({
name: config.name,
description: config.description,
schema: config.schema,
func: config.func,
})),
}));
describe('createNodeAsTool', () => {
const context = mock<ISupplyDataFunctions>({
getNodeParameter: jest.fn(),
addInputData: jest.fn(),
addOutputData: jest.fn(),
getNode: jest.fn(),
});
const handleToolInvocation = jest.fn();
const nodeType = mock<INodeType>({
description: {
name: 'TestNode',
description: 'Test node description',
},
});
const node = mock<INode>({ name: 'Test_Node' });
const options = { node, nodeType, handleToolInvocation };
beforeEach(() => {
jest.clearAllMocks();
(context.addInputData as jest.Mock).mockReturnValue({ index: 0 });
(context.getNode as jest.Mock).mockReturnValue(node);
(nodeType.execute as jest.Mock).mockResolvedValue([[{ json: { result: 'test' } }]]);
node.parameters = {
param1: "={{$fromAI('param1', 'Test parameter', 'string') }}",
param2: 'static value',
nestedParam: {
subParam: "={{ $fromAI('subparam', 'Nested parameter', 'string') }}",
},
descriptionType: 'auto',
resource: 'testResource',
operation: 'testOperation',
};
});
describe('Tool Creation and Basic Properties', () => {
it('should create a DynamicStructuredTool with correct properties', () => {
const tool = createNodeAsTool(options).response;
expect(tool).toBeDefined();
expect(tool.name).toBe('Test_Node');
expect(tool.description).toBe(
'Test node description\n Resource: testResource\n Operation: testOperation',
);
expect(tool.schema).toBeDefined();
});
it('should use toolDescription if provided', () => {
node.parameters.descriptionType = 'manual';
node.parameters.toolDescription = 'Custom tool description';
const tool = createNodeAsTool(options).response;
expect(tool.description).toBe('Custom tool description');
});
});
describe('Schema Creation and Parameter Handling', () => {
it('should create a schema based on fromAI arguments in nodeParameters', () => {
const tool = createNodeAsTool(options).response;
expect(tool.schema).toBeDefined();
expect(tool.schema.shape).toHaveProperty('param1');
expect(tool.schema.shape).toHaveProperty('subparam');
expect(tool.schema.shape).not.toHaveProperty('param2');
});
it('should handle fromAI arguments correctly', () => {
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.subparam).toBeInstanceOf(z.ZodString);
});
it('should handle default values correctly', () => {
node.parameters = {
paramWithDefault:
"={{ $fromAI('paramWithDefault', 'Parameter with default', 'string', 'default value') }}",
numberWithDefault:
"={{ $fromAI('numberWithDefault', 'Number with default', 'number', 42) }}",
booleanWithDefault:
"={{ $fromAI('booleanWithDefault', 'Boolean with default', 'boolean', true) }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.paramWithDefault.description).toBe('Parameter with default');
expect(tool.schema.shape.numberWithDefault.description).toBe('Number with default');
expect(tool.schema.shape.booleanWithDefault.description).toBe('Boolean with default');
});
it('should handle nested parameters correctly', () => {
node.parameters = {
topLevel: "={{ $fromAI('topLevel', 'Top level parameter', 'string') }}",
nested: {
level1: "={{ $fromAI('level1', 'Nested level 1', 'string') }}",
deeperNested: {
level2: "={{ $fromAI('level2', 'Nested level 2', 'number') }}",
},
},
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.topLevel).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.level1).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.level2).toBeInstanceOf(z.ZodNumber);
});
it('should handle array parameters correctly', () => {
node.parameters = {
arrayParam: [
"={{ $fromAI('item1', 'First item', 'string') }}",
"={{ $fromAI('item2', 'Second item', 'number') }}",
],
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.item1).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.item2).toBeInstanceOf(z.ZodNumber);
});
});
describe('Error Handling and Edge Cases', () => {
it('should handle error during node execution', async () => {
nodeType.execute = jest.fn().mockRejectedValue(new Error('Execution failed'));
const tool = createNodeAsTool(options).response;
handleToolInvocation.mockReturnValue('Error during node execution: some random issue.');
const result = await tool.func({ param1: 'test value' });
expect(result).toContain('Error during node execution:');
});
it('should throw an error for invalid parameter names', () => {
node.parameters.invalidParam = "$fromAI('invalid param', 'Invalid parameter', 'string')";
expect(() => createNodeAsTool(options)).toThrow('Parameter key `invalid param` is invalid');
});
it('should throw an error for $fromAI calls with unsupported types', () => {
node.parameters = {
invalidTypeParam:
"={{ $fromAI('invalidType', 'Param with unsupported type', 'unsupportedType') }}",
};
expect(() => createNodeAsTool(options)).toThrow('Invalid type: unsupportedType');
});
it('should handle empty parameters and parameters with no fromAI calls', () => {
node.parameters = {
param1: 'static value 1',
param2: 'static value 2',
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape).toEqual({});
});
});
describe('Parameter Name and Description Handling', () => {
it('should accept parameter names with underscores and hyphens', () => {
node.parameters = {
validName1:
"={{ $fromAI('param_name-1', 'Valid name with underscore and hyphen', 'string') }}",
validName2: "={{ $fromAI('param_name_2', 'Another valid name', 'number') }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape['param_name-1']).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape['param_name-1'].description).toBe(
'Valid name with underscore and hyphen',
);
expect(tool.schema.shape.param_name_2).toBeInstanceOf(z.ZodNumber);
expect(tool.schema.shape.param_name_2.description).toBe('Another valid name');
});
it('should throw an error for parameter names with invalid special characters', () => {
node.parameters = {
invalidNameParam:
"={{ $fromAI('param@name!', 'Invalid name with special characters', 'string') }}",
};
expect(() => createNodeAsTool(options)).toThrow('Parameter key `param@name!` is invalid');
});
it('should throw an error for empty parameter name', () => {
node.parameters = {
invalidNameParam: "={{ $fromAI('', 'Invalid name with special characters', 'string') }}",
};
expect(() => createNodeAsTool(options)).toThrow(
'You must specify a key when using $fromAI()',
);
});
it('should handle parameter names with exact and exceeding character limits', () => {
const longName = 'a'.repeat(64);
const tooLongName = 'a'.repeat(65);
node.parameters = {
longNameParam: `={{ $fromAI('${longName}', 'Param with 64 character name', 'string') }}`,
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape[longName]).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape[longName].description).toBe('Param with 64 character name');
node.parameters = {
tooLongNameParam: `={{ $fromAI('${tooLongName}', 'Param with 65 character name', 'string') }}`,
};
expect(() => createNodeAsTool(options)).toThrow(
`Parameter key \`${tooLongName}\` is invalid`,
);
});
it('should handle $fromAI calls with empty description', () => {
node.parameters = {
emptyDescriptionParam: "={{ $fromAI('emptyDescription', '', 'number') }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.emptyDescription).toBeInstanceOf(z.ZodNumber);
expect(tool.schema.shape.emptyDescription.description).toBeUndefined();
});
it('should throw an error for calls with the same parameter but different descriptions', () => {
node.parameters = {
duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}",
duplicateParam2: "={{ $fromAI('duplicate', 'Second duplicate', 'number') }}",
};
expect(() => createNodeAsTool(options)).toThrow(
"Duplicate key 'duplicate' found with different description or type",
);
});
it('should throw an error for calls with the same parameter but different types', () => {
node.parameters = {
duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}",
duplicateParam2: "={{ $fromAI('duplicate', 'First duplicate', 'number') }}",
};
expect(() => createNodeAsTool(options)).toThrow(
"Duplicate key 'duplicate' found with different description or type",
);
});
});
describe('Complex Parsing Scenarios', () => {
it('should correctly parse $fromAI calls with varying spaces, capitalization, and within template literals', () => {
node.parameters = {
varyingSpacing1: "={{$fromAI('param1','Description1','string')}}",
varyingSpacing2: "={{ $fromAI ( 'param2' , 'Description2' , 'number' ) }}",
varyingSpacing3: "={{ $FROMai('param3', 'Description3', 'boolean') }}",
wrongCapitalization: "={{$fromai('param4','Description4','number')}}",
templateLiteralParam:
// eslint-disable-next-line n8n-local-rules/no-interpolation-in-regular-string
"={{ `Value is: ${$fromAI('templatedParam', 'Templated param description', 'string')}` }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.param1.description).toBe('Description1');
expect(tool.schema.shape.param2).toBeInstanceOf(z.ZodNumber);
expect(tool.schema.shape.param2.description).toBe('Description2');
expect(tool.schema.shape.param3).toBeInstanceOf(z.ZodBoolean);
expect(tool.schema.shape.param3.description).toBe('Description3');
expect(tool.schema.shape.param4).toBeInstanceOf(z.ZodNumber);
expect(tool.schema.shape.param4.description).toBe('Description4');
expect(tool.schema.shape.templatedParam).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.templatedParam.description).toBe('Templated param description');
});
it('should correctly parse multiple $fromAI calls interleaved with regular text', () => {
node.parameters = {
interleavedParams:
"={{ 'Start ' + $fromAI('param1', 'First param', 'string') + ' Middle ' + $fromAI('param2', 'Second param', 'number') + ' End' }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.param1.description).toBe('First param');
expect(tool.schema.shape.param2).toBeInstanceOf(z.ZodNumber);
expect(tool.schema.shape.param2.description).toBe('Second param');
});
it('should correctly parse $fromAI calls with complex JSON default values', () => {
node.parameters = {
complexJsonDefault:
'={{ $fromAI(\'complexJson\', \'Param with complex JSON default\', \'json\', \'{"nested": {"key": "value"}, "array": [1, 2, 3]}\') }}',
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.complexJson._def.innerType).toBeInstanceOf(z.ZodRecord);
expect(tool.schema.shape.complexJson.description).toBe('Param with complex JSON default');
expect(tool.schema.shape.complexJson._def.defaultValue()).toEqual({
nested: { key: 'value' },
array: [1, 2, 3],
});
});
it('should ignore $fromAI calls embedded in non-string node parameters', () => {
node.parameters = {
numberParam: 42,
booleanParam: false,
objectParam: {
innerString: "={{ $fromAI('innerParam', 'Inner param', 'string') }}",
innerNumber: 100,
innerObject: {
deepParam: "={{ $fromAI('deepParam', 'Deep param', 'number') }}",
},
},
arrayParam: [
"={{ $fromAI('arrayParam1', 'First array param', 'string') }}",
200,
"={{ $fromAI('nestedArrayParam', 'Nested array param', 'boolean') }}",
],
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.innerParam).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.innerParam.description).toBe('Inner param');
expect(tool.schema.shape.deepParam).toBeInstanceOf(z.ZodNumber);
expect(tool.schema.shape.deepParam.description).toBe('Deep param');
expect(tool.schema.shape.arrayParam1).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.arrayParam1.description).toBe('First array param');
expect(tool.schema.shape.nestedArrayParam).toBeInstanceOf(z.ZodBoolean);
expect(tool.schema.shape.nestedArrayParam.description).toBe('Nested array param');
});
});
describe('Escaping and Special Characters', () => {
it('should handle escaped single quotes in parameter names and descriptions', () => {
node.parameters = {
escapedQuotesParam:
"={{ $fromAI('paramName', 'Description with \\'escaped\\' quotes', 'string') }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.paramName.description).toBe("Description with 'escaped' quotes");
});
it('should handle escaped double quotes in parameter names and descriptions', () => {
node.parameters = {
escapedQuotesParam:
'={{ $fromAI("paramName", "Description with \\"escaped\\" quotes", "string") }}',
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.paramName.description).toBe('Description with "escaped" quotes');
});
it('should handle escaped backslashes in parameter names and descriptions', () => {
node.parameters = {
escapedBackslashesParam:
"={{ $fromAI('paramName', 'Description with \\\\ backslashes', 'string') }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.paramName.description).toBe('Description with \\ backslashes');
});
it('should handle mixed escaped characters in parameter names and descriptions', () => {
node.parameters = {
mixedEscapesParam:
'={{ $fromAI(`paramName`, \'Description with \\\'mixed" characters\', "number") }}',
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodNumber);
expect(tool.schema.shape.paramName.description).toBe('Description with \'mixed" characters');
});
});
describe('Edge Cases and Limitations', () => {
it('should ignore excess arguments in $fromAI calls beyond the fourth argument', () => {
node.parameters = {
excessArgsParam:
"={{ $fromAI('excessArgs', 'Param with excess arguments', 'string', 'default', 'extraArg1', 'extraArg2') }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.excessArgs._def.innerType).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.excessArgs.description).toBe('Param with excess arguments');
expect(tool.schema.shape.excessArgs._def.defaultValue()).toBe('default');
});
it('should correctly parse $fromAI calls with nested parentheses', () => {
node.parameters = {
nestedParenthesesParam:
"={{ $fromAI('paramWithNested', 'Description with ((nested)) parentheses', 'string') }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.paramWithNested).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.paramWithNested.description).toBe(
'Description with ((nested)) parentheses',
);
});
it('should handle $fromAI calls with very long descriptions', () => {
const longDescription = 'A'.repeat(1000);
node.parameters = {
longParam: `={{ $fromAI('longParam', '${longDescription}', 'string') }}`,
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.longParam).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.longParam.description).toBe(longDescription);
});
it('should handle $fromAI calls with only some parameters', () => {
node.parameters = {
partialParam1: "={{ $fromAI('partial1') }}",
partialParam2: "={{ $fromAI('partial2', 'Description only') }}",
partialParam3: "={{ $fromAI('partial3', '', 'number') }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.partial1).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.partial2).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.partial3).toBeInstanceOf(z.ZodNumber);
});
});
describe('Unicode and Internationalization', () => {
it('should handle $fromAI calls with unicode characters', () => {
node.parameters = {
unicodeParam: "={{ $fromAI('unicodeParam', '🌈 Unicode parameter 你好', 'string') }}",
};
const tool = createNodeAsTool(options).response;
expect(tool.schema.shape.unicodeParam).toBeInstanceOf(z.ZodString);
expect(tool.schema.shape.unicodeParam.description).toBe('🌈 Unicode parameter 你好');
});
});
});

View File

@@ -0,0 +1,80 @@
import { ExpressionError } from 'n8n-workflow';
import { ensureType } from '../ensure-type';
describe('ensureType', () => {
it('throws error for null value', () => {
expect(() => ensureType('string', null, 'myParam')).toThrowError(
new ExpressionError("Parameter 'myParam' must not be null"),
);
});
it('throws error for undefined value', () => {
expect(() => ensureType('string', undefined, 'myParam')).toThrowError(
new ExpressionError("Parameter 'myParam' could not be 'undefined'"),
);
});
it('returns string value without modification', () => {
const value = 'hello';
const expectedValue = value;
const result = ensureType('string', value, 'myParam');
expect(result).toBe(expectedValue);
});
it('returns number value without modification', () => {
const value = 42;
const expectedValue = value;
const result = ensureType('number', value, 'myParam');
expect(result).toBe(expectedValue);
});
it('returns boolean value without modification', () => {
const value = true;
const expectedValue = value;
const result = ensureType('boolean', value, 'myParam');
expect(result).toBe(expectedValue);
});
it('converts object to string if toType is string', () => {
const value = { name: 'John' };
const expectedValue = JSON.stringify(value);
const result = ensureType('string', value, 'myParam');
expect(result).toBe(expectedValue);
});
it('converts string to number if toType is number', () => {
const value = '10';
const expectedValue = 10;
const result = ensureType('number', value, 'myParam');
expect(result).toBe(expectedValue);
});
it('throws error for invalid conversion to number', () => {
const value = 'invalid';
expect(() => ensureType('number', value, 'myParam')).toThrowError(
new ExpressionError("Parameter 'myParam' must be a number, but we got 'invalid'"),
);
});
it('parses valid JSON string to object if toType is object', () => {
const value = '{"name": "Alice"}';
const expectedValue = JSON.parse(value);
const result = ensureType('object', value, 'myParam');
expect(result).toEqual(expectedValue);
});
it('throws error for invalid JSON string to object conversion', () => {
const value = 'invalid_json';
expect(() => ensureType('object', value, 'myParam')).toThrowError(
new ExpressionError("Parameter 'myParam' could not be parsed"),
);
});
it('throws error for non-array value if toType is array', () => {
const value = { name: 'Alice' };
expect(() => ensureType('array', value, 'myParam')).toThrowError(
new ExpressionError("Parameter 'myParam' must be an array, but we got object"),
);
});
});

View File

@@ -0,0 +1,219 @@
import type { IRunExecutionData } from 'n8n-workflow';
import { InvalidExecutionMetadataError } from '@/errors/invalid-execution-metadata.error';
import {
setWorkflowExecutionMetadata,
setAllWorkflowExecutionMetadata,
KV_LIMIT,
getWorkflowExecutionMetadata,
getAllWorkflowExecutionMetadata,
} from '../execution-metadata';
describe('Execution Metadata functions', () => {
test('setWorkflowExecutionMetadata will set a value', () => {
const metadata = {};
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
setWorkflowExecutionMetadata(executionData, 'test1', 'value1');
expect(metadata).toEqual({
test1: 'value1',
});
});
test('setAllWorkflowExecutionMetadata will set multiple values', () => {
const metadata = {};
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
setAllWorkflowExecutionMetadata(executionData, {
test1: 'value1',
test2: 'value2',
});
expect(metadata).toEqual({
test1: 'value1',
test2: 'value2',
});
});
test('setWorkflowExecutionMetadata should only convert numbers to strings', () => {
const metadata = {};
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
expect(() => setWorkflowExecutionMetadata(executionData, 'test1', 1234)).not.toThrow(
InvalidExecutionMetadataError,
);
expect(metadata).toEqual({
test1: '1234',
});
expect(() => setWorkflowExecutionMetadata(executionData, 'test2', {})).toThrow(
InvalidExecutionMetadataError,
);
expect(metadata).not.toEqual({
test1: '1234',
test2: {},
});
});
test('setAllWorkflowExecutionMetadata should not convert values to strings and should set other values correctly', () => {
const metadata = {};
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
expect(() =>
setAllWorkflowExecutionMetadata(executionData, {
test1: {} as unknown as string,
test2: [] as unknown as string,
test3: 'value3',
test4: 'value4',
}),
).toThrow(InvalidExecutionMetadataError);
expect(metadata).toEqual({
test3: 'value3',
test4: 'value4',
});
});
test('setWorkflowExecutionMetadata should validate key characters', () => {
const metadata = {};
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
expect(() => setWorkflowExecutionMetadata(executionData, 'te$t1$', 1234)).toThrow(
InvalidExecutionMetadataError,
);
expect(metadata).not.toEqual({
test1: '1234',
});
});
test('setWorkflowExecutionMetadata should limit the number of metadata entries', () => {
const metadata = {};
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
const expected: Record<string, string> = {};
for (let i = 0; i < KV_LIMIT; i++) {
expected[`test${i + 1}`] = `value${i + 1}`;
}
for (let i = 0; i < KV_LIMIT + 10; i++) {
setWorkflowExecutionMetadata(executionData, `test${i + 1}`, `value${i + 1}`);
}
expect(metadata).toEqual(expected);
});
test('getWorkflowExecutionMetadata should return a single value for an existing key', () => {
const metadata: Record<string, string> = { test1: 'value1' };
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
expect(getWorkflowExecutionMetadata(executionData, 'test1')).toBe('value1');
});
test('getWorkflowExecutionMetadata should return undefined for an unset key', () => {
const metadata: Record<string, string> = { test1: 'value1' };
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
expect(getWorkflowExecutionMetadata(executionData, 'test2')).toBeUndefined();
});
test('getAllWorkflowExecutionMetadata should return all metadata', () => {
const metadata: Record<string, string> = { test1: 'value1', test2: 'value2' };
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
expect(getAllWorkflowExecutionMetadata(executionData)).toEqual(metadata);
});
test('getAllWorkflowExecutionMetadata should not an object that modifies internal state', () => {
const metadata: Record<string, string> = { test1: 'value1', test2: 'value2' };
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
getAllWorkflowExecutionMetadata(executionData).test1 = 'changed';
expect(metadata.test1).not.toBe('changed');
expect(metadata.test1).toBe('value1');
});
test('setWorkflowExecutionMetadata should truncate long keys', () => {
const metadata = {};
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
setWorkflowExecutionMetadata(
executionData,
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab',
'value1',
);
expect(metadata).toEqual({
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: 'value1',
});
});
test('setWorkflowExecutionMetadata should truncate long values', () => {
const metadata = {};
const executionData = {
resultData: {
metadata,
},
} as IRunExecutionData;
setWorkflowExecutionMetadata(
executionData,
'test1',
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab',
);
expect(metadata).toEqual({
test1:
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
});
});
});

View File

@@ -0,0 +1,146 @@
import { mock } from 'jest-mock-extended';
import { LoggerProxy } from 'n8n-workflow';
import type {
IDataObject,
IRunExecutionData,
IWorkflowExecuteAdditionalData,
SecretsHelpersBase,
} from 'n8n-workflow';
import { PLACEHOLDER_EMPTY_EXECUTION_ID } from '@/constants';
import { getAdditionalKeys } from '../get-additional-keys';
describe('getAdditionalKeys', () => {
const secretsHelpers = mock<SecretsHelpersBase>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({
executionId: '123',
webhookWaitingBaseUrl: 'https://webhook.test',
formWaitingBaseUrl: 'https://form.test',
variables: { testVar: 'value' },
secretsHelpers,
});
const runExecutionData = mock<IRunExecutionData>({
resultData: {
runData: {},
metadata: {},
},
});
beforeAll(() => {
LoggerProxy.init(mock());
secretsHelpers.hasProvider.mockReturnValue(true);
secretsHelpers.hasSecret.mockReturnValue(true);
secretsHelpers.getSecret.mockReturnValue('secret-value');
secretsHelpers.listSecrets.mockReturnValue(['secret1']);
secretsHelpers.listProviders.mockReturnValue(['provider1']);
});
it('should use placeholder execution ID when none provided', () => {
const noIdData = { ...additionalData, executionId: undefined };
const result = getAdditionalKeys(noIdData, 'manual', null);
expect(result.$execution?.id).toBe(PLACEHOLDER_EMPTY_EXECUTION_ID);
});
it('should return production mode when not manual', () => {
const result = getAdditionalKeys(additionalData, 'internal', null);
expect(result.$execution?.mode).toBe('production');
});
it('should include customData methods when runExecutionData is provided', () => {
const result = getAdditionalKeys(additionalData, 'manual', runExecutionData);
expect(result.$execution?.customData).toBeDefined();
expect(typeof result.$execution?.customData?.set).toBe('function');
expect(typeof result.$execution?.customData?.setAll).toBe('function');
expect(typeof result.$execution?.customData?.get).toBe('function');
expect(typeof result.$execution?.customData?.getAll).toBe('function');
});
it('should handle customData operations correctly', () => {
const result = getAdditionalKeys(additionalData, 'manual', runExecutionData);
const customData = result.$execution?.customData;
customData?.set('testKey', 'testValue');
expect(customData?.get('testKey')).toBe('testValue');
customData?.setAll({ key1: 'value1', key2: 'value2' });
const allData = customData?.getAll();
expect(allData).toEqual({
testKey: 'testValue',
key1: 'value1',
key2: 'value2',
});
});
it('should include secrets when enabled', () => {
const result = getAdditionalKeys(additionalData, 'manual', null, { secretsEnabled: true });
expect(result.$secrets).toBeDefined();
expect((result.$secrets?.provider1 as IDataObject).secret1).toEqual('secret-value');
});
it('should not include secrets when disabled', () => {
const result = getAdditionalKeys(additionalData, 'manual', null, { secretsEnabled: false });
expect(result.$secrets).toBeUndefined();
});
it('should throw errors in manual mode', () => {
const result = getAdditionalKeys(additionalData, 'manual', runExecutionData);
expect(() => {
result.$execution?.customData?.set('invalid*key', 'value');
}).toThrow();
});
it('should correctly set resume URLs', () => {
const result = getAdditionalKeys(additionalData, 'manual', null);
expect(result.$execution?.resumeUrl).toBe('https://webhook.test/123');
expect(result.$execution?.resumeFormUrl).toBe('https://form.test/123');
expect(result.$resumeWebhookUrl).toBe('https://webhook.test/123'); // Test deprecated property
});
it('should return test mode when manual', () => {
const result = getAdditionalKeys(additionalData, 'manual', null);
expect(result.$execution?.mode).toBe('test');
});
it('should return variables from additionalData', () => {
const result = getAdditionalKeys(additionalData, 'manual', null);
expect(result.$vars?.testVar).toEqual('value');
});
it('should handle errors in non-manual mode without throwing', () => {
const result = getAdditionalKeys(additionalData, 'internal', runExecutionData);
const customData = result.$execution?.customData;
expect(() => {
customData?.set('invalid*key', 'value');
}).not.toThrow();
});
it('should return undefined customData when runExecutionData is null', () => {
const result = getAdditionalKeys(additionalData, 'manual', null);
expect(result.$execution?.customData).toBeUndefined();
});
it('should respect metadata KV limit', () => {
const result = getAdditionalKeys(additionalData, 'manual', runExecutionData);
const customData = result.$execution?.customData;
// Add 11 key-value pairs (exceeding the limit of 10)
for (let i = 0; i < 11; i++) {
customData?.set(`key${i}`, `value${i}`);
}
const allData = customData?.getAll() ?? {};
expect(Object.keys(allData)).toHaveLength(10);
});
});

View File

@@ -0,0 +1,366 @@
import type { Tool } from '@langchain/core/tools';
import { mock } from 'jest-mock-extended';
import type {
INode,
ITaskDataConnections,
IRunExecutionData,
INodeExecutionData,
IExecuteData,
IWorkflowExecuteAdditionalData,
Workflow,
INodeType,
INodeTypes,
} from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { ExecuteContext } from '../../execute-context';
describe('getInputConnectionData', () => {
const agentNode = mock<INode>({
name: 'Test Agent',
type: 'test.agent',
parameters: {},
});
const agentNodeType = mock<INodeType>({
description: {
inputs: [],
},
});
const nodeTypes = mock<INodeTypes>();
const workflow = mock<Workflow>({
id: 'test-workflow',
active: false,
nodeTypes,
});
const runExecutionData = mock<IRunExecutionData>({
resultData: { runData: {} },
});
const connectionInputData = [] as INodeExecutionData[];
const inputData = {} as ITaskDataConnections;
const executeData = {} as IExecuteData;
const hooks = mock<Required<IWorkflowExecuteAdditionalData['hooks']>>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ hooks });
let executeContext: ExecuteContext;
beforeEach(() => {
jest.clearAllMocks();
executeContext = new ExecuteContext(
workflow,
agentNode,
additionalData,
'internal',
runExecutionData,
0,
connectionInputData,
inputData,
executeData,
[],
);
jest.spyOn(executeContext, 'getNode').mockReturnValue(agentNode);
nodeTypes.getByNameAndVersion
.calledWith(agentNode.type, expect.anything())
.mockReturnValue(agentNodeType);
});
describe.each([
NodeConnectionType.AiAgent,
NodeConnectionType.AiChain,
NodeConnectionType.AiDocument,
NodeConnectionType.AiEmbedding,
NodeConnectionType.AiLanguageModel,
NodeConnectionType.AiMemory,
NodeConnectionType.AiOutputParser,
NodeConnectionType.AiRetriever,
NodeConnectionType.AiTextSplitter,
NodeConnectionType.AiVectorStore,
] as const)('%s', (connectionType) => {
const response = mock();
const node = mock<INode>({
name: 'First Node',
type: 'test.type',
disabled: false,
});
const secondNode = mock<INode>({ name: 'Second Node', disabled: false });
const supplyData = jest.fn().mockResolvedValue({ response });
const nodeType = mock<INodeType>({ supplyData });
beforeEach(() => {
nodeTypes.getByNameAndVersion
.calledWith(node.type, expect.anything())
.mockReturnValue(nodeType);
workflow.getParentNodes
.calledWith(agentNode.name, connectionType)
.mockReturnValue([node.name]);
workflow.getNode.calledWith(node.name).mockReturnValue(node);
workflow.getNode.calledWith(secondNode.name).mockReturnValue(secondNode);
});
it('should throw when no inputs are defined', async () => {
agentNodeType.description.inputs = [];
await expect(executeContext.getInputConnectionData(connectionType, 0)).rejects.toThrow(
'Node does not have input of type',
);
expect(supplyData).not.toHaveBeenCalled();
});
it('should return undefined when no nodes are connected and input is not required', async () => {
agentNodeType.description.inputs = [
{
type: connectionType,
maxConnections: 1,
required: false,
},
];
workflow.getParentNodes.mockReturnValueOnce([]);
const result = await executeContext.getInputConnectionData(connectionType, 0);
expect(result).toBeUndefined();
expect(supplyData).not.toHaveBeenCalled();
});
it('should throw when too many nodes are connected', async () => {
agentNodeType.description.inputs = [
{
type: connectionType,
maxConnections: 1,
required: true,
},
];
workflow.getParentNodes.mockReturnValueOnce([node.name, secondNode.name]);
await expect(executeContext.getInputConnectionData(connectionType, 0)).rejects.toThrow(
`Only 1 ${connectionType} sub-nodes are/is allowed to be connected`,
);
expect(supplyData).not.toHaveBeenCalled();
});
it('should throw when required node is not connected', async () => {
agentNodeType.description.inputs = [
{
type: connectionType,
required: true,
},
];
workflow.getParentNodes.mockReturnValueOnce([]);
await expect(executeContext.getInputConnectionData(connectionType, 0)).rejects.toThrow(
'must be connected and enabled',
);
expect(supplyData).not.toHaveBeenCalled();
});
it('should handle disabled nodes', async () => {
agentNodeType.description.inputs = [
{
type: connectionType,
required: true,
},
];
const disabledNode = mock<INode>({
name: 'Disabled Node',
type: 'test.type',
disabled: true,
});
workflow.getParentNodes.mockReturnValueOnce([disabledNode.name]);
workflow.getNode.calledWith(disabledNode.name).mockReturnValue(disabledNode);
await expect(executeContext.getInputConnectionData(connectionType, 0)).rejects.toThrow(
'must be connected and enabled',
);
expect(supplyData).not.toHaveBeenCalled();
});
it('should handle node execution errors', async () => {
agentNodeType.description.inputs = [
{
type: connectionType,
required: true,
},
];
supplyData.mockRejectedValueOnce(new Error('supplyData error'));
await expect(executeContext.getInputConnectionData(connectionType, 0)).rejects.toThrow(
`Error in sub-node ${node.name}`,
);
expect(supplyData).toHaveBeenCalled();
});
it('should propagate configuration errors', async () => {
agentNodeType.description.inputs = [
{
type: connectionType,
required: true,
},
];
const configError = new NodeOperationError(node, 'Config Error in node', {
functionality: 'configuration-node',
});
supplyData.mockRejectedValueOnce(configError);
await expect(executeContext.getInputConnectionData(connectionType, 0)).rejects.toThrow(
configError.message,
);
expect(nodeType.supplyData).toHaveBeenCalled();
});
it('should handle close functions', async () => {
agentNodeType.description.inputs = [
{
type: connectionType,
maxConnections: 1,
required: true,
},
];
const closeFunction = jest.fn();
supplyData.mockResolvedValueOnce({ response, closeFunction });
const result = await executeContext.getInputConnectionData(connectionType, 0);
expect(result).toBe(response);
expect(supplyData).toHaveBeenCalled();
// @ts-expect-error private property
expect(executeContext.closeFunctions).toContain(closeFunction);
});
});
describe(NodeConnectionType.AiTool, () => {
const mockTool = mock<Tool>();
const toolNode = mock<INode>({
name: 'Test Tool',
type: 'test.tool',
disabled: false,
});
const supplyData = jest.fn().mockResolvedValue({ response: mockTool });
const toolNodeType = mock<INodeType>({ supplyData });
const secondToolNode = mock<INode>({ name: 'test.secondTool', disabled: false });
const secondMockTool = mock<Tool>();
const secondToolNodeType = mock<INodeType>({
supplyData: jest.fn().mockResolvedValue({ response: secondMockTool }),
});
beforeEach(() => {
nodeTypes.getByNameAndVersion
.calledWith(toolNode.type, expect.anything())
.mockReturnValue(toolNodeType);
workflow.getParentNodes
.calledWith(agentNode.name, NodeConnectionType.AiTool)
.mockReturnValue([toolNode.name]);
workflow.getNode.calledWith(toolNode.name).mockReturnValue(toolNode);
workflow.getNode.calledWith(secondToolNode.name).mockReturnValue(secondToolNode);
});
it('should return empty array when no tools are connected and input is not required', async () => {
agentNodeType.description.inputs = [
{
type: NodeConnectionType.AiTool,
required: false,
},
];
workflow.getParentNodes.mockReturnValueOnce([]);
const result = await executeContext.getInputConnectionData(NodeConnectionType.AiTool, 0);
expect(result).toEqual([]);
expect(supplyData).not.toHaveBeenCalled();
});
it('should throw when required tool node is not connected', async () => {
agentNodeType.description.inputs = [
{
type: NodeConnectionType.AiTool,
required: true,
},
];
workflow.getParentNodes.mockReturnValueOnce([]);
await expect(
executeContext.getInputConnectionData(NodeConnectionType.AiTool, 0),
).rejects.toThrow('must be connected and enabled');
expect(supplyData).not.toHaveBeenCalled();
});
it('should handle disabled tool nodes', async () => {
const disabledToolNode = mock<INode>({
name: 'Disabled Tool',
type: 'test.tool',
disabled: true,
});
agentNodeType.description.inputs = [
{
type: NodeConnectionType.AiTool,
required: true,
},
];
workflow.getParentNodes
.calledWith(agentNode.name, NodeConnectionType.AiTool)
.mockReturnValue([disabledToolNode.name]);
workflow.getNode.calledWith(disabledToolNode.name).mockReturnValue(disabledToolNode);
await expect(
executeContext.getInputConnectionData(NodeConnectionType.AiTool, 0),
).rejects.toThrow('must be connected and enabled');
expect(supplyData).not.toHaveBeenCalled();
});
it('should handle multiple connected tools', async () => {
agentNodeType.description.inputs = [
{
type: NodeConnectionType.AiTool,
required: true,
},
];
nodeTypes.getByNameAndVersion
.calledWith(secondToolNode.type, expect.anything())
.mockReturnValue(secondToolNodeType);
workflow.getParentNodes
.calledWith(agentNode.name, NodeConnectionType.AiTool)
.mockReturnValue([toolNode.name, secondToolNode.name]);
const result = await executeContext.getInputConnectionData(NodeConnectionType.AiTool, 0);
expect(result).toEqual([mockTool, secondMockTool]);
expect(supplyData).toHaveBeenCalled();
expect(secondToolNodeType.supplyData).toHaveBeenCalled();
});
it('should handle tool execution errors', async () => {
supplyData.mockRejectedValueOnce(new Error('Tool execution error'));
agentNodeType.description.inputs = [
{
type: NodeConnectionType.AiTool,
required: true,
},
];
await expect(
executeContext.getInputConnectionData(NodeConnectionType.AiTool, 0),
).rejects.toThrow(`Error in sub-node ${toolNode.name}`);
expect(supplyData).toHaveBeenCalled();
});
it('should return the tool when there are no issues', async () => {
agentNodeType.description.inputs = [
{
type: NodeConnectionType.AiTool,
required: true,
},
];
const result = await executeContext.getInputConnectionData(NodeConnectionType.AiTool, 0);
expect(result).toEqual([mockTool]);
expect(supplyData).toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,312 @@
import type { IDataObject, INode, INodeType } from 'n8n-workflow';
import { validateValueAgainstSchema } from '../validate-value-against-schema';
describe('validateValueAgainstSchema', () => {
test('should validate fixedCollection values parameter', () => {
const nodeType = {
description: {
properties: [
{
displayName: 'Fields to Set',
name: 'fields',
placeholder: 'Add Field',
type: 'fixedCollection',
description: 'Edit existing fields or add new ones to modify the output data',
typeOptions: {
multipleValues: true,
sortable: true,
},
default: {},
options: [
{
name: 'values',
displayName: 'Values',
values: [
{
displayName: 'Name',
name: 'name',
type: 'string',
default: '',
placeholder: 'e.g. fieldName',
description:
'Name of the field to set the value of. Supports dot-notation. Example: data.person[0].name.',
requiresDataPath: 'single',
},
{
displayName: 'Type',
name: 'type',
type: 'options',
description: 'The field value type',
options: [
{
name: 'String',
value: 'stringValue',
},
{
name: 'Number',
value: 'numberValue',
},
{
name: 'Boolean',
value: 'booleanValue',
},
{
name: 'Array',
value: 'arrayValue',
},
{
name: 'Object',
value: 'objectValue',
},
],
default: 'stringValue',
},
{
displayName: 'Value',
name: 'stringValue',
type: 'string',
default: '',
displayOptions: {
show: {
type: ['stringValue'],
},
},
validateType: 'string',
},
{
displayName: 'Value',
name: 'numberValue',
type: 'number',
default: 0,
displayOptions: {
show: {
type: ['numberValue'],
},
},
validateType: 'number',
},
{
displayName: 'Value',
name: 'booleanValue',
type: 'options',
default: 'true',
options: [
{
name: 'True',
value: 'true',
},
{
name: 'False',
value: 'false',
},
],
displayOptions: {
show: {
type: ['booleanValue'],
},
},
validateType: 'boolean',
},
{
displayName: 'Value',
name: 'arrayValue',
type: 'string',
default: '',
placeholder: 'e.g. [ arrayItem1, arrayItem2, arrayItem3 ]',
displayOptions: {
show: {
type: ['arrayValue'],
},
},
validateType: 'array',
},
{
displayName: 'Value',
name: 'objectValue',
type: 'json',
default: '={}',
typeOptions: {
rows: 2,
},
displayOptions: {
show: {
type: ['objectValue'],
},
},
validateType: 'object',
},
],
},
],
displayOptions: {
show: {
mode: ['manual'],
},
},
},
],
},
} as unknown as INodeType;
const node = {
parameters: {
mode: 'manual',
duplicateItem: false,
fields: {
values: [
{
name: 'num1',
type: 'numberValue',
numberValue: '=str',
},
],
},
include: 'none',
options: {},
},
name: 'Edit Fields2',
type: 'n8n-nodes-base.set',
typeVersion: 3,
} as unknown as INode;
const values = [
{
name: 'num1',
type: 'numberValue',
numberValue: '55',
},
{
name: 'str1',
type: 'stringValue',
stringValue: 42, //validateFieldType does not change the type of string value
},
{
name: 'arr1',
type: 'arrayValue',
arrayValue: "['foo', 'bar']",
},
{
name: 'obj',
type: 'objectValue',
objectValue: '{ "key": "value" }',
},
];
const parameterName = 'fields.values';
const result = validateValueAgainstSchema(node, nodeType, values, parameterName, 0, 0);
// value should be type number
expect(typeof (result as IDataObject[])[0].numberValue).toEqual('number');
// string value should remain unchanged
expect(typeof (result as IDataObject[])[1].stringValue).toEqual('number');
// value should be type array
expect(typeof (result as IDataObject[])[2].arrayValue).toEqual('object');
expect(Array.isArray((result as IDataObject[])[2].arrayValue)).toEqual(true);
// value should be type object
expect(typeof (result as IDataObject[])[3].objectValue).toEqual('object');
expect(((result as IDataObject[])[3].objectValue as IDataObject).key).toEqual('value');
});
test('should validate single value parameter', () => {
const nodeType = {
description: {
properties: [
{
displayName: 'Value',
name: 'numberValue',
type: 'number',
default: 0,
validateType: 'number',
},
],
},
} as unknown as INodeType;
const node = {
parameters: {
mode: 'manual',
duplicateItem: false,
numberValue: '777',
include: 'none',
options: {},
},
name: 'Edit Fields2',
type: 'n8n-nodes-base.set',
typeVersion: 3,
} as unknown as INode;
const value = '777';
const parameterName = 'numberValue';
const result = validateValueAgainstSchema(node, nodeType, value, parameterName, 0, 0);
// value should be type number
expect(typeof result).toEqual('number');
});
describe('when the mode is in Fixed mode, and the node is a resource mapper', () => {
const nodeType = {
description: {
properties: [
{
name: 'operation',
type: 'resourceMapper',
typeOptions: {
resourceMapper: {
mode: 'add',
},
},
},
],
},
} as unknown as INodeType;
const node = {
parameters: {
operation: {
schema: [
{ id: 'num', type: 'number', required: true },
{ id: 'str', type: 'string', required: true },
{ id: 'obj', type: 'object', required: true },
{ id: 'arr', type: 'array', required: true },
],
attemptToConvertTypes: true,
mappingMode: '',
value: '',
},
},
} as unknown as INode;
const parameterName = 'operation.value';
describe('should correctly validate values for', () => {
test.each([
{ num: 0 },
{ num: 23 },
{ num: -0 },
{ num: -Infinity },
{ num: Infinity },
{ str: '' },
{ str: ' ' },
{ str: 'hello' },
{ arr: [] },
{ obj: {} },
])('%s', (value) => {
expect(() =>
validateValueAgainstSchema(node, nodeType, value, parameterName, 0, 0),
).not.toThrow();
});
});
describe('should throw an error for', () => {
test.each([{ num: NaN }, { num: undefined }, { num: null }])('%s', (value) => {
expect(() =>
validateValueAgainstSchema(node, nodeType, value, parameterName, 0, 0),
).toThrow();
});
});
});
});

View File

@@ -0,0 +1,31 @@
import { DateTime } from 'luxon';
import type { INodeParameters, NodeParameterValueType } from 'n8n-workflow';
/**
* Clean up parameter data to make sure that only valid data gets returned
* INFO: Currently only converts Luxon Dates as we know for sure it will not be breaking
*/
export function cleanupParameterData(inputData: NodeParameterValueType): void {
if (typeof inputData !== 'object' || inputData === null) {
return;
}
if (Array.isArray(inputData)) {
inputData.forEach((value) => cleanupParameterData(value as NodeParameterValueType));
return;
}
if (typeof inputData === 'object') {
Object.keys(inputData).forEach((key) => {
const value = (inputData as INodeParameters)[key];
if (typeof value === 'object') {
if (DateTime.isDateTime(value)) {
// Is a special luxon date so convert to string
(inputData as INodeParameters)[key] = value.toString();
} else {
cleanupParameterData(value);
}
}
});
}
}

View File

@@ -0,0 +1,419 @@
import { DynamicStructuredTool } from '@langchain/core/tools';
import type { IDataObject, INode, INodeType } from 'n8n-workflow';
import { jsonParse, NodeOperationError } from 'n8n-workflow';
import { z } from 'zod';
type AllowedTypes = 'string' | 'number' | 'boolean' | 'json';
interface FromAIArgument {
key: string;
description?: string;
type?: AllowedTypes;
defaultValue?: string | number | boolean | Record<string, unknown>;
}
type ParserOptions = {
node: INode;
nodeType: INodeType;
handleToolInvocation: (toolArgs: IDataObject) => Promise<unknown>;
};
// This file is temporarily duplicated in `packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/v2/utils/FromAIParser.ts`
// Please apply any changes in both files
/**
* AIParametersParser
*
* This class encapsulates the logic for parsing node parameters, extracting $fromAI calls,
* generating Zod schemas, and creating LangChain tools.
*/
class AIParametersParser {
/**
* Constructs an instance of AIParametersParser.
*/
constructor(private readonly options: ParserOptions) {}
/**
* Generates a Zod schema based on the provided FromAIArgument placeholder.
* @param placeholder The FromAIArgument object containing key, type, description, and defaultValue.
* @returns A Zod schema corresponding to the placeholder's type and constraints.
*/
private generateZodSchema(placeholder: FromAIArgument): z.ZodTypeAny {
let schema: z.ZodTypeAny;
switch (placeholder.type?.toLowerCase()) {
case 'string':
schema = z.string();
break;
case 'number':
schema = z.number();
break;
case 'boolean':
schema = z.boolean();
break;
case 'json':
schema = z.record(z.any());
break;
default:
schema = z.string();
}
if (placeholder.description) {
schema = schema.describe(`${schema.description ?? ''} ${placeholder.description}`.trim());
}
if (placeholder.defaultValue !== undefined) {
schema = schema.default(placeholder.defaultValue);
}
return schema;
}
/**
* Recursively traverses the nodeParameters object to find all $fromAI calls.
* @param payload The current object or value being traversed.
* @param collectedArgs The array collecting FromAIArgument objects.
*/
private traverseNodeParameters(payload: unknown, collectedArgs: FromAIArgument[]) {
if (typeof payload === 'string') {
const fromAICalls = this.extractFromAICalls(payload);
fromAICalls.forEach((call) => collectedArgs.push(call));
} else if (Array.isArray(payload)) {
payload.forEach((item: unknown) => this.traverseNodeParameters(item, collectedArgs));
} else if (typeof payload === 'object' && payload !== null) {
Object.values(payload).forEach((value) => this.traverseNodeParameters(value, collectedArgs));
}
}
/**
* Extracts all $fromAI calls from a given string
* @param str The string to search for $fromAI calls.
* @returns An array of FromAIArgument objects.
*
* This method uses a regular expression to find the start of each $fromAI function call
* in the input string. It then employs a character-by-character parsing approach to
* accurately extract the arguments of each call, handling nested parentheses and quoted strings.
*
* The parsing process:
* 1. Finds the starting position of a $fromAI call using regex.
* 2. Iterates through characters, keeping track of parentheses depth and quote status.
* 3. Handles escaped characters within quotes to avoid premature quote closing.
* 4. Builds the argument string until the matching closing parenthesis is found.
* 5. Parses the extracted argument string into a FromAIArgument object.
* 6. Repeats the process for all $fromAI calls in the input string.
*
*/
private extractFromAICalls(str: string): FromAIArgument[] {
const args: FromAIArgument[] = [];
// Regular expression to match the start of a $fromAI function call
const pattern = /\$fromAI\s*\(\s*/gi;
let match: RegExpExecArray | null;
while ((match = pattern.exec(str)) !== null) {
const startIndex = match.index + match[0].length;
let current = startIndex;
let inQuotes = false;
let quoteChar = '';
let parenthesesCount = 1;
let argsString = '';
// Parse the arguments string, handling nested parentheses and quotes
while (current < str.length && parenthesesCount > 0) {
const char = str[current];
if (inQuotes) {
// Handle characters inside quotes, including escaped characters
if (char === '\\' && current + 1 < str.length) {
argsString += char + str[current + 1];
current += 2;
continue;
}
if (char === quoteChar) {
inQuotes = false;
quoteChar = '';
}
argsString += char;
} else {
// Handle characters outside quotes
if (['"', "'", '`'].includes(char)) {
inQuotes = true;
quoteChar = char;
} else if (char === '(') {
parenthesesCount++;
} else if (char === ')') {
parenthesesCount--;
}
// Only add characters if we're still inside the main parentheses
if (parenthesesCount > 0 || char !== ')') {
argsString += char;
}
}
current++;
}
// If parentheses are balanced, parse the arguments
if (parenthesesCount === 0) {
try {
const parsedArgs = this.parseArguments(argsString);
args.push(parsedArgs);
} catch (error) {
// If parsing fails, throw an ApplicationError with details
throw new NodeOperationError(
this.options.node,
`Failed to parse $fromAI arguments: ${argsString}: ${error}`,
);
}
} else {
// Log an error if parentheses are unbalanced
throw new NodeOperationError(
this.options.node,
`Unbalanced parentheses while parsing $fromAI call: ${str.slice(startIndex)}`,
);
}
}
return args;
}
/**
* Parses the arguments of a single $fromAI function call.
* @param argsString The string containing the function arguments.
* @returns A FromAIArgument object.
*/
private parseArguments(argsString: string): FromAIArgument {
// Split arguments by commas not inside quotes
const args: string[] = [];
let currentArg = '';
let inQuotes = false;
let quoteChar = '';
let escapeNext = false;
for (let i = 0; i < argsString.length; i++) {
const char = argsString[i];
if (escapeNext) {
currentArg += char;
escapeNext = false;
continue;
}
if (char === '\\') {
escapeNext = true;
continue;
}
if (['"', "'", '`'].includes(char)) {
if (!inQuotes) {
inQuotes = true;
quoteChar = char;
currentArg += char;
} else if (char === quoteChar) {
inQuotes = false;
quoteChar = '';
currentArg += char;
} else {
currentArg += char;
}
continue;
}
if (char === ',' && !inQuotes) {
args.push(currentArg.trim());
currentArg = '';
continue;
}
currentArg += char;
}
if (currentArg) {
args.push(currentArg.trim());
}
// Remove surrounding quotes if present
const cleanArgs = args.map((arg) => {
const trimmed = arg.trim();
if (
(trimmed.startsWith("'") && trimmed.endsWith("'")) ||
(trimmed.startsWith('`') && trimmed.endsWith('`')) ||
(trimmed.startsWith('"') && trimmed.endsWith('"'))
) {
return trimmed
.slice(1, -1)
.replace(/\\'/g, "'")
.replace(/\\`/g, '`')
.replace(/\\"/g, '"')
.replace(/\\\\/g, '\\');
}
return trimmed;
});
const type = cleanArgs?.[2] || 'string';
if (!['string', 'number', 'boolean', 'json'].includes(type.toLowerCase())) {
throw new NodeOperationError(this.options.node, `Invalid type: ${type}`);
}
return {
key: cleanArgs[0] || '',
description: cleanArgs[1],
type: (cleanArgs?.[2] ?? 'string') as AllowedTypes,
defaultValue: this.parseDefaultValue(cleanArgs[3]),
};
}
/**
* Parses the default value, preserving its original type.
* @param value The default value as a string.
* @returns The parsed default value in its appropriate type.
*/
private parseDefaultValue(
value: string | undefined,
): string | number | boolean | Record<string, unknown> | undefined {
if (value === undefined || value === '') return undefined;
const lowerValue = value.toLowerCase();
if (lowerValue === 'true') return true;
if (lowerValue === 'false') return false;
if (!isNaN(Number(value))) return Number(value);
try {
return jsonParse(value);
} catch {
return value;
}
}
/**
* Retrieves and validates the Zod schema for the tool.
*
* This method:
* 1. Collects all $fromAI arguments from node parameters
* 2. Validates parameter keys against naming rules
* 3. Checks for duplicate keys and ensures consistency
* 4. Generates a Zod schema from the validated arguments
*
* @throws {NodeOperationError} When parameter keys are invalid or when duplicate keys have inconsistent definitions
* @returns {z.ZodObject} A Zod schema object representing the structure and validation rules for the node parameters
*/
private getSchema() {
const { node } = this.options;
const collectedArguments: FromAIArgument[] = [];
this.traverseNodeParameters(node.parameters, collectedArguments);
// Validate each collected argument
const nameValidationRegex = /^[a-zA-Z0-9_-]{1,64}$/;
const keyMap = new Map<string, FromAIArgument>();
for (const argument of collectedArguments) {
if (argument.key.length === 0 || !nameValidationRegex.test(argument.key)) {
const isEmptyError = 'You must specify a key when using $fromAI()';
const isInvalidError = `Parameter key \`${argument.key}\` is invalid`;
const error = new Error(argument.key.length === 0 ? isEmptyError : isInvalidError);
throw new NodeOperationError(node, error, {
description:
'Invalid parameter key, must be between 1 and 64 characters long and only contain letters, numbers, underscores, and hyphens',
});
}
if (keyMap.has(argument.key)) {
// If the key already exists in the Map
const existingArg = keyMap.get(argument.key)!;
// Check if the existing argument has the same description and type
if (
existingArg.description !== argument.description ||
existingArg.type !== argument.type
) {
// If not, throw an error for inconsistent duplicate keys
throw new NodeOperationError(
node,
`Duplicate key '${argument.key}' found with different description or type`,
{
description:
'Ensure all $fromAI() calls with the same key have consistent descriptions and types',
},
);
}
// If the duplicate key has consistent description and type, it's allowed (no action needed)
} else {
// If the key doesn't exist in the Map, add it
keyMap.set(argument.key, argument);
}
}
// Remove duplicate keys, latest occurrence takes precedence
const uniqueArgsMap = collectedArguments.reduce((map, arg) => {
map.set(arg.key, arg);
return map;
}, new Map<string, FromAIArgument>());
const uniqueArguments = Array.from(uniqueArgsMap.values());
// Generate Zod schema from unique arguments
const schemaObj = uniqueArguments.reduce((acc: Record<string, z.ZodTypeAny>, placeholder) => {
acc[placeholder.key] = this.generateZodSchema(placeholder);
return acc;
}, {});
return z.object(schemaObj).required();
}
/**
* Generates a description for a node based on the provided parameters.
* @param node The node type.
* @param nodeParameters The parameters of the node.
* @returns A string description for the node.
*/
private getDescription(): string {
const { node, nodeType } = this.options;
const manualDescription = node.parameters.toolDescription as string;
if (node.parameters.descriptionType === 'auto') {
const resource = node.parameters.resource as string;
const operation = node.parameters.operation as string;
let description = nodeType.description.description;
if (resource) {
description += `\n Resource: ${resource}`;
}
if (operation) {
description += `\n Operation: ${operation}`;
}
return description.trim();
}
if (node.parameters.descriptionType === 'manual') {
return manualDescription ?? nodeType.description.description;
}
return nodeType.description.description;
}
/**
* Creates a DynamicStructuredTool from a node.
* @returns A DynamicStructuredTool instance.
*/
createTool(): DynamicStructuredTool {
const { node, nodeType } = this.options;
const schema = this.getSchema();
const description = this.getDescription();
const nodeName = node.name.replace(/ /g, '_');
const name = nodeName || nodeType.description.name;
return new DynamicStructuredTool({
name,
description,
schema,
func: async (toolArgs: z.infer<typeof schema>) =>
await this.options.handleToolInvocation(toolArgs),
});
}
}
/**
* Converts node into LangChain tool by analyzing node parameters,
* identifying placeholders using the $fromAI function, and generating a Zod schema. It then creates
* a DynamicStructuredTool that can be used in LangChain workflows.
*/
export function createNodeAsTool(options: ParserOptions) {
const parser = new AIParametersParser(options);
return { response: parser.createTool() };
}

View File

@@ -0,0 +1,103 @@
import type { EnsureTypeOptions } from 'n8n-workflow';
import { ExpressionError } from 'n8n-workflow';
export function ensureType(
toType: EnsureTypeOptions,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
parameterValue: any,
parameterName: string,
errorOptions?: { itemIndex?: number; runIndex?: number; nodeCause?: string },
): string | number | boolean | object {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
let returnData = parameterValue;
if (returnData === null) {
throw new ExpressionError(`Parameter '${parameterName}' must not be null`, errorOptions);
}
if (returnData === undefined) {
throw new ExpressionError(
`Parameter '${parameterName}' could not be 'undefined'`,
errorOptions,
);
}
if (['object', 'array', 'json'].includes(toType)) {
if (typeof returnData !== 'object') {
// if value is not an object and is string try to parse it, else throw an error
if (typeof returnData === 'string' && returnData.length) {
try {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const parsedValue = JSON.parse(returnData);
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
returnData = parsedValue;
} catch (error) {
throw new ExpressionError(`Parameter '${parameterName}' could not be parsed`, {
...errorOptions,
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access
description: error.message,
});
}
} else {
throw new ExpressionError(
`Parameter '${parameterName}' must be an ${toType}, but we got '${String(parameterValue)}'`,
errorOptions,
);
}
} else if (toType === 'json') {
// value is an object, make sure it is valid JSON
try {
JSON.stringify(returnData);
} catch (error) {
throw new ExpressionError(`Parameter '${parameterName}' is not valid JSON`, {
...errorOptions,
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access
description: error.message,
});
}
}
if (toType === 'array' && !Array.isArray(returnData)) {
// value is not an array, but has to be
throw new ExpressionError(
`Parameter '${parameterName}' must be an array, but we got object`,
errorOptions,
);
}
}
try {
if (toType === 'string') {
if (typeof returnData === 'object') {
returnData = JSON.stringify(returnData);
} else {
returnData = String(returnData);
}
}
if (toType === 'number') {
returnData = Number(returnData);
if (Number.isNaN(returnData)) {
throw new ExpressionError(
`Parameter '${parameterName}' must be a number, but we got '${parameterValue}'`,
errorOptions,
);
}
}
if (toType === 'boolean') {
returnData = Boolean(returnData);
}
} catch (error) {
if (error instanceof ExpressionError) throw error;
throw new ExpressionError(`Parameter '${parameterName}' could not be converted to ${toType}`, {
...errorOptions,
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access
description: error.message,
});
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return returnData;
}

View File

@@ -0,0 +1,75 @@
import type { IRunExecutionData } from 'n8n-workflow';
import { LoggerProxy as Logger } from 'n8n-workflow';
import { InvalidExecutionMetadataError } from '@/errors/invalid-execution-metadata.error';
export const KV_LIMIT = 10;
export function setWorkflowExecutionMetadata(
executionData: IRunExecutionData,
key: string,
value: unknown,
) {
if (!executionData.resultData.metadata) {
executionData.resultData.metadata = {};
}
// Currently limited to 10 metadata KVs
if (
!(key in executionData.resultData.metadata) &&
Object.keys(executionData.resultData.metadata).length >= KV_LIMIT
) {
return;
}
if (typeof key !== 'string') {
throw new InvalidExecutionMetadataError('key', key);
}
if (key.replace(/[A-Za-z0-9_]/g, '').length !== 0) {
throw new InvalidExecutionMetadataError(
'key',
key,
`Custom date key can only contain characters "A-Za-z0-9_" (key "${key}")`,
);
}
if (typeof value !== 'string' && typeof value !== 'number' && typeof value !== 'bigint') {
throw new InvalidExecutionMetadataError('value', key);
}
const val = String(value);
if (key.length > 50) {
Logger.error('Custom data key over 50 characters long. Truncating to 50 characters.');
}
if (val.length > 255) {
Logger.error('Custom data value over 255 characters long. Truncating to 255 characters.');
}
executionData.resultData.metadata[key.slice(0, 50)] = val.slice(0, 255);
}
export function setAllWorkflowExecutionMetadata(
executionData: IRunExecutionData,
obj: Record<string, string>,
) {
const errors: Error[] = [];
Object.entries(obj).forEach(([key, value]) => {
try {
setWorkflowExecutionMetadata(executionData, key, value);
} catch (e) {
errors.push(e as Error);
}
});
if (errors.length) {
throw errors[0];
}
}
export function getAllWorkflowExecutionMetadata(
executionData: IRunExecutionData,
): Record<string, string> {
// Make a copy so it can't be modified directly
return executionData.resultData.metadata ? { ...executionData.resultData.metadata } : {};
}
export function getWorkflowExecutionMetadata(
executionData: IRunExecutionData,
key: string,
): string {
return getAllWorkflowExecutionMetadata(executionData)[String(key).slice(0, 50)];
}

View File

@@ -0,0 +1,207 @@
import get from 'lodash/get';
import {
ApplicationError,
LoggerProxy,
NodeHelpers,
NodeOperationError,
WorkflowOperationError,
executeFilter,
isFilterValue,
type INode,
type INodeParameters,
type INodeProperties,
type INodePropertyCollection,
type INodePropertyOptions,
type INodeType,
type NodeParameterValueType,
} from 'n8n-workflow';
function findPropertyFromParameterName(
parameterName: string,
nodeType: INodeType,
node: INode,
nodeParameters: INodeParameters,
): INodePropertyOptions | INodeProperties | INodePropertyCollection {
let property: INodePropertyOptions | INodeProperties | INodePropertyCollection | undefined;
const paramParts = parameterName.split('.');
let currentParamPath = '';
const findProp = (
name: string,
options: Array<INodePropertyOptions | INodeProperties | INodePropertyCollection>,
): INodePropertyOptions | INodeProperties | INodePropertyCollection | undefined => {
return options.find(
(i) =>
i.name === name &&
NodeHelpers.displayParameterPath(nodeParameters, i, currentParamPath, node),
);
};
for (const p of paramParts) {
const param = p.split('[')[0];
if (!property) {
property = findProp(param, nodeType.description.properties);
} else if ('options' in property && property.options) {
property = findProp(param, property.options);
currentParamPath += `.${param}`;
} else if ('values' in property) {
property = findProp(param, property.values);
currentParamPath += `.${param}`;
} else {
throw new ApplicationError('Could not find property', { extra: { parameterName } });
}
if (!property) {
throw new ApplicationError('Could not find property', { extra: { parameterName } });
}
}
if (!property) {
throw new ApplicationError('Could not find property', { extra: { parameterName } });
}
return property;
}
function executeRegexExtractValue(
value: string,
regex: RegExp,
parameterName: string,
parameterDisplayName: string,
): NodeParameterValueType | object {
const extracted = regex.exec(value);
if (!extracted) {
throw new WorkflowOperationError(
`ERROR: ${parameterDisplayName} parameter's value is invalid. This is likely because the URL entered is incorrect`,
);
}
if (extracted.length < 2 || extracted.length > 2) {
throw new WorkflowOperationError(
`Property "${parameterName}" has an invalid extractValue regex "${regex.source}". extractValue expects exactly one group to be returned.`,
);
}
return extracted[1];
}
function extractValueRLC(
value: NodeParameterValueType | object,
property: INodeProperties,
parameterName: string,
): NodeParameterValueType | object {
// Not an RLC value
if (typeof value !== 'object' || !value || !('mode' in value) || !('value' in value)) {
return value;
}
const modeProp = (property.modes ?? []).find((i) => i.name === value.mode);
if (!modeProp) {
return value.value;
}
if (!('extractValue' in modeProp) || !modeProp.extractValue) {
return value.value;
}
if (typeof value.value !== 'string') {
let typeName: string | undefined = value.value?.constructor.name;
if (value.value === null) {
typeName = 'null';
} else if (typeName === undefined) {
typeName = 'undefined';
}
LoggerProxy.error(
`Only strings can be passed to extractValue. Parameter "${parameterName}" passed "${typeName}"`,
);
throw new ApplicationError(
"ERROR: This parameter's value is invalid. Please enter a valid mode.",
{ extra: { parameter: property.displayName, modeProp: modeProp.displayName } },
);
}
if (modeProp.extractValue.type !== 'regex') {
throw new ApplicationError('Property with unknown `extractValue`', {
extra: { parameter: parameterName, extractValueType: modeProp.extractValue.type },
});
}
const regex = new RegExp(modeProp.extractValue.regex);
return executeRegexExtractValue(value.value, regex, parameterName, property.displayName);
}
function extractValueFilter(
value: NodeParameterValueType | object,
property: INodeProperties,
parameterName: string,
itemIndex: number,
): NodeParameterValueType | object {
if (!isFilterValue(value)) {
return value;
}
if (property.extractValue?.type) {
throw new ApplicationError(
`Property "${parameterName}" has an invalid extractValue type. Filter parameters only support extractValue: true`,
{ extra: { parameter: parameterName } },
);
}
return executeFilter(value, { itemIndex });
}
function extractValueOther(
value: NodeParameterValueType | object,
property: INodeProperties | INodePropertyCollection,
parameterName: string,
): NodeParameterValueType | object {
if (!('extractValue' in property) || !property.extractValue) {
return value;
}
if (typeof value !== 'string') {
let typeName: string | undefined = value?.constructor.name;
if (value === null) {
typeName = 'null';
} else if (typeName === undefined) {
typeName = 'undefined';
}
LoggerProxy.error(
`Only strings can be passed to extractValue. Parameter "${parameterName}" passed "${typeName}"`,
);
throw new ApplicationError("This parameter's value is invalid", {
extra: { parameter: property.displayName },
});
}
if (property.extractValue.type !== 'regex') {
throw new ApplicationError('Property with unknown `extractValue`', {
extra: { parameter: parameterName, extractValueType: property.extractValue.type },
});
}
const regex = new RegExp(property.extractValue.regex);
return executeRegexExtractValue(value, regex, parameterName, property.displayName);
}
export function extractValue(
value: NodeParameterValueType | object,
parameterName: string,
node: INode,
nodeType: INodeType,
itemIndex = 0,
): NodeParameterValueType | object {
let property: INodePropertyOptions | INodeProperties | INodePropertyCollection;
try {
property = findPropertyFromParameterName(parameterName, nodeType, node, node.parameters);
// Definitely doesn't have value extractor
if (!('type' in property)) {
return value;
}
if (property.type === 'resourceLocator') {
return extractValueRLC(value, property, parameterName);
} else if (property.type === 'filter') {
return extractValueFilter(value, property, parameterName, itemIndex);
}
return extractValueOther(value, property, parameterName);
} catch (error) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-assignment
throw new NodeOperationError(node, error, { description: get(error, 'description') });
}
}

View File

@@ -0,0 +1,75 @@
import type {
IRunExecutionData,
IWorkflowDataProxyAdditionalKeys,
IWorkflowExecuteAdditionalData,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { LoggerProxy } from 'n8n-workflow';
import { PLACEHOLDER_EMPTY_EXECUTION_ID } from '@/constants';
import {
setWorkflowExecutionMetadata,
setAllWorkflowExecutionMetadata,
getWorkflowExecutionMetadata,
getAllWorkflowExecutionMetadata,
} from './execution-metadata';
import { getSecretsProxy } from './get-secrets-proxy';
/** Returns the additional keys for Expressions and Function-Nodes */
export function getAdditionalKeys(
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
runExecutionData: IRunExecutionData | null,
options?: { secretsEnabled?: boolean },
): IWorkflowDataProxyAdditionalKeys {
const executionId = additionalData.executionId ?? PLACEHOLDER_EMPTY_EXECUTION_ID;
const resumeUrl = `${additionalData.webhookWaitingBaseUrl}/${executionId}`;
const resumeFormUrl = `${additionalData.formWaitingBaseUrl}/${executionId}`;
return {
$execution: {
id: executionId,
mode: mode === 'manual' ? 'test' : 'production',
resumeUrl,
resumeFormUrl,
customData: runExecutionData
? {
set(key: string, value: string): void {
try {
setWorkflowExecutionMetadata(runExecutionData, key, value);
} catch (e) {
if (mode === 'manual') {
throw e;
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-member-access
LoggerProxy.debug(e.message);
}
},
setAll(obj: Record<string, string>): void {
try {
setAllWorkflowExecutionMetadata(runExecutionData, obj);
} catch (e) {
if (mode === 'manual') {
throw e;
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-member-access
LoggerProxy.debug(e.message);
}
},
get(key: string): string {
return getWorkflowExecutionMetadata(runExecutionData, key);
},
getAll(): Record<string, string> {
return getAllWorkflowExecutionMetadata(runExecutionData);
},
}
: undefined,
},
$vars: additionalData.variables,
$secrets: options?.secretsEnabled ? getSecretsProxy(additionalData) : undefined,
// deprecated
$executionId: executionId,
$resumeWebhookUrl: resumeUrl,
};
}

View File

@@ -0,0 +1,184 @@
/* eslint-disable @typescript-eslint/no-unsafe-argument */
import type {
CloseFunction,
IExecuteData,
IExecuteFunctions,
INodeExecutionData,
IRunExecutionData,
ITaskDataConnections,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
SupplyData,
AINodeConnectionType,
} from 'n8n-workflow';
import {
NodeConnectionType,
NodeOperationError,
ExecutionBaseError,
ApplicationError,
} from 'n8n-workflow';
import { createNodeAsTool } from './create-node-as-tool';
// eslint-disable-next-line import/no-cycle
import { SupplyDataContext } from '../../node-execution-context';
import type { ExecuteContext, WebhookContext } from '../../node-execution-context';
export async function getInputConnectionData(
this: ExecuteContext | WebhookContext | SupplyDataContext,
workflow: Workflow,
runExecutionData: IRunExecutionData,
parentRunIndex: number,
connectionInputData: INodeExecutionData[],
parentInputData: ITaskDataConnections,
additionalData: IWorkflowExecuteAdditionalData,
executeData: IExecuteData,
mode: WorkflowExecuteMode,
closeFunctions: CloseFunction[],
connectionType: AINodeConnectionType,
itemIndex: number,
abortSignal?: AbortSignal,
): Promise<unknown> {
const parentNode = this.getNode();
const inputConfiguration = this.nodeInputs.find((input) => input.type === connectionType);
if (inputConfiguration === undefined) {
throw new ApplicationError('Node does not have input of type', {
extra: { nodeName: parentNode.name, connectionType },
});
}
const connectedNodes = this.getConnectedNodes(connectionType);
if (connectedNodes.length === 0) {
if (inputConfiguration.required) {
throw new NodeOperationError(
parentNode,
`A ${inputConfiguration?.displayName ?? connectionType} sub-node must be connected and enabled`,
);
}
return inputConfiguration.maxConnections === 1 ? undefined : [];
}
if (
inputConfiguration.maxConnections !== undefined &&
connectedNodes.length > inputConfiguration.maxConnections
) {
throw new NodeOperationError(
parentNode,
`Only ${inputConfiguration.maxConnections} ${connectionType} sub-nodes are/is allowed to be connected`,
);
}
const nodes: SupplyData[] = [];
for (const connectedNode of connectedNodes) {
const connectedNodeType = workflow.nodeTypes.getByNameAndVersion(
connectedNode.type,
connectedNode.typeVersion,
);
const contextFactory = (runIndex: number, inputData: ITaskDataConnections) =>
new SupplyDataContext(
workflow,
connectedNode,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
connectionType,
executeData,
closeFunctions,
abortSignal,
);
if (!connectedNodeType.supplyData) {
if (connectedNodeType.description.outputs.includes(NodeConnectionType.AiTool)) {
/**
* This keeps track of how many times this specific AI tool node has been invoked.
* It is incremented on every invocation of the tool to keep the output of each invocation separate from each other.
*/
let toolRunIndex = 0;
const supplyData = createNodeAsTool({
node: connectedNode,
nodeType: connectedNodeType,
handleToolInvocation: async (toolArgs) => {
const runIndex = toolRunIndex++;
const context = contextFactory(runIndex, {});
context.addInputData(NodeConnectionType.AiTool, [[{ json: toolArgs }]]);
try {
// Execute the sub-node with the proxied context
const result = await connectedNodeType.execute?.call(
context as unknown as IExecuteFunctions,
);
// Process and map the results
const mappedResults = result?.[0]?.flatMap((item) => item.json);
// Add output data to the context
context.addOutputData(NodeConnectionType.AiTool, runIndex, [
[{ json: { response: mappedResults } }],
]);
// Return the stringified results
return JSON.stringify(mappedResults);
} catch (error) {
const nodeError = new NodeOperationError(connectedNode, error as Error);
context.addOutputData(NodeConnectionType.AiTool, runIndex, nodeError);
return 'Error during node execution: ' + nodeError.description;
}
},
});
nodes.push(supplyData);
} else {
throw new ApplicationError('Node does not have a `supplyData` method defined', {
extra: { nodeName: connectedNode.name },
});
}
} else {
const context = contextFactory(parentRunIndex, parentInputData);
try {
const supplyData = await connectedNodeType.supplyData.call(context, itemIndex);
if (supplyData.closeFunction) {
closeFunctions.push(supplyData.closeFunction);
}
nodes.push(supplyData);
} catch (error) {
// Propagate errors from sub-nodes
if (error instanceof ExecutionBaseError) {
if (error.functionality === 'configuration-node') throw error;
} else {
error = new NodeOperationError(connectedNode, error, {
itemIndex,
});
}
let currentNodeRunIndex = 0;
if (runExecutionData.resultData.runData.hasOwnProperty(parentNode.name)) {
currentNodeRunIndex = runExecutionData.resultData.runData[parentNode.name].length;
}
// Display the error on the node which is causing it
await context.addExecutionDataFunctions(
'input',
error,
connectionType,
parentNode.name,
currentNodeRunIndex,
);
// Display on the calling node which node has the error
throw new NodeOperationError(connectedNode, `Error in sub-node ${connectedNode.name}`, {
itemIndex,
functionality: 'configuration-node',
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access
description: error.message,
});
}
}
}
return inputConfiguration.maxConnections === 1
? (nodes || [])[0]?.response
: nodes.map((node) => node.response);
}

View File

@@ -0,0 +1,76 @@
import type { IDataObject, IWorkflowExecuteAdditionalData } from 'n8n-workflow';
import { ExpressionError } from 'n8n-workflow';
function buildSecretsValueProxy(value: IDataObject): unknown {
return new Proxy(value, {
get(_target, valueName) {
if (typeof valueName !== 'string') {
return;
}
if (!(valueName in value)) {
throw new ExpressionError('Could not load secrets', {
description:
'The credential in use tries to use secret from an external store that could not be found',
});
}
const retValue = value[valueName];
if (typeof retValue === 'object' && retValue !== null) {
return buildSecretsValueProxy(retValue as IDataObject);
}
return retValue;
},
});
}
export function getSecretsProxy(additionalData: IWorkflowExecuteAdditionalData): IDataObject {
const secretsHelpers = additionalData.secretsHelpers;
return new Proxy(
{},
{
get(_target, providerName) {
if (typeof providerName !== 'string') {
return {};
}
if (secretsHelpers.hasProvider(providerName)) {
return new Proxy(
{},
{
get(_target2, secretName) {
if (typeof secretName !== 'string') {
return;
}
if (!secretsHelpers.hasSecret(providerName, secretName)) {
throw new ExpressionError('Could not load secrets', {
description:
'The credential in use tries to use secret from an external store that could not be found',
});
}
const retValue = secretsHelpers.getSecret(providerName, secretName);
if (typeof retValue === 'object' && retValue !== null) {
return buildSecretsValueProxy(retValue as IDataObject);
}
return retValue;
},
set() {
return false;
},
ownKeys() {
return secretsHelpers.listSecrets(providerName);
},
},
);
}
throw new ExpressionError('Could not load secrets', {
description:
'The credential in use pulls secrets from an external store that is not reachable',
});
},
set() {
return false;
},
ownKeys() {
return secretsHelpers.listProviders();
},
},
);
}

View File

@@ -0,0 +1,218 @@
import type {
FieldType,
IDataObject,
INode,
INodeProperties,
INodePropertyCollection,
INodePropertyOptions,
INodeType,
} from 'n8n-workflow';
import {
ExpressionError,
isResourceMapperValue,
NodeHelpers,
validateFieldType,
} from 'n8n-workflow';
import type { ExtendedValidationResult } from '@/interfaces';
const validateResourceMapperValue = (
parameterName: string,
paramValues: { [key: string]: unknown },
node: INode,
skipRequiredCheck = false,
): ExtendedValidationResult => {
const result: ExtendedValidationResult = { valid: true, newValue: paramValues };
const paramNameParts = parameterName.split('.');
if (paramNameParts.length !== 2) {
return result;
}
const resourceMapperParamName = paramNameParts[0];
const resourceMapperField = node.parameters[resourceMapperParamName];
if (!resourceMapperField || !isResourceMapperValue(resourceMapperField)) {
return result;
}
const schema = resourceMapperField.schema;
const paramValueNames = Object.keys(paramValues);
for (let i = 0; i < paramValueNames.length; i++) {
const key = paramValueNames[i];
const resolvedValue = paramValues[key];
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
const schemaEntry = schema.find((s) => s.id === key);
if (
!skipRequiredCheck &&
schemaEntry?.required === true &&
schemaEntry.type !== 'boolean' &&
(resolvedValue === undefined || resolvedValue === null)
) {
return {
valid: false,
errorMessage: `The value "${String(key)}" is required but not set`,
fieldName: key,
};
}
if (schemaEntry?.type) {
const validationResult = validateFieldType(key, resolvedValue, schemaEntry.type, {
valueOptions: schemaEntry.options,
strict: !resourceMapperField.attemptToConvertTypes,
parseStrings: !!resourceMapperField.convertFieldsToString,
});
if (!validationResult.valid) {
return { ...validationResult, fieldName: key };
} else {
// If it's valid, set the casted value
paramValues[key] = validationResult.newValue;
}
}
}
return result;
};
const validateCollection = (
node: INode,
runIndex: number,
itemIndex: number,
propertyDescription: INodeProperties,
parameterPath: string[],
validationResult: ExtendedValidationResult,
): ExtendedValidationResult => {
let nestedDescriptions: INodeProperties[] | undefined;
if (propertyDescription.type === 'fixedCollection') {
nestedDescriptions = (propertyDescription.options as INodePropertyCollection[]).find(
(entry) => entry.name === parameterPath[1],
)?.values;
}
if (propertyDescription.type === 'collection') {
nestedDescriptions = propertyDescription.options as INodeProperties[];
}
if (!nestedDescriptions) {
return validationResult;
}
const validationMap: {
[key: string]: { type: FieldType; displayName: string; options?: INodePropertyOptions[] };
} = {};
for (const prop of nestedDescriptions) {
if (!prop.validateType || prop.ignoreValidationDuringExecution) continue;
validationMap[prop.name] = {
type: prop.validateType,
displayName: prop.displayName,
options:
prop.validateType === 'options' ? (prop.options as INodePropertyOptions[]) : undefined,
};
}
if (!Object.keys(validationMap).length) {
return validationResult;
}
if (validationResult.valid) {
for (const value of Array.isArray(validationResult.newValue)
? (validationResult.newValue as IDataObject[])
: [validationResult.newValue as IDataObject]) {
for (const key of Object.keys(value)) {
if (!validationMap[key]) continue;
const fieldValidationResult = validateFieldType(key, value[key], validationMap[key].type, {
valueOptions: validationMap[key].options,
});
if (!fieldValidationResult.valid) {
throw new ExpressionError(
`Invalid input for field '${validationMap[key].displayName}' inside '${propertyDescription.displayName}' in [item ${itemIndex}]`,
{
description: fieldValidationResult.errorMessage,
runIndex,
itemIndex,
nodeCause: node.name,
},
);
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
value[key] = fieldValidationResult.newValue;
}
}
}
return validationResult;
};
export const validateValueAgainstSchema = (
node: INode,
nodeType: INodeType,
parameterValue: string | number | boolean | object | null | undefined,
parameterName: string,
runIndex: number,
itemIndex: number,
) => {
const parameterPath = parameterName.split('.');
const propertyDescription = nodeType.description.properties.find(
(prop) =>
parameterPath[0] === prop.name && NodeHelpers.displayParameter(node.parameters, prop, node),
);
if (!propertyDescription) {
return parameterValue;
}
let validationResult: ExtendedValidationResult = { valid: true, newValue: parameterValue };
if (
parameterPath.length === 1 &&
propertyDescription.validateType &&
!propertyDescription.ignoreValidationDuringExecution
) {
validationResult = validateFieldType(
parameterName,
parameterValue,
propertyDescription.validateType,
);
} else if (
propertyDescription.type === 'resourceMapper' &&
parameterPath[1] === 'value' &&
typeof parameterValue === 'object'
) {
validationResult = validateResourceMapperValue(
parameterName,
parameterValue as { [key: string]: unknown },
node,
propertyDescription.typeOptions?.resourceMapper?.mode !== 'add',
);
} else if (['fixedCollection', 'collection'].includes(propertyDescription.type)) {
validationResult = validateCollection(
node,
runIndex,
itemIndex,
propertyDescription,
parameterPath,
validationResult,
);
}
if (!validationResult.valid) {
throw new ExpressionError(
`Invalid input for '${
validationResult.fieldName
? String(validationResult.fieldName)
: propertyDescription.displayName
}' [item ${itemIndex}]`,
{
description: validationResult.errorMessage,
runIndex,
itemIndex,
nodeCause: node.name,
},
);
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return validationResult.newValue;
};

View File

@@ -0,0 +1,180 @@
import type { Request, Response } from 'express';
import type {
AINodeConnectionType,
CloseFunction,
ICredentialDataDecryptedObject,
IDataObject,
IExecuteData,
INode,
INodeExecutionData,
IRunExecutionData,
ITaskDataConnections,
IWebhookData,
IWebhookFunctions,
IWorkflowExecuteAdditionalData,
WebhookType,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
copyBinaryFile,
getBinaryHelperFunctions,
getNodeWebhookUrl,
getRequestHelperFunctions,
returnJsonArray,
} from '@/node-execute-functions';
import { NodeExecutionContext } from './node-execution-context';
import { getInputConnectionData } from './utils/get-input-connection-data';
export class WebhookContext extends NodeExecutionContext implements IWebhookFunctions {
readonly helpers: IWebhookFunctions['helpers'];
readonly nodeHelpers: IWebhookFunctions['nodeHelpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly webhookData: IWebhookData,
private readonly closeFunctions: CloseFunction[],
runExecutionData: IRunExecutionData | null,
) {
let connectionInputData: INodeExecutionData[] = [];
let executionData: IExecuteData | undefined;
if (runExecutionData?.executionData !== undefined) {
executionData = runExecutionData.executionData.nodeExecutionStack[0];
if (executionData !== undefined) {
connectionInputData = executionData.data.main[0]!;
}
}
super(
workflow,
node,
additionalData,
mode,
runExecutionData,
0,
connectionInputData,
executionData,
);
this.helpers = {
createDeferredPromise,
returnJsonArray,
...getRequestHelperFunctions(workflow, node, additionalData),
...getBinaryHelperFunctions(additionalData, workflow.id),
};
this.nodeHelpers = {
copyBinaryFile: async (filePath, fileName, mimeType) =>
await copyBinaryFile(
this.workflow.id,
this.additionalData.executionId!,
filePath,
fileName,
mimeType,
),
};
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await this._getCredentials<T>(type);
}
getBodyData() {
return this.assertHttpRequest().body as IDataObject;
}
getHeaderData() {
return this.assertHttpRequest().headers;
}
getParamsData(): object {
return this.assertHttpRequest().params;
}
getQueryData(): object {
return this.assertHttpRequest().query;
}
getRequestObject(): Request {
return this.assertHttpRequest();
}
getResponseObject(): Response {
if (this.additionalData.httpResponse === undefined) {
throw new ApplicationError('Response is missing');
}
return this.additionalData.httpResponse;
}
private assertHttpRequest() {
const { httpRequest } = this.additionalData;
if (httpRequest === undefined) {
throw new ApplicationError('Request is missing');
}
return httpRequest;
}
getNodeWebhookUrl(name: WebhookType): string | undefined {
return getNodeWebhookUrl(
name,
this.workflow,
this.node,
this.additionalData,
this.mode,
this.additionalKeys,
);
}
getWebhookName() {
return this.webhookData.webhookDescription.name;
}
async getInputConnectionData(
connectionType: AINodeConnectionType,
itemIndex: number,
): Promise<unknown> {
// To be able to use expressions like "$json.sessionId" set the
// body data the webhook received to what is normally used for
// incoming node data.
const connectionInputData: INodeExecutionData[] = [
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
{ json: this.additionalData.httpRequest?.body || {} },
];
const runExecutionData: IRunExecutionData = {
resultData: {
runData: {},
},
};
const executeData: IExecuteData = {
data: {
main: [connectionInputData],
},
node: this.node,
source: null,
};
return await getInputConnectionData.call(
this,
this.workflow,
runExecutionData,
this.runIndex,
connectionInputData,
{} as ITaskDataConnections,
this.additionalData,
executeData,
this.mode,
this.closeFunctions,
connectionType,
itemIndex,
);
}
}

View File

@@ -0,0 +1,36 @@
import type {
IGetNodeParameterOptions,
INode,
IWorkflowExecuteAdditionalData,
Workflow,
IWorkflowNodeContext,
} from 'n8n-workflow';
import { NodeExecutionContext } from './node-execution-context';
export class LoadWorkflowNodeContext extends NodeExecutionContext implements IWorkflowNodeContext {
// Note that this differs from and does not shadow the function with the
// same name in `NodeExecutionContext`, as it has the `itemIndex` parameter
readonly getNodeParameter: IWorkflowNodeContext['getNodeParameter'];
constructor(workflow: Workflow, node: INode, additionalData: IWorkflowExecuteAdditionalData) {
super(workflow, node, additionalData, 'internal');
{
// We need to cast due to the overloaded IWorkflowNodeContext::getNodeParameter function
// Which would require us to replicate all overload return types, as TypeScript offers
// no convenient solution to refer to a set of overloads.
this.getNodeParameter = ((
parameterName: string,
itemIndex: number,
fallbackValue?: unknown,
options?: IGetNodeParameterOptions,
) =>
this._getNodeParameter(
parameterName,
itemIndex,
fallbackValue,
options,
)) as IWorkflowNodeContext['getNodeParameter'];
}
}
}

View File

@@ -0,0 +1,114 @@
import type { IRunData } from 'n8n-workflow';
import { createNodeData, toITaskData } from './helpers';
import { cleanRunData } from '../clean-run-data';
import { DirectedGraph } from '../directed-graph';
describe('cleanRunData', () => {
// ┌─────┐ ┌─────┐ ┌─────┐
// │node1├───►│node2├──►│node3│
// └─────┘ └─────┘ └─────┘
test('deletes all run data of all children and the node being passed in', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
const graph = new DirectedGraph()
.addNodes(node1, node2, node3)
.addConnections({ from: node1, to: node2 }, { from: node2, to: node3 });
const runData: IRunData = {
[node1.name]: [toITaskData([{ data: { value: 1 } }])],
[node2.name]: [toITaskData([{ data: { value: 2 } }])],
[node3.name]: [toITaskData([{ data: { value: 3 } }])],
};
// ACT
const newRunData = cleanRunData(runData, graph, new Set([node1]));
// ASSERT
expect(newRunData).toEqual({});
});
// ┌─────┐ ┌─────┐ ┌─────┐
// │node1├───►│node2├──►│node3│
// └─────┘ └─────┘ └─────┘
test('retains the run data of parent nodes of the node being passed in', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
const graph = new DirectedGraph()
.addNodes(node1, node2, node3)
.addConnections({ from: node1, to: node2 }, { from: node2, to: node3 });
const runData: IRunData = {
[node1.name]: [toITaskData([{ data: { value: 1 } }])],
[node2.name]: [toITaskData([{ data: { value: 2 } }])],
[node3.name]: [toITaskData([{ data: { value: 3 } }])],
};
// ACT
const newRunData = cleanRunData(runData, graph, new Set([node2]));
// ASSERT
expect(newRunData).toEqual({ [node1.name]: runData[node1.name] });
});
// ┌─────┐ ┌─────┐ ┌─────┐
// ┌─►│node1├───►│node2├──►│node3├─┐
// │ └─────┘ └─────┘ └─────┘ │
// │ │
// └───────────────────────────────┘
test('terminates when finding a cycle', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
const graph = new DirectedGraph()
.addNodes(node1, node2, node3)
.addConnections(
{ from: node1, to: node2 },
{ from: node2, to: node3 },
{ from: node3, to: node1 },
);
const runData: IRunData = {
[node1.name]: [toITaskData([{ data: { value: 1 } }])],
[node2.name]: [toITaskData([{ data: { value: 2 } }])],
[node3.name]: [toITaskData([{ data: { value: 3 } }])],
};
// ACT
const newRunData = cleanRunData(runData, graph, new Set([node2]));
// ASSERT
// TODO: Find out if this is a desirable result in milestone 2
expect(newRunData).toEqual({});
});
// ┌─────┐ ┌─────┐
// │node1├───►│node2│
// └─────┘ └─────┘
test('removes run data of nodes that are not in the subgraph', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const graph = new DirectedGraph()
.addNodes(node1, node2)
.addConnections({ from: node1, to: node2 });
// not part of the graph
const node3 = createNodeData({ name: 'Node3' });
const runData: IRunData = {
[node1.name]: [toITaskData([{ data: { value: 1 } }])],
[node2.name]: [toITaskData([{ data: { value: 2 } }])],
[node3.name]: [toITaskData([{ data: { value: 3 } }])],
};
// ACT
const newRunData = cleanRunData(runData, graph, new Set([node2]));
// ASSERT
expect(newRunData).toEqual({
[node1.name]: [toITaskData([{ data: { value: 1 } }])],
});
});
});

View File

@@ -0,0 +1,495 @@
// NOTE: Diagrams in this file have been created with https://asciiflow.com/#/
// If you update the tests, please update the diagrams as well.
// If you add a test, please create a new diagram.
//
// Map
// 0 means the output has no run data
// 1 means the output has run data
// ►► denotes the node that the user wants to execute to
// XX denotes that the node is disabled
// PD denotes that the node has pinned data
import type { INode } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import { createNodeData, defaultWorkflowParameter } from './helpers';
import { DirectedGraph } from '../directed-graph';
describe('DirectedGraph', () => {
// ┌─────┐ ┌─────┐ ┌─────┐
// ┌─►│node1├───►│node2├──►│node3├─┐
// │ └─────┘ └─────┘ └─────┘ │
// │ │
// └───────────────────────────────┘
test('roundtrip', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
// ACT
const graph = new DirectedGraph()
.addNodes(node1, node2, node3)
.addConnections(
{ from: node1, to: node2 },
{ from: node2, to: node3 },
{ from: node3, to: node1 },
);
// ASSERT
expect(DirectedGraph.fromWorkflow(graph.toWorkflow({ ...defaultWorkflowParameter }))).toEqual(
graph,
);
});
// ┌─────┐ ┌─────┐──► null
// │node1├───►│node2│ ┌─────┐
// └─────┘ └─────┘──►│node3│
// └─────┘
//
test('linear workflow with null connections', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
// ACT
const graph = new DirectedGraph()
.addNodes(node1, node2, node3)
.addConnections({ from: node1, to: node2 }, { from: node2, to: node3, outputIndex: 1 });
// ASSERT
expect(DirectedGraph.fromWorkflow(graph.toWorkflow({ ...defaultWorkflowParameter }))).toEqual(
graph,
);
});
describe('getChildren', () => {
// ┌─────┐ ┌─────┐ ┌─────┐
// │node1├───►│node2├──►│node3│
// └─────┘ └─────┘ └─────┘
test('returns all children', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
const graph = new DirectedGraph()
.addNodes(node1, node2, node3)
.addConnections({ from: node1, to: node2 }, { from: node2, to: node3 });
// ACT
const children = graph.getChildren(node1);
// ASSERT
expect(children.size).toBe(2);
expect(children).toEqual(new Set([node2, node3]));
});
// ┌─────┐ ┌─────┐ ┌─────┐
// ┌─►│node1├───►│node2├──►│node3├─┐
// │ └─────┘ └─────┘ └─────┘ │
// │ │
// └───────────────────────────────┘
test('terminates when finding a cycle', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
const graph = new DirectedGraph()
.addNodes(node1, node2, node3)
.addConnections(
{ from: node1, to: node2 },
{ from: node2, to: node3 },
{ from: node3, to: node1 },
);
// ACT
const children = graph.getChildren(node1);
// ASSERT
expect(children.size).toBe(3);
expect(children).toEqual(new Set([node1, node2, node3]));
});
});
describe('getStronglyConnectedComponents', () => {
// ┌─────┐ ┌─────┐ ┌─────┐
// │node1├───►│node2├───►│node4│
// └─────┘ └──┬──┘ └─────┘
// ▲ │
// │ │
// ┌──┴──┐ │
// │node3│◄──────┘
// └─────┘
test('find strongly connected components', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
const node4 = createNodeData({ name: 'Node4' });
const graph = new DirectedGraph()
.addNodes(node1, node2, node3, node4)
.addConnections(
{ from: node1, to: node2 },
{ from: node2, to: node3 },
{ from: node3, to: node1 },
{ from: node2, to: node4 },
);
// ACT
const stronglyConnectedComponents = graph.getStronglyConnectedComponents();
// ASSERT
expect(stronglyConnectedComponents).toHaveLength(2);
expect(stronglyConnectedComponents).toContainEqual(new Set([node4]));
expect(stronglyConnectedComponents).toContainEqual(new Set([node3, node2, node1]));
});
// ┌────┐
// ┌───────┐ │ ├─
// │trigger├──┬──►loop│
// └───────┘ │ │ ├────┐
// │ └────┘ │
// └─────────┐ │
// ┌────┐ │ │
// ┌───►node├─┘ │
// │ └────┘ │
// │ │
// └─────────────┘
test('find strongly connected components even if they use different output indexes', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const loop = createNodeData({ name: 'loop' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(trigger, loop, node)
.addConnections(
{ from: trigger, to: loop },
{ from: loop, outputIndex: 1, to: node },
{ from: node, to: loop },
);
// ACT
const stronglyConnectedComponents = graph.getStronglyConnectedComponents();
// ASSERT
expect(stronglyConnectedComponents).toHaveLength(2);
expect(stronglyConnectedComponents).toContainEqual(new Set([trigger]));
expect(stronglyConnectedComponents).toContainEqual(new Set([node, loop]));
});
});
describe('depthFirstSearch', () => {
// ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐
// │node0├───►│node1├───►│node2├───►│node4│───►│node5│
// └─────┘ └─────┘ └──┬──┘ └─────┘ └─────┘
// ▲ │
// │ │
// ┌──┴──┐ │
// │node3│◄──────┘
// └─────┘
test('calls nodes in the correct order and stops when it found the node', () => {
// ARRANGE
const node0 = createNodeData({ name: 'Node0' });
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
const node4 = createNodeData({ name: 'Node4' });
const node5 = createNodeData({ name: 'Node5' });
const graph = new DirectedGraph()
.addNodes(node0, node1, node2, node3, node4, node5)
.addConnections(
{ from: node0, to: node1 },
{ from: node1, to: node2 },
{ from: node2, to: node3 },
{ from: node3, to: node1 },
{ from: node2, to: node4 },
{ from: node4, to: node5 },
);
const fn = jest.fn().mockImplementation((node: INode) => node === node4);
// ACT
const foundNode = graph.depthFirstSearch({
from: node0,
fn,
});
// ASSERT
expect(foundNode).toBe(node4);
expect(fn).toHaveBeenCalledTimes(5);
expect(fn.mock.calls).toEqual([[node0], [node1], [node2], [node3], [node4]]);
});
});
describe('getParentConnections', () => {
// ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐
// │node1├──►│node2├──►│node3│──►│node4│
// └─────┘ └─────┘ └─────┘ └─────┘
test('returns all parent connections', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
const node4 = createNodeData({ name: 'Node4' });
const graph = new DirectedGraph()
.addNodes(node1, node2, node3, node4)
.addConnections(
{ from: node1, to: node2 },
{ from: node2, to: node3 },
{ from: node3, to: node4 },
);
// ACT
const connections = graph.getParentConnections(node3);
// ASSERT
const expectedConnections = graph.getConnections().filter((c) => c.to !== node4);
expect(connections.size).toBe(2);
expect(connections).toEqual(new Set(expectedConnections));
});
// ┌─────┐ ┌─────┐ ┌─────┐
// ┌─►│node1├───►│node2├──►│node3├─┐
// │ └─────┘ └─────┘ └─────┘ │
// │ │
// └───────────────────────────────┘
test('terminates when finding a cycle', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
const graph = new DirectedGraph()
.addNodes(node1, node2, node3)
.addConnections(
{ from: node1, to: node2 },
{ from: node2, to: node3 },
{ from: node3, to: node1 },
);
// ACT
const connections = graph.getParentConnections(node3);
// ASSERT
expect(connections.size).toBe(3);
expect(connections).toEqual(new Set(graph.getConnections()));
});
});
describe('removeNode', () => {
// XX
// ┌─────┐ ┌─────┐ ┌─────┐
// │node0├───►│node1├──►│node2│
// └─────┘ └─────┘ └─────┘
// turns into
// ┌─────┐ ┌─────┐
// │node0│ │node2│
// └─────┘ └─────┘
test('remove node and all connections', () => {
// ARRANGE
const node0 = createNodeData({ name: 'node0' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const graph = new DirectedGraph()
.addNodes(node0, node1, node2)
.addConnections({ from: node0, to: node1 }, { from: node0, to: node2 });
// ACT
graph.removeNode(node1);
// ASSERT
expect(graph).toEqual(
new DirectedGraph().addNodes(node0, node2).addConnections({ from: node0, to: node2 }),
);
});
// XX
// ┌─────┐ ┌─────┐ ┌─────┐
// │node0├───►│node1├──►│node2│
// └─────┘ └─────┘ └─────┘
// turns into
// ┌─────┐ ┌─────┐
// │node0├──►│node2│
// └─────┘ └─────┘
test('remove node, but reconnect connections', () => {
// ARRANGE
const node0 = createNodeData({ name: 'node0' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const graph = new DirectedGraph()
.addNodes(node0, node1, node2)
.addConnections({ from: node0, to: node1 }, { from: node1, to: node2 });
// ACT
const newConnections = graph.removeNode(node1, { reconnectConnections: true });
// ASSERT
expect(newConnections).toHaveLength(1);
expect(newConnections[0]).toEqual({
from: node0,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node2,
});
expect(graph).toEqual(
new DirectedGraph().addNodes(node0, node2).addConnections({ from: node0, to: node2 }),
);
});
// XX
// ┌─────┐ ┌─────┐ ┌─────┐
// │ │o o│ │o o│ │
// │ │o─┐ o│ │o o│ │
// │node0│o └►o│node1│o o│node2│
// │ │o o│ │o─┐ o│ │
// │ │o o│ │o └►o│ │
// └─────┘ └─────┘ └─────┘
// turns into
// ┌─────┐ ┌─────┐
// │ │o o│ │
// │ │o───────┐ o│ │
// │node0│o │ o│node2│
// │ │o │ o│ │
// │ │o └──────►o│ │
// └─────┘ └─────┘
test('remove node, reconnect connections and retaining the input indexes', () => {
// ARRANGE
const node0 = createNodeData({ name: 'node0' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const graph = new DirectedGraph()
.addNodes(node0, node1, node2)
.addConnections(
{ from: node0, outputIndex: 1, inputIndex: 2, to: node1 },
{ from: node1, outputIndex: 3, inputIndex: 4, to: node2 },
);
// ACT
const newConnections = graph.removeNode(node1, { reconnectConnections: true });
// ASSERT
expect(newConnections).toHaveLength(1);
expect(newConnections[0]).toEqual({
from: node0,
outputIndex: 1,
type: NodeConnectionType.Main,
inputIndex: 4,
to: node2,
});
expect(graph).toEqual(
new DirectedGraph()
.addNodes(node0, node2)
.addConnections({ from: node0, outputIndex: 1, inputIndex: 4, to: node2 }),
);
});
// XX
// ┌─────┐ ┌─────┐ ┌─────┐
// │ │o o│ │o │ │
// │ │o─┐ o│ │o │ │
// │node0│ └►o│node1│o ┌►o│node2│
// │ │ │ │o─┘ │ │
// │ │ │ │ │ │
// └─────┘ └─────┘ └─────┘
// turns into
// ┌─────┐ ┌─────┐
// │ │o │ │
// │ │o───────┐ │ │
// │node0│ └──────►o│node2│
// │ │ │ │
// │ │ │ │
// └─────┘ └─────┘
test('remove node, reconnect connections and retaining the input indexes, even if the child has less inputs than the than the removed node had', () => {
// ARRANGE
const node0 = createNodeData({ name: 'node0' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const graph = new DirectedGraph()
.addNodes(node0, node1, node2)
.addConnections(
{ from: node0, outputIndex: 1, inputIndex: 2, to: node1 },
{ from: node1, outputIndex: 3, inputIndex: 0, to: node2 },
);
// ACT
const newConnections = graph.removeNode(node1, { reconnectConnections: true });
// ASSERT
const expectedGraph = new DirectedGraph()
.addNodes(node0, node2)
.addConnections({ from: node0, outputIndex: 1, inputIndex: 0, to: node2 });
expect(newConnections).toHaveLength(1);
expect(newConnections).toEqual(expectedGraph.getConnections());
expect(graph).toEqual(expectedGraph);
});
// ┌─────┐ ┌──────┐
// │left0├─┐ XX ┌►│right0│
// └─────┘ │ ┌──────┐ │ └──────┘
// ├─►│center├──┤
// ┌─────┐ │ └──────┘ │ ┌──────┐
// │left1├─┘ └►│right1│
// └─────┘ └──────┘
// turns into
//
// ┌─────┐ ┌──────┐
// │left0├─┐ ┌─►│right0│
// └─────┘ │ │ └──────┘
// ├───────────┤
// ┌─────┐ │ │ ┌──────┐
// │left1├─┘ └─►│right1│
// └─────┘ └──────┘
test('remove node, reconnect connections and multiplexes them', () => {
// ARRANGE
const left0 = createNodeData({ name: 'left0' });
const left1 = createNodeData({ name: 'left1' });
const center = createNodeData({ name: 'center' });
const right0 = createNodeData({ name: 'right0' });
const right1 = createNodeData({ name: 'right1' });
const graph = new DirectedGraph()
.addNodes(left0, left1, center, right0, right1)
.addConnections(
{ from: left0, to: center },
{ from: left1, to: center },
{ from: center, to: right0 },
{ from: center, to: right1 },
);
// ACT
const newConnections = graph.removeNode(center, { reconnectConnections: true });
// ASSERT
const expectedGraph = new DirectedGraph()
.addNodes(left0, left1, right0, right1)
.addConnections(
{ from: left0, to: right0 },
{ from: left0, to: right1 },
{ from: left1, to: right0 },
{ from: left1, to: right1 },
);
expect(newConnections).toHaveLength(4);
expect(newConnections).toEqual(expectedGraph.getConnections());
expect(graph).toEqual(expectedGraph);
});
});
describe('hasNode', () => {
test("returns node if it's part of the graph", () => {
// ARRANGE
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph().addNodes(node);
// ACT & ASSERT
expect(graph.hasNode(node.name)).toBe(true);
});
test('returns undefined if there is no node with that name in the graph', () => {
// ARRANGE
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph().addNodes(node);
// ACT & ASSERT
expect(graph.hasNode(node.name + 'foo')).toBe(false);
});
});
});

View File

@@ -0,0 +1,124 @@
// NOTE: Diagrams in this file have been created with https://asciiflow.com/#/
// If you update the tests, please update the diagrams as well.
// If you add a test, please create a new diagram.
//
// Map
// 0 means the output has no run data
// 1 means the output has run data
// ►► denotes the node that the user wants to execute to
// XX denotes that the node is disabled
// PD denotes that the node has pinned data
import { NodeConnectionType } from 'n8n-workflow';
import { createNodeData } from './helpers';
import { DirectedGraph } from '../directed-graph';
import { filterDisabledNodes } from '../filter-disabled-nodes';
describe('filterDisabledNodes', () => {
// XX
// ┌───────┐ ┌────────┐ ►►
// │ ├────────►│ │ ┌───────────┐
// │trigger│ │disabled├─────►│destination│
// │ ├────────►│ │ └───────────┘
// └───────┘ └────────┘
// turns into
// ┌───────┐ ►►
// │ │ ┌───────────┐
// │trigger├─────►│destination│
// │ │ └───────────┘
// └───────┘
test('filter disabled nodes', () => {
const trigger = createNodeData({ name: 'trigger' });
const disabled = createNodeData({ name: 'disabled', disabled: true });
const destination = createNodeData({ name: 'destination' });
const graph = new DirectedGraph()
.addNodes(trigger, disabled, destination)
.addConnections({ from: trigger, to: disabled }, { from: disabled, to: destination });
const subgraph = filterDisabledNodes(graph);
expect(subgraph).toEqual(
new DirectedGraph()
.addNodes(trigger, destination)
.addConnections({ from: trigger, to: destination }),
);
});
// XX XX
// ┌───────┐ ┌─────┐ ┌─────┐ ┌───────────┐
// │trigger├────►│node1├────►│node2├────►│destination│
// └───────┘ └─────┘ └─────┘ └───────────┘
// turns into
// ┌───────┐ ┌───────────┐
// │trigger├────►│destination│
// └───────┘ └───────────┘
test('filter multiple disabled nodes in a row', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const disabledNode1 = createNodeData({ name: 'disabledNode1', disabled: true });
const disabledNode2 = createNodeData({ name: 'disabledNode2', disabled: true });
const destination = createNodeData({ name: 'destination' });
const graph = new DirectedGraph()
.addNodes(trigger, disabledNode1, disabledNode2, destination)
.addConnections(
{ from: trigger, to: disabledNode1 },
{ from: disabledNode1, to: disabledNode2 },
{ from: disabledNode2, to: destination },
);
// ACT
const subgraph = filterDisabledNodes(graph);
// ASSERT
expect(subgraph).toEqual(
new DirectedGraph()
.addNodes(trigger, destination)
.addConnections({ from: trigger, to: destination }),
);
});
describe('root nodes', () => {
// XX
// ┌───────┐ ┌────┐ ┌───────────┐
// │trigger├───►root├───►destination│
// └───────┘ └──▲─┘ └───────────┘
// │AiLanguageModel
// ┌┴──────┐
// │aiModel│
// └───────┘
// turns into
// ┌───────┐ ┌───────────┐
// │trigger├────────────►destination│
// └───────┘ └───────────┘
test('filter disabled root nodes', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const root = createNodeData({ name: 'root', disabled: true });
const aiModel = createNodeData({ name: 'ai_model' });
const destination = createNodeData({ name: 'destination' });
const graph = new DirectedGraph()
.addNodes(trigger, root, aiModel, destination)
.addConnections(
{ from: trigger, to: root },
{ from: aiModel, type: NodeConnectionType.AiLanguageModel, to: root },
{ from: root, to: destination },
);
// ACT
const subgraph = filterDisabledNodes(graph);
// ASSERT
expect(subgraph).toEqual(
new DirectedGraph()
// The model is still in the graph, but orphaned. This is ok for
// partial executions as findSubgraph will remove orphaned nodes.
.addNodes(trigger, destination, aiModel)
.addConnections({ from: trigger, to: destination }),
);
});
});
});

View File

@@ -0,0 +1,567 @@
// NOTE: Diagrams in this file have been created with https://asciiflow.com/#/
// If you update the tests, please update the diagrams as well.
// If you add a test, please create a new diagram.
//
// Map
// 0 means the output has no run data
// 1 means the output has run data
// ►► denotes the node that the user wants to execute to
// XX denotes that the node is disabled
// PD denotes that the node has pinned data
import { type IPinData, type IRunData } from 'n8n-workflow';
import { createNodeData, toITaskData } from './helpers';
import { DirectedGraph } from '../directed-graph';
import { findStartNodes, isDirty } from '../find-start-nodes';
describe('isDirty', () => {
test("if the node has pinned data it's not dirty", () => {
const node = createNodeData({ name: 'Basic Node' });
const pinData: IPinData = {
[node.name]: [{ json: { value: 1 } }],
};
expect(isDirty(node, undefined, pinData)).toBe(false);
});
test("if the node has run data it's not dirty", () => {
const node = createNodeData({ name: 'Basic Node' });
const runData: IRunData = {
[node.name]: [toITaskData([{ data: { value: 1 } }])],
};
expect(isDirty(node, runData)).toBe(false);
});
});
describe('findStartNodes', () => {
// ►►
// ┌───────┐
// │trigger│
// └───────┘
test('finds the start node if there is only a trigger', () => {
const node = createNodeData({ name: 'Basic Node' });
const graph = new DirectedGraph().addNode(node);
const startNodes = findStartNodes({
graph,
trigger: node,
destination: node,
pinData: {},
runData: {},
});
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(node);
});
// ►►
// ┌───────┐ ┌───────────┐
// │trigger├────►│destination│
// └───────┘ └───────────┘
test('finds the start node in a simple graph', () => {
const trigger = createNodeData({ name: 'trigger' });
const destination = createNodeData({ name: 'destination' });
const graph = new DirectedGraph()
.addNodes(trigger, destination)
.addConnection({ from: trigger, to: destination });
// if the trigger has no run data
{
const startNodes = findStartNodes({
graph,
trigger,
destination,
pinData: {},
runData: {},
});
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(trigger);
}
// if the trigger has run data
{
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { value: 1 } }])],
};
const startNodes = findStartNodes({
graph,
trigger,
destination,
runData,
pinData: {},
});
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(destination);
}
});
// ┌───────┐ ►►
// │ │1──┐ ┌────┐
// │trigger│ ├─►│node│
// │ │1──┘ └────┘
// └───────┘
// All nodes have run data. `findStartNodes` should return node twice
// because it has 2 input connections.
test('multiple outputs', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(trigger, node)
.addConnections(
{ from: trigger, to: node, outputIndex: 0, inputIndex: 0 },
{ from: trigger, to: node, outputIndex: 1, inputIndex: 0 },
);
const runData: IRunData = {
[trigger.name]: [
toITaskData([
{ data: { value: 1 }, outputIndex: 0 },
{ data: { value: 1 }, outputIndex: 1 },
]),
],
[node.name]: [toITaskData([{ data: { value: 1 } }])],
};
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: node,
runData,
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(node);
});
// ┌─────┐ ┌─────┐ ►►
//┌───────┐ │ ├────┬────────►│ │ ┌─────┐
//│trigger├───►│node1│ │ │node2├────┬───►│node4│
//└───────┘ │ ├────┼────┬───►│ │ │ └─────┘
// └─────┘ │ │ └─────┘ │
// │ │ │
// │ │ │
// │ │ │
// │ │ ┌─────┐ │
// │ └───►│ │ │
// │ │node3├────┘
// └────────►│ │
// └─────┘
test('complex example with multiple outputs and inputs', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const node3 = createNodeData({ name: 'node3' });
const node4 = createNodeData({ name: 'node4' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2, node3, node4)
.addConnections(
{ from: trigger, to: node1 },
{ from: node1, to: node2, outputIndex: 0, inputIndex: 0 },
{ from: node1, to: node2, outputIndex: 1, inputIndex: 1 },
{ from: node1, to: node3, outputIndex: 0, inputIndex: 1 },
{ from: node1, to: node3, outputIndex: 1, inputIndex: 0 },
{ from: node2, to: node4 },
{ from: node3, to: node4 },
);
{
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: node4,
pinData: {},
runData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
// no run data means the trigger is the start node
expect(startNodes).toContainEqual(trigger);
}
{
// run data for everything
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { value: 1 } }])],
[node1.name]: [toITaskData([{ data: { value: 1 } }])],
[node2.name]: [toITaskData([{ data: { value: 1 } }])],
[node3.name]: [toITaskData([{ data: { value: 1 } }])],
[node4.name]: [toITaskData([{ data: { value: 1 } }])],
};
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: node4,
runData,
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(node4);
}
});
// ►►
// ┌───────┐1 ┌────┐
// │ ├────────►│ │
// │trigger│ │node│
// │ ├────────►│ │
// └───────┘0 └────┘
// The merge node only gets data on one input, so the it should only be once
// in the start nodes
test('multiple connections with the first one having data', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(trigger, node)
.addConnections(
{ from: trigger, to: node, inputIndex: 0, outputIndex: 0 },
{ from: trigger, to: node, inputIndex: 1, outputIndex: 1 },
);
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: node,
runData: {
[trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])],
},
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(node);
});
// ►►
// ┌───────┐0 ┌────┐
// │ ├────────►│ │
// │trigger│ │node│
// │ ├────────►│ │
// └───────┘1 └────┘
// The merge node only gets data on one input, so the it should only be once
// in the start nodes
test('multiple connections with the second one having data', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(trigger, node)
.addConnections(
{ from: trigger, to: node, inputIndex: 0, outputIndex: 0 },
{ from: trigger, to: node, inputIndex: 1, outputIndex: 1 },
);
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: node,
runData: {
[trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])],
},
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(node);
});
// ►►
// ┌───────┐1 ┌────┐
// │ ├────────►│ │
// │trigger│ │node│
// │ ├────────►│ │
// └───────┘1 └────┘
// The merge node gets data on both inputs, so the it should be in the start
// nodes twice.
test('multiple connections with both having data', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(trigger, node)
.addConnections(
{ from: trigger, to: node, inputIndex: 0, outputIndex: 0 },
{ from: trigger, to: node, inputIndex: 1, outputIndex: 1 },
);
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: node,
runData: {
[trigger.name]: [
toITaskData([
{ data: { value: 1 }, outputIndex: 0 },
{ data: { value: 1 }, outputIndex: 1 },
]),
],
},
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(node);
});
// ►►
// ┌───────┐ ┌────┐
// │ │1 ┌────►│ │
// │trigger├───┤ │node│
// │ │ └────►│ │
// └───────┘ └────┘
test('multiple connections with both having data', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const node3 = createNodeData({ name: 'node3' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2, node3)
.addConnections(
{ from: trigger, to: node1 },
{ from: trigger, to: node2 },
{ from: node1, to: node3 },
{ from: node2, to: node3 },
);
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: node3,
runData: {
[trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])],
[node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])],
[node2.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])],
},
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(node3);
});
// ►►
// ┌───────┐ ┌─────┐0 ┌─────┐
// │ │1 │ ├────────►│ │
// │trigger├───────►│node1│ │node2│
// │ │ │ ├────────►│ │
// └───────┘ └─────┘1 └─────┘
test('multiple connections with trigger', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2)
.addConnections(
{ from: trigger, to: node1 },
{ from: node1, to: node2, outputIndex: 0 },
{ from: node1, to: node2, outputIndex: 1 },
);
// ACT
const startNodes = findStartNodes({
graph,
trigger: node1,
destination: node2,
runData: {
[trigger.name]: [toITaskData([{ data: { value: 1 } }])],
[node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])],
},
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(node2);
});
// ►►
//┌───────┐1 ┌─────┐1 ┌─────┐
//│Trigger├───┬──►│Node1├───┬─►│Node2│
//└───────┘ │ └─────┘ │ └─────┘
// │ │
// └─────────────┘
test('terminates when called with graph that contains cycles', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2)
.addConnections(
{ from: trigger, to: node1 },
{ from: node1, to: node1 },
{ from: node1, to: node2 },
);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { value: 1 } }])],
[node1.name]: [toITaskData([{ data: { value: 1 } }])],
};
const pinData: IPinData = {};
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: node2,
runData,
pinData,
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(node2);
});
describe('custom loop logic', () => {
test('if the last run of loop node has no data (null) on the done output, then the loop is the start node', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const loop = createNodeData({ name: 'loop', type: 'n8n-nodes-base.splitInBatches' });
const inLoop = createNodeData({ name: 'inLoop' });
const afterLoop = createNodeData({ name: 'afterLoop' });
const graph = new DirectedGraph()
.addNodes(trigger, loop, inLoop, afterLoop)
.addConnections(
{ from: trigger, to: loop },
{ from: loop, outputIndex: 1, to: inLoop },
{ from: inLoop, to: loop },
{ from: loop, to: afterLoop },
);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { name: 'trigger' } }])],
[loop.name]: [
// only output on the `loop` branch, but no output on the `done`
// branch
toITaskData([{ outputIndex: 1, data: { name: 'loop' } }]),
],
[inLoop.name]: [toITaskData([{ data: { name: 'inLoop' } }])],
};
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: afterLoop,
runData,
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(loop);
});
test('if the last run of loop node has no data (empty array) on the done output, then the loop is the start node', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const loop = createNodeData({ name: 'loop', type: 'n8n-nodes-base.splitInBatches' });
const inLoop = createNodeData({ name: 'inLoop' });
const afterLoop = createNodeData({ name: 'afterLoop' });
const graph = new DirectedGraph()
.addNodes(trigger, loop, inLoop, afterLoop)
.addConnections(
{ from: trigger, to: loop },
{ from: loop, outputIndex: 1, to: inLoop },
{ from: inLoop, to: loop },
{ from: loop, to: afterLoop },
);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { name: 'trigger' } }])],
[loop.name]: [
// This is handcrafted because `toITaskData` does not allow inserting
// an empty array like the first element of `main` below. But the
// execution engine creates ITaskData like this.
{
executionStatus: 'success',
executionTime: 0,
startTime: 0,
source: [],
data: { main: [[], [{ json: { name: 'loop' } }]] },
},
],
[inLoop.name]: [toITaskData([{ data: { name: 'inLoop' } }])],
};
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: afterLoop,
runData,
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(loop);
});
test('if the loop has data on the done output in the last run it does not become a start node', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const loop = createNodeData({ name: 'loop', type: 'n8n-nodes-base.splitInBatches' });
const inLoop = createNodeData({ name: 'inLoop' });
const afterLoop = createNodeData({ name: 'afterLoop' });
const graph = new DirectedGraph()
.addNodes(trigger, loop, inLoop, afterLoop)
.addConnections(
{ from: trigger, to: loop },
{ from: loop, outputIndex: 1, to: inLoop },
{ from: inLoop, to: loop },
{ from: loop, to: afterLoop },
);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { name: 'trigger' } }])],
[loop.name]: [
toITaskData([{ outputIndex: 1, data: { name: 'loop' } }]),
toITaskData([{ outputIndex: 0, data: { name: 'done' } }]),
],
[inLoop.name]: [toITaskData([{ data: { name: 'inLoop' } }])],
};
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: afterLoop,
runData,
pinData: {},
});
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(afterLoop);
});
});
});

View File

@@ -0,0 +1,262 @@
// NOTE: Diagrams in this file have been created with https://asciiflow.com/#/
// If you update the tests, please update the diagrams as well.
// If you add a test, please create a new diagram.
//
// Map
// 0 means the output has no run data
// 1 means the output has run data
// ►► denotes the node that the user wants to execute to
// XX denotes that the node is disabled
// PD denotes that the node has pinned data
import { NodeConnectionType } from 'n8n-workflow';
import { createNodeData } from './helpers';
import { DirectedGraph } from '../directed-graph';
import { findSubgraph } from '../find-subgraph';
describe('findSubgraph', () => {
// ►►
// ┌───────┐ ┌───────────┐
// │trigger├────►│destination│
// └───────┘ └───────────┘
test('simple', () => {
const trigger = createNodeData({ name: 'trigger' });
const destination = createNodeData({ name: 'destination' });
const graph = new DirectedGraph()
.addNodes(trigger, destination)
.addConnections({ from: trigger, to: destination });
const subgraph = findSubgraph({ graph, destination, trigger });
expect(subgraph).toEqual(graph);
});
// ►►
// ┌───────┐ ┌───────────┐
// │ ├────────►│ │
// │trigger│ │destination│
// │ ├────────►│ │
// └───────┘ └───────────┘
test('multiple connections', () => {
const ifNode = createNodeData({ name: 'If' });
const noOp = createNodeData({ name: 'noOp' });
const graph = new DirectedGraph()
.addNodes(ifNode, noOp)
.addConnections(
{ from: ifNode, to: noOp, outputIndex: 0 },
{ from: ifNode, to: noOp, outputIndex: 1 },
);
const subgraph = findSubgraph({ graph, destination: noOp, trigger: ifNode });
expect(subgraph).toEqual(graph);
});
// ►►
// ┌───────┐ ┌───────────┐
// │ ├────────►│ │ ┌────┐
// │trigger│ │destination├─────►│node│
// │ ├────────►│ │ └────┘
// └───────┘ └───────────┘
test('disregard nodes after destination', () => {
const trigger = createNodeData({ name: 'trigger' });
const destination = createNodeData({ name: 'destination' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(trigger, destination, node)
.addConnections({ from: trigger, to: destination }, { from: destination, to: node });
const subgraph = findSubgraph({ graph, destination, trigger });
expect(subgraph).toEqual(
new DirectedGraph()
.addNodes(trigger, destination)
.addConnections({ from: trigger, to: destination }),
);
});
// ►►
// ┌───────┐ ┌─────┐ ┌─────┐
// │Trigger├───┬──►│Node1├───┬─►│Node2│
// └───────┘ │ └─────┘ │ └─────┘
// │ │
// └─────────────┘
test('terminates when called with graph that contains cycles', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2)
.addConnections(
{ from: trigger, to: node1 },
{ from: node1, to: node1 },
{ from: node1, to: node2 },
);
// ACT
const subgraph = findSubgraph({ graph, destination: node2, trigger });
// ASSERT
expect(subgraph).toEqual(graph);
});
// ►►
// ┌───────┐ ┌─────┐
// │Trigger├──┬─►│Node1│
// └───────┘ │ └─────┘
// │
// ┌─────┐ │
// │Node2├────┘
// └─────┘
test('terminates when called with graph that contains cycles', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2)
.addConnections({ from: trigger, to: node1 }, { from: node2, to: node1 });
// ACT
const subgraph = findSubgraph({ graph, destination: node1, trigger });
// ASSERT
expect(subgraph).toEqual(
new DirectedGraph().addNodes(trigger, node1).addConnections({ from: trigger, to: node1 }),
);
});
// ►►
// ┌───────┐ ┌───────────┐ ┌───────────┐
// │Trigger├─┬─►│Destination├──►│AnotherNode├───┐
// └───────┘ │ └───────────┘ └───────────┘ │
// │ │
// └──────────────────────────────────┘
test('terminates if the destination node is part of a cycle', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const destination = createNodeData({ name: 'destination' });
const anotherNode = createNodeData({ name: 'anotherNode' });
const graph = new DirectedGraph()
.addNodes(trigger, destination, anotherNode)
.addConnections(
{ from: trigger, to: destination },
{ from: destination, to: anotherNode },
{ from: anotherNode, to: destination },
);
// ACT
const subgraph = findSubgraph({ graph, destination, trigger });
// ASSERT
expect(subgraph).toEqual(
new DirectedGraph()
.addNodes(trigger, destination)
.addConnections({ from: trigger, to: destination }),
);
});
describe('root nodes', () => {
// ►►
// ┌───────┐ ┌───────────┐
// │trigger├─────►│destination│
// └───────┘ └──▲────────┘
// │AiLanguageModel
// ┌┴──────┐
// │aiModel│
// └───────┘
test('always retain connections that have a different type than `NodeConnectionType.Main`', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const destination = createNodeData({ name: 'destination' });
const aiModel = createNodeData({ name: 'ai_model' });
const graph = new DirectedGraph()
.addNodes(trigger, destination, aiModel)
.addConnections(
{ from: trigger, to: destination },
{ from: aiModel, type: NodeConnectionType.AiLanguageModel, to: destination },
);
// ACT
const subgraph = findSubgraph({ graph, destination, trigger });
// ASSERT
expect(subgraph).toEqual(graph);
});
// This graph is not possible, it's only here to make sure `findSubgraph`
// does not follow non-Main connections.
//
// ┌────┐ ┌───────────┐
// │root┼───►destination│
// └──▲─┘ └───────────┘
// │AiLanguageModel
// ┌┴──────┐
// │aiModel│
// └▲──────┘
// ┌┴──────┐
// │trigger│
// └───────┘
// turns into an empty graph, because there is no `Main` typed connection
// connecting destination and trigger.
test('skip non-Main connection types', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const root = createNodeData({ name: 'root' });
const aiModel = createNodeData({ name: 'aiModel' });
const destination = createNodeData({ name: 'destination' });
const graph = new DirectedGraph()
.addNodes(trigger, root, aiModel, destination)
.addConnections(
{ from: trigger, to: aiModel },
{ from: aiModel, type: NodeConnectionType.AiLanguageModel, to: root },
{ from: root, to: destination },
);
// ACT
const subgraph = findSubgraph({ graph, destination, trigger });
// ASSERT
expect(subgraph.getConnections()).toHaveLength(0);
expect(subgraph.getNodes().size).toBe(0);
});
// ┌───────┐ ┌───────────┐
// │trigger├────────────►destination│
// └───────┘ └───────────┘
//
// ┌───────┐
// │aiModel│
// └───────┘
// turns into
// ┌───────┐ ┌───────────┐
// │trigger├────────────►destination│
// └───────┘ └───────────┘
test('remove orphaned nodes', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const aiModel = createNodeData({ name: 'ai_model' });
const destination = createNodeData({ name: 'destination' });
const graph = new DirectedGraph()
.addNodes(trigger, aiModel, destination)
.addConnections({ from: trigger, to: destination });
// ACT
const subgraph = findSubgraph({ graph, destination, trigger });
// ASSERT
expect(subgraph).toEqual(
new DirectedGraph()
.addNodes(trigger, destination)
.addConnections({ from: trigger, to: destination }),
);
});
});
});

View File

@@ -0,0 +1,516 @@
// NOTE: Diagrams in this file have been created with https://asciiflow.com/#/
// If you update the tests, please update the diagrams as well.
// If you add a test, please create a new diagram.
//
// Map
// 0 means the output has no run data
// 1 means the output has run data
// PD denotes that the node has pinned data
import type { IPinData } from 'n8n-workflow';
import { NodeConnectionType, type IRunData } from 'n8n-workflow';
import { createNodeData, toITaskData } from './helpers';
import { DirectedGraph } from '../directed-graph';
import { getSourceDataGroups } from '../get-source-data-groups';
describe('getSourceDataGroups', () => {
//┌───────┐1
//│source1├────┐
//└───────┘ │ ┌────┐
//┌───────┐1 ├──►│ │
//│source2├────┘ │node│
//└───────┘ ┌──►│ │
//┌───────┐1 │ └────┘
//│source3├────┘
//└───────┘
it('groups sources into possibly complete sets if all of them have data', () => {
// ARRANGE
const source1 = createNodeData({ name: 'source1' });
const source2 = createNodeData({ name: 'source2' });
const source3 = createNodeData({ name: 'source3' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(source1, source2, source3, node)
.addConnections(
{ from: source1, to: node, inputIndex: 0 },
{ from: source2, to: node, inputIndex: 0 },
{ from: source3, to: node, inputIndex: 1 },
);
const runData: IRunData = {
[source1.name]: [toITaskData([{ data: { value: 1 } }])],
[source2.name]: [toITaskData([{ data: { value: 1 } }])],
[source3.name]: [toITaskData([{ data: { value: 1 } }])],
};
const pinnedData: IPinData = {};
// ACT
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
// ASSERT
expect(groups).toHaveLength(2);
const group1 = groups[0];
expect(group1.connections).toHaveLength(2);
expect(group1.connections[0]).toEqual({
from: source1,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
expect(group1.connections[1]).toEqual({
from: source3,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 1,
to: node,
});
const group2 = groups[1];
expect(group2.connections).toHaveLength(1);
expect(group2.connections[0]).toEqual({
from: source2,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
});
//┌───────┐PD
//│source1├────┐
//└───────┘ │ ┌────┐
//┌───────┐PD ├──►│ │
//│source2├────┘ │node│
//└───────┘ ┌──►│ │
//┌───────┐PD │ └────┘
//│source3├────┘
//└───────┘
it('groups sources into possibly complete sets if all of them have data', () => {
// ARRANGE
const source1 = createNodeData({ name: 'source1' });
const source2 = createNodeData({ name: 'source2' });
const source3 = createNodeData({ name: 'source3' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(source1, source2, source3, node)
.addConnections(
{ from: source1, to: node, inputIndex: 0 },
{ from: source2, to: node, inputIndex: 0 },
{ from: source3, to: node, inputIndex: 1 },
);
const runData: IRunData = {};
const pinnedData: IPinData = {
[source1.name]: [{ json: { value: 1 } }],
[source2.name]: [{ json: { value: 2 } }],
[source3.name]: [{ json: { value: 3 } }],
};
// ACT
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
// ASSERT
expect(groups).toHaveLength(2);
const group1 = groups[0];
expect(group1.connections).toHaveLength(2);
expect(group1.connections[0]).toEqual({
from: source1,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
expect(group1.connections[1]).toEqual({
from: source3,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 1,
to: node,
});
const group2 = groups[1];
expect(group2.connections).toHaveLength(1);
expect(group2.connections[0]).toEqual({
from: source2,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
});
//┌───────┐0
//│source1├────┐
//└───────┘ │ ┌────┐
//┌───────┐1 ├──►│ │
//│source2├────┘ │node│
//└───────┘ ┌──►│ │
//┌───────┐1 │ └────┘
//│source3├────┘
//└───────┘
it('groups sources into one complete set with 2 connections and one incomplete set with 1 connection', () => {
// ARRANGE
const source1 = createNodeData({ name: 'source1' });
const source2 = createNodeData({ name: 'source2' });
const source3 = createNodeData({ name: 'source3' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(source1, source2, source3, node)
.addConnections(
{ from: source1, to: node, inputIndex: 0 },
{ from: source2, to: node, inputIndex: 0 },
{ from: source3, to: node, inputIndex: 1 },
);
const runData: IRunData = {
[source2.name]: [toITaskData([{ data: { value: 1 } }])],
[source3.name]: [toITaskData([{ data: { value: 1 } }])],
};
const pinnedData: IPinData = {};
// ACT
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
// ASSERT
const completeGroups = groups.filter((g) => g.complete);
{
expect(completeGroups).toHaveLength(1);
const group1 = completeGroups[0];
expect(group1.connections).toHaveLength(2);
expect(group1.connections[0]).toEqual({
from: source2,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
expect(group1.connections[1]).toEqual({
from: source3,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 1,
to: node,
});
}
const incompleteGroups = groups.filter((g) => !g.complete);
{
expect(incompleteGroups).toHaveLength(1);
const group1 = incompleteGroups[0];
expect(group1.connections).toHaveLength(1);
expect(group1.connections[0]).toEqual({
from: source1,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
}
});
//┌───────┐0
//│source1├───────┐
//└───────┘ │
// │
//┌───────┐1 │
//│source2├───────┤ ┌────┐
//└───────┘ └────► │
// │node│
//┌───────┐1 ┌────► │
//│source3├───────┤ └────┘
//└───────┘ │
// │
//┌───────┐0 │
//│source4├───────┘
//└───────┘
it('groups sources into one complete set with 2 connections and one incomplete set with 2 connection', () => {
// ARRANGE
const source1 = createNodeData({ name: 'source1' });
const source2 = createNodeData({ name: 'source2' });
const source3 = createNodeData({ name: 'source3' });
const source4 = createNodeData({ name: 'source4' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(source1, source2, source3, source4, node)
.addConnections(
{ from: source1, to: node, inputIndex: 0 },
{ from: source2, to: node, inputIndex: 0 },
{ from: source3, to: node, inputIndex: 1 },
{ from: source4, to: node, inputIndex: 1 },
);
const runData: IRunData = {
[source2.name]: [toITaskData([{ data: { value: 1 } }])],
[source3.name]: [toITaskData([{ data: { value: 1 } }])],
};
const pinnedData: IPinData = {};
// ACT
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
// ASSERT
const completeGroups = groups.filter((g) => g.complete);
{
expect(completeGroups).toHaveLength(1);
const group1 = completeGroups[0];
expect(group1.connections).toHaveLength(2);
expect(group1.connections[0]).toEqual({
from: source2,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
expect(group1.connections[1]).toEqual({
from: source3,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 1,
to: node,
});
}
const incompleteGroups = groups.filter((g) => !g.complete);
{
expect(incompleteGroups).toHaveLength(1);
const group1 = incompleteGroups[0];
expect(group1.connections).toHaveLength(2);
expect(group1.connections[0]).toEqual({
from: source1,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
expect(group1.connections[1]).toEqual({
from: source4,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 1,
to: node,
});
}
});
// ┌───────┐1
// │source1├───────┐
// └───────┘ │
// │
// ┌───────┐0 │
// │source2├───────┤ ┌────┐
// └───────┘ └────► │
// │node│
// ┌───────┐0 ┌────► │
// │source3├───────┘ └────┘
// └───────┘
it('groups sources into two incomplete sets, one with 1 connection without and one with 2 connections one with data and one without', () => {
// ARRANGE
const source1 = createNodeData({ name: 'source1' });
const source2 = createNodeData({ name: 'source2' });
const source3 = createNodeData({ name: 'source3' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(source1, source2, source3, node)
.addConnections(
{ from: source1, to: node, inputIndex: 0 },
{ from: source2, to: node, inputIndex: 0 },
{ from: source3, to: node, inputIndex: 1 },
);
const runData: IRunData = {
[source1.name]: [toITaskData([{ data: { node: 'source1' } }])],
};
const pinnedData: IPinData = {};
// ACT
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
// ASSERT
const completeGroups = groups.filter((g) => g.complete);
expect(completeGroups).toHaveLength(0);
const incompleteGroups = groups.filter((g) => !g.complete);
expect(incompleteGroups).toHaveLength(2);
const group1 = incompleteGroups[0];
expect(group1.connections).toHaveLength(2);
expect(group1.connections[0]).toEqual({
from: source1,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
expect(group1.connections[1]).toEqual({
from: source3,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 1,
to: node,
});
const group2 = incompleteGroups[1];
expect(group2.connections).toHaveLength(1);
expect(group2.connections[0]).toEqual({
from: source2,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 0,
to: node,
});
});
// ┌─────┐1 ►►
// ┌─►│Node1┼──┐ ┌─────┐
// ┌───────┐1│ └─────┘ └──►│ │
// │Trigger├─┤ │Node3│
// └───────┘ │ ┌─────┐0 ┌──►│ │
// └─►│Node2├──┘ └─────┘
// └─────┘
test('return an incomplete group when there is no data on input 2', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const node3 = createNodeData({ name: 'node3' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2, node3)
.addConnections(
{ from: trigger, to: node1 },
{ from: trigger, to: node2 },
{ from: node1, to: node3, inputIndex: 0 },
{ from: node2, to: node3, inputIndex: 1 },
);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { nodeName: 'trigger' } }])],
[node1.name]: [toITaskData([{ data: { nodeName: 'node1' } }])],
};
const pinData: IPinData = {};
// ACT
const groups = getSourceDataGroups(graph, node3, runData, pinData);
// ASSERT
expect(groups).toHaveLength(1);
const group1 = groups[0];
expect(group1.connections).toHaveLength(2);
expect(group1.complete).toEqual(false);
});
// ┌─────┐0 ►►
// ┌─►│Node1┼──┐ ┌─────┐
// ┌───────┐1│ └─────┘ └──►│ │
// │Trigger├─┤ │Node3│
// └───────┘ │ ┌─────┐1 ┌──►│ │
// └─►│Node2├──┘ └─────┘
// └─────┘
test('return an incomplete group when there is no data on input 1', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const node3 = createNodeData({ name: 'node3' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2, node3)
.addConnections(
{ from: trigger, to: node1 },
{ from: trigger, to: node2 },
{ from: node1, to: node3, inputIndex: 0 },
{ from: node2, to: node3, inputIndex: 1 },
);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { nodeName: 'trigger' } }])],
[node2.name]: [toITaskData([{ data: { nodeName: 'node2' } }])],
};
const pinData: IPinData = {};
// ACT
const groups = getSourceDataGroups(graph, node3, runData, pinData);
// ASSERT
expect(groups).toHaveLength(1);
const group1 = groups[0];
expect(group1.connections).toHaveLength(2);
expect(group1.complete).toEqual(false);
});
it('terminates with negative input indexes', () => {
// ARRANGE
const source1 = createNodeData({ name: 'source1' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(source1, node)
.addConnections({ from: source1, to: node, inputIndex: -1 });
const runData: IRunData = {
[source1.name]: [toITaskData([{ data: { node: source1.name } }])],
};
const pinnedData: IPinData = {};
// ACT
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
// ASSERT
expect(groups).toHaveLength(1);
const group1 = groups[0];
expect(group1.connections).toHaveLength(1);
expect(group1.connections[0]).toEqual({
from: source1,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: -1,
to: node,
});
});
it('terminates inputs with missing connections', () => {
// ARRANGE
const source1 = createNodeData({ name: 'source1' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(source1, node)
.addConnections({ from: source1, to: node, inputIndex: 1 });
const runData: IRunData = {
[source1.name]: [toITaskData([{ data: { node: source1.name } }])],
};
const pinnedData: IPinData = {};
// ACT
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
// ASSERT
expect(groups).toHaveLength(1);
const group1 = groups[0];
expect(group1.connections).toHaveLength(1);
expect(group1.connections[0]).toEqual({
from: source1,
outputIndex: 0,
type: NodeConnectionType.Main,
inputIndex: 1,
to: node,
});
});
it('terminates if the graph has no connections', () => {
// ARRANGE
const source1 = createNodeData({ name: 'source1' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph().addNodes(source1, node);
const runData: IRunData = {
[source1.name]: [toITaskData([{ data: { node: source1.name } }])],
};
const pinnedData: IPinData = {};
// ACT
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
// ASSERT
expect(groups).toHaveLength(0);
});
});

View File

@@ -0,0 +1,116 @@
// NOTE: Diagrams in this file have been created with https://asciiflow.com/#/
// If you update the tests, please update the diagrams as well.
// If you add a test, please create a new diagram.
//
// Map
// 0 means the output has no run data
// 1 means the output has run data
// ►► denotes the node that the user wants to execute to
// XX denotes that the node is disabled
// PD denotes that the node has pinned data
import { createNodeData } from './helpers';
import { DirectedGraph } from '../directed-graph';
import { handleCycles } from '../handle-cycles';
describe('handleCycles', () => {
// ┌────┐ ┌─────────┐
//┌───────┐ │ ├──────────►afterLoop│
//│trigger├────┬───►loop│ └─────────┘
//└───────┘ │ │ ├─┐ ►►
// │ └────┘ │ ┌──────┐
// │ └───►inLoop├────┐
// │ └──────┘ │
// │ │
// └──────────────────────────┘
test('if the start node is within a cycle it returns the start of the cycle as the new start node', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const loop = createNodeData({ name: 'loop' });
const inLoop = createNodeData({ name: 'inLoop' });
const afterLoop = createNodeData({ name: 'afterLoop' });
const graph = new DirectedGraph()
.addNodes(trigger, loop, inLoop, afterLoop)
.addConnections(
{ from: trigger, to: loop },
{ from: loop, outputIndex: 0, to: afterLoop },
{ from: loop, outputIndex: 1, to: inLoop },
{ from: inLoop, to: loop },
);
const startNodes = new Set([inLoop]);
// ACT
const newStartNodes = handleCycles(graph, startNodes, trigger);
// ASSERT
expect(newStartNodes.size).toBe(1);
expect(newStartNodes).toContainEqual(loop);
});
// ┌────┐ ┌─────────┐
//┌───────┐ │ ├──────────►afterLoop│
//│trigger├────┬───►loop│ └─────────┘
//└───────┘ │ │ ├─┐ ►►
// │ └────┘ │ ┌──────┐
// │ └───►inLoop├────┐
// │ └──────┘ │
// │ │
// └──────────────────────────┘
test('does not mutate `startNodes`', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const loop = createNodeData({ name: 'loop' });
const inLoop = createNodeData({ name: 'inLoop' });
const afterLoop = createNodeData({ name: 'afterLoop' });
const graph = new DirectedGraph()
.addNodes(trigger, loop, inLoop, afterLoop)
.addConnections(
{ from: trigger, to: loop },
{ from: loop, outputIndex: 0, to: afterLoop },
{ from: loop, outputIndex: 1, to: inLoop },
{ from: inLoop, to: loop },
);
const startNodes = new Set([inLoop]);
// ACT
handleCycles(graph, startNodes, trigger);
// ASSERT
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(inLoop);
});
// ►►
// ┌────┐ ┌─────────┐
//┌───────┐ │ ├──────────►afterLoop│
//│trigger├────┬───►loop│ └─────────┘
//└───────┘ │ │ ├─┐
// │ └────┘ │ ┌──────┐
// │ └───►inLoop├────┐
// │ └──────┘ │
// │ │
// └──────────────────────────┘
test('if the start node is not within a cycle it returns the same node as the new start node', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const loop = createNodeData({ name: 'loop' });
const inLoop = createNodeData({ name: 'inLoop' });
const afterLoop = createNodeData({ name: 'afterLoop' });
const graph = new DirectedGraph()
.addNodes(trigger, loop, inLoop, afterLoop)
.addConnections(
{ from: trigger, to: loop },
{ from: loop, outputIndex: 0, to: afterLoop },
{ from: loop, outputIndex: 1, to: inLoop },
{ from: inLoop, to: loop },
);
const startNodes = new Set([afterLoop]);
// ACT
const newStartNodes = handleCycles(graph, startNodes, trigger);
// ASSERT
expect(newStartNodes.size).toBe(1);
expect(newStartNodes).toContainEqual(afterLoop);
});
});

View File

@@ -0,0 +1,100 @@
import { NodeConnectionType } from 'n8n-workflow';
import type { INodeParameters, INode, ITaskData, IDataObject, IConnections } from 'n8n-workflow';
interface StubNode {
name: string;
parameters?: INodeParameters;
disabled?: boolean;
type?: 'n8n-nodes-base.manualTrigger' | 'n8n-nodes-base.splitInBatches' | (string & {});
}
export function createNodeData(stubData: StubNode): INode {
return {
name: stubData.name,
parameters: stubData.parameters ?? {},
type: stubData.type ?? 'n8n-nodes-base.set',
typeVersion: 1,
id: 'uuid-1234',
position: [100, 100],
disabled: stubData.disabled ?? false,
};
}
type TaskData = {
data: IDataObject;
outputIndex?: number;
nodeConnectionType?: NodeConnectionType;
};
export function toITaskData(taskData: TaskData[]): ITaskData {
const result: ITaskData = {
executionStatus: 'success',
executionTime: 0,
startTime: 0,
source: [],
data: {},
};
// NOTE: Here to make TS happy.
result.data = result.data ?? {};
for (const taskDatum of taskData) {
const type = taskDatum.nodeConnectionType ?? NodeConnectionType.Main;
const outputIndex = taskDatum.outputIndex ?? 0;
result.data[type] = result.data[type] ?? [];
const dataConnection = result.data[type];
dataConnection[outputIndex] = [{ json: taskDatum.data }];
}
for (const [type, dataConnection] of Object.entries(result.data)) {
for (const [index, maybe] of dataConnection.entries()) {
result.data[type][index] = maybe ?? null;
}
}
return result;
}
export const nodeTypes = {
getByName: jest.fn(),
getByNameAndVersion: jest.fn(),
getKnownTypes: jest.fn(),
};
export const defaultWorkflowParameter = {
active: false,
nodeTypes,
};
type Connection = {
from: INode;
to: INode;
type?: NodeConnectionType;
outputIndex?: number;
inputIndex?: number;
};
export function toIConnections(connections: Connection[]): IConnections {
const result: IConnections = {};
for (const connection of connections) {
const type = connection.type ?? NodeConnectionType.Main;
const outputIndex = connection.outputIndex ?? 0;
const inputIndex = connection.inputIndex ?? 0;
result[connection.from.name] = result[connection.from.name] ?? {
[type]: [],
};
const resultConnection = result[connection.from.name];
resultConnection[type][outputIndex] = resultConnection[type][outputIndex] ?? [];
const group = resultConnection[type][outputIndex];
group.push({
node: connection.to.name,
type,
index: inputIndex,
});
}
return result;
}

View File

@@ -0,0 +1,816 @@
// NOTE: Diagrams in this file have been created with https://asciiflow.com/#/
// If you update the tests, please update the diagrams as well.
// If you add a test, please create a new diagram.
//
// Map
// 0 means the output has no run data
// 1 means the output has run data
// ►► denotes the node that the user wants to execute to
// XX denotes that the node is disabled
// PD denotes that the node has pinned data
import { AssertionError } from 'assert';
import type {
INodeExecutionData,
ISourceData,
IWaitingForExecution,
IWaitingForExecutionSource,
} from 'n8n-workflow';
import { NodeConnectionType, type IPinData, type IRunData } from 'n8n-workflow';
import { createNodeData, toITaskData } from './helpers';
import { DirectedGraph } from '../directed-graph';
import { findSubgraph } from '../find-subgraph';
import {
addWaitingExecution,
addWaitingExecutionSource,
recreateNodeExecutionStack,
} from '../recreate-node-execution-stack';
describe('recreateNodeExecutionStack', () => {
// ►►
// ┌───────┐1 ┌────┐
// │Trigger├──────►│Node│
// └───────┘ └────┘
test('all nodes except destination node have data', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node = createNodeData({ name: 'node' });
const graph = new DirectedGraph()
.addNodes(trigger, node)
.addConnections({ from: trigger, to: node });
const workflow = findSubgraph({ graph, destination: node, trigger });
const startNodes = new Set([node]);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { value: 1 } }])],
};
const pinData = {};
// ACT
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(workflow, startNodes, runData, pinData);
// ASSERT
expect(nodeExecutionStack).toHaveLength(1);
expect(nodeExecutionStack).toEqual([
{
data: { main: [[{ json: { value: 1 } }]] },
node,
source: {
main: [
{
// TODO: not part of ISourceDate, but maybe it should be?
//currentNodeInput: 0,
previousNode: 'trigger',
previousNodeOutput: 0,
previousNodeRun: 0,
},
],
},
},
]);
expect(waitingExecution).toEqual({});
expect(waitingExecutionSource).toEqual({});
});
// ►►
// ┌───────┐0 ┌────┐
// │Trigger├──────►│Node│
// └───────┘ └────┘
test('no nodes have data', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node = createNodeData({ name: 'node' });
const workflow = new DirectedGraph()
.addNodes(trigger, node)
.addConnections({ from: trigger, to: node });
const startNodes = new Set([trigger]);
const runData: IRunData = {};
const pinData: IPinData = {};
// ACT
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(workflow, startNodes, runData, pinData);
// ASSERT
expect(nodeExecutionStack).toHaveLength(1);
expect(nodeExecutionStack).toEqual([
{
data: { main: [[{ json: {} }]] },
node: trigger,
source: null,
},
]);
expect(waitingExecution).toEqual({});
expect(waitingExecutionSource).toEqual({});
});
// PinData ►►
// ┌───────┐1 ┌────┐
// │Trigger├──────►│Node│
// └───────┘ └────┘
test('node before destination node has pinned data', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node = createNodeData({ name: 'node' });
const workflow = new DirectedGraph()
.addNodes(trigger, node)
.addConnections({ from: trigger, to: node });
const startNodes = new Set([node]);
const runData: IRunData = {};
const pinData: IPinData = {
[trigger.name]: [{ json: { value: 1 } }],
};
// ACT
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(workflow, startNodes, runData, pinData);
// ASSERT
expect(nodeExecutionStack).toHaveLength(1);
expect(nodeExecutionStack).toEqual([
{
data: { main: [[{ json: { value: 1 } }]] },
node,
source: {
main: [
{
// TODO: not part of ISourceDate, but maybe it should be?
//currentNodeInput: 0,
previousNode: trigger.name,
previousNodeRun: 0,
previousNodeOutput: 0,
},
],
},
},
]);
expect(waitingExecution).toEqual({});
expect(waitingExecutionSource).toEqual({});
});
// XX ►►
// ┌───────┐1 ┌─────┐ ┌─────┐
// │Trigger├─────►│Node1├──────►│Node2│
// └───────┘ └─────┘ └─────┘
test('throws if a disabled node is found', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1', disabled: true });
const node2 = createNodeData({ name: 'node2' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2)
.addConnections({ from: trigger, to: node1 }, { from: node1, to: node2 });
const startNodes = new Set([node2]);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { value: 1 } }])],
};
const pinData = {};
// ACT & ASSERT
expect(() => recreateNodeExecutionStack(graph, startNodes, runData, pinData)).toThrowError(
AssertionError,
);
});
// ►►
// ┌───────┐1 ┌─────┐1 ┌─────┐
// │Trigger├──┬──►│Node1├──┬───►│Node3│
// └───────┘ │ └─────┘ │ └─────┘
// │ │
// │ ┌─────┐1 │
// └──►│Node2├──┘
// └─────┘
test('multiple incoming connections', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const node3 = createNodeData({ name: 'node3' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2, node3)
.addConnections(
{ from: trigger, to: node1 },
{ from: trigger, to: node2 },
{ from: node1, to: node3 },
{ from: node2, to: node3 },
);
const startNodes = new Set([node3]);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { value: 1 } }])],
[node1.name]: [toITaskData([{ data: { value: 1 } }])],
[node2.name]: [toITaskData([{ data: { value: 1 } }])],
};
const pinData = {};
// ACT
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
// ASSERT
expect(nodeExecutionStack).toEqual([
{
data: { main: [[{ json: { value: 1 } }]] },
node: node3,
source: {
main: [
{
// TODO: not part of ISourceDate, but maybe it should be?
//currentNodeInput: 0,
previousNode: 'node1',
previousNodeOutput: 0,
previousNodeRun: 0,
},
],
},
},
{
data: { main: [[{ json: { value: 1 } }]] },
node: node3,
source: {
main: [
{
// TODO: not part of ISourceDate, but maybe it should be?
//currentNodeInput: 0,
previousNode: 'node2',
previousNodeOutput: 0,
previousNodeRun: 0,
},
],
},
},
]);
expect(waitingExecution).toEqual({});
expect(waitingExecutionSource).toEqual({});
});
// ┌─────┐1 ►►
// ┌─►│node1├───┐ ┌─────┐
// ┌───────┐1 │ └─────┘ └──►│ │
// │Trigger├──┤ │node3│
// └───────┘ │ ┌─────┐1 ┌──►│ │
// └─►│node2├───┘ └─────┘
// └─────┘
test('multiple inputs', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const node3 = createNodeData({ name: 'node3' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2, node3)
.addConnections(
{ from: trigger, to: node1 },
{ from: trigger, to: node2 },
{ from: node1, to: node3, inputIndex: 0 },
{ from: node2, to: node3, inputIndex: 1 },
);
const startNodes = new Set([node3]);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { value: 1 } }])],
[node1.name]: [toITaskData([{ data: { value: 1 } }])],
[node2.name]: [toITaskData([{ data: { value: 1 } }])],
};
const pinData: IPinData = {
[trigger.name]: [{ json: { value: 1 } }],
};
// ACT
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
// ASSERT
expect(nodeExecutionStack).toHaveLength(1);
expect(nodeExecutionStack[0]).toEqual({
data: { main: [[{ json: { value: 1 } }], [{ json: { value: 1 } }]] },
node: node3,
source: {
main: [
{ previousNode: 'node1', previousNodeOutput: 0, previousNodeRun: 0 },
{ previousNode: 'node2', previousNodeOutput: 0, previousNodeRun: 0 },
],
},
});
expect(waitingExecution).toEqual({});
expect(waitingExecutionSource).toEqual({});
});
// ┌─────┐ ┌─────┐
// ┌──►node1┼────┬──────► │
// │ └─────┘ │ │merge│
// │ │ ┌───► │
// ├─────────────┘ │ └─────┘
// │ │
//┌───────┐ │ ┌─────┐ │
//│trigger├───┴────►node2├─────┘
//└───────┘ └─────┘
describe('multiple inputs', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const node1 = createNodeData({ name: 'node1' });
const node2 = createNodeData({ name: 'node2' });
const merge = createNodeData({ name: 'merge' });
const graph = new DirectedGraph()
.addNodes(trigger, node1, node2, merge)
.addConnections(
{ from: trigger, to: node1 },
{ from: trigger, to: node2 },
{ from: trigger, to: merge, inputIndex: 0 },
{ from: node1, to: merge, inputIndex: 0 },
{ from: node2, to: merge, inputIndex: 1 },
);
test('only the trigger has run data', () => {
// ARRANGE
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])],
};
const pinData: IPinData = {};
const startNodes = new Set([node1, node2, merge]);
// ACT
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
// ASSERT
expect(nodeExecutionStack).toHaveLength(2);
expect(nodeExecutionStack[0]).toEqual({
node: node1,
data: { main: [[{ json: { node: 'trigger' } }]] },
source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] },
});
expect(nodeExecutionStack[1]).toEqual({
node: node2,
data: { main: [[{ json: { node: 'trigger' } }]] },
source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] },
});
expect(waitingExecution).toEqual({
[merge.name]: {
'0': {
main: [[{ json: { node: 'trigger' } }]],
},
},
});
expect(waitingExecutionSource).toEqual({
[merge.name]: {
'0': {
main: [
{
previousNode: 'trigger',
previousNodeOutput: 0,
previousNodeRun: 0,
},
],
},
},
});
});
test('the trigger and node1 have run data', () => {
// ARRANGE
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])],
[node1.name]: [toITaskData([{ data: { node: 'node1' } }])],
};
const pinData: IPinData = {};
const startNodes = new Set([node2, merge]);
// ACT
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
// ASSERT
expect(nodeExecutionStack).toHaveLength(2);
expect(nodeExecutionStack[0]).toEqual({
node: node2,
data: { main: [[{ json: { node: 'trigger' } }]] },
source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] },
});
expect(nodeExecutionStack[1]).toEqual({
node: merge,
data: { main: [[{ json: { node: 'trigger' } }]] },
source: {
main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }],
},
});
expect(waitingExecution).toEqual({
[merge.name]: {
'0': {
main: [[{ json: { node: 'node1' } }]],
},
},
});
expect(waitingExecutionSource).toEqual({
[merge.name]: {
'0': {
main: [
{
previousNode: 'node1',
previousNodeOutput: 0,
previousNodeRun: 0,
},
],
},
},
});
});
test('the trigger and node2 have run data', () => {
// ARRANGE
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])],
[node2.name]: [toITaskData([{ data: { node: 'node2' } }])],
};
const pinData: IPinData = {};
const startNodes = new Set([node1, merge]);
// ACT
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
// ASSERT
expect(nodeExecutionStack).toHaveLength(2);
expect(nodeExecutionStack[0]).toEqual({
node: node1,
data: { main: [[{ json: { node: 'trigger' } }]] },
source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] },
});
expect(nodeExecutionStack[1]).toEqual({
node: merge,
data: { main: [[{ json: { node: 'trigger' } }], [{ json: { node: 'node2' } }]] },
source: {
main: [
{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 },
{ previousNode: 'node2', previousNodeOutput: 0, previousNodeRun: 0 },
],
},
});
expect(waitingExecution).toEqual({});
expect(waitingExecutionSource).toEqual({});
});
test('the trigger, node1 and node2 have run data', () => {
// ARRANGE
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])],
[node1.name]: [toITaskData([{ data: { node: 'node1' } }])],
[node2.name]: [toITaskData([{ data: { node: 'node2' } }])],
};
const pinData: IPinData = {};
const startNodes = new Set([merge]);
// ACT
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
// ASSERT
expect(nodeExecutionStack).toHaveLength(2);
expect(nodeExecutionStack[0]).toEqual({
node: merge,
data: { main: [[{ json: { node: 'node1' } }], [{ json: { node: 'node2' } }]] },
source: {
main: [
{ previousNode: 'node1', previousNodeOutput: 0, previousNodeRun: 0 },
{ previousNode: 'node2', previousNodeOutput: 0, previousNodeRun: 0 },
],
},
});
expect(nodeExecutionStack[1]).toEqual({
node: merge,
data: { main: [[{ json: { node: 'trigger' } }]] },
source: {
main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }],
},
});
expect(waitingExecution).toEqual({});
expect(waitingExecutionSource).toEqual({});
});
});
});
describe('addWaitingExecution', () => {
test('allow adding data partially', () => {
const waitingExecution: IWaitingForExecution = {};
const nodeName1 = 'node 1';
const nodeName2 = 'node 2';
const executionData: INodeExecutionData[] = [{ json: { item: 1 } }, { json: { item: 2 } }];
// adding the data for the second input index first
{
addWaitingExecution(
waitingExecution,
nodeName1,
1, // runIndex
NodeConnectionType.Main,
1, // inputIndex
executionData,
);
expect(waitingExecution).toEqual({
[nodeName1]: {
// runIndex
1: {
[NodeConnectionType.Main]: [undefined, executionData],
},
},
});
}
// adding the data for the first input
{
addWaitingExecution(
waitingExecution,
nodeName1,
1, // runIndex
NodeConnectionType.Main,
0, // inputIndex
executionData,
);
expect(waitingExecution).toEqual({
[nodeName1]: {
// runIndex
1: {
[NodeConnectionType.Main]: [executionData, executionData],
},
},
});
}
// adding data for another node connection type
{
addWaitingExecution(
waitingExecution,
nodeName1,
1, // runIndex
NodeConnectionType.AiMemory,
0, // inputIndex
executionData,
);
expect(waitingExecution).toEqual({
[nodeName1]: {
// runIndex
1: {
[NodeConnectionType.Main]: [executionData, executionData],
[NodeConnectionType.AiMemory]: [executionData],
},
},
});
}
// adding data for another run
{
addWaitingExecution(
waitingExecution,
nodeName1,
0, // runIndex
NodeConnectionType.AiChain,
0, // inputIndex
executionData,
);
expect(waitingExecution).toEqual({
[nodeName1]: {
// runIndex
0: {
[NodeConnectionType.AiChain]: [executionData],
},
1: {
[NodeConnectionType.Main]: [executionData, executionData],
[NodeConnectionType.AiMemory]: [executionData],
},
},
});
}
// adding data for another node
{
addWaitingExecution(
waitingExecution,
nodeName2,
0, // runIndex
NodeConnectionType.Main,
2, // inputIndex
executionData,
);
expect(waitingExecution).toEqual({
[nodeName1]: {
// runIndex
0: {
[NodeConnectionType.AiChain]: [executionData],
},
1: {
[NodeConnectionType.Main]: [executionData, executionData],
[NodeConnectionType.AiMemory]: [executionData],
},
},
[nodeName2]: {
// runIndex
0: {
[NodeConnectionType.Main]: [undefined, undefined, executionData],
},
},
});
}
// allow adding null
{
addWaitingExecution(
waitingExecution,
nodeName2,
0, // runIndex
NodeConnectionType.Main,
0, // inputIndex
null,
);
expect(waitingExecution).toEqual({
[nodeName2]: {
// runIndex
0: {
[NodeConnectionType.Main]: [null, undefined, executionData],
},
},
[nodeName1]: {
// runIndex
0: {
[NodeConnectionType.AiChain]: [executionData],
},
1: {
[NodeConnectionType.Main]: [executionData, executionData],
[NodeConnectionType.AiMemory]: [executionData],
},
},
});
}
});
});
describe('addWaitingExecutionSource', () => {
test('allow adding data partially', () => {
const waitingExecutionSource: IWaitingForExecutionSource = {};
const nodeName1 = 'node 1';
const nodeName2 = 'node 2';
const sourceData: ISourceData = {
previousNode: 'node 0',
previousNodeRun: 0,
previousNodeOutput: 0,
};
// adding the data for the second input index first
{
addWaitingExecutionSource(
waitingExecutionSource,
nodeName1,
1, // runIndex
NodeConnectionType.Main,
1, // inputIndex
sourceData,
);
expect(waitingExecutionSource).toEqual({
[nodeName1]: {
// runIndex
1: {
[NodeConnectionType.Main]: [undefined, sourceData],
},
},
});
}
// adding the data for the first input
{
addWaitingExecutionSource(
waitingExecutionSource,
nodeName1,
1, // runIndex
NodeConnectionType.Main,
0, // inputIndex
sourceData,
);
expect(waitingExecutionSource).toEqual({
[nodeName1]: {
// runIndex
1: {
[NodeConnectionType.Main]: [sourceData, sourceData],
},
},
});
}
// adding data for another node connection type
{
addWaitingExecutionSource(
waitingExecutionSource,
nodeName1,
1, // runIndex
NodeConnectionType.AiMemory,
0, // inputIndex
sourceData,
);
expect(waitingExecutionSource).toEqual({
[nodeName1]: {
// runIndex
1: {
[NodeConnectionType.Main]: [sourceData, sourceData],
[NodeConnectionType.AiMemory]: [sourceData],
},
},
});
}
// adding data for another run
{
addWaitingExecutionSource(
waitingExecutionSource,
nodeName1,
0, // runIndex
NodeConnectionType.AiChain,
0, // inputIndex
sourceData,
);
expect(waitingExecutionSource).toEqual({
[nodeName1]: {
// runIndex
0: {
[NodeConnectionType.AiChain]: [sourceData],
},
1: {
[NodeConnectionType.Main]: [sourceData, sourceData],
[NodeConnectionType.AiMemory]: [sourceData],
},
},
});
}
// adding data for another node
{
addWaitingExecutionSource(
waitingExecutionSource,
nodeName2,
0, // runIndex
NodeConnectionType.Main,
2, // inputIndex
sourceData,
);
expect(waitingExecutionSource).toEqual({
[nodeName1]: {
// runIndex
0: {
[NodeConnectionType.AiChain]: [sourceData],
},
1: {
[NodeConnectionType.Main]: [sourceData, sourceData],
[NodeConnectionType.AiMemory]: [sourceData],
},
},
[nodeName2]: {
// runIndex
0: {
[NodeConnectionType.Main]: [undefined, undefined, sourceData],
},
},
});
}
// allow adding null
{
addWaitingExecutionSource(
waitingExecutionSource,
nodeName2,
0, // runIndex
NodeConnectionType.Main,
0, // inputIndex
null,
);
expect(waitingExecutionSource).toEqual({
[nodeName1]: {
// runIndex
0: {
[NodeConnectionType.AiChain]: [sourceData],
},
1: {
[NodeConnectionType.Main]: [sourceData, sourceData],
[NodeConnectionType.AiMemory]: [sourceData],
},
},
[nodeName2]: {
// runIndex
0: {
[NodeConnectionType.Main]: [null, undefined, sourceData],
},
},
});
}
});
});

View File

@@ -0,0 +1,27 @@
import { NodeConnectionType } from 'n8n-workflow';
import { createNodeData, toIConnections } from './helpers';
test('toIConnections', () => {
const node1 = createNodeData({ name: 'Basic Node 1' });
const node2 = createNodeData({ name: 'Basic Node 2' });
expect(
toIConnections([{ from: node1, to: node2, type: NodeConnectionType.Main, outputIndex: 0 }]),
).toEqual({
[node1.name]: {
// output group
main: [
// first output
[
// first connection
{
node: node2.name,
type: NodeConnectionType.Main,
index: 0,
},
],
],
},
});
});

View File

@@ -0,0 +1,65 @@
import { NodeConnectionType } from 'n8n-workflow';
import { toITaskData } from './helpers';
test('toITaskData', function () {
expect(toITaskData([{ data: { value: 1 } }])).toEqual({
executionStatus: 'success',
executionTime: 0,
source: [],
startTime: 0,
data: {
main: [[{ json: { value: 1 } }]],
},
});
expect(toITaskData([{ data: { value: 1 }, outputIndex: 1 }])).toEqual({
executionStatus: 'success',
executionTime: 0,
source: [],
startTime: 0,
data: {
main: [null, [{ json: { value: 1 } }]],
},
});
expect(
toITaskData([
{ data: { value: 1 }, outputIndex: 1, nodeConnectionType: NodeConnectionType.AiAgent },
]),
).toEqual({
executionStatus: 'success',
executionTime: 0,
source: [],
startTime: 0,
data: {
[NodeConnectionType.AiAgent]: [null, [{ json: { value: 1 } }]],
},
});
expect(
toITaskData([
{ data: { value: 1 }, outputIndex: 0 },
{ data: { value: 2 }, outputIndex: 1 },
]),
).toEqual({
executionStatus: 'success',
executionTime: 0,
startTime: 0,
source: [],
data: {
main: [
[
{
json: { value: 1 },
},
],
[
{
json: { value: 2 },
},
],
],
},
});
});

View File

@@ -0,0 +1,35 @@
import type { INode, IRunData } from 'n8n-workflow';
import type { DirectedGraph } from './directed-graph';
/**
* Returns new run data that does not contain data for any node that is a child
* of any start node.
* This does not mutate the `runData` being passed in.
*/
export function cleanRunData(
runData: IRunData,
graph: DirectedGraph,
startNodes: Set<INode>,
): IRunData {
const newRunData: IRunData = { ...runData };
for (const startNode of startNodes) {
delete newRunData[startNode.name];
const children = graph.getChildren(startNode);
for (const child of children) {
delete newRunData[child.name];
}
}
// Remove run data for all nodes that are not part of the subgraph
for (const nodeName of Object.keys(newRunData)) {
if (!graph.hasNode(nodeName)) {
// remove run data for node that is not part of the graph
delete newRunData[nodeName];
}
}
return newRunData;
}

View File

@@ -0,0 +1,509 @@
import * as a from 'assert';
import type { IConnections, INode, WorkflowParameters } from 'n8n-workflow';
import { NodeConnectionType, Workflow } from 'n8n-workflow';
export type GraphConnection = {
from: INode;
to: INode;
type: NodeConnectionType;
outputIndex: number;
inputIndex: number;
};
// fromName-outputType-outputIndex-inputIndex-toName
type DirectedGraphKey = `${string}-${NodeConnectionType}-${number}-${number}-${string}`;
type RemoveNodeBaseOptions = {
reconnectConnections: boolean;
skipConnectionFn?: (connection: GraphConnection) => boolean;
};
/**
* Represents a directed graph as an adjacency list, e.g. one list for the
* vertices and one list for the edges.
* To integrate easier with the n8n codebase vertices are called nodes and
* edges are called connections.
*
* The reason why this exists next to the Workflow class is that the workflow
* class stored the graph in a deeply nested, normalized format. This format
* does not lend itself to editing the graph or build graphs incrementally.
* This closes this gap by having import and export functions:
* `fromWorkflow`, `toWorkflow`.
*
* Thus it allows to do something like this:
* ```ts
* const newWorkflow = DirectedGraph.fromWorkflow(workflow)
* .addNodes(node1, node2)
* .addConnection({ from: node1, to: node2 })
* .toWorkflow(...workflow);
* ```
*/
export class DirectedGraph {
private nodes: Map<string, INode> = new Map();
private connections: Map<DirectedGraphKey, GraphConnection> = new Map();
hasNode(nodeName: string) {
return this.nodes.has(nodeName);
}
getNodes() {
return new Map(this.nodes.entries());
}
getConnections(filter: { to?: INode } = {}) {
const filteredCopy: GraphConnection[] = [];
for (const connection of this.connections.values()) {
const toMatches = filter.to ? connection.to === filter.to : true;
if (toMatches) {
filteredCopy.push(connection);
}
}
return filteredCopy;
}
addNode(node: INode) {
this.nodes.set(node.name, node);
return this;
}
addNodes(...nodes: INode[]) {
for (const node of nodes) {
this.addNode(node);
}
return this;
}
/**
* Removes a node from the graph.
*
* By default it will also remove all connections that use that node and
* return nothing.
*
* If you pass `{ reconnectConnections: true }` it will rewire all
* connections making sure all parent nodes are connected to all child nodes
* and return the new connections.
*/
removeNode(
node: INode,
options?: { reconnectConnections: true } & RemoveNodeBaseOptions,
): GraphConnection[];
removeNode(
node: INode,
options?: { reconnectConnections: false } & RemoveNodeBaseOptions,
): undefined;
removeNode(
node: INode,
options: RemoveNodeBaseOptions = { reconnectConnections: false },
): undefined | GraphConnection[] {
if (options.reconnectConnections) {
const incomingConnections = this.getDirectParentConnections(node);
const outgoingConnections = this.getDirectChildConnections(node);
const newConnections: GraphConnection[] = [];
for (const incomingConnection of incomingConnections) {
if (options.skipConnectionFn && options.skipConnectionFn(incomingConnection)) {
continue;
}
for (const outgoingConnection of outgoingConnections) {
if (options.skipConnectionFn && options.skipConnectionFn(outgoingConnection)) {
continue;
}
const newConnection = {
...incomingConnection,
to: outgoingConnection.to,
inputIndex: outgoingConnection.inputIndex,
};
newConnections.push(newConnection);
}
}
for (const [key, connection] of this.connections.entries()) {
if (connection.to === node || connection.from === node) {
this.connections.delete(key);
}
}
for (const newConnection of newConnections) {
this.connections.set(this.makeKey(newConnection), newConnection);
}
this.nodes.delete(node.name);
return newConnections;
} else {
for (const [key, connection] of this.connections.entries()) {
if (connection.to === node || connection.from === node) {
this.connections.delete(key);
}
}
this.nodes.delete(node.name);
return;
}
}
addConnection(connectionInput: {
from: INode;
to: INode;
type?: NodeConnectionType;
outputIndex?: number;
inputIndex?: number;
}) {
const { from, to } = connectionInput;
const fromExists = this.nodes.get(from.name) === from;
const toExists = this.nodes.get(to.name) === to;
a.ok(fromExists);
a.ok(toExists);
const connection: GraphConnection = {
...connectionInput,
type: connectionInput.type ?? NodeConnectionType.Main,
outputIndex: connectionInput.outputIndex ?? 0,
inputIndex: connectionInput.inputIndex ?? 0,
};
this.connections.set(this.makeKey(connection), connection);
return this;
}
addConnections(
...connectionInputs: Array<{
from: INode;
to: INode;
type?: NodeConnectionType;
outputIndex?: number;
inputIndex?: number;
}>
) {
for (const connectionInput of connectionInputs) {
this.addConnection(connectionInput);
}
return this;
}
getDirectChildConnections(node: INode) {
const nodeExists = this.nodes.get(node.name) === node;
a.ok(nodeExists);
const directChildren: GraphConnection[] = [];
for (const connection of this.connections.values()) {
if (connection.from !== node) {
continue;
}
directChildren.push(connection);
}
return directChildren;
}
private getChildrenRecursive(node: INode, children: Set<INode>) {
const directChildren = this.getDirectChildConnections(node);
for (const directChild of directChildren) {
// Break out if we found a cycle.
if (children.has(directChild.to)) {
continue;
}
children.add(directChild.to);
this.getChildrenRecursive(directChild.to, children);
}
return children;
}
/**
* Returns all nodes that are children of the node that is passed as an
* argument.
*
* If the node being passed in is a child of itself (e.g. is part of a
* cycle), the return set will contain it as well.
*/
getChildren(node: INode) {
return this.getChildrenRecursive(node, new Set());
}
getDirectParentConnections(node: INode) {
const nodeExists = this.nodes.get(node.name) === node;
a.ok(nodeExists);
const directParents: GraphConnection[] = [];
for (const connection of this.connections.values()) {
if (connection.to !== node) {
continue;
}
directParents.push(connection);
}
return directParents;
}
private getParentConnectionsRecursive(node: INode, connections: Set<GraphConnection>) {
const parentConnections = this.getDirectParentConnections(node);
for (const connection of parentConnections) {
// break out of cycles
if (connections.has(connection)) {
continue;
}
connections.add(connection);
this.getParentConnectionsRecursive(connection.from, connections);
}
return connections;
}
getParentConnections(node: INode) {
return this.getParentConnectionsRecursive(node, new Set());
}
getConnection(
from: INode,
outputIndex: number,
type: NodeConnectionType,
inputIndex: number,
to: INode,
): GraphConnection | undefined {
return this.connections.get(
this.makeKey({
from,
outputIndex,
type,
inputIndex,
to,
}),
);
}
/**
* Returns all strongly connected components.
*
* Strongly connected components are a set of nodes where it's possible to
* reach every node from every node.
*
* Strongly connected components are mutually exclusive in directed graphs,
* e.g. they cannot overlap.
*
* The smallest strongly connected component is a single node, since it can
* reach itself from itself by not following any edges.
*
* The algorithm implement here is Tarjan's algorithm.
*
* Example:
* ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐
* │node1├────►node2◄────┤node3├────►node5│
* └─────┘ └──┬──┘ └──▲──┘ └▲───┬┘
* │ │ │ │
* ┌──▼──┐ │ ┌┴───▼┐
* │node4├───────┘ │node6│
* └─────┘ └─────┘
*
* The strongly connected components are
* 1. node1
* 2. node2, node4, node3
* 3. node5, node6
*
* Further reading:
* https://en.wikipedia.org/wiki/Strongly_connected_component
* https://www.youtube.com/watch?v=wUgWX0nc4NY
*/
getStronglyConnectedComponents(): Array<Set<INode>> {
let id = 0;
const visited = new Set<INode>();
const ids = new Map<INode, number>();
const lowLinkValues = new Map<INode, number>();
const stack: INode[] = [];
const stronglyConnectedComponents: Array<Set<INode>> = [];
const followNode = (node: INode) => {
if (visited.has(node)) {
return;
}
visited.add(node);
lowLinkValues.set(node, id);
ids.set(node, id);
id++;
stack.push(node);
const directChildren = this.getDirectChildConnections(node).map((c) => c.to);
for (const child of directChildren) {
followNode(child);
// if node is on stack min the low id
if (stack.includes(child)) {
const childLowLinkValue = lowLinkValues.get(child);
const ownLowLinkValue = lowLinkValues.get(node);
a.ok(childLowLinkValue !== undefined);
a.ok(ownLowLinkValue !== undefined);
const lowestLowLinkValue = Math.min(childLowLinkValue, ownLowLinkValue);
lowLinkValues.set(node, lowestLowLinkValue);
}
}
// after we visited all children, check if the low id is the same as the
// nodes id, which means we found a strongly connected component
const ownId = ids.get(node);
const ownLowLinkValue = lowLinkValues.get(node);
a.ok(ownId !== undefined);
a.ok(ownLowLinkValue !== undefined);
if (ownId === ownLowLinkValue) {
// pop from the stack until the stack is empty or we find a node that
// has a different low id
const scc: Set<INode> = new Set();
let next = stack.at(-1);
while (next && lowLinkValues.get(next) === ownId) {
stack.pop();
scc.add(next);
next = stack.at(-1);
}
if (scc.size > 0) {
stronglyConnectedComponents.push(scc);
}
}
};
for (const node of this.nodes.values()) {
followNode(node);
}
return stronglyConnectedComponents;
}
private depthFirstSearchRecursive(
from: INode,
fn: (node: INode) => boolean,
seen: Set<INode>,
): INode | undefined {
if (seen.has(from)) {
return undefined;
}
seen.add(from);
if (fn(from)) {
return from;
}
for (const childConnection of this.getDirectChildConnections(from)) {
const found = this.depthFirstSearchRecursive(childConnection.to, fn, seen);
if (found) {
return found;
}
}
return undefined;
}
/**
* Like `Array.prototype.find` but for directed graphs.
*
* Starting from, and including, the `from` node this calls the provided
* predicate function with every child node until the predicate function
* returns true.
*
* The search is depth first, meaning every branch is exhausted before the
* next branch is tried.
*
* The first node for which the predicate function returns true is returned.
*
* If the graph is exhausted and the predicate function never returned true,
* undefined is returned instead.
*/
depthFirstSearch({ from, fn }: { from: INode; fn: (node: INode) => boolean }): INode | undefined {
return this.depthFirstSearchRecursive(from, fn, new Set());
}
toWorkflow(parameters: Omit<WorkflowParameters, 'nodes' | 'connections'>): Workflow {
return new Workflow({
...parameters,
nodes: [...this.nodes.values()],
connections: this.toIConnections(),
});
}
static fromWorkflow(workflow: Workflow): DirectedGraph {
const graph = new DirectedGraph();
graph.addNodes(...Object.values(workflow.nodes));
for (const [fromNodeName, iConnection] of Object.entries(workflow.connectionsBySourceNode)) {
const from = workflow.getNode(fromNodeName);
a.ok(from);
for (const [outputType, outputs] of Object.entries(iConnection)) {
for (const [outputIndex, conns] of outputs.entries()) {
for (const conn of conns ?? []) {
// TODO: What's with the input type?
const { node: toNodeName, type: _inputType, index: inputIndex } = conn;
const to = workflow.getNode(toNodeName);
a.ok(to);
graph.addConnection({
from,
to,
// TODO: parse outputType instead of casting it
type: outputType as NodeConnectionType,
outputIndex,
inputIndex,
});
}
}
}
}
return graph;
}
clone() {
return new DirectedGraph()
.addNodes(...this.getNodes().values())
.addConnections(...this.getConnections().values());
}
private toIConnections() {
const result: IConnections = {};
for (const connection of this.connections.values()) {
const { from, to, type, outputIndex, inputIndex } = connection;
result[from.name] = result[from.name] ?? {
[type]: [],
};
const resultConnection = result[from.name];
resultConnection[type][outputIndex] = resultConnection[type][outputIndex] ?? [];
const group = resultConnection[type][outputIndex];
group.push({
node: to.name,
type,
index: inputIndex,
});
}
return result;
}
private makeKey(connection: GraphConnection): DirectedGraphKey {
return `${connection.from.name}-${connection.type}-${connection.outputIndex}-${connection.inputIndex}-${connection.to.name}`;
}
}

View File

@@ -0,0 +1,18 @@
import { NodeConnectionType } from 'n8n-workflow';
import type { DirectedGraph } from './directed-graph';
export function filterDisabledNodes(graph: DirectedGraph): DirectedGraph {
const filteredGraph = graph.clone();
for (const node of filteredGraph.getNodes().values()) {
if (node.disabled) {
filteredGraph.removeNode(node, {
reconnectConnections: true,
skipConnectionFn: (c) => c.type !== NodeConnectionType.Main,
});
}
}
return filteredGraph;
}

View File

@@ -0,0 +1,176 @@
import { NodeConnectionType, type INode, type IPinData, type IRunData } from 'n8n-workflow';
import type { DirectedGraph } from './directed-graph';
import { getIncomingData, getIncomingDataFromAnyRun } from './get-incoming-data';
/**
* A node is dirty if either of the following is true:
* - it's properties or options changed since last execution (not implemented yet)
* - one of it's parents is disabled
* - it has an error (not implemented yet)
* - it neither has run data nor pinned data
*/
export function isDirty(node: INode, runData: IRunData = {}, pinData: IPinData = {}): boolean {
// TODO: implement
const propertiesOrOptionsChanged = false;
if (propertiesOrOptionsChanged) {
return true;
}
// TODO: implement
const parentNodeGotDisabled = false;
if (parentNodeGotDisabled) {
return true;
}
// TODO: implement
const hasAnError = false;
if (hasAnError) {
return true;
}
const hasPinnedData = pinData[node.name] !== undefined;
if (hasPinnedData) {
return false;
}
const hasRunData = runData?.[node.name];
if (hasRunData) {
return false;
}
return true;
}
function findStartNodesRecursive(
graph: DirectedGraph,
current: INode,
destination: INode,
runData: IRunData,
pinData: IPinData,
startNodes: Set<INode>,
seen: Set<INode>,
): Set<INode> {
const nodeIsDirty = isDirty(current, runData, pinData);
// If the current node is dirty stop following this branch, we found a start
// node.
if (nodeIsDirty) {
startNodes.add(current);
return startNodes;
}
// If the current node is the destination node stop following this branch, we
// found a start node.
if (current === destination) {
startNodes.add(current);
return startNodes;
}
// If the current node is a loop node, check if the `done` output has data on
// the last run. If it doesn't the loop wasn't fully executed and needs to be
// re-run from the start. Thus the loop node become the start node.
if (current.type === 'n8n-nodes-base.splitInBatches') {
const nodeRunData = getIncomingData(
runData,
current.name,
// last run
-1,
NodeConnectionType.Main,
0,
);
if (nodeRunData === null || nodeRunData.length === 0) {
startNodes.add(current);
return startNodes;
}
}
// If we detect a cycle stop following the branch, there is no start node on
// this branch.
if (seen.has(current)) {
return startNodes;
}
// Recurse with every direct child that is part of the sub graph.
const outGoingConnections = graph.getDirectChildConnections(current);
for (const outGoingConnection of outGoingConnections) {
const nodeRunData = getIncomingDataFromAnyRun(
runData,
outGoingConnection.from.name,
outGoingConnection.type,
outGoingConnection.outputIndex,
);
// If the node has multiple outputs, only follow the outputs that have run data.
const hasNoRunData =
nodeRunData === null || nodeRunData === undefined || nodeRunData.data.length === 0;
if (hasNoRunData) {
continue;
}
findStartNodesRecursive(
graph,
outGoingConnection.to,
destination,
runData,
pinData,
startNodes,
new Set(seen).add(current),
);
}
return startNodes;
}
/**
* The start node is the node from which a partial execution starts. The start
* node will be executed or re-executed.
* The nodes are found by traversing the graph from the trigger to the
* destination and finding the earliest dirty nodes on every branch.
*
* The algorithm is:
* Starting from the trigger node.
*
* 1. if the current node is not a trigger and has no input data (on all
* connections) (not implemented yet, possibly not necessary)
* - stop following this branch, there is no start node on this branch
* 2. If the current node is dirty, or is the destination node
* - stop following this branch, we found a start node
* 3. If we detect a cycle
* - stop following the branch, there is no start node on this branch
* 4. Recurse with every direct child that is part of the sub graph
*/
export function findStartNodes(options: {
graph: DirectedGraph;
trigger: INode;
destination: INode;
pinData: IPinData;
runData: IRunData;
}): Set<INode> {
const graph = options.graph;
const trigger = options.trigger;
const destination = options.destination;
const runData = { ...options.runData };
const pinData = options.pinData;
const startNodes = findStartNodesRecursive(
graph,
trigger,
destination,
runData,
pinData,
// start nodes found
new Set(),
// seen
new Set(),
);
return startNodes;
}

View File

@@ -0,0 +1,114 @@
import { NodeConnectionType, type INode } from 'n8n-workflow';
import type { GraphConnection } from './directed-graph';
import { DirectedGraph } from './directed-graph';
function findSubgraphRecursive(
graph: DirectedGraph,
destinationNode: INode,
current: INode,
trigger: INode,
newGraph: DirectedGraph,
currentBranch: GraphConnection[],
) {
// If the current node is the chosen trigger keep this branch.
if (current === trigger) {
for (const connection of currentBranch) {
newGraph.addNodes(connection.from, connection.to);
newGraph.addConnection(connection);
}
return;
}
const parentConnections = graph.getDirectParentConnections(current);
// If the current node has no parents, dont keep this branch.
if (parentConnections.length === 0) {
return;
}
// If the current node is the destination node again, dont keep this branch.
const isCycleWithDestinationNode =
current === destinationNode && currentBranch.some((c) => c.to === destinationNode);
if (isCycleWithDestinationNode) {
return;
}
// If the current node was already visited, keep this branch.
const isCycleWithCurrentNode = currentBranch.some((c) => c.to === current);
if (isCycleWithCurrentNode) {
// TODO: write function that adds nodes when adding connections
for (const connection of currentBranch) {
newGraph.addNodes(connection.from, connection.to);
newGraph.addConnection(connection);
}
return;
}
// Recurse on each parent.
for (const parentConnection of parentConnections) {
// Skip parents that are connected via non-Main connection types. They are
// only utility nodes for AI and are not part of the data or control flow
// and can never lead too the trigger.
if (parentConnection.type !== NodeConnectionType.Main) {
continue;
}
findSubgraphRecursive(graph, destinationNode, parentConnection.from, trigger, newGraph, [
...currentBranch,
parentConnection,
]);
}
}
/**
* Find all nodes that can lead from the trigger to the destination node.
*
* The algorithm is:
* Start with Destination Node
*
* 1. if the current node is the chosen trigger keep this branch
* 2. if the current node has no parents, dont keep this branch
* 3. if the current node is the destination node again, dont keep this
* branch
* 4. if the current node was already visited, keep this branch
* 5. Recurse on each parent
* 6. Re-add all connections that don't use the `Main` connections type.
* Theses are used by nodes called root nodes and they are not part of the
* dataflow in the graph they are utility nodes, like the AI model used in a
* lang chain node.
*/
export function findSubgraph(options: {
graph: DirectedGraph;
destination: INode;
trigger: INode;
}): DirectedGraph {
const graph = options.graph;
const destination = options.destination;
const trigger = options.trigger;
const subgraph = new DirectedGraph();
findSubgraphRecursive(graph, destination, destination, trigger, subgraph, []);
// For each node in the subgraph, if it has parent connections of a type that
// is not `Main` in the input graph, add the connections and the nodes
// connected to it to the subgraph
//
// Without this all AI related workflows would not work when executed
// partially, because all utility nodes would be missing.
for (const node of subgraph.getNodes().values()) {
const parentConnections = graph.getParentConnections(node);
for (const connection of parentConnections) {
if (connection.type === NodeConnectionType.Main) {
continue;
}
subgraph.addNodes(connection.from, connection.to);
subgraph.addConnection(connection);
}
}
return subgraph;
}

View File

@@ -0,0 +1,61 @@
import * as assert from 'assert/strict';
import type { INode, Workflow } from 'n8n-workflow';
function findAllParentTriggers(workflow: Workflow, destinationNodeName: string) {
const parentNodes = workflow
.getParentNodes(destinationNodeName)
.map((name) => {
const node = workflow.getNode(name);
// We got the node name from `workflow.getParentNodes`. The node must
// exist.
assert.ok(node);
return {
node,
nodeType: workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion),
};
})
.filter((value) => value !== null)
.filter(({ nodeType }) => nodeType.description.group.includes('trigger'))
.map(({ node }) => node);
return parentNodes;
}
// TODO: write unit tests for this
// TODO: rewrite this using DirectedGraph instead of workflow.
export function findTriggerForPartialExecution(
workflow: Workflow,
destinationNodeName: string,
): INode | undefined {
const parentTriggers = findAllParentTriggers(workflow, destinationNodeName).filter(
(trigger) => !trigger.disabled,
);
const pinnedTriggers = parentTriggers
// TODO: add the other filters here from `findAllPinnedActivators`, see
// copy below.
.filter((trigger) => workflow.pinData?.[trigger.name])
// TODO: Make this sorting more predictable
// Put nodes which names end with 'webhook' first, while also reversing the
// order they had in the original array.
.sort((n) => (n.type.endsWith('webhook') ? -1 : 1));
if (pinnedTriggers.length) {
return pinnedTriggers[0];
} else {
return parentTriggers[0];
}
}
//function findAllPinnedActivators(workflow: Workflow, pinData?: IPinData) {
// return Object.values(workflow.nodes)
// .filter(
// (node) =>
// !node.disabled &&
// pinData?.[node.name] &&
// ['trigger', 'webhook'].some((suffix) => node.type.toLowerCase().endsWith(suffix)) &&
// node.type !== 'n8n-nodes-base.respondToWebhook',
// )
// .sort((a) => (a.type.endsWith('webhook') ? -1 : 1));
//}

View File

@@ -0,0 +1,34 @@
import type { INodeExecutionData, IRunData, NodeConnectionType } from 'n8n-workflow';
export function getIncomingData(
runData: IRunData,
nodeName: string,
runIndex: number,
connectionType: NodeConnectionType,
outputIndex: number,
): INodeExecutionData[] | null {
return runData[nodeName]?.at(runIndex)?.data?.[connectionType].at(outputIndex) ?? null;
}
function getRunIndexLength(runData: IRunData, nodeName: string) {
return runData[nodeName]?.length ?? 0;
}
export function getIncomingDataFromAnyRun(
runData: IRunData,
nodeName: string,
connectionType: NodeConnectionType,
outputIndex: number,
): { data: INodeExecutionData[]; runIndex: number } | undefined {
const maxRunIndexes = getRunIndexLength(runData, nodeName);
for (let runIndex = 0; runIndex < maxRunIndexes; runIndex++) {
const data = getIncomingData(runData, nodeName, runIndex, connectionType, outputIndex);
if (data && data.length > 0) {
return { data, runIndex };
}
}
return undefined;
}

View File

@@ -0,0 +1,166 @@
import { type INode, type IPinData, type IRunData } from 'n8n-workflow';
import type { GraphConnection, DirectedGraph } from './directed-graph';
function sortByInputIndexThenByName(
connection1: GraphConnection,
connection2: GraphConnection,
): number {
if (connection1.inputIndex === connection2.inputIndex) {
return connection1.from.name.localeCompare(connection2.from.name);
} else {
return connection1.inputIndex - connection2.inputIndex;
}
}
type SourceConnectionGroup = {
/**
* This is true if all connections have data. If any connection does not have
* data it false.
*
* This is interesting to decide if a node should be put on the execution
* stack of the waiting stack in the execution engine.
*/
complete: boolean;
connections: GraphConnection[];
};
function newGroup(): SourceConnectionGroup {
return {
complete: true,
connections: [],
};
}
/**
* Groups incoming connections to the node. The groups contain one connection
* per input, if possible, with run data or pinned data.
*
* The purpose of this is to get as many complete sets of data for executing
* nodes with multiple inputs.
*
* # Example 1:
* ┌───────┐1
* │source1├────┐
* └───────┘ │ ┌────┐
* ┌───────┐1 ├──►│ │
* │source2├────┘ │node│
* └───────┘ ┌──►│ │
* ┌───────┐1 │ └────┘
* │source3├────┘
* └───────┘
*
* Given this workflow, and assuming all sources have run data or pinned data,
* it's possible to run `node` with the data of `source1` and `source3` and
* then one more time with the data from `source2`.
*
* It would also be possible to run `node` with the data of `source2` and
* `source3` and then one more time with the data from `source1`.
*
* To improve the determinism of this the connections are sorted by input and
* then by from-node name.
*
* So this will return 2 groups:
* 1. source1 and source3
* 2. source2
*
* # Example 2:
* ┌───────┐0
* │source1├────┐
* └───────┘ │ ┌────┐
* ┌───────┐1 ├──►│ │
* │source2├────┘ │node│
* └───────┘ ┌──►│ │
* ┌───────┐1 │ └────┘
* │source3├────┘
* └───────┘
*
* Since `source1` has no run data and no pinned data it's skipped in favor of
* `source2` for the for input.
* It will become it's own group that is marked as `complete: false`
*
* So this will return 2 group:
* 1. source2 and source3, `complete: true`
* 2. source1, `complete: false`
*/
export function getSourceDataGroups(
graph: DirectedGraph,
node: INode,
runData: IRunData,
pinnedData: IPinData,
): SourceConnectionGroup[] {
const connections = graph.getConnections({ to: node });
const sortedConnectionsWithData = [];
const sortedConnectionsWithoutData = [];
for (const connection of connections) {
const hasData = runData[connection.from.name] || pinnedData[connection.from.name];
if (hasData) {
sortedConnectionsWithData.push(connection);
} else {
sortedConnectionsWithoutData.push(connection);
}
}
if (sortedConnectionsWithData.length === 0 && sortedConnectionsWithoutData.length === 0) {
return [];
}
sortedConnectionsWithData.sort(sortByInputIndexThenByName);
sortedConnectionsWithoutData.sort(sortByInputIndexThenByName);
const groups: SourceConnectionGroup[] = [];
let currentGroup = newGroup();
let currentInputIndex =
Math.min(
...sortedConnectionsWithData.map((c) => c.inputIndex),
...sortedConnectionsWithoutData.map((c) => c.inputIndex),
) - 1;
while (sortedConnectionsWithData.length > 0 || sortedConnectionsWithoutData.length > 0) {
currentInputIndex++;
const connectionWithDataIndex = sortedConnectionsWithData.findIndex(
// eslint-disable-next-line @typescript-eslint/no-loop-func
(c) => c.inputIndex === currentInputIndex,
);
if (connectionWithDataIndex >= 0) {
const connection = sortedConnectionsWithData[connectionWithDataIndex];
currentGroup.connections.push(connection);
sortedConnectionsWithData.splice(connectionWithDataIndex, 1);
continue;
}
const connectionWithoutDataIndex = sortedConnectionsWithoutData.findIndex(
// eslint-disable-next-line @typescript-eslint/no-loop-func
(c) => c.inputIndex === currentInputIndex,
);
if (connectionWithoutDataIndex >= 0) {
const connection = sortedConnectionsWithoutData[connectionWithoutDataIndex];
currentGroup.connections.push(connection);
currentGroup.complete = false;
sortedConnectionsWithoutData.splice(connectionWithoutDataIndex, 1);
continue;
}
groups.push(currentGroup);
currentGroup = newGroup();
currentInputIndex =
Math.min(
...sortedConnectionsWithData.map((c) => c.inputIndex),
...sortedConnectionsWithoutData.map((c) => c.inputIndex),
) - 1;
}
groups.push(currentGroup);
return groups;
}

View File

@@ -0,0 +1,56 @@
import type { INode } from 'n8n-workflow';
import * as a from 'node:assert/strict';
import type { DirectedGraph } from './directed-graph';
/**
* Returns a new set of start nodes.
*
* For every start node this checks if it is part of a cycle and if it is it
* replaces the start node with the start of the cycle.
*
* This is useful because it prevents executing cycles partially, e.g. figuring
* our which run of the cycle has to be repeated etc.
*/
export function handleCycles(
graph: DirectedGraph,
startNodes: Set<INode>,
trigger: INode,
): Set<INode> {
// Strongly connected components can also be nodes that are not part of a
// cycle. They form a strongly connected component of one. E.g the trigger is
// always a strongly connected component by itself because it does not have
// any inputs and thus cannot build a cycle.
//
// We're not interested in them so we filter them out.
const cycles = graph.getStronglyConnectedComponents().filter((cycle) => cycle.size >= 1);
const newStartNodes: Set<INode> = new Set(startNodes);
// For each start node, check if the node is part of a cycle and if it is
// replace the start node with the start of the cycle.
if (cycles.length === 0) {
return newStartNodes;
}
for (const startNode of startNodes) {
for (const cycle of cycles) {
const isPartOfCycle = cycle.has(startNode);
if (isPartOfCycle) {
const firstNode = graph.depthFirstSearch({
from: trigger,
fn: (node) => cycle.has(node),
});
a.ok(
firstNode,
"the trigger must be connected to the cycle, otherwise the cycle wouldn't be part of the subgraph",
);
newStartNodes.delete(startNode);
newStartNodes.add(firstNode);
}
}
}
return newStartNodes;
}

View File

@@ -0,0 +1,8 @@
export { DirectedGraph } from './directed-graph';
export { findTriggerForPartialExecution } from './find-trigger-for-partial-execution';
export { findStartNodes } from './find-start-nodes';
export { findSubgraph } from './find-subgraph';
export { recreateNodeExecutionStack } from './recreate-node-execution-stack';
export { cleanRunData } from './clean-run-data';
export { handleCycles } from './handle-cycles';
export { filterDisabledNodes } from './filter-disabled-nodes';

View File

@@ -0,0 +1,219 @@
import * as a from 'assert/strict';
import {
NodeConnectionType,
type IExecuteData,
type INode,
type INodeExecutionData,
type IPinData,
type IRunData,
type ISourceData,
type ITaskDataConnectionsSource,
type IWaitingForExecution,
type IWaitingForExecutionSource,
} from 'n8n-workflow';
import type { DirectedGraph } from './directed-graph';
import { getIncomingDataFromAnyRun } from './get-incoming-data';
import { getSourceDataGroups } from './get-source-data-groups';
export function addWaitingExecution(
waitingExecution: IWaitingForExecution,
nodeName: string,
runIndex: number,
inputType: NodeConnectionType,
inputIndex: number,
executionData: INodeExecutionData[] | null,
) {
const waitingExecutionObject = waitingExecution[nodeName] ?? {};
const taskDataConnections = waitingExecutionObject[runIndex] ?? {};
const executionDataList = taskDataConnections[inputType] ?? [];
executionDataList[inputIndex] = executionData;
taskDataConnections[inputType] = executionDataList;
waitingExecutionObject[runIndex] = taskDataConnections;
waitingExecution[nodeName] = waitingExecutionObject;
}
export function addWaitingExecutionSource(
waitingExecutionSource: IWaitingForExecutionSource,
nodeName: string,
runIndex: number,
inputType: NodeConnectionType,
inputIndex: number,
sourceData: ISourceData | null,
) {
const waitingExecutionSourceObject = waitingExecutionSource[nodeName] ?? {};
const taskDataConnectionsSource = waitingExecutionSourceObject[runIndex] ?? {};
const sourceDataList = taskDataConnectionsSource[inputType] ?? [];
sourceDataList[inputIndex] = sourceData;
taskDataConnectionsSource[inputType] = sourceDataList;
waitingExecutionSourceObject[runIndex] = taskDataConnectionsSource;
waitingExecutionSource[nodeName] = waitingExecutionSourceObject;
}
/**
* Recreates the node execution stack, waiting executions and waiting
* execution sources from a directed graph, start nodes, the destination node,
* run and pinned data.
*
* This function aims to be able to recreate the internal state of the
* WorkflowExecute class at any point of time during an execution based on the
* data that is already available. Specifically it will recreate the
* `WorkflowExecute.runExecutionData.executionData` properties.
*
* This allows "restarting" an execution and having it only execute what's
* necessary to be able to execute the destination node accurately, e.g. as
* close as possible to what would happen in a production execution.
*/
export function recreateNodeExecutionStack(
graph: DirectedGraph,
startNodes: Set<INode>,
runData: IRunData,
pinData: IPinData,
): {
nodeExecutionStack: IExecuteData[];
waitingExecution: IWaitingForExecution;
waitingExecutionSource: IWaitingForExecutionSource;
} {
// Validate invariants.
// The graph needs to be free of disabled nodes. If it's not it hasn't been
// passed through findSubgraph.
for (const node of graph.getNodes().values()) {
a.notEqual(
node.disabled,
true,
`Graph contains disabled nodes. This is not supported. Make sure to pass the graph through "findSubgraph" before calling "recreateNodeExecutionStack". The node in question is "${node.name}"`,
);
}
// Initialize the nodeExecutionStack and waitingExecution with
// the data from runData
const nodeExecutionStack: IExecuteData[] = [];
const waitingExecution: IWaitingForExecution = {};
const waitingExecutionSource: IWaitingForExecutionSource = {};
for (const startNode of startNodes) {
const incomingStartNodeConnections = graph
.getDirectParentConnections(startNode)
.filter((c) => c.type === NodeConnectionType.Main);
let incomingData: INodeExecutionData[][] = [];
let incomingSourceData: ITaskDataConnectionsSource | null = null;
if (incomingStartNodeConnections.length === 0) {
incomingData.push([{ json: {} }]);
const executeData: IExecuteData = {
node: startNode,
data: { main: incomingData },
source: incomingSourceData,
};
nodeExecutionStack.push(executeData);
} else {
const sourceDataSets = getSourceDataGroups(graph, startNode, runData, pinData);
for (const sourceData of sourceDataSets) {
if (sourceData.complete) {
// All incoming connections have data, so let's put the node on the
// stack!
incomingData = [];
incomingSourceData = { main: [] };
for (const incomingConnection of sourceData.connections) {
let runIndex = 0;
const sourceNode = incomingConnection.from;
if (pinData[sourceNode.name]) {
incomingData.push(pinData[sourceNode.name]);
} else {
a.ok(
runData[sourceNode.name],
`Start node(${incomingConnection.to.name}) has an incoming connection with no run or pinned data. This is not supported. The connection in question is "${sourceNode.name}->${startNode.name}". Are you sure the start nodes come from the "findStartNodes" function?`,
);
const nodeIncomingData = getIncomingDataFromAnyRun(
runData,
sourceNode.name,
incomingConnection.type,
incomingConnection.outputIndex,
);
if (nodeIncomingData) {
runIndex = nodeIncomingData.runIndex;
incomingData.push(nodeIncomingData.data);
}
}
incomingSourceData.main.push({
previousNode: incomingConnection.from.name,
previousNodeOutput: incomingConnection.outputIndex,
previousNodeRun: runIndex,
});
}
const executeData: IExecuteData = {
node: startNode,
data: { main: incomingData },
source: incomingSourceData,
};
nodeExecutionStack.push(executeData);
} else {
const nodeName = startNode.name;
const nextRunIndex = waitingExecution[nodeName]
? Object.keys(waitingExecution[nodeName]).length
: 0;
for (const incomingConnection of sourceData.connections) {
const sourceNode = incomingConnection.from;
const maybeNodeIncomingData = getIncomingDataFromAnyRun(
runData,
sourceNode.name,
incomingConnection.type,
incomingConnection.outputIndex,
);
const nodeIncomingData = maybeNodeIncomingData?.data ?? null;
if (nodeIncomingData) {
addWaitingExecution(
waitingExecution,
nodeName,
nextRunIndex,
incomingConnection.type,
incomingConnection.inputIndex,
nodeIncomingData,
);
addWaitingExecutionSource(
waitingExecutionSource,
nodeName,
nextRunIndex,
incomingConnection.type,
incomingConnection.inputIndex,
nodeIncomingData
? {
previousNode: incomingConnection.from.name,
previousNodeRun: nextRunIndex,
previousNodeOutput: incomingConnection.outputIndex,
}
: null,
);
}
}
}
}
}
}
return {
nodeExecutionStack,
waitingExecution,
waitingExecutionSource,
};
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,44 @@
import { Service } from '@n8n/di';
import { CronJob } from 'cron';
import type { CronExpression, Workflow } from 'n8n-workflow';
import { InstanceSettings } from '@/instance-settings';
@Service()
export class ScheduledTaskManager {
constructor(private readonly instanceSettings: InstanceSettings) {}
readonly cronJobs = new Map<string, CronJob[]>();
registerCron(workflow: Workflow, cronExpression: CronExpression, onTick: () => void) {
const cronJob = new CronJob(
cronExpression,
() => {
if (this.instanceSettings.isLeader) onTick();
},
undefined,
true,
workflow.timezone,
);
const cronJobsForWorkflow = this.cronJobs.get(workflow.id);
if (cronJobsForWorkflow) {
cronJobsForWorkflow.push(cronJob);
} else {
this.cronJobs.set(workflow.id, [cronJob]);
}
}
deregisterCrons(workflowId: string) {
const cronJobs = this.cronJobs.get(workflowId) ?? [];
while (cronJobs.length) {
const cronJob = cronJobs.pop();
if (cronJob) cronJob.stop();
}
}
deregisterAllCrons() {
for (const workflowId of Object.keys(this.cronJobs)) {
this.deregisterCrons(workflowId);
}
}
}

View File

@@ -0,0 +1,76 @@
import { Service } from '@n8n/di';
import type { SSHCredentials } from 'n8n-workflow';
import { createHash } from 'node:crypto';
import { Client, type ConnectConfig } from 'ssh2';
@Service()
export class SSHClientsManager {
readonly clients = new Map<string, { client: Client; lastUsed: Date }>();
constructor() {
// Close all SSH connections when the process exits
process.on('exit', () => this.onShutdown());
if (process.env.NODE_ENV === 'test') return;
// Regularly close stale SSH connections
setInterval(() => this.cleanupStaleConnections(), 60 * 1000);
}
async getClient(credentials: SSHCredentials): Promise<Client> {
const { sshAuthenticateWith, sshHost, sshPort, sshUser } = credentials;
const sshConfig: ConnectConfig = {
host: sshHost,
port: sshPort,
username: sshUser,
...(sshAuthenticateWith === 'password'
? { password: credentials.sshPassword }
: {
privateKey: credentials.privateKey,
passphrase: credentials.passphrase ?? undefined,
}),
};
const clientHash = createHash('sha1').update(JSON.stringify(sshConfig)).digest('base64');
const existing = this.clients.get(clientHash);
if (existing) {
existing.lastUsed = new Date();
return existing.client;
}
return await new Promise((resolve, reject) => {
const sshClient = new Client();
sshClient.once('error', reject);
sshClient.once('ready', () => {
sshClient.off('error', reject);
sshClient.once('close', () => this.clients.delete(clientHash));
this.clients.set(clientHash, {
client: sshClient,
lastUsed: new Date(),
});
resolve(sshClient);
});
sshClient.connect(sshConfig);
});
}
onShutdown() {
for (const { client } of this.clients.values()) {
client.end();
}
}
cleanupStaleConnections() {
const { clients } = this;
if (clients.size === 0) return;
const now = Date.now();
for (const [hash, { client, lastUsed }] of clients.entries()) {
if (now - lastUsed.getTime() > 5 * 60 * 1000) {
client.end();
clients.delete(hash);
}
}
}
}

View File

@@ -0,0 +1,116 @@
import { Service } from '@n8n/di';
import { ApplicationError } from 'n8n-workflow';
import type {
Workflow,
INode,
INodeExecutionData,
IPollFunctions,
IGetExecuteTriggerFunctions,
IWorkflowExecuteAdditionalData,
WorkflowExecuteMode,
WorkflowActivateMode,
ITriggerResponse,
IDeferredPromise,
IExecuteResponsePromiseData,
IRun,
} from 'n8n-workflow';
@Service()
export class TriggersAndPollers {
/**
* Runs the given trigger node so that it can trigger the workflow when the node has data.
*/
async runTrigger(
workflow: Workflow,
node: INode,
getTriggerFunctions: IGetExecuteTriggerFunctions,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
activation: WorkflowActivateMode,
): Promise<ITriggerResponse | undefined> {
const triggerFunctions = getTriggerFunctions(workflow, node, additionalData, mode, activation);
const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
if (!nodeType.trigger) {
throw new ApplicationError('Node type does not have a trigger function defined', {
extra: { nodeName: node.name },
tags: { nodeType: node.type },
});
}
if (mode === 'manual') {
// In manual mode we do not just start the trigger function we also
// want to be able to get informed as soon as the first data got emitted
const triggerResponse = await nodeType.trigger.call(triggerFunctions);
// Add the manual trigger response which resolves when the first time data got emitted
triggerResponse!.manualTriggerResponse = new Promise((resolve, reject) => {
triggerFunctions.emit = (
(resolveEmit) =>
(
data: INodeExecutionData[][],
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
donePromise?: IDeferredPromise<IRun>,
) => {
additionalData.hooks!.hookFunctions.sendResponse = [
async (response: IExecuteResponsePromiseData): Promise<void> => {
if (responsePromise) {
responsePromise.resolve(response);
}
},
];
if (donePromise) {
additionalData.hooks!.hookFunctions.workflowExecuteAfter?.unshift(
async (runData: IRun): Promise<void> => {
return donePromise.resolve(runData);
},
);
}
resolveEmit(data);
}
)(resolve);
triggerFunctions.emitError = (
(rejectEmit) =>
(error: Error, responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>) => {
additionalData.hooks!.hookFunctions.sendResponse = [
async (): Promise<void> => {
if (responsePromise) {
responsePromise.reject(error);
}
},
];
rejectEmit(error);
}
)(reject);
});
return triggerResponse;
}
// In all other modes simply start the trigger
return await nodeType.trigger.call(triggerFunctions);
}
/**
* Runs the given poller node so that it can trigger the workflow when the node has data.
*/
async runPoll(
workflow: Workflow,
node: INode,
pollFunctions: IPollFunctions,
): Promise<INodeExecutionData[][] | null> {
const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
if (!nodeType.poll) {
throw new ApplicationError('Node type does not have a poll function defined', {
extra: { nodeName: node.name },
tags: { nodeType: node.type },
});
}
return await nodeType.poll.call(pollFunctions);
}
}

File diff suppressed because it is too large Load Diff