mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-18 02:21:13 +00:00
refactor(core): Reorganize n8n-core and enforce file-name casing (no-changelog) (#12667)
This commit is contained in:
committed by
GitHub
parent
e7f00bcb7f
commit
05858c2153
@@ -1,8 +0,0 @@
|
||||
export { DirectedGraph } from './DirectedGraph';
|
||||
export { findTriggerForPartialExecution } from './findTriggerForPartialExecution';
|
||||
export { findStartNodes } from './findStartNodes';
|
||||
export { findSubgraph } from './findSubgraph';
|
||||
export { recreateNodeExecutionStack } from './recreateNodeExecutionStack';
|
||||
export { cleanRunData } from './cleanRunData';
|
||||
export { handleCycles } from './handleCycles';
|
||||
export { filterDisabledNodes } from './filterDisabledNodes';
|
||||
60
packages/core/src/__tests__/credentials.test.ts
Normal file
60
packages/core/src/__tests__/credentials.test.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { Container } from '@n8n/di';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { CredentialInformation } from 'n8n-workflow';
|
||||
|
||||
import { Cipher } from '@/encryption/cipher';
|
||||
import type { InstanceSettings } from '@/instance-settings';
|
||||
|
||||
import { Credentials } from '../credentials';
|
||||
|
||||
describe('Credentials', () => {
|
||||
const cipher = new Cipher(mock<InstanceSettings>({ encryptionKey: 'password' }));
|
||||
Container.set(Cipher, cipher);
|
||||
|
||||
const setDataKey = (credentials: Credentials, key: string, data: CredentialInformation) => {
|
||||
let fullData;
|
||||
try {
|
||||
fullData = credentials.getData();
|
||||
} catch (e) {
|
||||
fullData = {};
|
||||
}
|
||||
fullData[key] = data;
|
||||
return credentials.setData(fullData);
|
||||
};
|
||||
|
||||
describe('without nodeType set', () => {
|
||||
test('should be able to set and read key data without initial data set', () => {
|
||||
const credentials = new Credentials({ id: null, name: 'testName' }, 'testType');
|
||||
|
||||
const key = 'key1';
|
||||
const newData = 1234;
|
||||
|
||||
setDataKey(credentials, key, newData);
|
||||
|
||||
expect(credentials.getData()[key]).toEqual(newData);
|
||||
});
|
||||
|
||||
test('should be able to set and read key data with initial data set', () => {
|
||||
const key = 'key2';
|
||||
|
||||
// Saved under "key1"
|
||||
const initialData = 4321;
|
||||
const initialDataEncoded = 'U2FsdGVkX1+0baznXt+Ag/ub8A2kHLyoLxn/rR9h4XQ=';
|
||||
|
||||
const credentials = new Credentials(
|
||||
{ id: null, name: 'testName' },
|
||||
'testType',
|
||||
initialDataEncoded,
|
||||
);
|
||||
|
||||
const newData = 1234;
|
||||
|
||||
// Set and read new data
|
||||
setDataKey(credentials, key, newData);
|
||||
expect(credentials.getData()[key]).toEqual(newData);
|
||||
|
||||
// Read the data which got provided encrypted on init
|
||||
expect(credentials.getData().key1).toEqual(initialData);
|
||||
});
|
||||
});
|
||||
});
|
||||
932
packages/core/src/__tests__/node-execute-functions.test.ts
Normal file
932
packages/core/src/__tests__/node-execute-functions.test.ts
Normal file
@@ -0,0 +1,932 @@
|
||||
import { Container } from '@n8n/di';
|
||||
import FormData from 'form-data';
|
||||
import { mkdtempSync, readFileSync } from 'fs';
|
||||
import { IncomingMessage } from 'http';
|
||||
import type { Agent } from 'https';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type {
|
||||
IBinaryData,
|
||||
IHttpRequestMethods,
|
||||
IHttpRequestOptions,
|
||||
INode,
|
||||
IRequestOptions,
|
||||
ITaskDataConnections,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
Workflow,
|
||||
WorkflowHooks,
|
||||
} from 'n8n-workflow';
|
||||
import nock from 'nock';
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { Readable } from 'stream';
|
||||
import type { SecureContextOptions } from 'tls';
|
||||
|
||||
import { BinaryDataService } from '@/binary-data/binary-data.service';
|
||||
import { InstanceSettings } from '@/instance-settings';
|
||||
import {
|
||||
binaryToString,
|
||||
copyInputItems,
|
||||
getBinaryDataBuffer,
|
||||
invokeAxios,
|
||||
isFilePathBlocked,
|
||||
parseContentDisposition,
|
||||
parseContentType,
|
||||
parseIncomingMessage,
|
||||
parseRequestObject,
|
||||
proxyRequestToAxios,
|
||||
removeEmptyBody,
|
||||
setBinaryDataBuffer,
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
const temporaryDir = mkdtempSync(join(tmpdir(), 'n8n'));
|
||||
|
||||
describe('NodeExecuteFunctions', () => {
|
||||
describe('test binary data helper methods', () => {
|
||||
test("test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'default' mode", async () => {
|
||||
// Setup a 'default' binary data manager instance
|
||||
Container.set(BinaryDataService, new BinaryDataService());
|
||||
|
||||
await Container.get(BinaryDataService).init({
|
||||
mode: 'default',
|
||||
availableModes: ['default'],
|
||||
localStoragePath: temporaryDir,
|
||||
});
|
||||
|
||||
// Set our binary data buffer
|
||||
const inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
const setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
{
|
||||
mimeType: 'txt',
|
||||
data: 'This should be overwritten by the actual payload in the response',
|
||||
},
|
||||
inputData,
|
||||
'workflowId',
|
||||
'executionId',
|
||||
);
|
||||
|
||||
// Expect our return object to contain the base64 encoding of the input data, as it should be stored in memory.
|
||||
expect(setBinaryDataBufferResponse.data).toEqual(inputData.toString('base64'));
|
||||
|
||||
// Now, re-fetch our data.
|
||||
// An ITaskDataConnections object is used to share data between nodes. The top level property, 'main', represents the successful output object from a previous node.
|
||||
const taskDataConnectionsInput: ITaskDataConnections = {
|
||||
main: [],
|
||||
};
|
||||
|
||||
// We add an input set, with one item at index 0, to this input. It contains an empty json payload and our binary data.
|
||||
taskDataConnectionsInput.main.push([
|
||||
{
|
||||
json: {},
|
||||
binary: {
|
||||
data: setBinaryDataBufferResponse,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Now, lets fetch our data! The item will be item index 0.
|
||||
const getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
taskDataConnectionsInput,
|
||||
0,
|
||||
'data',
|
||||
0,
|
||||
);
|
||||
|
||||
expect(getBinaryDataBufferResponse).toEqual(inputData);
|
||||
});
|
||||
|
||||
test("test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'filesystem' mode", async () => {
|
||||
Container.set(BinaryDataService, new BinaryDataService());
|
||||
|
||||
// Setup a 'filesystem' binary data manager instance
|
||||
await Container.get(BinaryDataService).init({
|
||||
mode: 'filesystem',
|
||||
availableModes: ['filesystem'],
|
||||
localStoragePath: temporaryDir,
|
||||
});
|
||||
|
||||
// Set our binary data buffer
|
||||
const inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
const setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
{
|
||||
mimeType: 'txt',
|
||||
data: 'This should be overwritten with the name of the configured data manager',
|
||||
},
|
||||
inputData,
|
||||
'workflowId',
|
||||
'executionId',
|
||||
);
|
||||
|
||||
// Expect our return object to contain the name of the configured data manager.
|
||||
expect(setBinaryDataBufferResponse.data).toEqual('filesystem-v2');
|
||||
|
||||
// Ensure that the input data was successfully persisted to disk.
|
||||
expect(
|
||||
readFileSync(
|
||||
`${temporaryDir}/${setBinaryDataBufferResponse.id?.replace('filesystem-v2:', '')}`,
|
||||
),
|
||||
).toEqual(inputData);
|
||||
|
||||
// Now, re-fetch our data.
|
||||
// An ITaskDataConnections object is used to share data between nodes. The top level property, 'main', represents the successful output object from a previous node.
|
||||
const taskDataConnectionsInput: ITaskDataConnections = {
|
||||
main: [],
|
||||
};
|
||||
|
||||
// We add an input set, with one item at index 0, to this input. It contains an empty json payload and our binary data.
|
||||
taskDataConnectionsInput.main.push([
|
||||
{
|
||||
json: {},
|
||||
binary: {
|
||||
data: setBinaryDataBufferResponse,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Now, lets fetch our data! The item will be item index 0.
|
||||
const getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
taskDataConnectionsInput,
|
||||
0,
|
||||
'data',
|
||||
0,
|
||||
);
|
||||
|
||||
expect(getBinaryDataBufferResponse).toEqual(inputData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseContentType', () => {
|
||||
const testCases = [
|
||||
{
|
||||
input: 'text/plain',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
},
|
||||
},
|
||||
description: 'should parse basic content type',
|
||||
},
|
||||
{
|
||||
input: 'TEXT/PLAIN',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
},
|
||||
},
|
||||
description: 'should convert type to lowercase',
|
||||
},
|
||||
{
|
||||
input: 'text/html; charset=iso-8859-1',
|
||||
expected: {
|
||||
type: 'text/html',
|
||||
parameters: {
|
||||
charset: 'iso-8859-1',
|
||||
},
|
||||
},
|
||||
description: 'should parse content type with charset',
|
||||
},
|
||||
{
|
||||
input: 'application/json; charset=utf-8; boundary=---123',
|
||||
expected: {
|
||||
type: 'application/json',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
boundary: '---123',
|
||||
},
|
||||
},
|
||||
description: 'should parse content type with multiple parameters',
|
||||
},
|
||||
{
|
||||
input: 'text/plain; charset="utf-8"; filename="test.txt"',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
filename: 'test.txt',
|
||||
},
|
||||
},
|
||||
description: 'should handle quoted parameter values',
|
||||
},
|
||||
{
|
||||
input: 'text/plain; filename=%22test%20file.txt%22',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
filename: 'test file.txt',
|
||||
},
|
||||
},
|
||||
description: 'should handle encoded parameter values',
|
||||
},
|
||||
{
|
||||
input: undefined,
|
||||
expected: null,
|
||||
description: 'should return null for undefined input',
|
||||
},
|
||||
{
|
||||
input: '',
|
||||
expected: null,
|
||||
description: 'should return null for empty string',
|
||||
},
|
||||
];
|
||||
|
||||
test.each(testCases)('$description', ({ input, expected }) => {
|
||||
expect(parseContentType(input)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseContentDisposition', () => {
|
||||
const testCases = [
|
||||
{
|
||||
input: 'attachment; filename="file.txt"',
|
||||
expected: { type: 'attachment', filename: 'file.txt' },
|
||||
description: 'should parse basic content disposition',
|
||||
},
|
||||
{
|
||||
input: 'attachment; filename=file.txt',
|
||||
expected: { type: 'attachment', filename: 'file.txt' },
|
||||
description: 'should parse filename without quotes',
|
||||
},
|
||||
{
|
||||
input: 'inline; filename="image.jpg"',
|
||||
expected: { type: 'inline', filename: 'image.jpg' },
|
||||
description: 'should parse inline disposition',
|
||||
},
|
||||
{
|
||||
input: 'attachment; filename="my file.pdf"',
|
||||
expected: { type: 'attachment', filename: 'my file.pdf' },
|
||||
description: 'should parse filename with spaces',
|
||||
},
|
||||
{
|
||||
input: "attachment; filename*=UTF-8''my%20file.txt",
|
||||
expected: { type: 'attachment', filename: 'my file.txt' },
|
||||
description: 'should parse filename* parameter (RFC 5987)',
|
||||
},
|
||||
{
|
||||
input: 'filename="test.txt"',
|
||||
expected: { type: 'attachment', filename: 'test.txt' },
|
||||
description: 'should handle invalid syntax but with filename',
|
||||
},
|
||||
{
|
||||
input: 'filename=test.txt',
|
||||
expected: { type: 'attachment', filename: 'test.txt' },
|
||||
description: 'should handle invalid syntax with only filename parameter',
|
||||
},
|
||||
{
|
||||
input: undefined,
|
||||
expected: null,
|
||||
description: 'should return null for undefined input',
|
||||
},
|
||||
{
|
||||
input: '',
|
||||
expected: null,
|
||||
description: 'should return null for empty string',
|
||||
},
|
||||
{
|
||||
input: 'attachment; filename="%F0%9F%98%80.txt"',
|
||||
expected: { type: 'attachment', filename: '😀.txt' },
|
||||
description: 'should handle encoded filenames',
|
||||
},
|
||||
{
|
||||
input: 'attachment; size=123; filename="test.txt"; creation-date="Thu, 1 Jan 2020"',
|
||||
expected: { type: 'attachment', filename: 'test.txt' },
|
||||
description: 'should handle multiple parameters',
|
||||
},
|
||||
];
|
||||
|
||||
test.each(testCases)('$description', ({ input, expected }) => {
|
||||
expect(parseContentDisposition(input)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseIncomingMessage', () => {
|
||||
it('parses valid content-type header', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: { 'content-type': 'application/json', 'content-disposition': undefined },
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentType).toEqual('application/json');
|
||||
});
|
||||
|
||||
it('parses valid content-type header with parameters', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': 'application/json; charset=utf-8',
|
||||
'content-disposition': undefined,
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentType).toEqual('application/json');
|
||||
expect(message.encoding).toEqual('utf-8');
|
||||
});
|
||||
|
||||
it('parses valid content-type header with encoding wrapped in quotes', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': 'application/json; charset="utf-8"',
|
||||
'content-disposition': undefined,
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentType).toEqual('application/json');
|
||||
expect(message.encoding).toEqual('utf-8');
|
||||
});
|
||||
|
||||
it('parses valid content-disposition header with filename*', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition':
|
||||
'attachment; filename="screenshot%20(1).png"; filename*=UTF-8\'\'screenshot%20(1).png',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'screenshot (1).png',
|
||||
type: 'attachment',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses valid content-disposition header with filename* (quoted)', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition': ' attachment;filename*="utf-8\' \'test-unsplash.jpg"',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'test-unsplash.jpg',
|
||||
type: 'attachment',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses valid content-disposition header with filename and trailing ";"', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition': 'inline; filename="screenshot%20(1).png";',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'screenshot (1).png',
|
||||
type: 'inline',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses non standard content-disposition with missing type', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition': 'filename="screenshot%20(1).png";',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'screenshot (1).png',
|
||||
type: 'attachment',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('proxyRequestToAxios', () => {
|
||||
const baseUrl = 'http://example.de';
|
||||
const workflow = mock<Workflow>();
|
||||
const hooks = mock<WorkflowHooks>();
|
||||
const additionalData = mock<IWorkflowExecuteAdditionalData>({ hooks });
|
||||
const node = mock<INode>();
|
||||
|
||||
beforeEach(() => {
|
||||
hooks.executeHookFunctions.mockClear();
|
||||
});
|
||||
|
||||
test('should rethrow an error with `status` property', async () => {
|
||||
nock(baseUrl).get('/test').reply(400);
|
||||
|
||||
try {
|
||||
await proxyRequestToAxios(workflow, additionalData, node, `${baseUrl}/test`);
|
||||
} catch (error) {
|
||||
expect(error.status).toEqual(400);
|
||||
}
|
||||
});
|
||||
|
||||
test('should not throw if the response status is 200', async () => {
|
||||
nock(baseUrl).get('/test').reply(200);
|
||||
await proxyRequestToAxios(workflow, additionalData, node, `${baseUrl}/test`);
|
||||
expect(hooks.executeHookFunctions).toHaveBeenCalledWith('nodeFetchedData', [
|
||||
workflow.id,
|
||||
node,
|
||||
]);
|
||||
});
|
||||
|
||||
test('should throw if the response status is 403', async () => {
|
||||
const headers = { 'content-type': 'text/plain' };
|
||||
nock(baseUrl).get('/test').reply(403, 'Forbidden', headers);
|
||||
try {
|
||||
await proxyRequestToAxios(workflow, additionalData, node, `${baseUrl}/test`);
|
||||
} catch (error) {
|
||||
expect(error.statusCode).toEqual(403);
|
||||
expect(error.request).toBeUndefined();
|
||||
expect(error.response).toMatchObject({ headers, status: 403 });
|
||||
expect(error.options).toMatchObject({
|
||||
headers: { Accept: '*/*' },
|
||||
method: 'get',
|
||||
url: 'http://example.de/test',
|
||||
});
|
||||
expect(error.config).toBeUndefined();
|
||||
expect(error.message).toEqual('403 - "Forbidden"');
|
||||
}
|
||||
expect(hooks.executeHookFunctions).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should not throw if the response status is 404, but `simple` option is set to `false`', async () => {
|
||||
nock(baseUrl).get('/test').reply(404, 'Not Found');
|
||||
const response = await proxyRequestToAxios(workflow, additionalData, node, {
|
||||
url: `${baseUrl}/test`,
|
||||
simple: false,
|
||||
});
|
||||
|
||||
expect(response).toEqual('Not Found');
|
||||
expect(hooks.executeHookFunctions).toHaveBeenCalledWith('nodeFetchedData', [
|
||||
workflow.id,
|
||||
node,
|
||||
]);
|
||||
});
|
||||
|
||||
test('should return full response when `resolveWithFullResponse` is set to true', async () => {
|
||||
nock(baseUrl).get('/test').reply(404, 'Not Found');
|
||||
const response = await proxyRequestToAxios(workflow, additionalData, node, {
|
||||
url: `${baseUrl}/test`,
|
||||
resolveWithFullResponse: true,
|
||||
simple: false,
|
||||
});
|
||||
|
||||
expect(response).toMatchObject({
|
||||
body: 'Not Found',
|
||||
headers: {},
|
||||
statusCode: 404,
|
||||
statusMessage: null,
|
||||
});
|
||||
expect(hooks.executeHookFunctions).toHaveBeenCalledWith('nodeFetchedData', [
|
||||
workflow.id,
|
||||
node,
|
||||
]);
|
||||
});
|
||||
|
||||
describe('redirects', () => {
|
||||
test('should forward authorization header', async () => {
|
||||
nock(baseUrl).get('/redirect').reply(301, '', { Location: 'https://otherdomain.com/test' });
|
||||
nock('https://otherdomain.com')
|
||||
.get('/test')
|
||||
.reply(200, function () {
|
||||
return this.req.headers;
|
||||
});
|
||||
|
||||
const response = await proxyRequestToAxios(workflow, additionalData, node, {
|
||||
url: `${baseUrl}/redirect`,
|
||||
auth: {
|
||||
username: 'testuser',
|
||||
password: 'testpassword',
|
||||
},
|
||||
headers: {
|
||||
'X-Other-Header': 'otherHeaderContent',
|
||||
},
|
||||
resolveWithFullResponse: true,
|
||||
});
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
const forwardedHeaders = JSON.parse(response.body);
|
||||
expect(forwardedHeaders.authorization).toBe('Basic dGVzdHVzZXI6dGVzdHBhc3N3b3Jk');
|
||||
expect(forwardedHeaders['x-other-header']).toBe('otherHeaderContent');
|
||||
});
|
||||
|
||||
test('should follow redirects by default', async () => {
|
||||
nock(baseUrl)
|
||||
.get('/redirect')
|
||||
.reply(301, '', { Location: `${baseUrl}/test` });
|
||||
nock(baseUrl).get('/test').reply(200, 'Redirected');
|
||||
|
||||
const response = await proxyRequestToAxios(workflow, additionalData, node, {
|
||||
url: `${baseUrl}/redirect`,
|
||||
resolveWithFullResponse: true,
|
||||
});
|
||||
|
||||
expect(response).toMatchObject({
|
||||
body: 'Redirected',
|
||||
headers: {},
|
||||
statusCode: 200,
|
||||
});
|
||||
});
|
||||
|
||||
test('should not follow redirects when configured', async () => {
|
||||
nock(baseUrl)
|
||||
.get('/redirect')
|
||||
.reply(301, '', { Location: `${baseUrl}/test` });
|
||||
nock(baseUrl).get('/test').reply(200, 'Redirected');
|
||||
|
||||
await expect(
|
||||
proxyRequestToAxios(workflow, additionalData, node, {
|
||||
url: `${baseUrl}/redirect`,
|
||||
resolveWithFullResponse: true,
|
||||
followRedirect: false,
|
||||
}),
|
||||
).rejects.toThrowError(expect.objectContaining({ statusCode: 301 }));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseRequestObject', () => {
|
||||
test('should handle basic request options', async () => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
url: 'https://example.com',
|
||||
method: 'POST',
|
||||
headers: { 'content-type': 'application/json' },
|
||||
body: { key: 'value' },
|
||||
});
|
||||
|
||||
expect(axiosOptions).toEqual(
|
||||
expect.objectContaining({
|
||||
url: 'https://example.com',
|
||||
method: 'POST',
|
||||
headers: { accept: '*/*', 'content-type': 'application/json' },
|
||||
data: { key: 'value' },
|
||||
maxRedirects: 0,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('should set correct headers for FormData', async () => {
|
||||
const formData = new FormData();
|
||||
formData.append('key', 'value');
|
||||
|
||||
const axiosOptions = await parseRequestObject({
|
||||
url: 'https://example.com',
|
||||
formData,
|
||||
headers: {
|
||||
'content-type': 'multipart/form-data',
|
||||
},
|
||||
});
|
||||
|
||||
expect(axiosOptions.headers).toMatchObject({
|
||||
accept: '*/*',
|
||||
'content-length': 163,
|
||||
'content-type': expect.stringMatching(/^multipart\/form-data; boundary=/),
|
||||
});
|
||||
|
||||
expect(axiosOptions.data).toBeInstanceOf(FormData);
|
||||
});
|
||||
|
||||
test('should not use Host header for SNI', async () => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
url: 'https://example.de/foo/bar',
|
||||
headers: { Host: 'other.host.com' },
|
||||
});
|
||||
expect((axiosOptions.httpsAgent as Agent).options.servername).toEqual('example.de');
|
||||
});
|
||||
|
||||
describe('should set SSL certificates', () => {
|
||||
const agentOptions: SecureContextOptions = {
|
||||
ca: '-----BEGIN CERTIFICATE-----\nTEST\n-----END CERTIFICATE-----',
|
||||
};
|
||||
const requestObject: IRequestOptions = {
|
||||
method: 'GET',
|
||||
uri: 'https://example.de',
|
||||
agentOptions,
|
||||
};
|
||||
|
||||
test('on regular requests', async () => {
|
||||
const axiosOptions = await parseRequestObject(requestObject);
|
||||
expect((axiosOptions.httpsAgent as Agent).options).toEqual({
|
||||
servername: 'example.de',
|
||||
...agentOptions,
|
||||
noDelay: true,
|
||||
path: null,
|
||||
});
|
||||
});
|
||||
|
||||
test('on redirected requests', async () => {
|
||||
const axiosOptions = await parseRequestObject(requestObject);
|
||||
expect(axiosOptions.beforeRedirect).toBeDefined;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const redirectOptions: Record<string, any> = { agents: {}, hostname: 'example.de' };
|
||||
axiosOptions.beforeRedirect!(redirectOptions, mock());
|
||||
expect(redirectOptions.agent).toEqual(redirectOptions.agents.https);
|
||||
expect((redirectOptions.agent as Agent).options).toEqual({
|
||||
servername: 'example.de',
|
||||
...agentOptions,
|
||||
noDelay: true,
|
||||
path: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when followRedirect is true', () => {
|
||||
test.each(['GET', 'HEAD'] as IHttpRequestMethods[])(
|
||||
'should set maxRedirects on %s ',
|
||||
async (method) => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
method,
|
||||
followRedirect: true,
|
||||
maxRedirects: 1234,
|
||||
});
|
||||
expect(axiosOptions.maxRedirects).toEqual(1234);
|
||||
},
|
||||
);
|
||||
|
||||
test.each(['POST', 'PUT', 'PATCH', 'DELETE'] as IHttpRequestMethods[])(
|
||||
'should not set maxRedirects on %s ',
|
||||
async (method) => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
method,
|
||||
followRedirect: true,
|
||||
maxRedirects: 1234,
|
||||
});
|
||||
expect(axiosOptions.maxRedirects).toEqual(0);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('when followAllRedirects is true', () => {
|
||||
test.each(['GET', 'HEAD', 'POST', 'PUT', 'PATCH', 'DELETE'] as IHttpRequestMethods[])(
|
||||
'should set maxRedirects on %s ',
|
||||
async (method) => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
method,
|
||||
followAllRedirects: true,
|
||||
maxRedirects: 1234,
|
||||
});
|
||||
expect(axiosOptions.maxRedirects).toEqual(1234);
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('invokeAxios', () => {
|
||||
const baseUrl = 'http://example.de';
|
||||
|
||||
beforeEach(() => {
|
||||
nock.cleanAll();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should throw error for non-401 status codes', async () => {
|
||||
nock(baseUrl).get('/test').reply(500, {});
|
||||
|
||||
await expect(invokeAxios({ url: `${baseUrl}/test` })).rejects.toThrow(
|
||||
'Request failed with status code 500',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on 401 without digest auth challenge', async () => {
|
||||
nock(baseUrl).get('/test').reply(401, {});
|
||||
|
||||
await expect(
|
||||
invokeAxios(
|
||||
{
|
||||
url: `${baseUrl}/test`,
|
||||
},
|
||||
{ sendImmediately: false },
|
||||
),
|
||||
).rejects.toThrow('Request failed with status code 401');
|
||||
});
|
||||
|
||||
it('should make successful requests', async () => {
|
||||
nock(baseUrl).get('/test').reply(200, { success: true });
|
||||
|
||||
const response = await invokeAxios({
|
||||
url: `${baseUrl}/test`,
|
||||
});
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.data).toEqual({ success: true });
|
||||
});
|
||||
|
||||
it('should handle digest auth when receiving 401 with nonce', async () => {
|
||||
nock(baseUrl)
|
||||
.get('/test')
|
||||
.matchHeader('authorization', 'Basic dXNlcjpwYXNz')
|
||||
.once()
|
||||
.reply(401, {}, { 'www-authenticate': 'Digest realm="test", nonce="abc123", qop="auth"' });
|
||||
|
||||
nock(baseUrl)
|
||||
.get('/test')
|
||||
.matchHeader(
|
||||
'authorization',
|
||||
/^Digest username="user",realm="test",nonce="abc123",uri="\/test",qop="auth",algorithm="MD5",response="[0-9a-f]{32}"/,
|
||||
)
|
||||
.reply(200, { success: true });
|
||||
|
||||
const response = await invokeAxios(
|
||||
{
|
||||
url: `${baseUrl}/test`,
|
||||
auth: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
},
|
||||
{ sendImmediately: false },
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.data).toEqual({ success: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('copyInputItems', () => {
|
||||
it('should pick only selected properties', () => {
|
||||
const output = copyInputItems(
|
||||
[
|
||||
{
|
||||
json: {
|
||||
a: 1,
|
||||
b: true,
|
||||
c: {},
|
||||
},
|
||||
},
|
||||
],
|
||||
['a'],
|
||||
);
|
||||
expect(output).toEqual([{ a: 1 }]);
|
||||
});
|
||||
|
||||
it('should convert undefined to null', () => {
|
||||
const output = copyInputItems(
|
||||
[
|
||||
{
|
||||
json: {
|
||||
a: undefined,
|
||||
},
|
||||
},
|
||||
],
|
||||
['a'],
|
||||
);
|
||||
expect(output).toEqual([{ a: null }]);
|
||||
});
|
||||
|
||||
it('should clone objects', () => {
|
||||
const input = {
|
||||
a: { b: 5 },
|
||||
};
|
||||
const output = copyInputItems(
|
||||
[
|
||||
{
|
||||
json: input,
|
||||
},
|
||||
],
|
||||
['a'],
|
||||
);
|
||||
expect(output[0].a).toEqual(input.a);
|
||||
expect(output[0].a === input.a).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeEmptyBody', () => {
|
||||
test.each(['GET', 'HEAD', 'OPTIONS'] as IHttpRequestMethods[])(
|
||||
'Should remove empty body for %s',
|
||||
async (method) => {
|
||||
const requestOptions = {
|
||||
method,
|
||||
body: {},
|
||||
} as IHttpRequestOptions | IRequestOptions;
|
||||
removeEmptyBody(requestOptions);
|
||||
expect(requestOptions.body).toEqual(undefined);
|
||||
},
|
||||
);
|
||||
|
||||
test.each(['GET', 'HEAD', 'OPTIONS'] as IHttpRequestMethods[])(
|
||||
'Should not remove non-empty body for %s',
|
||||
async (method) => {
|
||||
const requestOptions = {
|
||||
method,
|
||||
body: { test: true },
|
||||
} as IHttpRequestOptions | IRequestOptions;
|
||||
removeEmptyBody(requestOptions);
|
||||
expect(requestOptions.body).toEqual({ test: true });
|
||||
},
|
||||
);
|
||||
|
||||
test.each(['POST', 'PUT', 'PATCH', 'DELETE'] as IHttpRequestMethods[])(
|
||||
'Should not remove empty body for %s',
|
||||
async (method) => {
|
||||
const requestOptions = {
|
||||
method,
|
||||
body: {},
|
||||
} as IHttpRequestOptions | IRequestOptions;
|
||||
removeEmptyBody(requestOptions);
|
||||
expect(requestOptions.body).toEqual({});
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('binaryToString', () => {
|
||||
const ENCODING_SAMPLES = {
|
||||
utf8: {
|
||||
text: 'Hello, 世界! τεστ мир ⚡️ é à ü ñ',
|
||||
buffer: Buffer.from([
|
||||
0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0xe4, 0xb8, 0x96, 0xe7, 0x95, 0x8c, 0x21, 0x20,
|
||||
0xcf, 0x84, 0xce, 0xb5, 0xcf, 0x83, 0xcf, 0x84, 0x20, 0xd0, 0xbc, 0xd0, 0xb8, 0xd1, 0x80,
|
||||
0x20, 0xe2, 0x9a, 0xa1, 0xef, 0xb8, 0x8f, 0x20, 0xc3, 0xa9, 0x20, 0xc3, 0xa0, 0x20, 0xc3,
|
||||
0xbc, 0x20, 0xc3, 0xb1,
|
||||
]),
|
||||
},
|
||||
|
||||
'iso-8859-15': {
|
||||
text: 'Café € personnalité',
|
||||
buffer: Buffer.from([
|
||||
0x43, 0x61, 0x66, 0xe9, 0x20, 0xa4, 0x20, 0x70, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x6e, 0x61,
|
||||
0x6c, 0x69, 0x74, 0xe9,
|
||||
]),
|
||||
},
|
||||
|
||||
latin1: {
|
||||
text: 'señor année déjà',
|
||||
buffer: Buffer.from([
|
||||
0x73, 0x65, 0xf1, 0x6f, 0x72, 0x20, 0x61, 0x6e, 0x6e, 0xe9, 0x65, 0x20, 0x64, 0xe9, 0x6a,
|
||||
0xe0,
|
||||
]),
|
||||
},
|
||||
|
||||
ascii: {
|
||||
text: 'Hello, World! 123',
|
||||
buffer: Buffer.from([
|
||||
0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0x57, 0x6f, 0x72, 0x6c, 0x64, 0x21, 0x20, 0x31,
|
||||
0x32, 0x33,
|
||||
]),
|
||||
},
|
||||
|
||||
'windows-1252': {
|
||||
text: '€ Smart "quotes" • bullet',
|
||||
buffer: Buffer.from([
|
||||
0x80, 0x20, 0x53, 0x6d, 0x61, 0x72, 0x74, 0x20, 0x22, 0x71, 0x75, 0x6f, 0x74, 0x65, 0x73,
|
||||
0x22, 0x20, 0x95, 0x20, 0x62, 0x75, 0x6c, 0x6c, 0x65, 0x74,
|
||||
]),
|
||||
},
|
||||
|
||||
'shift-jis': {
|
||||
text: 'こんにちは世界',
|
||||
buffer: Buffer.from([
|
||||
0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd, 0x90, 0xa2, 0x8a, 0x45,
|
||||
]),
|
||||
},
|
||||
|
||||
big5: {
|
||||
text: '哈囉世界',
|
||||
buffer: Buffer.from([0xab, 0xa2, 0xc5, 0x6f, 0xa5, 0x40, 0xac, 0xc9]),
|
||||
},
|
||||
|
||||
'koi8-r': {
|
||||
text: 'Привет мир',
|
||||
buffer: Buffer.from([0xf0, 0xd2, 0xc9, 0xd7, 0xc5, 0xd4, 0x20, 0xcd, 0xc9, 0xd2]),
|
||||
},
|
||||
};
|
||||
|
||||
describe('should handle Buffer', () => {
|
||||
for (const [encoding, { text, buffer }] of Object.entries(ENCODING_SAMPLES)) {
|
||||
test(`with ${encoding}`, async () => {
|
||||
const data = await binaryToString(buffer, encoding);
|
||||
expect(data).toBe(text);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('should handle streams', () => {
|
||||
for (const [encoding, { text, buffer }] of Object.entries(ENCODING_SAMPLES)) {
|
||||
test(`with ${encoding}`, async () => {
|
||||
const stream = Readable.from(buffer);
|
||||
const data = await binaryToString(stream, encoding);
|
||||
expect(data).toBe(text);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('should handle IncomingMessage', () => {
|
||||
for (const [encoding, { text, buffer }] of Object.entries(ENCODING_SAMPLES)) {
|
||||
test(`with ${encoding}`, async () => {
|
||||
const response = Readable.from(buffer) as IncomingMessage;
|
||||
response.headers = { 'content-type': `application/json;charset=${encoding}` };
|
||||
// @ts-expect-error need this hack to fake `instanceof IncomingMessage` checks
|
||||
response.__proto__ = IncomingMessage.prototype;
|
||||
const data = await binaryToString(response);
|
||||
expect(data).toBe(text);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('isFilePathBlocked', () => {
|
||||
test('should return true for static cache dir', () => {
|
||||
const filePath = Container.get(InstanceSettings).staticCacheDir;
|
||||
|
||||
expect(isFilePathBlocked(filePath)).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,208 @@
|
||||
import fs from 'node:fs';
|
||||
import fsp from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { FileSystemManager } from '@/binary-data/file-system.manager';
|
||||
import { isStream } from '@/binary-data/object-store/utils';
|
||||
import { toFileId, toStream } from '@test/utils';
|
||||
|
||||
jest.mock('fs');
|
||||
jest.mock('fs/promises');
|
||||
|
||||
const storagePath = tmpdir();
|
||||
|
||||
const fsManager = new FileSystemManager(storagePath);
|
||||
|
||||
const toFullFilePath = (fileId: string) => path.join(storagePath, fileId);
|
||||
|
||||
const workflowId = 'ObogjVbqpNOQpiyV';
|
||||
const executionId = '999';
|
||||
const fileUuid = '71f6209b-5d48-41a2-a224-80d529d8bb32';
|
||||
const fileId = toFileId(workflowId, executionId, fileUuid);
|
||||
|
||||
const otherWorkflowId = 'FHio8ftV6SrCAfPJ';
|
||||
const otherExecutionId = '888';
|
||||
const otherFileUuid = '71f6209b-5d48-41a2-a224-80d529d8bb33';
|
||||
const otherFileId = toFileId(otherWorkflowId, otherExecutionId, otherFileUuid);
|
||||
|
||||
const mockBuffer = Buffer.from('Test data');
|
||||
const mockStream = toStream(mockBuffer);
|
||||
|
||||
afterAll(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('store()', () => {
|
||||
it('should store a buffer', async () => {
|
||||
const metadata = { mimeType: 'text/plain' };
|
||||
|
||||
const result = await fsManager.store(workflowId, executionId, mockBuffer, metadata);
|
||||
|
||||
expect(result.fileSize).toBe(mockBuffer.length);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPath()', () => {
|
||||
it('should return a path', async () => {
|
||||
const filePath = fsManager.getPath(fileId);
|
||||
|
||||
expect(filePath).toBe(toFullFilePath(fileId));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAsBuffer()', () => {
|
||||
it('should return a buffer', async () => {
|
||||
fsp.readFile = jest.fn().mockResolvedValue(mockBuffer);
|
||||
fsp.access = jest.fn().mockImplementation(async () => {});
|
||||
|
||||
const result = await fsManager.getAsBuffer(fileId);
|
||||
|
||||
expect(Buffer.isBuffer(result)).toBe(true);
|
||||
expect(fsp.readFile).toHaveBeenCalledWith(toFullFilePath(fileId));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAsStream()', () => {
|
||||
it('should return a stream', async () => {
|
||||
fs.createReadStream = jest.fn().mockReturnValue(mockStream);
|
||||
fsp.access = jest.fn().mockImplementation(async () => {});
|
||||
|
||||
const stream = await fsManager.getAsStream(fileId);
|
||||
|
||||
expect(isStream(stream)).toBe(true);
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith(toFullFilePath(fileId), {
|
||||
highWaterMark: undefined,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMetadata()', () => {
|
||||
it('should return metadata', async () => {
|
||||
const mimeType = 'text/plain';
|
||||
const fileName = 'file.txt';
|
||||
|
||||
fsp.readFile = jest.fn().mockResolvedValue(
|
||||
JSON.stringify({
|
||||
fileSize: 1,
|
||||
mimeType,
|
||||
fileName,
|
||||
}),
|
||||
);
|
||||
|
||||
const metadata = await fsManager.getMetadata(fileId);
|
||||
|
||||
expect(metadata).toEqual(expect.objectContaining({ fileSize: 1, mimeType, fileName }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('copyByFileId()', () => {
|
||||
it('should copy by file ID and return the file ID', async () => {
|
||||
fsp.copyFile = jest.fn().mockResolvedValue(undefined);
|
||||
|
||||
// @ts-expect-error - private method
|
||||
jest.spyOn(fsManager, 'toFileId').mockReturnValue(otherFileId);
|
||||
|
||||
const targetFileId = await fsManager.copyByFileId(workflowId, executionId, fileId);
|
||||
|
||||
const sourcePath = toFullFilePath(fileId);
|
||||
const targetPath = toFullFilePath(targetFileId);
|
||||
|
||||
expect(fsp.copyFile).toHaveBeenCalledWith(sourcePath, targetPath);
|
||||
});
|
||||
});
|
||||
|
||||
describe('copyByFilePath()', () => {
|
||||
test('should copy by file path and return the file ID and size', async () => {
|
||||
const sourceFilePath = tmpdir();
|
||||
const metadata = { mimeType: 'text/plain' };
|
||||
|
||||
// @ts-expect-error - private method
|
||||
jest.spyOn(fsManager, 'toFileId').mockReturnValue(otherFileId);
|
||||
|
||||
// @ts-expect-error - private method
|
||||
jest.spyOn(fsManager, 'getSize').mockReturnValue(mockBuffer.length);
|
||||
|
||||
const targetPath = toFullFilePath(otherFileId);
|
||||
|
||||
fsp.cp = jest.fn().mockResolvedValue(undefined);
|
||||
fsp.writeFile = jest.fn().mockResolvedValue(undefined);
|
||||
|
||||
const result = await fsManager.copyByFilePath(
|
||||
workflowId,
|
||||
executionId,
|
||||
sourceFilePath,
|
||||
metadata,
|
||||
);
|
||||
|
||||
expect(fsp.cp).toHaveBeenCalledWith(sourceFilePath, targetPath);
|
||||
expect(fsp.writeFile).toHaveBeenCalledWith(
|
||||
`${toFullFilePath(otherFileId)}.metadata`,
|
||||
JSON.stringify({ ...metadata, fileSize: mockBuffer.length }),
|
||||
{ encoding: 'utf-8' },
|
||||
);
|
||||
expect(result.fileSize).toBe(mockBuffer.length);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteMany()', () => {
|
||||
const rmOptions = {
|
||||
force: true,
|
||||
recursive: true,
|
||||
};
|
||||
|
||||
it('should delete many files by workflow ID and execution ID', async () => {
|
||||
const ids = [
|
||||
{ workflowId, executionId },
|
||||
{ workflowId: otherWorkflowId, executionId: otherExecutionId },
|
||||
];
|
||||
|
||||
fsp.rm = jest.fn().mockResolvedValue(undefined);
|
||||
|
||||
const promise = fsManager.deleteMany(ids);
|
||||
|
||||
await expect(promise).resolves.not.toThrow();
|
||||
|
||||
expect(fsp.rm).toHaveBeenCalledTimes(2);
|
||||
expect(fsp.rm).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
`${storagePath}/workflows/${workflowId}/executions/${executionId}`,
|
||||
rmOptions,
|
||||
);
|
||||
expect(fsp.rm).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
`${storagePath}/workflows/${otherWorkflowId}/executions/${otherExecutionId}`,
|
||||
rmOptions,
|
||||
);
|
||||
});
|
||||
|
||||
it('should suppress error on non-existing filepath', async () => {
|
||||
const ids = [{ workflowId: 'does-not-exist', executionId: 'does-not-exist' }];
|
||||
|
||||
fsp.rm = jest.fn().mockResolvedValue(undefined);
|
||||
|
||||
const promise = fsManager.deleteMany(ids);
|
||||
|
||||
await expect(promise).resolves.not.toThrow();
|
||||
|
||||
expect(fsp.rm).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('rename()', () => {
|
||||
it('should rename a file', async () => {
|
||||
fsp.rename = jest.fn().mockResolvedValue(undefined);
|
||||
fsp.rm = jest.fn().mockResolvedValue(undefined);
|
||||
|
||||
const promise = fsManager.rename(fileId, otherFileId);
|
||||
|
||||
const oldPath = toFullFilePath(fileId);
|
||||
const newPath = toFullFilePath(otherFileId);
|
||||
|
||||
await expect(promise).resolves.not.toThrow();
|
||||
|
||||
expect(fsp.rename).toHaveBeenCalledTimes(2);
|
||||
expect(fsp.rename).toHaveBeenCalledWith(oldPath, newPath);
|
||||
expect(fsp.rename).toHaveBeenCalledWith(`${oldPath}.metadata`, `${newPath}.metadata`);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,134 @@
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import fs from 'node:fs/promises';
|
||||
|
||||
import { ObjectStoreService } from '@/binary-data/object-store/object-store.service.ee';
|
||||
import type { MetadataResponseHeaders } from '@/binary-data/object-store/types';
|
||||
import { isStream } from '@/binary-data/object-store/utils';
|
||||
import { ObjectStoreManager } from '@/binary-data/object-store.manager';
|
||||
import { mockInstance, toFileId, toStream } from '@test/utils';
|
||||
|
||||
jest.mock('fs/promises');
|
||||
|
||||
const objectStoreService = mockInstance(ObjectStoreService);
|
||||
const objectStoreManager = new ObjectStoreManager(objectStoreService);
|
||||
|
||||
const workflowId = 'ObogjVbqpNOQpiyV';
|
||||
const executionId = '999';
|
||||
const fileUuid = '71f6209b-5d48-41a2-a224-80d529d8bb32';
|
||||
const fileId = toFileId(workflowId, executionId, fileUuid);
|
||||
const prefix = `workflows/${workflowId}/executions/${executionId}/binary_data/`;
|
||||
|
||||
const otherWorkflowId = 'FHio8ftV6SrCAfPJ';
|
||||
const otherExecutionId = '888';
|
||||
const otherFileUuid = '71f6209b-5d48-41a2-a224-80d529d8bb33';
|
||||
const otherFileId = toFileId(otherWorkflowId, otherExecutionId, otherFileUuid);
|
||||
|
||||
const mockBuffer = Buffer.from('Test data');
|
||||
const mockStream = toStream(mockBuffer);
|
||||
|
||||
beforeAll(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('store()', () => {
|
||||
it('should store a buffer', async () => {
|
||||
const metadata = { mimeType: 'text/plain' };
|
||||
|
||||
const result = await objectStoreManager.store(workflowId, executionId, mockBuffer, metadata);
|
||||
|
||||
expect(result.fileId.startsWith(prefix)).toBe(true);
|
||||
expect(result.fileSize).toBe(mockBuffer.length);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPath()', () => {
|
||||
it('should return a path', async () => {
|
||||
const path = objectStoreManager.getPath(fileId);
|
||||
|
||||
expect(path).toBe(fileId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAsBuffer()', () => {
|
||||
it('should return a buffer', async () => {
|
||||
// @ts-expect-error Overload signature seemingly causing the return type to be misinferred
|
||||
objectStoreService.get.mockResolvedValue(mockBuffer);
|
||||
|
||||
const result = await objectStoreManager.getAsBuffer(fileId);
|
||||
|
||||
expect(Buffer.isBuffer(result)).toBe(true);
|
||||
expect(objectStoreService.get).toHaveBeenCalledWith(fileId, { mode: 'buffer' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAsStream()', () => {
|
||||
it('should return a stream', async () => {
|
||||
objectStoreService.get.mockResolvedValue(mockStream);
|
||||
|
||||
const stream = await objectStoreManager.getAsStream(fileId);
|
||||
|
||||
expect(isStream(stream)).toBe(true);
|
||||
expect(objectStoreService.get).toHaveBeenCalledWith(fileId, { mode: 'stream' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMetadata()', () => {
|
||||
it('should return metadata', async () => {
|
||||
const mimeType = 'text/plain';
|
||||
const fileName = 'file.txt';
|
||||
|
||||
objectStoreService.getMetadata.mockResolvedValue(
|
||||
mock<MetadataResponseHeaders>({
|
||||
'content-length': '1',
|
||||
'content-type': mimeType,
|
||||
'x-amz-meta-filename': fileName,
|
||||
}),
|
||||
);
|
||||
|
||||
const metadata = await objectStoreManager.getMetadata(fileId);
|
||||
|
||||
expect(metadata).toEqual(expect.objectContaining({ fileSize: 1, mimeType, fileName }));
|
||||
expect(objectStoreService.getMetadata).toHaveBeenCalledWith(fileId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('copyByFileId()', () => {
|
||||
it('should copy by file ID and return the file ID', async () => {
|
||||
const targetFileId = await objectStoreManager.copyByFileId(workflowId, executionId, fileId);
|
||||
|
||||
expect(targetFileId.startsWith(prefix)).toBe(true);
|
||||
expect(objectStoreService.get).toHaveBeenCalledWith(fileId, { mode: 'buffer' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('copyByFilePath()', () => {
|
||||
test('should copy by file path and return the file ID and size', async () => {
|
||||
const sourceFilePath = 'path/to/file/in/filesystem';
|
||||
const metadata = { mimeType: 'text/plain' };
|
||||
|
||||
fs.readFile = jest.fn().mockResolvedValue(mockBuffer);
|
||||
|
||||
const result = await objectStoreManager.copyByFilePath(
|
||||
workflowId,
|
||||
executionId,
|
||||
sourceFilePath,
|
||||
metadata,
|
||||
);
|
||||
|
||||
expect(result.fileId.startsWith(prefix)).toBe(true);
|
||||
expect(fs.readFile).toHaveBeenCalledWith(sourceFilePath);
|
||||
expect(result.fileSize).toBe(mockBuffer.length);
|
||||
});
|
||||
});
|
||||
|
||||
describe('rename()', () => {
|
||||
it('should rename a file', async () => {
|
||||
const promise = objectStoreManager.rename(fileId, otherFileId);
|
||||
|
||||
await expect(promise).resolves.not.toThrow();
|
||||
|
||||
expect(objectStoreService.get).toHaveBeenCalledWith(fileId, { mode: 'buffer' });
|
||||
expect(objectStoreService.getMetadata).toHaveBeenCalledWith(fileId);
|
||||
expect(objectStoreService.deleteOne).toHaveBeenCalledWith(fileId);
|
||||
});
|
||||
});
|
||||
34
packages/core/src/binary-data/__tests__/utils.test.ts
Normal file
34
packages/core/src/binary-data/__tests__/utils.test.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { Readable } from 'node:stream';
|
||||
import { createGunzip } from 'node:zlib';
|
||||
|
||||
import { binaryToBuffer } from '@/binary-data/utils';
|
||||
|
||||
describe('BinaryData/utils', () => {
|
||||
describe('binaryToBuffer', () => {
|
||||
it('should handle buffer objects', async () => {
|
||||
const body = Buffer.from('test');
|
||||
expect((await binaryToBuffer(body)).toString()).toEqual('test');
|
||||
});
|
||||
|
||||
it('should handle valid uncompressed Readable streams', async () => {
|
||||
const body = Readable.from(Buffer.from('test'));
|
||||
expect((await binaryToBuffer(body)).toString()).toEqual('test');
|
||||
});
|
||||
|
||||
it('should handle valid compressed Readable streams', async () => {
|
||||
const gunzip = createGunzip();
|
||||
const body = Readable.from(
|
||||
Buffer.from('1f8b08000000000000032b492d2e01000c7e7fd804000000', 'hex'),
|
||||
).pipe(gunzip);
|
||||
expect((await binaryToBuffer(body)).toString()).toEqual('test');
|
||||
});
|
||||
|
||||
it('should throw on invalid compressed Readable streams', async () => {
|
||||
const gunzip = createGunzip();
|
||||
const body = Readable.from(Buffer.from('0001f8b080000000000000000', 'hex')).pipe(gunzip);
|
||||
await expect(binaryToBuffer(body)).rejects.toThrow(
|
||||
new Error('Failed to decompress response'),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -22,7 +22,7 @@ export class BinaryDataService {
|
||||
this.mode = config.mode === 'filesystem' ? 'filesystem-v2' : config.mode;
|
||||
|
||||
if (config.availableModes.includes('filesystem')) {
|
||||
const { FileSystemManager } = await import('./FileSystem.manager');
|
||||
const { FileSystemManager } = await import('./file-system.manager');
|
||||
|
||||
this.managers.filesystem = new FileSystemManager(config.localStoragePath);
|
||||
this.managers['filesystem-v2'] = this.managers.filesystem;
|
||||
@@ -31,8 +31,8 @@ export class BinaryDataService {
|
||||
}
|
||||
|
||||
if (config.availableModes.includes('s3')) {
|
||||
const { ObjectStoreManager } = await import('./ObjectStore.manager');
|
||||
const { ObjectStoreService } = await import('../ObjectStore/ObjectStore.service.ee');
|
||||
const { ObjectStoreManager } = await import('./object-store.manager');
|
||||
const { ObjectStoreService } = await import('./object-store/object-store.service.ee');
|
||||
|
||||
this.managers.s3 = new ObjectStoreManager(Container.get(ObjectStoreService));
|
||||
|
||||
4
packages/core/src/binary-data/index.ts
Normal file
4
packages/core/src/binary-data/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './binary-data.service';
|
||||
export * from './types';
|
||||
export { ObjectStoreService } from './object-store/object-store.service.ee';
|
||||
export { isStoredMode as isValidNonDefaultMode } from './utils';
|
||||
@@ -3,9 +3,9 @@ import fs from 'node:fs/promises';
|
||||
import type { Readable } from 'node:stream';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import { ObjectStoreService } from './object-store/object-store.service.ee';
|
||||
import type { BinaryData } from './types';
|
||||
import { binaryToBuffer } from './utils';
|
||||
import { ObjectStoreService } from '../ObjectStore/ObjectStore.service.ee';
|
||||
|
||||
@Service()
|
||||
export class ObjectStoreManager implements BinaryData.Manager {
|
||||
@@ -0,0 +1,318 @@
|
||||
import axios from 'axios';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
import { ObjectStoreService } from '@/binary-data/object-store/object-store.service.ee';
|
||||
import { writeBlockedMessage } from '@/binary-data/object-store/utils';
|
||||
|
||||
jest.mock('axios');
|
||||
|
||||
const mockAxios = axios as jest.Mocked<typeof axios>;
|
||||
|
||||
const mockBucket = { region: 'us-east-1', name: 'test-bucket' };
|
||||
const mockHost = `s3.${mockBucket.region}.amazonaws.com`;
|
||||
const mockCredentials = { accessKey: 'mock-access-key', accessSecret: 'mock-secret-key' };
|
||||
const mockUrl = `https://${mockHost}/${mockBucket.name}`;
|
||||
const FAILED_REQUEST_ERROR_MESSAGE = 'Request to S3 failed';
|
||||
const mockError = new Error('Something went wrong!');
|
||||
const fileId =
|
||||
'workflows/ObogjVbqpNOQpiyV/executions/999/binary_data/71f6209b-5d48-41a2-a224-80d529d8bb32';
|
||||
const mockBuffer = Buffer.from('Test data');
|
||||
|
||||
const toDeletionXml = (filename: string) => `<Delete>
|
||||
<Object><Key>${filename}</Key></Object>
|
||||
</Delete>`;
|
||||
|
||||
let objectStoreService: ObjectStoreService;
|
||||
|
||||
beforeEach(async () => {
|
||||
objectStoreService = new ObjectStoreService(mock());
|
||||
mockAxios.request.mockResolvedValueOnce({ status: 200 }); // for checkConnection
|
||||
await objectStoreService.init(mockHost, mockBucket, mockCredentials);
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('checkConnection()', () => {
|
||||
it('should send a HEAD request to the correct host', async () => {
|
||||
mockAxios.request.mockResolvedValue({ status: 200 });
|
||||
|
||||
objectStoreService.setReady(false);
|
||||
|
||||
await objectStoreService.checkConnection();
|
||||
|
||||
expect(mockAxios.request).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
method: 'HEAD',
|
||||
url: `https://${mockHost}/${mockBucket.name}`,
|
||||
headers: expect.objectContaining({
|
||||
'X-Amz-Content-Sha256': expect.any(String),
|
||||
'X-Amz-Date': expect.any(String),
|
||||
Authorization: expect.any(String),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error on request failure', async () => {
|
||||
objectStoreService.setReady(false);
|
||||
|
||||
mockAxios.request.mockRejectedValue(mockError);
|
||||
|
||||
const promise = objectStoreService.checkConnection();
|
||||
|
||||
await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMetadata()', () => {
|
||||
it('should send a HEAD request to the correct host and path', async () => {
|
||||
mockAxios.request.mockResolvedValue({ status: 200 });
|
||||
|
||||
await objectStoreService.getMetadata(fileId);
|
||||
|
||||
expect(mockAxios.request).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
method: 'HEAD',
|
||||
url: `${mockUrl}/${fileId}`,
|
||||
headers: expect.objectContaining({
|
||||
Host: mockHost,
|
||||
'X-Amz-Content-Sha256': expect.any(String),
|
||||
'X-Amz-Date': expect.any(String),
|
||||
Authorization: expect.any(String),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error on request failure', async () => {
|
||||
mockAxios.request.mockRejectedValue(mockError);
|
||||
|
||||
const promise = objectStoreService.getMetadata(fileId);
|
||||
|
||||
await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('put()', () => {
|
||||
it('should send a PUT request to upload an object', async () => {
|
||||
const metadata = { fileName: 'file.txt', mimeType: 'text/plain' };
|
||||
|
||||
mockAxios.request.mockResolvedValue({ status: 200 });
|
||||
|
||||
await objectStoreService.put(fileId, mockBuffer, metadata);
|
||||
|
||||
expect(mockAxios.request).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
method: 'PUT',
|
||||
url: `${mockUrl}/${fileId}`,
|
||||
headers: expect.objectContaining({
|
||||
'Content-Length': mockBuffer.length,
|
||||
'Content-MD5': expect.any(String),
|
||||
'x-amz-meta-filename': metadata.fileName,
|
||||
'Content-Type': metadata.mimeType,
|
||||
}),
|
||||
data: mockBuffer,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should block if read-only', async () => {
|
||||
objectStoreService.setReadonly(true);
|
||||
|
||||
const metadata = { fileName: 'file.txt', mimeType: 'text/plain' };
|
||||
|
||||
const promise = objectStoreService.put(fileId, mockBuffer, metadata);
|
||||
|
||||
await expect(promise).resolves.not.toThrow();
|
||||
|
||||
const result = await promise;
|
||||
|
||||
expect(result.status).toBe(403);
|
||||
expect(result.statusText).toBe('Forbidden');
|
||||
|
||||
expect(result.data).toBe(writeBlockedMessage(fileId));
|
||||
});
|
||||
|
||||
it('should throw an error on request failure', async () => {
|
||||
const metadata = { fileName: 'file.txt', mimeType: 'text/plain' };
|
||||
|
||||
mockAxios.request.mockRejectedValue(mockError);
|
||||
|
||||
const promise = objectStoreService.put(fileId, mockBuffer, metadata);
|
||||
|
||||
await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get()', () => {
|
||||
it('should send a GET request to download an object as a buffer', async () => {
|
||||
const fileId = 'file.txt';
|
||||
|
||||
mockAxios.request.mockResolvedValue({ status: 200, data: Buffer.from('Test content') });
|
||||
|
||||
const result = await objectStoreService.get(fileId, { mode: 'buffer' });
|
||||
|
||||
expect(mockAxios.request).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
method: 'GET',
|
||||
url: `${mockUrl}/${fileId}`,
|
||||
responseType: 'arraybuffer',
|
||||
}),
|
||||
);
|
||||
|
||||
expect(Buffer.isBuffer(result)).toBe(true);
|
||||
});
|
||||
|
||||
it('should send a GET request to download an object as a stream', async () => {
|
||||
mockAxios.request.mockResolvedValue({ status: 200, data: new Readable() });
|
||||
|
||||
const result = await objectStoreService.get(fileId, { mode: 'stream' });
|
||||
|
||||
expect(mockAxios.request).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
method: 'GET',
|
||||
url: `${mockUrl}/${fileId}`,
|
||||
responseType: 'stream',
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result instanceof Readable).toBe(true);
|
||||
});
|
||||
|
||||
it('should throw an error on request failure', async () => {
|
||||
mockAxios.request.mockRejectedValue(mockError);
|
||||
|
||||
const promise = objectStoreService.get(fileId, { mode: 'buffer' });
|
||||
|
||||
await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteOne()', () => {
|
||||
it('should send a DELETE request to delete a single object', async () => {
|
||||
mockAxios.request.mockResolvedValue({ status: 204 });
|
||||
|
||||
await objectStoreService.deleteOne(fileId);
|
||||
|
||||
expect(mockAxios.request).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
method: 'DELETE',
|
||||
url: `${mockUrl}/${fileId}`,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error on request failure', async () => {
|
||||
mockAxios.request.mockRejectedValue(mockError);
|
||||
|
||||
const promise = objectStoreService.deleteOne(fileId);
|
||||
|
||||
await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteMany()', () => {
|
||||
it('should send a POST request to delete multiple objects', async () => {
|
||||
const prefix = 'test-dir/';
|
||||
const fileName = 'file.txt';
|
||||
|
||||
const mockList = [
|
||||
{
|
||||
key: fileName,
|
||||
lastModified: '2023-09-24T12:34:56Z',
|
||||
eTag: 'abc123def456',
|
||||
size: 456789,
|
||||
storageClass: 'STANDARD',
|
||||
},
|
||||
];
|
||||
|
||||
objectStoreService.list = jest.fn().mockResolvedValue(mockList);
|
||||
|
||||
mockAxios.request.mockResolvedValue({ status: 204 });
|
||||
|
||||
await objectStoreService.deleteMany(prefix);
|
||||
|
||||
expect(objectStoreService.list).toHaveBeenCalledWith(prefix);
|
||||
expect(mockAxios.request).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
method: 'POST',
|
||||
url: `${mockUrl}/?delete`,
|
||||
headers: expect.objectContaining({
|
||||
'Content-Type': 'application/xml',
|
||||
'Content-Length': expect.any(Number),
|
||||
'Content-MD5': expect.any(String),
|
||||
}),
|
||||
data: toDeletionXml(fileName),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not send a deletion request if no prefix match', async () => {
|
||||
objectStoreService.list = jest.fn().mockResolvedValue([]);
|
||||
|
||||
const result = await objectStoreService.deleteMany('non-matching-prefix');
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should throw an error on request failure', async () => {
|
||||
mockAxios.request.mockRejectedValue(mockError);
|
||||
|
||||
const promise = objectStoreService.deleteMany('test-dir/');
|
||||
|
||||
await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('list()', () => {
|
||||
it('should list objects with a common prefix', async () => {
|
||||
const prefix = 'test-dir/';
|
||||
|
||||
const mockListPage = {
|
||||
contents: [{ key: `${prefix}file1.txt` }, { key: `${prefix}file2.txt` }],
|
||||
isTruncated: false,
|
||||
};
|
||||
|
||||
objectStoreService.getListPage = jest.fn().mockResolvedValue(mockListPage);
|
||||
|
||||
mockAxios.request.mockResolvedValue({ status: 200 });
|
||||
|
||||
const result = await objectStoreService.list(prefix);
|
||||
|
||||
expect(result).toEqual(mockListPage.contents);
|
||||
});
|
||||
|
||||
it('should consolidate pages', async () => {
|
||||
const prefix = 'test-dir/';
|
||||
|
||||
const mockFirstListPage = {
|
||||
contents: [{ key: `${prefix}file1.txt` }],
|
||||
isTruncated: true,
|
||||
nextContinuationToken: 'token1',
|
||||
};
|
||||
|
||||
const mockSecondListPage = {
|
||||
contents: [{ key: `${prefix}file2.txt` }],
|
||||
isTruncated: false,
|
||||
};
|
||||
|
||||
objectStoreService.getListPage = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce(mockFirstListPage)
|
||||
.mockResolvedValueOnce(mockSecondListPage);
|
||||
|
||||
mockAxios.request.mockResolvedValue({ status: 200 });
|
||||
|
||||
const result = await objectStoreService.list(prefix);
|
||||
|
||||
expect(result).toEqual([...mockFirstListPage.contents, ...mockSecondListPage.contents]);
|
||||
});
|
||||
|
||||
it('should throw an error on request failure', async () => {
|
||||
mockAxios.request.mockRejectedValue(mockError);
|
||||
|
||||
const promise = objectStoreService.list('test-dir/');
|
||||
|
||||
await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE);
|
||||
});
|
||||
});
|
||||
@@ -18,7 +18,7 @@ import type {
|
||||
RequestOptions,
|
||||
} from './types';
|
||||
import { isStream, parseXml, writeBlockedMessage } from './utils';
|
||||
import type { BinaryData } from '../BinaryData/types';
|
||||
import type { BinaryData } from '../types';
|
||||
|
||||
@Service()
|
||||
export class ObjectStoreService {
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { AxiosResponseHeaders, ResponseType } from 'axios';
|
||||
|
||||
import type { BinaryData } from '../BinaryData/types';
|
||||
import type { BinaryData } from '../types';
|
||||
|
||||
export type RawListPage = {
|
||||
listBucketResult: {
|
||||
@@ -1,6 +1,3 @@
|
||||
import type { INodeProperties } from 'n8n-workflow';
|
||||
import { cronNodeOptions } from 'n8n-workflow';
|
||||
|
||||
const { NODE_ENV } = process.env;
|
||||
export const inProduction = NODE_ENV === 'production';
|
||||
export const inDevelopment = !NODE_ENV || NODE_ENV === 'development';
|
||||
@@ -11,38 +8,9 @@ export const PLACEHOLDER_EMPTY_WORKFLOW_ID = '__EMPTY__';
|
||||
export const HTTP_REQUEST_NODE_TYPE = 'n8n-nodes-base.httpRequest';
|
||||
export const HTTP_REQUEST_TOOL_NODE_TYPE = '@n8n/n8n-nodes-langchain.toolHttpRequest';
|
||||
|
||||
export const CUSTOM_NODES_CATEGORY = 'Custom Nodes';
|
||||
|
||||
export const RESTRICT_FILE_ACCESS_TO = 'N8N_RESTRICT_FILE_ACCESS_TO';
|
||||
export const BLOCK_FILE_ACCESS_TO_N8N_FILES = 'N8N_BLOCK_FILE_ACCESS_TO_N8N_FILES';
|
||||
export const CONFIG_FILES = 'N8N_CONFIG_FILES';
|
||||
export const BINARY_DATA_STORAGE_PATH = 'N8N_BINARY_DATA_STORAGE_PATH';
|
||||
export const UM_EMAIL_TEMPLATES_INVITE = 'N8N_UM_EMAIL_TEMPLATES_INVITE';
|
||||
export const UM_EMAIL_TEMPLATES_PWRESET = 'N8N_UM_EMAIL_TEMPLATES_PWRESET';
|
||||
|
||||
export const commonPollingParameters: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Poll Times',
|
||||
name: 'pollTimes',
|
||||
type: 'fixedCollection',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
multipleValueButtonText: 'Add Poll Time',
|
||||
},
|
||||
default: { item: [{ mode: 'everyMinute' }] },
|
||||
description: 'Time at which polling should occur',
|
||||
placeholder: 'Add Poll Time',
|
||||
options: cronNodeOptions,
|
||||
},
|
||||
];
|
||||
|
||||
export const commonCORSParameters: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Allowed Origins (CORS)',
|
||||
name: 'allowedOrigins',
|
||||
type: 'string',
|
||||
default: '*',
|
||||
description:
|
||||
'Comma-separated list of URLs allowed for cross-origin non-preflight requests. Use * (default) to allow all origins.',
|
||||
},
|
||||
];
|
||||
@@ -2,7 +2,7 @@ import { Container } from '@n8n/di';
|
||||
import type { ICredentialDataDecryptedObject, ICredentialsEncrypted } from 'n8n-workflow';
|
||||
import { ApplicationError, ICredentials, jsonParse } from 'n8n-workflow';
|
||||
|
||||
import { Cipher } from './Cipher';
|
||||
import { Cipher } from '@/encryption/cipher';
|
||||
|
||||
export class Credentials<
|
||||
T extends object = ICredentialDataDecryptedObject,
|
||||
37
packages/core/src/encryption/__tests__/cipher.test.ts
Normal file
37
packages/core/src/encryption/__tests__/cipher.test.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Container } from '@n8n/di';
|
||||
|
||||
import { InstanceSettings } from '@/instance-settings';
|
||||
import { mockInstance } from '@test/utils';
|
||||
|
||||
import { Cipher } from '../cipher';
|
||||
|
||||
describe('Cipher', () => {
|
||||
mockInstance(InstanceSettings, { encryptionKey: 'test_key' });
|
||||
const cipher = Container.get(Cipher);
|
||||
|
||||
describe('encrypt', () => {
|
||||
it('should encrypt strings', () => {
|
||||
const encrypted = cipher.encrypt('random-string');
|
||||
const decrypted = cipher.decrypt(encrypted);
|
||||
expect(decrypted).toEqual('random-string');
|
||||
});
|
||||
|
||||
it('should encrypt objects', () => {
|
||||
const encrypted = cipher.encrypt({ key: 'value' });
|
||||
const decrypted = cipher.decrypt(encrypted);
|
||||
expect(decrypted).toEqual('{"key":"value"}');
|
||||
});
|
||||
});
|
||||
|
||||
describe('decrypt', () => {
|
||||
it('should decrypt string', () => {
|
||||
const decrypted = cipher.decrypt('U2FsdGVkX194VEoX27o3+y5jUd1JTTmVwkOKjVhB6Jg=');
|
||||
expect(decrypted).toEqual('random-string');
|
||||
});
|
||||
|
||||
it('should not try to decrypt if the input is shorter than 16 bytes', () => {
|
||||
const decrypted = cipher.decrypt('U2FsdGVkX194VEo');
|
||||
expect(decrypted).toEqual('');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Service } from '@n8n/di';
|
||||
import { createHash, createCipheriv, createDecipheriv, randomBytes } from 'crypto';
|
||||
|
||||
import { InstanceSettings } from './InstanceSettings';
|
||||
import { InstanceSettings } from '@/instance-settings';
|
||||
|
||||
// Data encrypted by CryptoJS always starts with these bytes
|
||||
const RANDOM_BYTES = Buffer.from('53616c7465645f5f', 'hex');
|
||||
1
packages/core/src/encryption/index.ts
Normal file
1
packages/core/src/encryption/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { Cipher } from './cipher';
|
||||
162
packages/core/src/errors/__tests__/error-reporter.test.ts
Normal file
162
packages/core/src/errors/__tests__/error-reporter.test.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import { QueryFailedError } from '@n8n/typeorm';
|
||||
import type { ErrorEvent } from '@sentry/types';
|
||||
import { AxiosError } from 'axios';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { ApplicationError } from 'n8n-workflow';
|
||||
|
||||
import type { Logger } from '@/logging/logger';
|
||||
|
||||
import { ErrorReporter } from '../error-reporter';
|
||||
|
||||
jest.mock('@sentry/node', () => ({
|
||||
init: jest.fn(),
|
||||
setTag: jest.fn(),
|
||||
captureException: jest.fn(),
|
||||
Integrations: {},
|
||||
}));
|
||||
|
||||
jest.spyOn(process, 'on');
|
||||
|
||||
describe('ErrorReporter', () => {
|
||||
const errorReporter = new ErrorReporter(mock());
|
||||
const event = {} as ErrorEvent;
|
||||
|
||||
describe('beforeSend', () => {
|
||||
it('should ignore errors with level warning', async () => {
|
||||
const originalException = new ApplicationError('test');
|
||||
originalException.level = 'warning';
|
||||
|
||||
expect(await errorReporter.beforeSend(event, { originalException })).toEqual(null);
|
||||
});
|
||||
|
||||
it('should keep events with a cause with error level', async () => {
|
||||
const cause = new Error('cause-error');
|
||||
const originalException = new ApplicationError('test', cause);
|
||||
|
||||
expect(await errorReporter.beforeSend(event, { originalException })).toEqual(event);
|
||||
});
|
||||
|
||||
it('should ignore events with error cause with warning level', async () => {
|
||||
const cause: Error & { level?: 'warning' } = new Error('cause-error');
|
||||
cause.level = 'warning';
|
||||
const originalException = new ApplicationError('test', cause);
|
||||
|
||||
expect(await errorReporter.beforeSend(event, { originalException })).toEqual(null);
|
||||
});
|
||||
|
||||
it('should set level, extra, and tags from ApplicationError', async () => {
|
||||
const originalException = new ApplicationError('Test error', {
|
||||
level: 'error',
|
||||
extra: { foo: 'bar' },
|
||||
tags: { tag1: 'value1' },
|
||||
});
|
||||
|
||||
const testEvent = {} as ErrorEvent;
|
||||
|
||||
const result = await errorReporter.beforeSend(testEvent, { originalException });
|
||||
|
||||
expect(result).toEqual({
|
||||
level: 'error',
|
||||
extra: { foo: 'bar' },
|
||||
tags: { tag1: 'value1' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should deduplicate errors with same stack trace', async () => {
|
||||
const originalException = new Error();
|
||||
|
||||
const firstResult = await errorReporter.beforeSend(event, { originalException });
|
||||
expect(firstResult).toEqual(event);
|
||||
|
||||
const secondResult = await errorReporter.beforeSend(event, { originalException });
|
||||
expect(secondResult).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle Promise rejections', async () => {
|
||||
const originalException = Promise.reject(new Error());
|
||||
|
||||
const result = await errorReporter.beforeSend(event, { originalException });
|
||||
|
||||
expect(result).toEqual(event);
|
||||
});
|
||||
|
||||
test.each([
|
||||
['undefined', undefined],
|
||||
['null', null],
|
||||
['an AxiosError', new AxiosError()],
|
||||
['a rejected Promise with AxiosError', Promise.reject(new AxiosError())],
|
||||
[
|
||||
'a QueryFailedError with SQLITE_FULL',
|
||||
new QueryFailedError('', [], new Error('SQLITE_FULL')),
|
||||
],
|
||||
[
|
||||
'a QueryFailedError with SQLITE_IOERR',
|
||||
new QueryFailedError('', [], new Error('SQLITE_IOERR')),
|
||||
],
|
||||
['an ApplicationError with "warning" level', new ApplicationError('', { level: 'warning' })],
|
||||
[
|
||||
'an Error with ApplicationError as cause with "warning" level',
|
||||
new Error('', { cause: new ApplicationError('', { level: 'warning' }) }),
|
||||
],
|
||||
])('should ignore if originalException is %s', async (_, originalException) => {
|
||||
const result = await errorReporter.beforeSend(event, { originalException });
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
describe('beforeSendFilter', () => {
|
||||
const newErrorReportedWithBeforeSendFilter = (beforeSendFilter: jest.Mock) => {
|
||||
const errorReporter = new ErrorReporter(mock());
|
||||
// @ts-expect-error - beforeSendFilter is private
|
||||
errorReporter.beforeSendFilter = beforeSendFilter;
|
||||
return errorReporter;
|
||||
};
|
||||
|
||||
it('should filter out based on the beforeSendFilter', async () => {
|
||||
const beforeSendFilter = jest.fn().mockReturnValue(true);
|
||||
const errorReporter = newErrorReportedWithBeforeSendFilter(beforeSendFilter);
|
||||
const hint = { originalException: new Error() };
|
||||
|
||||
const result = await errorReporter.beforeSend(event, hint);
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(beforeSendFilter).toHaveBeenCalledWith(event, hint);
|
||||
});
|
||||
|
||||
it('should not filter out when beforeSendFilter returns false', async () => {
|
||||
const beforeSendFilter = jest.fn().mockReturnValue(false);
|
||||
const errorReporter = newErrorReportedWithBeforeSendFilter(beforeSendFilter);
|
||||
const hint = { originalException: new Error() };
|
||||
|
||||
const result = await errorReporter.beforeSend(event, hint);
|
||||
|
||||
expect(result).toEqual(event);
|
||||
expect(beforeSendFilter).toHaveBeenCalledWith(event, hint);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error', () => {
|
||||
let error: ApplicationError;
|
||||
let logger: Logger;
|
||||
let errorReporter: ErrorReporter;
|
||||
const metadata = undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
error = new ApplicationError('Test error');
|
||||
logger = mock<Logger>();
|
||||
errorReporter = new ErrorReporter(logger);
|
||||
});
|
||||
|
||||
it('should include stack trace for error-level `ApplicationError`', () => {
|
||||
error.level = 'error';
|
||||
errorReporter.error(error);
|
||||
expect(logger.error).toHaveBeenCalledWith(`Test error\n${error.stack}\n`, metadata);
|
||||
});
|
||||
|
||||
it('should exclude stack trace for warning-level `ApplicationError`', () => {
|
||||
error.level = 'warning';
|
||||
errorReporter.error(error);
|
||||
expect(logger.error).toHaveBeenCalledWith('Test error', metadata);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,8 +6,8 @@ import { AxiosError } from 'axios';
|
||||
import { ApplicationError, ExecutionCancelledError, type ReportingOptions } from 'n8n-workflow';
|
||||
import { createHash } from 'node:crypto';
|
||||
|
||||
import type { InstanceType } from './InstanceSettings';
|
||||
import { Logger } from './logging/logger';
|
||||
import type { InstanceType } from '@/instance-settings';
|
||||
import { Logger } from '@/logging/logger';
|
||||
|
||||
type ErrorReporterInitOptions = {
|
||||
serverType: InstanceType | 'task_runner';
|
||||
@@ -5,3 +5,5 @@ export { InvalidManagerError } from './invalid-manager.error';
|
||||
export { InvalidExecutionMetadataError } from './invalid-execution-metadata.error';
|
||||
export { UnrecognizedCredentialTypeError } from './unrecognized-credential-type.error';
|
||||
export { UnrecognizedNodeTypeError } from './unrecognized-node-type.error';
|
||||
|
||||
export { ErrorReporter } from './error-reporter';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ApplicationError } from 'n8n-workflow';
|
||||
|
||||
import { CONFIG_MODES } from '../BinaryData/utils';
|
||||
import { CONFIG_MODES } from '../binary-data/utils';
|
||||
|
||||
export class InvalidModeError extends ApplicationError {
|
||||
constructor() {
|
||||
|
||||
@@ -12,11 +12,12 @@ import type {
|
||||
} from 'n8n-workflow';
|
||||
import { LoggerProxy, TriggerCloseError, WorkflowActivationError } from 'n8n-workflow';
|
||||
|
||||
import { ActiveWorkflows } from '@/ActiveWorkflows';
|
||||
import type { ErrorReporter } from '@/error-reporter';
|
||||
import type { PollContext } from '@/node-execution-context';
|
||||
import type { ScheduledTaskManager } from '@/ScheduledTaskManager';
|
||||
import type { TriggersAndPollers } from '@/TriggersAndPollers';
|
||||
import type { ErrorReporter } from '@/errors/error-reporter';
|
||||
|
||||
import { ActiveWorkflows } from '../active-workflows';
|
||||
import type { PollContext } from '../node-execution-context';
|
||||
import type { ScheduledTaskManager } from '../scheduled-task-manager';
|
||||
import type { TriggersAndPollers } from '../triggers-and-pollers';
|
||||
|
||||
describe('ActiveWorkflows', () => {
|
||||
const workflowId = 'test-workflow-id';
|
||||
2149
packages/core/src/execution-engine/__tests__/routing-node.test.ts
Normal file
2149
packages/core/src/execution-engine/__tests__/routing-node.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,71 @@
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { Workflow } from 'n8n-workflow';
|
||||
|
||||
import type { InstanceSettings } from '@/instance-settings';
|
||||
|
||||
import { ScheduledTaskManager } from '../scheduled-task-manager';
|
||||
|
||||
describe('ScheduledTaskManager', () => {
|
||||
const instanceSettings = mock<InstanceSettings>({ isLeader: true });
|
||||
const workflow = mock<Workflow>({ timezone: 'GMT' });
|
||||
const everyMinute = '0 * * * * *';
|
||||
const onTick = jest.fn();
|
||||
|
||||
let scheduledTaskManager: ScheduledTaskManager;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
jest.useFakeTimers();
|
||||
scheduledTaskManager = new ScheduledTaskManager(instanceSettings);
|
||||
});
|
||||
|
||||
it('should throw when workflow timezone is invalid', () => {
|
||||
expect(() =>
|
||||
scheduledTaskManager.registerCron(
|
||||
mock<Workflow>({ timezone: 'somewhere' }),
|
||||
everyMinute,
|
||||
onTick,
|
||||
),
|
||||
).toThrow('Invalid timezone.');
|
||||
});
|
||||
|
||||
it('should throw when cron expression is invalid', () => {
|
||||
expect(() =>
|
||||
//@ts-expect-error invalid cron expression is a type-error
|
||||
scheduledTaskManager.registerCron(workflow, 'invalid-cron-expression', onTick),
|
||||
).toThrow();
|
||||
});
|
||||
|
||||
it('should register valid CronJobs', async () => {
|
||||
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
|
||||
|
||||
expect(onTick).not.toHaveBeenCalled();
|
||||
jest.advanceTimersByTime(10 * 60 * 1000); // 10 minutes
|
||||
expect(onTick).toHaveBeenCalledTimes(10);
|
||||
});
|
||||
|
||||
it('should should not invoke on follower instances', async () => {
|
||||
scheduledTaskManager = new ScheduledTaskManager(mock<InstanceSettings>({ isLeader: false }));
|
||||
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
|
||||
|
||||
expect(onTick).not.toHaveBeenCalled();
|
||||
jest.advanceTimersByTime(10 * 60 * 1000); // 10 minutes
|
||||
expect(onTick).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deregister CronJobs for a workflow', async () => {
|
||||
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
|
||||
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
|
||||
scheduledTaskManager.registerCron(workflow, everyMinute, onTick);
|
||||
|
||||
expect(scheduledTaskManager.cronJobs.get(workflow.id)?.length).toBe(3);
|
||||
|
||||
scheduledTaskManager.deregisterCrons(workflow.id);
|
||||
|
||||
expect(scheduledTaskManager.cronJobs.get(workflow.id)?.length).toBe(0);
|
||||
|
||||
expect(onTick).not.toHaveBeenCalled();
|
||||
jest.advanceTimersByTime(10 * 60 * 1000); // 10 minutes
|
||||
expect(onTick).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,68 @@
|
||||
import type { SSHCredentials } from 'n8n-workflow';
|
||||
import { Client } from 'ssh2';
|
||||
|
||||
import { SSHClientsManager } from '../ssh-clients-manager';
|
||||
|
||||
describe('SSHClientsManager', () => {
|
||||
const credentials: SSHCredentials = {
|
||||
sshAuthenticateWith: 'password',
|
||||
sshHost: 'example.com',
|
||||
sshPort: 22,
|
||||
sshUser: 'username',
|
||||
sshPassword: 'password',
|
||||
};
|
||||
|
||||
let sshClientsManager: SSHClientsManager;
|
||||
const connectSpy = jest.spyOn(Client.prototype, 'connect');
|
||||
const endSpy = jest.spyOn(Client.prototype, 'end');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
jest.useFakeTimers();
|
||||
|
||||
sshClientsManager = new SSHClientsManager();
|
||||
connectSpy.mockImplementation(function (this: Client) {
|
||||
this.emit('ready');
|
||||
return this;
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a new SSH client', async () => {
|
||||
const client = await sshClientsManager.getClient(credentials);
|
||||
|
||||
expect(client).toBeInstanceOf(Client);
|
||||
});
|
||||
|
||||
it('should not create a new SSH client when connect fails', async () => {
|
||||
connectSpy.mockImplementation(function (this: Client) {
|
||||
throw new Error('Failed to connect');
|
||||
});
|
||||
await expect(sshClientsManager.getClient(credentials)).rejects.toThrow('Failed to connect');
|
||||
});
|
||||
|
||||
it('should reuse an existing SSH client', async () => {
|
||||
const client1 = await sshClientsManager.getClient(credentials);
|
||||
const client2 = await sshClientsManager.getClient(credentials);
|
||||
|
||||
expect(client1).toBe(client2);
|
||||
});
|
||||
|
||||
it('should close all SSH connections on process exit', async () => {
|
||||
await sshClientsManager.getClient(credentials);
|
||||
sshClientsManager.onShutdown();
|
||||
|
||||
expect(endSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should cleanup stale SSH connections', async () => {
|
||||
await sshClientsManager.getClient({ ...credentials, sshHost: 'host1' });
|
||||
await sshClientsManager.getClient({ ...credentials, sshHost: 'host2' });
|
||||
await sshClientsManager.getClient({ ...credentials, sshHost: 'host3' });
|
||||
|
||||
jest.advanceTimersByTime(6 * 60 * 1000);
|
||||
sshClientsManager.cleanupStaleConnections();
|
||||
|
||||
expect(endSpy).toHaveBeenCalledTimes(3);
|
||||
expect(sshClientsManager.clients.size).toBe(0);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,163 @@
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { ApplicationError } from 'n8n-workflow';
|
||||
import type {
|
||||
Workflow,
|
||||
INode,
|
||||
INodeExecutionData,
|
||||
IPollFunctions,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
INodeType,
|
||||
INodeTypes,
|
||||
ITriggerFunctions,
|
||||
WorkflowHooks,
|
||||
IRun,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { TriggersAndPollers } from '../triggers-and-pollers';
|
||||
|
||||
describe('TriggersAndPollers', () => {
|
||||
const node = mock<INode>();
|
||||
const nodeType = mock<INodeType>({
|
||||
trigger: undefined,
|
||||
poll: undefined,
|
||||
});
|
||||
const nodeTypes = mock<INodeTypes>();
|
||||
const workflow = mock<Workflow>({ nodeTypes });
|
||||
const hookFunctions = mock<WorkflowHooks['hookFunctions']>({
|
||||
sendResponse: [],
|
||||
workflowExecuteAfter: [],
|
||||
});
|
||||
const additionalData = mock<IWorkflowExecuteAdditionalData>({
|
||||
hooks: {
|
||||
hookFunctions,
|
||||
},
|
||||
});
|
||||
const triggersAndPollers = new TriggersAndPollers();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
|
||||
});
|
||||
|
||||
describe('runTrigger()', () => {
|
||||
const triggerFunctions = mock<ITriggerFunctions>();
|
||||
const getTriggerFunctions = jest.fn().mockReturnValue(triggerFunctions);
|
||||
const triggerFn = jest.fn();
|
||||
const mockEmitData: INodeExecutionData[][] = [[{ json: { data: 'test' } }]];
|
||||
|
||||
const runTriggerHelper = async (mode: 'manual' | 'trigger' = 'trigger') =>
|
||||
await triggersAndPollers.runTrigger(
|
||||
workflow,
|
||||
node,
|
||||
getTriggerFunctions,
|
||||
additionalData,
|
||||
mode,
|
||||
'init',
|
||||
);
|
||||
|
||||
it('should throw error if node type does not have trigger function', async () => {
|
||||
await expect(runTriggerHelper()).rejects.toThrow(ApplicationError);
|
||||
});
|
||||
|
||||
it('should call trigger function in regular mode', async () => {
|
||||
nodeType.trigger = triggerFn;
|
||||
triggerFn.mockResolvedValue({ test: true });
|
||||
|
||||
const result = await runTriggerHelper();
|
||||
|
||||
expect(triggerFn).toHaveBeenCalled();
|
||||
expect(result).toEqual({ test: true });
|
||||
});
|
||||
|
||||
describe('manual mode', () => {
|
||||
const getMockTriggerFunctions = () => getTriggerFunctions.mock.results[0]?.value;
|
||||
|
||||
beforeEach(() => {
|
||||
nodeType.trigger = triggerFn;
|
||||
triggerFn.mockResolvedValue({ workflowId: '123' });
|
||||
});
|
||||
|
||||
it('should handle promise resolution', async () => {
|
||||
const result = await runTriggerHelper('manual');
|
||||
|
||||
expect(result?.manualTriggerResponse).toBeInstanceOf(Promise);
|
||||
getMockTriggerFunctions()?.emit?.(mockEmitData);
|
||||
});
|
||||
|
||||
it('should handle error emission', async () => {
|
||||
const testError = new Error('Test error');
|
||||
const result = await runTriggerHelper('manual');
|
||||
|
||||
getMockTriggerFunctions()?.emitError?.(testError);
|
||||
await expect(result?.manualTriggerResponse).rejects.toThrow(testError);
|
||||
});
|
||||
|
||||
it('should handle response promise', async () => {
|
||||
const responsePromise = { resolve: jest.fn(), reject: jest.fn() };
|
||||
await runTriggerHelper('manual');
|
||||
|
||||
getMockTriggerFunctions()?.emit?.(mockEmitData, responsePromise);
|
||||
|
||||
expect(hookFunctions.sendResponse?.length).toBe(1);
|
||||
await hookFunctions.sendResponse![0]?.({ testResponse: true });
|
||||
expect(responsePromise.resolve).toHaveBeenCalledWith({ testResponse: true });
|
||||
});
|
||||
|
||||
it('should handle both response and done promises', async () => {
|
||||
const responsePromise = { resolve: jest.fn(), reject: jest.fn() };
|
||||
const donePromise = { resolve: jest.fn(), reject: jest.fn() };
|
||||
const mockRunData = mock<IRun>({ data: { resultData: { runData: {} } } });
|
||||
|
||||
await runTriggerHelper('manual');
|
||||
getMockTriggerFunctions()?.emit?.(mockEmitData, responsePromise, donePromise);
|
||||
|
||||
await hookFunctions.sendResponse![0]?.({ testResponse: true });
|
||||
expect(responsePromise.resolve).toHaveBeenCalledWith({ testResponse: true });
|
||||
|
||||
await hookFunctions.workflowExecuteAfter?.[0]?.(mockRunData, {});
|
||||
expect(donePromise.resolve).toHaveBeenCalledWith(mockRunData);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('runPoll()', () => {
|
||||
const pollFunctions = mock<IPollFunctions>();
|
||||
const pollFn = jest.fn();
|
||||
|
||||
const runPollHelper = async () =>
|
||||
await triggersAndPollers.runPoll(workflow, node, pollFunctions);
|
||||
|
||||
it('should throw error if node type does not have poll function', async () => {
|
||||
await expect(runPollHelper()).rejects.toThrow(ApplicationError);
|
||||
});
|
||||
|
||||
it('should call poll function and return result', async () => {
|
||||
const mockPollResult: INodeExecutionData[][] = [[{ json: { data: 'test' } }]];
|
||||
nodeType.poll = pollFn;
|
||||
pollFn.mockResolvedValue(mockPollResult);
|
||||
|
||||
const result = await runPollHelper();
|
||||
|
||||
expect(pollFn).toHaveBeenCalled();
|
||||
expect(result).toBe(mockPollResult);
|
||||
});
|
||||
|
||||
it('should return null if poll function returns no data', async () => {
|
||||
nodeType.poll = pollFn;
|
||||
pollFn.mockResolvedValue(null);
|
||||
|
||||
const result = await runPollHelper();
|
||||
|
||||
expect(pollFn).toHaveBeenCalled();
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should propagate errors from poll function', async () => {
|
||||
nodeType.poll = pollFn;
|
||||
pollFn.mockRejectedValue(new Error('Poll function failed'));
|
||||
|
||||
await expect(runPollHelper()).rejects.toThrow('Poll function failed');
|
||||
expect(pollFn).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,669 @@
|
||||
{
|
||||
"name": "My workflow 105",
|
||||
"nodes": [
|
||||
{
|
||||
"parameters": {},
|
||||
"id": "a94bc1fb-1f39-404b-b149-a76c4fbaed25",
|
||||
"name": "When clicking \"Execute Workflow\"",
|
||||
"type": "n8n-nodes-base.manualTrigger",
|
||||
"typeVersion": 1,
|
||||
"position": [-60, 780]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "originalName",
|
||||
"stringValue": "={{ $('Mock Data').item.json.name }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "6ba26bdf-91e2-4f18-8f4c-09e98aa4a9df",
|
||||
"name": "Success",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [820, 1180]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "originalName",
|
||||
"stringValue": "={{ $('Mock Data').item.json.name }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "e3d1eadf-0994-4806-97ce-c5c5f673c624",
|
||||
"name": "Error",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [820, 1360]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"jsCode": "return [\n {\n \"id\": \"23423532\",\n \"name\": \"Jay Gatsby\",\n \"email\": \"gatsby@west-egg.com\",\n \"notes\": \"Keeps asking about a green light??\",\n \"country\": \"US\",\n \"created\": \"1925-04-10\"\n },\n {\n \"id\": \"23423533\",\n \"name\": \"José Arcadio Buendía\",\n \"email\": \"jab@macondo.co\",\n \"notes\": \"Lots of people named after him. Very confusing\",\n \"country\": \"CO\",\n \"created\": \"1967-05-05\"\n },\n {\n \"id\": \"23423534\",\n \"name\": \"Max Sendak\",\n \"email\": \"info@in-and-out-of-weeks.org\",\n \"notes\": \"Keeps rolling his terrible eyes\",\n \"country\": \"US\",\n \"created\": \"1963-04-09\"\n },\n {\n \"id\": \"23423535\",\n \"name\": \"Zaphod Beeblebrox\",\n \"email\": \"captain@heartofgold.com\",\n \"notes\": \"Felt like I was talking to more than one person\",\n \"country\": null,\n \"created\": \"1979-10-12\"\n },\n {\n \"id\": \"23423536\",\n \"name\": \"Edmund Pevensie\",\n \"email\": \"edmund@narnia.gov\",\n \"notes\": \"Passionate sailor\",\n \"country\": \"UK\",\n \"created\": \"1950-10-16\"\n }\n]"
|
||||
},
|
||||
"id": "01adfc2d-141d-4843-b2d6-04115a476bc1",
|
||||
"name": "Mock Data",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 2,
|
||||
"position": [160, 780]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"content": "## On Error: Continue (using error output)",
|
||||
"height": 414,
|
||||
"width": 564
|
||||
},
|
||||
"id": "8ca689eb-7910-43ad-bd10-fae35a8fc203",
|
||||
"name": "Sticky Note",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"typeVersion": 1,
|
||||
"position": [460, 1100]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"content": "## Continue On Fail (deprecated)",
|
||||
"height": 279,
|
||||
"width": 564
|
||||
},
|
||||
"id": "a17460d6-b0c0-432d-ac6f-8ff684357c8d",
|
||||
"name": "Sticky Note1",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"typeVersion": 1,
|
||||
"position": [460, 460]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "originalName",
|
||||
"stringValue": "={{ $('Mock Data').item.json.name }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "46df5463-4289-4e61-9f80-87e035931bda",
|
||||
"name": "Combined",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [800, 560]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"mode": "runOnceForEachItem",
|
||||
"jsCode": "// Add a new field called 'myNewField' to the JSON of the item\n$input.item.json.myNewField = 1;\n\nif ($input.item.json.country === 'US') {\n throw new Error('This is an error');\n}\n\nreturn $input.item;"
|
||||
},
|
||||
"id": "a4708520-aaca-4618-b7a2-94da268fba37",
|
||||
"name": "Throw Error",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 2,
|
||||
"position": [480, 1280],
|
||||
"errorOutput": true,
|
||||
"onError": "continueErrorOutput"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"content": "## On Error: Continue",
|
||||
"height": 279,
|
||||
"width": 564
|
||||
},
|
||||
"id": "f0a450cd-4124-490d-964f-a71b645f770c",
|
||||
"name": "Sticky Note2",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"typeVersion": 1,
|
||||
"position": [460, 780]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"mode": "runOnceForEachItem",
|
||||
"jsCode": "// Add a new field called 'myNewField' to the JSON of the item\n$input.item.json.myNewField = 1;\n\nif ($input.item.json.country === 'US') {\n throw new Error('This is an error');\n}\n\nreturn $input.item;"
|
||||
},
|
||||
"id": "823f12e6-cbfc-4545-8505-fab158d1effe",
|
||||
"name": "Throw Error2",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 2,
|
||||
"position": [500, 880],
|
||||
"onError": "continueRegularOutput"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "originalName",
|
||||
"stringValue": "={{ $('Mock Data').item.json.name }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "8f88d130-9a13-4236-81c0-157f8a8990c0",
|
||||
"name": "Combined1",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [800, 880]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"mode": "runOnceForEachItem",
|
||||
"jsCode": "// Add a new field called 'myNewField' to the JSON of the item\n$input.item.json.myNewField = 1;\n\nif ($input.item.json.country === 'US') {\n throw new Error('This is an error');\n}\n\nreturn $input.item;"
|
||||
},
|
||||
"id": "1a3f4beb-0d1e-44fe-a411-5bd1096ffd74",
|
||||
"name": "Throw Error1",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 2,
|
||||
"position": [500, 560],
|
||||
"continueOnFail": true
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "originalName",
|
||||
"stringValue": "={{ $('Mock Data').item.json.name }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "c617a3d7-15e3-49b4-a7dd-d45c5e059a22",
|
||||
"name": "Success1",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [820, 1640]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"content": "## On Error: Continue (using error output) + Make sure error data gets removed",
|
||||
"height": 509.71047006830065,
|
||||
"width": 1183.725293692246
|
||||
},
|
||||
"id": "046de2cf-970a-4925-b87d-16e8cca511fd",
|
||||
"name": "Sticky Note3",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"typeVersion": 1,
|
||||
"position": [460, 1560]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"mode": "runOnceForEachItem",
|
||||
"jsCode": "// Add a new field called 'myNewField' to the JSON of the item\n$input.item.json.myNewField = 1;\n\nif ($input.item.json.country === 'US') {\n throw new Error('This is an error');\n}\n\nreturn $input.item;"
|
||||
},
|
||||
"id": "9ec21de1-dfca-4fff-b5a7-a56364239d7b",
|
||||
"name": "Throw Error3",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 2,
|
||||
"position": [480, 1740],
|
||||
"errorOutput": true,
|
||||
"onError": "continueErrorOutput"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"id": "e3605953-75cf-4036-99f7-05e3971a6a75",
|
||||
"name": "Edit Fields",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [1040, 1820],
|
||||
"onError": "continueErrorOutput"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "originalName",
|
||||
"stringValue": "={{ $('Mock Data').item.json.name }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "a71cfb77-adfd-4c77-9a8e-7e58cbd0931b",
|
||||
"name": "Success2",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [1320, 1680]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "originalName",
|
||||
"stringValue": "={{ $('Mock Data').item.json.name }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "ea9d02e9-1716-4f69-a14a-9133f5184886",
|
||||
"name": "Error2",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [1320, 1900]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "originalName",
|
||||
"stringValue": "={{ $('Mock Data').item.json.name }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "17780679-f7a3-4b1b-b6ee-f3f61e0843ad",
|
||||
"name": "Error1",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [820, 1820]
|
||||
}
|
||||
],
|
||||
"pinData": {
|
||||
"Combined": [
|
||||
{
|
||||
"json": {
|
||||
"error": "This is an error [line 5, for item 0]",
|
||||
"originalName": "Jay Gatsby"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423533",
|
||||
"name": "José Arcadio Buendía",
|
||||
"email": "jab@macondo.co",
|
||||
"notes": "Lots of people named after him. Very confusing",
|
||||
"country": "CO",
|
||||
"created": "1967-05-05",
|
||||
"myNewField": 1,
|
||||
"originalName": "José Arcadio Buendía"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"error": "This is an error [line 5, for item 2]",
|
||||
"originalName": "Max Sendak"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423535",
|
||||
"name": "Zaphod Beeblebrox",
|
||||
"email": "captain@heartofgold.com",
|
||||
"notes": "Felt like I was talking to more than one person",
|
||||
"country": null,
|
||||
"created": "1979-10-12",
|
||||
"myNewField": 1,
|
||||
"originalName": "Zaphod Beeblebrox"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423536",
|
||||
"name": "Edmund Pevensie",
|
||||
"email": "edmund@narnia.gov",
|
||||
"notes": "Passionate sailor",
|
||||
"country": "UK",
|
||||
"created": "1950-10-16",
|
||||
"myNewField": 1,
|
||||
"originalName": "Edmund Pevensie"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Combined1": [
|
||||
{
|
||||
"json": {
|
||||
"error": "This is an error [line 5, for item 0]",
|
||||
"originalName": "Jay Gatsby"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423533",
|
||||
"name": "José Arcadio Buendía",
|
||||
"email": "jab@macondo.co",
|
||||
"notes": "Lots of people named after him. Very confusing",
|
||||
"country": "CO",
|
||||
"created": "1967-05-05",
|
||||
"myNewField": 1,
|
||||
"originalName": "José Arcadio Buendía"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"error": "This is an error [line 5, for item 2]",
|
||||
"originalName": "Max Sendak"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423535",
|
||||
"name": "Zaphod Beeblebrox",
|
||||
"email": "captain@heartofgold.com",
|
||||
"notes": "Felt like I was talking to more than one person",
|
||||
"country": null,
|
||||
"created": "1979-10-12",
|
||||
"myNewField": 1,
|
||||
"originalName": "Zaphod Beeblebrox"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423536",
|
||||
"name": "Edmund Pevensie",
|
||||
"email": "edmund@narnia.gov",
|
||||
"notes": "Passionate sailor",
|
||||
"country": "UK",
|
||||
"created": "1950-10-16",
|
||||
"myNewField": 1,
|
||||
"originalName": "Edmund Pevensie"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Success1": [
|
||||
{
|
||||
"json": {
|
||||
"id": "23423533",
|
||||
"name": "José Arcadio Buendía",
|
||||
"email": "jab@macondo.co",
|
||||
"notes": "Lots of people named after him. Very confusing",
|
||||
"country": "CO",
|
||||
"created": "1967-05-05",
|
||||
"myNewField": 1,
|
||||
"originalName": "José Arcadio Buendía"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423535",
|
||||
"name": "Zaphod Beeblebrox",
|
||||
"email": "captain@heartofgold.com",
|
||||
"notes": "Felt like I was talking to more than one person",
|
||||
"country": null,
|
||||
"created": "1979-10-12",
|
||||
"myNewField": 1,
|
||||
"originalName": "Zaphod Beeblebrox"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423536",
|
||||
"name": "Edmund Pevensie",
|
||||
"email": "edmund@narnia.gov",
|
||||
"notes": "Passionate sailor",
|
||||
"country": "UK",
|
||||
"created": "1950-10-16",
|
||||
"myNewField": 1,
|
||||
"originalName": "Edmund Pevensie"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Error1": [
|
||||
{
|
||||
"json": {
|
||||
"id": "23423532",
|
||||
"name": "Jay Gatsby",
|
||||
"email": "gatsby@west-egg.com",
|
||||
"notes": "Keeps asking about a green light??",
|
||||
"country": "US",
|
||||
"created": "1925-04-10",
|
||||
"error": "This is an error [line 5, for item 0]",
|
||||
"originalName": "Jay Gatsby"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423534",
|
||||
"name": "Max Sendak",
|
||||
"email": "info@in-and-out-of-weeks.org",
|
||||
"notes": "Keeps rolling his terrible eyes",
|
||||
"country": "US",
|
||||
"created": "1963-04-09",
|
||||
"error": "This is an error [line 5, for item 2]",
|
||||
"originalName": "Max Sendak"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Success2": [
|
||||
{
|
||||
"json": {
|
||||
"id": "23423532",
|
||||
"name": "Jay Gatsby",
|
||||
"email": "gatsby@west-egg.com",
|
||||
"notes": "Keeps asking about a green light??",
|
||||
"country": "US",
|
||||
"created": "1925-04-10",
|
||||
"error": "This is an error [line 5, for item 0]",
|
||||
"originalName": "Jay Gatsby"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423534",
|
||||
"name": "Max Sendak",
|
||||
"email": "info@in-and-out-of-weeks.org",
|
||||
"notes": "Keeps rolling his terrible eyes",
|
||||
"country": "US",
|
||||
"created": "1963-04-09",
|
||||
"error": "This is an error [line 5, for item 2]",
|
||||
"originalName": "Max Sendak"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Error": [
|
||||
{
|
||||
"json": {
|
||||
"id": "23423532",
|
||||
"name": "Jay Gatsby",
|
||||
"email": "gatsby@west-egg.com",
|
||||
"notes": "Keeps asking about a green light??",
|
||||
"country": "US",
|
||||
"created": "1925-04-10",
|
||||
"error": "This is an error [line 5, for item 0]",
|
||||
"originalName": "Jay Gatsby"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423534",
|
||||
"name": "Max Sendak",
|
||||
"email": "info@in-and-out-of-weeks.org",
|
||||
"notes": "Keeps rolling his terrible eyes",
|
||||
"country": "US",
|
||||
"created": "1963-04-09",
|
||||
"error": "This is an error [line 5, for item 2]",
|
||||
"originalName": "Max Sendak"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Success": [
|
||||
{
|
||||
"json": {
|
||||
"id": "23423533",
|
||||
"name": "José Arcadio Buendía",
|
||||
"email": "jab@macondo.co",
|
||||
"notes": "Lots of people named after him. Very confusing",
|
||||
"country": "CO",
|
||||
"created": "1967-05-05",
|
||||
"myNewField": 1,
|
||||
"originalName": "José Arcadio Buendía"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423535",
|
||||
"name": "Zaphod Beeblebrox",
|
||||
"email": "captain@heartofgold.com",
|
||||
"notes": "Felt like I was talking to more than one person",
|
||||
"country": null,
|
||||
"created": "1979-10-12",
|
||||
"myNewField": 1,
|
||||
"originalName": "Zaphod Beeblebrox"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423536",
|
||||
"name": "Edmund Pevensie",
|
||||
"email": "edmund@narnia.gov",
|
||||
"notes": "Passionate sailor",
|
||||
"country": "UK",
|
||||
"created": "1950-10-16",
|
||||
"myNewField": 1,
|
||||
"originalName": "Edmund Pevensie"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"connections": {
|
||||
"When clicking \"Execute Workflow\"": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Mock Data",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Mock Data": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Throw Error",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Throw Error2",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Throw Error1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Throw Error3",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Throw Error": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Success",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"node": "Error",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Throw Error2": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Combined1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Throw Error1": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Combined",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Throw Error3": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Success1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"node": "Error1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Edit Fields": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Success2",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"node": "Error2",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Error1": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Edit Fields",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"active": false,
|
||||
"settings": {
|
||||
"executionOrder": "v1"
|
||||
},
|
||||
"versionId": "94aaa2ce-558a-4fed-948a-09860174272a",
|
||||
"meta": {
|
||||
"templateCredsSetupCompleted": true,
|
||||
"instanceId": "27cc9b56542ad45b38725555722c50a1c3fee1670bbb67980558314ee08517c4"
|
||||
},
|
||||
"id": "FJvJXVvjM5rw3sUM",
|
||||
"tags": []
|
||||
}
|
||||
@@ -0,0 +1,565 @@
|
||||
{
|
||||
"name": "paired items fix",
|
||||
"nodes": [
|
||||
{
|
||||
"parameters": {
|
||||
"values": {
|
||||
"string": [
|
||||
{
|
||||
"name": "setting",
|
||||
"value": "hello"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"name": "Set",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 1,
|
||||
"position": [500, 680],
|
||||
"id": "18333790-db22-4235-92e6-b7dec8c20b77"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"conditions": {
|
||||
"boolean": [
|
||||
{
|
||||
"value1": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"id": "4d4af5e5-860d-416f-b2d7-f0f87f380355",
|
||||
"name": "IF",
|
||||
"type": "n8n-nodes-base.if",
|
||||
"typeVersion": 1,
|
||||
"position": [1080, 500],
|
||||
"alwaysOutputData": true
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"values": {
|
||||
"string": [
|
||||
{
|
||||
"value": "={{ $('Set').item.json }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "26569caf-084d-4d5b-a575-c8e439358d10",
|
||||
"name": "Set1",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 2,
|
||||
"position": [1340, 480]
|
||||
},
|
||||
{
|
||||
"parameters": {},
|
||||
"id": "f4f91c8c-e695-422b-97ad-802b10c7d868",
|
||||
"name": "When clicking \"Execute Workflow\"",
|
||||
"type": "n8n-nodes-base.manualTrigger",
|
||||
"typeVersion": 1,
|
||||
"position": [200, 980]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"jsCode": "return [\n {\n 'thing': 1,\n 'letter': 'a'\n },\n {\n 'thing': 2,\n 'letter': 'b'\n },\n {\n 'thing': 3,\n 'letter': 'c'\n }\n]"
|
||||
},
|
||||
"id": "5eb81a1f-b845-408a-9fcc-e75e607212fa",
|
||||
"name": "Code",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 1,
|
||||
"position": [840, 500]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"functionCode": "return [\n {\n 'number': 1,\n 'letter': 'a'\n },\n {\n 'number': 2,\n 'letter': 'b'\n },\n {\n 'number': 3,\n 'letter': 'c'\n }\n]"
|
||||
},
|
||||
"name": "Generate new items",
|
||||
"type": "n8n-nodes-base.function",
|
||||
"typeVersion": 1,
|
||||
"position": [840, 860],
|
||||
"id": "dd5d92f2-5893-4591-9f22-051f50e1b348"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"values": {
|
||||
"number": [
|
||||
{
|
||||
"name": "numberOriginal",
|
||||
"value": "={{ $('Generate new items').item.json.number }}"
|
||||
}
|
||||
],
|
||||
"string": [
|
||||
{
|
||||
"name": "letterOriginal",
|
||||
"value": "={{ $('Generate new items').item.json.letter }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "ebb23410-831b-4f8f-834a-0ca22eb7c050",
|
||||
"name": "Set3",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 2,
|
||||
"position": [1320, 860]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"functionCode": "return [\n {\n 'json': {\n 'originalItem': 'third'\n },\n 'pairedItem': 2\n },\n {\n 'json': {\n 'originalItem': 'first'\n },\n 'pairedItem': 0\n },\n {\n 'json': {\n 'originalItem': 'second'\n },\n 'pairedItem': 1\n }\n]"
|
||||
},
|
||||
"name": "Mix up pairing",
|
||||
"type": "n8n-nodes-base.function",
|
||||
"typeVersion": 1,
|
||||
"position": [1080, 860],
|
||||
"id": "33ee2a0e-edc9-4197-94a2-4f77735240ff"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"content": "### Always output data & multiple possible output resolve which are identical",
|
||||
"height": 258,
|
||||
"width": 855
|
||||
},
|
||||
"id": "3dc9ccfa-ef78-4022-8bbc-45ef8eb3a207",
|
||||
"name": "Sticky Note1",
|
||||
"type": "n8n-nodes-base.stickyNote",
|
||||
"typeVersion": 1,
|
||||
"position": [780, 400]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "getAllPeople"
|
||||
},
|
||||
"id": "5ba8d43b-9fa3-4ba0-9c08-3199a9d2d602",
|
||||
"name": "cuctomers",
|
||||
"type": "n8n-nodes-base.n8nTrainingCustomerDatastore",
|
||||
"typeVersion": 1,
|
||||
"position": [640, 1340]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"id": "00114764-691d-40b4-ae11-c5206a9448e3",
|
||||
"name": "result",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 2,
|
||||
"position": [1380, 1180],
|
||||
"alwaysOutputData": true
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"jsCode": "// Loop over input items and add a new field called 'myNewField' to the JSON of each one\nconst data = [];\nfor (const [index, entry] of $input.all().entries()) {\n entry.json.myNewField = index;\n entry.pairedItem = 0;\n data.push(entry);\n}\n\nreturn data;"
|
||||
},
|
||||
"id": "d3aa3bc3-3e5a-42d2-a26d-ea0c273ea3e8",
|
||||
"name": "changePairedindex",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 1,
|
||||
"position": [920, 1180]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"keepOnlySet": true,
|
||||
"values": {
|
||||
"string": [
|
||||
{
|
||||
"name": "=nameOriginalItem",
|
||||
"value": "={{ $('cuctomers').item.json.name }}"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"value": "={{ $json.name }}"
|
||||
}
|
||||
],
|
||||
"boolean": [
|
||||
{
|
||||
"name": "test",
|
||||
"value": "={{ $('cuctomers').item.json.id === $json.id }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "af18482d-4a88-4ffb-b6e3-67be45cdfad1",
|
||||
"name": "checkWithOriginal",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 2,
|
||||
"position": [1140, 1180]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"id": "3bad8f81-2fac-4e6e-bb7c-3a4921674005",
|
||||
"name": "loop",
|
||||
"type": "n8n-nodes-base.splitInBatches",
|
||||
"typeVersion": 2,
|
||||
"position": [920, 1420]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"keepOnlySet": true,
|
||||
"values": {
|
||||
"string": [
|
||||
{
|
||||
"name": "=nameOriginalItem",
|
||||
"value": "={{ $('cuctomers').item.json.name }}"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"value": "={{ $json.name }}"
|
||||
}
|
||||
],
|
||||
"boolean": [
|
||||
{
|
||||
"name": "test",
|
||||
"value": "={{ $('cuctomers').item.json.id === $json.id }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "865691b7-e4b8-487e-a5ec-80387118ea61",
|
||||
"name": "testAfterLoop",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 2,
|
||||
"position": [1180, 1620]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"id": "8c5d3a1c-e34b-4937-bc22-88b418391002",
|
||||
"name": "result1",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 2,
|
||||
"position": [1380, 1620],
|
||||
"alwaysOutputData": true
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"jsCode": "// Loop over input items and add a new field called 'myNewField' to the JSON of each one\nconst data = [];\nfor (const [index, entry] of $input.all().entries()) {\n entry.json.myNewField = index;\n entry.pairedItem = 0;\n data.push(entry);\n}\n\nreturn data;"
|
||||
},
|
||||
"id": "ad476a3a-d491-406f-903d-022cb0f0ef3c",
|
||||
"name": "changePairedindex1",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 1,
|
||||
"position": [1380, 1400]
|
||||
}
|
||||
],
|
||||
"pinData": {
|
||||
"Set3": [
|
||||
{
|
||||
"json": {
|
||||
"originalItem": "third",
|
||||
"numberOriginal": 3,
|
||||
"letterOriginal": "c"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"originalItem": "first",
|
||||
"numberOriginal": 1,
|
||||
"letterOriginal": "a"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"originalItem": "second",
|
||||
"numberOriginal": 2,
|
||||
"letterOriginal": "b"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 2
|
||||
}
|
||||
}
|
||||
],
|
||||
"Set1": [
|
||||
{
|
||||
"json": {
|
||||
"propertyName": {
|
||||
"setting": "hello"
|
||||
}
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 0
|
||||
}
|
||||
}
|
||||
],
|
||||
"result": [
|
||||
{
|
||||
"json": {
|
||||
"test": true,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "Jay Gatsby"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"test": false,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "José Arcadio Buendía"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"test": false,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "Max Sendak"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 2
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"test": false,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "Zaphod Beeblebrox"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 3
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"test": false,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "Edmund Pevensie"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 4
|
||||
}
|
||||
}
|
||||
],
|
||||
"result1": [
|
||||
{
|
||||
"json": {
|
||||
"test": true,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "Jay Gatsby"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"test": false,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "José Arcadio Buendía"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"test": false,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "Max Sendak"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 2
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"test": false,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "Zaphod Beeblebrox"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 3
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"test": false,
|
||||
"nameOriginalItem": "Jay Gatsby",
|
||||
"name": "Edmund Pevensie"
|
||||
},
|
||||
"pairedItem": {
|
||||
"item": 4
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"connections": {
|
||||
"Set": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Code",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Generate new items",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"IF": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Set1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"When clicking \"Execute Workflow\"": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Set",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "cuctomers",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Code": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "IF",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Generate new items": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Mix up pairing",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Mix up pairing": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Set3",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"cuctomers": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "changePairedindex",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "loop",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"changePairedindex": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "checkWithOriginal",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"checkWithOriginal": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "result",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"loop": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "changePairedindex1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"node": "testAfterLoop",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"testAfterLoop": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "result1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"changePairedindex1": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "loop",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"active": false,
|
||||
"settings": {},
|
||||
"versionId": "6f6ee01c-8c99-493f-a30c-6a5ed2b71750",
|
||||
"id": "169",
|
||||
"meta": {
|
||||
"instanceId": "36203ea1ce3cef713fa25999bd9874ae26b9e4c2c3a90a365f2882a154d031d0"
|
||||
},
|
||||
"tags": []
|
||||
}
|
||||
@@ -18,11 +18,12 @@ import {
|
||||
WorkflowDeactivationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { ErrorReporter } from './error-reporter';
|
||||
import type { IWorkflowData } from './Interfaces';
|
||||
import { Logger } from './logging/logger';
|
||||
import { ScheduledTaskManager } from './ScheduledTaskManager';
|
||||
import { TriggersAndPollers } from './TriggersAndPollers';
|
||||
import { ErrorReporter } from '@/errors/error-reporter';
|
||||
import type { IWorkflowData } from '@/interfaces';
|
||||
import { Logger } from '@/logging/logger';
|
||||
|
||||
import { ScheduledTaskManager } from './scheduled-task-manager';
|
||||
import { TriggersAndPollers } from './triggers-and-pollers';
|
||||
|
||||
@Service()
|
||||
export class ActiveWorkflows {
|
||||
6
packages/core/src/execution-engine/index.ts
Normal file
6
packages/core/src/execution-engine/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from './active-workflows';
|
||||
export * from './routing-node';
|
||||
export * from './node-execution-context';
|
||||
export * from './partial-execution-utils';
|
||||
export * from './node-execution-context/utils/execution-metadata';
|
||||
export * from './workflow-execute';
|
||||
@@ -12,7 +12,7 @@ import type {
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import { InstanceSettings } from '@/InstanceSettings';
|
||||
import { InstanceSettings } from '@/instance-settings';
|
||||
|
||||
import { NodeExecutionContext } from '../node-execution-context';
|
||||
|
||||
@@ -17,7 +17,7 @@ import type {
|
||||
} from 'n8n-workflow';
|
||||
import { ApplicationError, NodeHelpers, WAIT_INDEFINITELY } from 'n8n-workflow';
|
||||
|
||||
import { BinaryDataService } from '@/BinaryData/BinaryData.service';
|
||||
import { BinaryDataService } from '@/binary-data/binary-data.service';
|
||||
|
||||
import type { BaseExecuteContext } from '../base-execute-context';
|
||||
|
||||
@@ -29,7 +29,7 @@ import {
|
||||
WorkflowDataProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { BinaryDataService } from '@/BinaryData/BinaryData.service';
|
||||
import { BinaryDataService } from '@/binary-data/binary-data.service';
|
||||
|
||||
import { NodeExecutionContext } from './node-execution-context';
|
||||
|
||||
@@ -37,10 +37,10 @@ import {
|
||||
getFileSystemHelperFunctions,
|
||||
getCheckProcessedHelperFunctions,
|
||||
detectBinaryEncoding,
|
||||
} from '@/NodeExecuteFunctions';
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { BaseExecuteContext } from './base-execute-context';
|
||||
import { getInputConnectionData } from './utils/getInputConnectionData';
|
||||
import { getInputConnectionData } from './utils/get-input-connection-data';
|
||||
|
||||
export class ExecuteContext extends BaseExecuteContext implements IExecuteFunctions {
|
||||
readonly helpers: IExecuteFunctions['helpers'];
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
getBinaryHelperFunctions,
|
||||
getRequestHelperFunctions,
|
||||
returnJsonArray,
|
||||
} from '@/NodeExecuteFunctions';
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { BaseExecuteContext } from './base-execute-context';
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
getNodeWebhookUrl,
|
||||
getRequestHelperFunctions,
|
||||
getWebhookDescription,
|
||||
} from '@/NodeExecuteFunctions';
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { NodeExecutionContext } from './node-execution-context';
|
||||
|
||||
@@ -10,4 +10,4 @@ export { SupplyDataContext } from './supply-data-context';
|
||||
export { TriggerContext } from './trigger-context';
|
||||
export { WebhookContext } from './webhook-context';
|
||||
|
||||
export { getAdditionalKeys } from './utils/getAdditionalKeys';
|
||||
export { getAdditionalKeys } from './utils/get-additional-keys';
|
||||
@@ -9,11 +9,11 @@ import type {
|
||||
Workflow,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { extractValue } from '@/ExtractValue';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { getRequestHelperFunctions, getSSHTunnelFunctions } from '@/NodeExecuteFunctions';
|
||||
import { getRequestHelperFunctions, getSSHTunnelFunctions } from '@/node-execute-functions';
|
||||
|
||||
import { NodeExecutionContext } from './node-execution-context';
|
||||
import { extractValue } from './utils/extract-value';
|
||||
|
||||
export class LoadOptionsContext extends NodeExecutionContext implements ILoadOptionsFunctions {
|
||||
readonly helpers: ILoadOptionsFunctions['helpers'];
|
||||
@@ -28,16 +28,16 @@ import {
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { HTTP_REQUEST_NODE_TYPE, HTTP_REQUEST_TOOL_NODE_TYPE } from '@/Constants';
|
||||
import { HTTP_REQUEST_NODE_TYPE, HTTP_REQUEST_TOOL_NODE_TYPE } from '@/constants';
|
||||
import { Memoized } from '@/decorators';
|
||||
import { extractValue } from '@/ExtractValue';
|
||||
import { InstanceSettings } from '@/InstanceSettings';
|
||||
import { InstanceSettings } from '@/instance-settings';
|
||||
import { Logger } from '@/logging/logger';
|
||||
|
||||
import { cleanupParameterData } from './utils/cleanupParameterData';
|
||||
import { ensureType } from './utils/ensureType';
|
||||
import { getAdditionalKeys } from './utils/getAdditionalKeys';
|
||||
import { validateValueAgainstSchema } from './utils/validateValueAgainstSchema';
|
||||
import { cleanupParameterData } from './utils/cleanup-parameter-data';
|
||||
import { ensureType } from './utils/ensure-type';
|
||||
import { extractValue } from './utils/extract-value';
|
||||
import { getAdditionalKeys } from './utils/get-additional-keys';
|
||||
import { validateValueAgainstSchema } from './utils/validate-value-against-schema';
|
||||
|
||||
export abstract class NodeExecutionContext implements Omit<FunctionsBase, 'getCredentials'> {
|
||||
protected readonly instanceSettings = Container.get(InstanceSettings);
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
getRequestHelperFunctions,
|
||||
getSchedulingFunctions,
|
||||
returnJsonArray,
|
||||
} from '@/NodeExecuteFunctions';
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { NodeExecutionContext } from './node-execution-context';
|
||||
|
||||
@@ -33,10 +33,10 @@ import {
|
||||
getSSHTunnelFunctions,
|
||||
normalizeItems,
|
||||
returnJsonArray,
|
||||
} from '@/NodeExecuteFunctions';
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { BaseExecuteContext } from './base-execute-context';
|
||||
import { getInputConnectionData } from './utils/getInputConnectionData';
|
||||
import { getInputConnectionData } from './utils/get-input-connection-data';
|
||||
|
||||
export class SupplyDataContext extends BaseExecuteContext implements ISupplyDataFunctions {
|
||||
readonly helpers: ISupplyDataFunctions['helpers'];
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
getSchedulingFunctions,
|
||||
getSSHTunnelFunctions,
|
||||
returnJsonArray,
|
||||
} from '@/NodeExecuteFunctions';
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { NodeExecutionContext } from './node-execution-context';
|
||||
|
||||
@@ -2,7 +2,7 @@ import toPlainObject from 'lodash/toPlainObject';
|
||||
import { DateTime } from 'luxon';
|
||||
import type { NodeParameterValue } from 'n8n-workflow';
|
||||
|
||||
import { cleanupParameterData } from '../cleanupParameterData';
|
||||
import { cleanupParameterData } from '../cleanup-parameter-data';
|
||||
|
||||
describe('cleanupParameterData', () => {
|
||||
it('should stringify Luxon dates in-place', () => {
|
||||
@@ -0,0 +1,481 @@
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { INodeType, ISupplyDataFunctions, INode } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { createNodeAsTool } from '../create-node-as-tool';
|
||||
|
||||
jest.mock('@langchain/core/tools', () => ({
|
||||
DynamicStructuredTool: jest.fn().mockImplementation((config) => ({
|
||||
name: config.name,
|
||||
description: config.description,
|
||||
schema: config.schema,
|
||||
func: config.func,
|
||||
})),
|
||||
}));
|
||||
|
||||
describe('createNodeAsTool', () => {
|
||||
const context = mock<ISupplyDataFunctions>({
|
||||
getNodeParameter: jest.fn(),
|
||||
addInputData: jest.fn(),
|
||||
addOutputData: jest.fn(),
|
||||
getNode: jest.fn(),
|
||||
});
|
||||
const handleToolInvocation = jest.fn();
|
||||
const nodeType = mock<INodeType>({
|
||||
description: {
|
||||
name: 'TestNode',
|
||||
description: 'Test node description',
|
||||
},
|
||||
});
|
||||
const node = mock<INode>({ name: 'Test_Node' });
|
||||
const options = { node, nodeType, handleToolInvocation };
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(context.addInputData as jest.Mock).mockReturnValue({ index: 0 });
|
||||
(context.getNode as jest.Mock).mockReturnValue(node);
|
||||
(nodeType.execute as jest.Mock).mockResolvedValue([[{ json: { result: 'test' } }]]);
|
||||
|
||||
node.parameters = {
|
||||
param1: "={{$fromAI('param1', 'Test parameter', 'string') }}",
|
||||
param2: 'static value',
|
||||
nestedParam: {
|
||||
subParam: "={{ $fromAI('subparam', 'Nested parameter', 'string') }}",
|
||||
},
|
||||
descriptionType: 'auto',
|
||||
resource: 'testResource',
|
||||
operation: 'testOperation',
|
||||
};
|
||||
});
|
||||
|
||||
describe('Tool Creation and Basic Properties', () => {
|
||||
it('should create a DynamicStructuredTool with correct properties', () => {
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool).toBeDefined();
|
||||
expect(tool.name).toBe('Test_Node');
|
||||
expect(tool.description).toBe(
|
||||
'Test node description\n Resource: testResource\n Operation: testOperation',
|
||||
);
|
||||
expect(tool.schema).toBeDefined();
|
||||
});
|
||||
|
||||
it('should use toolDescription if provided', () => {
|
||||
node.parameters.descriptionType = 'manual';
|
||||
node.parameters.toolDescription = 'Custom tool description';
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.description).toBe('Custom tool description');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Schema Creation and Parameter Handling', () => {
|
||||
it('should create a schema based on fromAI arguments in nodeParameters', () => {
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema).toBeDefined();
|
||||
expect(tool.schema.shape).toHaveProperty('param1');
|
||||
expect(tool.schema.shape).toHaveProperty('subparam');
|
||||
expect(tool.schema.shape).not.toHaveProperty('param2');
|
||||
});
|
||||
|
||||
it('should handle fromAI arguments correctly', () => {
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.subparam).toBeInstanceOf(z.ZodString);
|
||||
});
|
||||
|
||||
it('should handle default values correctly', () => {
|
||||
node.parameters = {
|
||||
paramWithDefault:
|
||||
"={{ $fromAI('paramWithDefault', 'Parameter with default', 'string', 'default value') }}",
|
||||
numberWithDefault:
|
||||
"={{ $fromAI('numberWithDefault', 'Number with default', 'number', 42) }}",
|
||||
booleanWithDefault:
|
||||
"={{ $fromAI('booleanWithDefault', 'Boolean with default', 'boolean', true) }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.paramWithDefault.description).toBe('Parameter with default');
|
||||
expect(tool.schema.shape.numberWithDefault.description).toBe('Number with default');
|
||||
expect(tool.schema.shape.booleanWithDefault.description).toBe('Boolean with default');
|
||||
});
|
||||
|
||||
it('should handle nested parameters correctly', () => {
|
||||
node.parameters = {
|
||||
topLevel: "={{ $fromAI('topLevel', 'Top level parameter', 'string') }}",
|
||||
nested: {
|
||||
level1: "={{ $fromAI('level1', 'Nested level 1', 'string') }}",
|
||||
deeperNested: {
|
||||
level2: "={{ $fromAI('level2', 'Nested level 2', 'number') }}",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.topLevel).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.level1).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.level2).toBeInstanceOf(z.ZodNumber);
|
||||
});
|
||||
|
||||
it('should handle array parameters correctly', () => {
|
||||
node.parameters = {
|
||||
arrayParam: [
|
||||
"={{ $fromAI('item1', 'First item', 'string') }}",
|
||||
"={{ $fromAI('item2', 'Second item', 'number') }}",
|
||||
],
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.item1).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.item2).toBeInstanceOf(z.ZodNumber);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handling and Edge Cases', () => {
|
||||
it('should handle error during node execution', async () => {
|
||||
nodeType.execute = jest.fn().mockRejectedValue(new Error('Execution failed'));
|
||||
const tool = createNodeAsTool(options).response;
|
||||
handleToolInvocation.mockReturnValue('Error during node execution: some random issue.');
|
||||
|
||||
const result = await tool.func({ param1: 'test value' });
|
||||
|
||||
expect(result).toContain('Error during node execution:');
|
||||
});
|
||||
|
||||
it('should throw an error for invalid parameter names', () => {
|
||||
node.parameters.invalidParam = "$fromAI('invalid param', 'Invalid parameter', 'string')";
|
||||
|
||||
expect(() => createNodeAsTool(options)).toThrow('Parameter key `invalid param` is invalid');
|
||||
});
|
||||
|
||||
it('should throw an error for $fromAI calls with unsupported types', () => {
|
||||
node.parameters = {
|
||||
invalidTypeParam:
|
||||
"={{ $fromAI('invalidType', 'Param with unsupported type', 'unsupportedType') }}",
|
||||
};
|
||||
|
||||
expect(() => createNodeAsTool(options)).toThrow('Invalid type: unsupportedType');
|
||||
});
|
||||
|
||||
it('should handle empty parameters and parameters with no fromAI calls', () => {
|
||||
node.parameters = {
|
||||
param1: 'static value 1',
|
||||
param2: 'static value 2',
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Parameter Name and Description Handling', () => {
|
||||
it('should accept parameter names with underscores and hyphens', () => {
|
||||
node.parameters = {
|
||||
validName1:
|
||||
"={{ $fromAI('param_name-1', 'Valid name with underscore and hyphen', 'string') }}",
|
||||
validName2: "={{ $fromAI('param_name_2', 'Another valid name', 'number') }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape['param_name-1']).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape['param_name-1'].description).toBe(
|
||||
'Valid name with underscore and hyphen',
|
||||
);
|
||||
|
||||
expect(tool.schema.shape.param_name_2).toBeInstanceOf(z.ZodNumber);
|
||||
expect(tool.schema.shape.param_name_2.description).toBe('Another valid name');
|
||||
});
|
||||
|
||||
it('should throw an error for parameter names with invalid special characters', () => {
|
||||
node.parameters = {
|
||||
invalidNameParam:
|
||||
"={{ $fromAI('param@name!', 'Invalid name with special characters', 'string') }}",
|
||||
};
|
||||
|
||||
expect(() => createNodeAsTool(options)).toThrow('Parameter key `param@name!` is invalid');
|
||||
});
|
||||
|
||||
it('should throw an error for empty parameter name', () => {
|
||||
node.parameters = {
|
||||
invalidNameParam: "={{ $fromAI('', 'Invalid name with special characters', 'string') }}",
|
||||
};
|
||||
|
||||
expect(() => createNodeAsTool(options)).toThrow(
|
||||
'You must specify a key when using $fromAI()',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle parameter names with exact and exceeding character limits', () => {
|
||||
const longName = 'a'.repeat(64);
|
||||
const tooLongName = 'a'.repeat(65);
|
||||
node.parameters = {
|
||||
longNameParam: `={{ $fromAI('${longName}', 'Param with 64 character name', 'string') }}`,
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape[longName]).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape[longName].description).toBe('Param with 64 character name');
|
||||
|
||||
node.parameters = {
|
||||
tooLongNameParam: `={{ $fromAI('${tooLongName}', 'Param with 65 character name', 'string') }}`,
|
||||
};
|
||||
expect(() => createNodeAsTool(options)).toThrow(
|
||||
`Parameter key \`${tooLongName}\` is invalid`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle $fromAI calls with empty description', () => {
|
||||
node.parameters = {
|
||||
emptyDescriptionParam: "={{ $fromAI('emptyDescription', '', 'number') }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.emptyDescription).toBeInstanceOf(z.ZodNumber);
|
||||
expect(tool.schema.shape.emptyDescription.description).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should throw an error for calls with the same parameter but different descriptions', () => {
|
||||
node.parameters = {
|
||||
duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}",
|
||||
duplicateParam2: "={{ $fromAI('duplicate', 'Second duplicate', 'number') }}",
|
||||
};
|
||||
|
||||
expect(() => createNodeAsTool(options)).toThrow(
|
||||
"Duplicate key 'duplicate' found with different description or type",
|
||||
);
|
||||
});
|
||||
it('should throw an error for calls with the same parameter but different types', () => {
|
||||
node.parameters = {
|
||||
duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}",
|
||||
duplicateParam2: "={{ $fromAI('duplicate', 'First duplicate', 'number') }}",
|
||||
};
|
||||
|
||||
expect(() => createNodeAsTool(options)).toThrow(
|
||||
"Duplicate key 'duplicate' found with different description or type",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Parsing Scenarios', () => {
|
||||
it('should correctly parse $fromAI calls with varying spaces, capitalization, and within template literals', () => {
|
||||
node.parameters = {
|
||||
varyingSpacing1: "={{$fromAI('param1','Description1','string')}}",
|
||||
varyingSpacing2: "={{ $fromAI ( 'param2' , 'Description2' , 'number' ) }}",
|
||||
varyingSpacing3: "={{ $FROMai('param3', 'Description3', 'boolean') }}",
|
||||
wrongCapitalization: "={{$fromai('param4','Description4','number')}}",
|
||||
templateLiteralParam:
|
||||
// eslint-disable-next-line n8n-local-rules/no-interpolation-in-regular-string
|
||||
"={{ `Value is: ${$fromAI('templatedParam', 'Templated param description', 'string')}` }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.param1.description).toBe('Description1');
|
||||
|
||||
expect(tool.schema.shape.param2).toBeInstanceOf(z.ZodNumber);
|
||||
expect(tool.schema.shape.param2.description).toBe('Description2');
|
||||
|
||||
expect(tool.schema.shape.param3).toBeInstanceOf(z.ZodBoolean);
|
||||
expect(tool.schema.shape.param3.description).toBe('Description3');
|
||||
|
||||
expect(tool.schema.shape.param4).toBeInstanceOf(z.ZodNumber);
|
||||
expect(tool.schema.shape.param4.description).toBe('Description4');
|
||||
|
||||
expect(tool.schema.shape.templatedParam).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.templatedParam.description).toBe('Templated param description');
|
||||
});
|
||||
|
||||
it('should correctly parse multiple $fromAI calls interleaved with regular text', () => {
|
||||
node.parameters = {
|
||||
interleavedParams:
|
||||
"={{ 'Start ' + $fromAI('param1', 'First param', 'string') + ' Middle ' + $fromAI('param2', 'Second param', 'number') + ' End' }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.param1.description).toBe('First param');
|
||||
|
||||
expect(tool.schema.shape.param2).toBeInstanceOf(z.ZodNumber);
|
||||
expect(tool.schema.shape.param2.description).toBe('Second param');
|
||||
});
|
||||
|
||||
it('should correctly parse $fromAI calls with complex JSON default values', () => {
|
||||
node.parameters = {
|
||||
complexJsonDefault:
|
||||
'={{ $fromAI(\'complexJson\', \'Param with complex JSON default\', \'json\', \'{"nested": {"key": "value"}, "array": [1, 2, 3]}\') }}',
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.complexJson._def.innerType).toBeInstanceOf(z.ZodRecord);
|
||||
expect(tool.schema.shape.complexJson.description).toBe('Param with complex JSON default');
|
||||
expect(tool.schema.shape.complexJson._def.defaultValue()).toEqual({
|
||||
nested: { key: 'value' },
|
||||
array: [1, 2, 3],
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore $fromAI calls embedded in non-string node parameters', () => {
|
||||
node.parameters = {
|
||||
numberParam: 42,
|
||||
booleanParam: false,
|
||||
objectParam: {
|
||||
innerString: "={{ $fromAI('innerParam', 'Inner param', 'string') }}",
|
||||
innerNumber: 100,
|
||||
innerObject: {
|
||||
deepParam: "={{ $fromAI('deepParam', 'Deep param', 'number') }}",
|
||||
},
|
||||
},
|
||||
arrayParam: [
|
||||
"={{ $fromAI('arrayParam1', 'First array param', 'string') }}",
|
||||
200,
|
||||
"={{ $fromAI('nestedArrayParam', 'Nested array param', 'boolean') }}",
|
||||
],
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.innerParam).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.innerParam.description).toBe('Inner param');
|
||||
|
||||
expect(tool.schema.shape.deepParam).toBeInstanceOf(z.ZodNumber);
|
||||
expect(tool.schema.shape.deepParam.description).toBe('Deep param');
|
||||
|
||||
expect(tool.schema.shape.arrayParam1).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.arrayParam1.description).toBe('First array param');
|
||||
|
||||
expect(tool.schema.shape.nestedArrayParam).toBeInstanceOf(z.ZodBoolean);
|
||||
expect(tool.schema.shape.nestedArrayParam.description).toBe('Nested array param');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Escaping and Special Characters', () => {
|
||||
it('should handle escaped single quotes in parameter names and descriptions', () => {
|
||||
node.parameters = {
|
||||
escapedQuotesParam:
|
||||
"={{ $fromAI('paramName', 'Description with \\'escaped\\' quotes', 'string') }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.paramName.description).toBe("Description with 'escaped' quotes");
|
||||
});
|
||||
|
||||
it('should handle escaped double quotes in parameter names and descriptions', () => {
|
||||
node.parameters = {
|
||||
escapedQuotesParam:
|
||||
'={{ $fromAI("paramName", "Description with \\"escaped\\" quotes", "string") }}',
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.paramName.description).toBe('Description with "escaped" quotes');
|
||||
});
|
||||
|
||||
it('should handle escaped backslashes in parameter names and descriptions', () => {
|
||||
node.parameters = {
|
||||
escapedBackslashesParam:
|
||||
"={{ $fromAI('paramName', 'Description with \\\\ backslashes', 'string') }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.paramName.description).toBe('Description with \\ backslashes');
|
||||
});
|
||||
|
||||
it('should handle mixed escaped characters in parameter names and descriptions', () => {
|
||||
node.parameters = {
|
||||
mixedEscapesParam:
|
||||
'={{ $fromAI(`paramName`, \'Description with \\\'mixed" characters\', "number") }}',
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodNumber);
|
||||
expect(tool.schema.shape.paramName.description).toBe('Description with \'mixed" characters');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases and Limitations', () => {
|
||||
it('should ignore excess arguments in $fromAI calls beyond the fourth argument', () => {
|
||||
node.parameters = {
|
||||
excessArgsParam:
|
||||
"={{ $fromAI('excessArgs', 'Param with excess arguments', 'string', 'default', 'extraArg1', 'extraArg2') }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.excessArgs._def.innerType).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.excessArgs.description).toBe('Param with excess arguments');
|
||||
expect(tool.schema.shape.excessArgs._def.defaultValue()).toBe('default');
|
||||
});
|
||||
|
||||
it('should correctly parse $fromAI calls with nested parentheses', () => {
|
||||
node.parameters = {
|
||||
nestedParenthesesParam:
|
||||
"={{ $fromAI('paramWithNested', 'Description with ((nested)) parentheses', 'string') }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.paramWithNested).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.paramWithNested.description).toBe(
|
||||
'Description with ((nested)) parentheses',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle $fromAI calls with very long descriptions', () => {
|
||||
const longDescription = 'A'.repeat(1000);
|
||||
node.parameters = {
|
||||
longParam: `={{ $fromAI('longParam', '${longDescription}', 'string') }}`,
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.longParam).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.longParam.description).toBe(longDescription);
|
||||
});
|
||||
|
||||
it('should handle $fromAI calls with only some parameters', () => {
|
||||
node.parameters = {
|
||||
partialParam1: "={{ $fromAI('partial1') }}",
|
||||
partialParam2: "={{ $fromAI('partial2', 'Description only') }}",
|
||||
partialParam3: "={{ $fromAI('partial3', '', 'number') }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.partial1).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.partial2).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.partial3).toBeInstanceOf(z.ZodNumber);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unicode and Internationalization', () => {
|
||||
it('should handle $fromAI calls with unicode characters', () => {
|
||||
node.parameters = {
|
||||
unicodeParam: "={{ $fromAI('unicodeParam', '🌈 Unicode parameter 你好', 'string') }}",
|
||||
};
|
||||
|
||||
const tool = createNodeAsTool(options).response;
|
||||
|
||||
expect(tool.schema.shape.unicodeParam).toBeInstanceOf(z.ZodString);
|
||||
expect(tool.schema.shape.unicodeParam.description).toBe('🌈 Unicode parameter 你好');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ExpressionError } from 'n8n-workflow';
|
||||
|
||||
import { ensureType } from '../ensureType';
|
||||
import { ensureType } from '../ensure-type';
|
||||
|
||||
describe('ensureType', () => {
|
||||
it('throws error for null value', () => {
|
||||
@@ -0,0 +1,219 @@
|
||||
import type { IRunExecutionData } from 'n8n-workflow';
|
||||
|
||||
import { InvalidExecutionMetadataError } from '@/errors/invalid-execution-metadata.error';
|
||||
|
||||
import {
|
||||
setWorkflowExecutionMetadata,
|
||||
setAllWorkflowExecutionMetadata,
|
||||
KV_LIMIT,
|
||||
getWorkflowExecutionMetadata,
|
||||
getAllWorkflowExecutionMetadata,
|
||||
} from '../execution-metadata';
|
||||
|
||||
describe('Execution Metadata functions', () => {
|
||||
test('setWorkflowExecutionMetadata will set a value', () => {
|
||||
const metadata = {};
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
setWorkflowExecutionMetadata(executionData, 'test1', 'value1');
|
||||
|
||||
expect(metadata).toEqual({
|
||||
test1: 'value1',
|
||||
});
|
||||
});
|
||||
|
||||
test('setAllWorkflowExecutionMetadata will set multiple values', () => {
|
||||
const metadata = {};
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
setAllWorkflowExecutionMetadata(executionData, {
|
||||
test1: 'value1',
|
||||
test2: 'value2',
|
||||
});
|
||||
|
||||
expect(metadata).toEqual({
|
||||
test1: 'value1',
|
||||
test2: 'value2',
|
||||
});
|
||||
});
|
||||
|
||||
test('setWorkflowExecutionMetadata should only convert numbers to strings', () => {
|
||||
const metadata = {};
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
expect(() => setWorkflowExecutionMetadata(executionData, 'test1', 1234)).not.toThrow(
|
||||
InvalidExecutionMetadataError,
|
||||
);
|
||||
|
||||
expect(metadata).toEqual({
|
||||
test1: '1234',
|
||||
});
|
||||
|
||||
expect(() => setWorkflowExecutionMetadata(executionData, 'test2', {})).toThrow(
|
||||
InvalidExecutionMetadataError,
|
||||
);
|
||||
|
||||
expect(metadata).not.toEqual({
|
||||
test1: '1234',
|
||||
test2: {},
|
||||
});
|
||||
});
|
||||
|
||||
test('setAllWorkflowExecutionMetadata should not convert values to strings and should set other values correctly', () => {
|
||||
const metadata = {};
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
expect(() =>
|
||||
setAllWorkflowExecutionMetadata(executionData, {
|
||||
test1: {} as unknown as string,
|
||||
test2: [] as unknown as string,
|
||||
test3: 'value3',
|
||||
test4: 'value4',
|
||||
}),
|
||||
).toThrow(InvalidExecutionMetadataError);
|
||||
|
||||
expect(metadata).toEqual({
|
||||
test3: 'value3',
|
||||
test4: 'value4',
|
||||
});
|
||||
});
|
||||
|
||||
test('setWorkflowExecutionMetadata should validate key characters', () => {
|
||||
const metadata = {};
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
expect(() => setWorkflowExecutionMetadata(executionData, 'te$t1$', 1234)).toThrow(
|
||||
InvalidExecutionMetadataError,
|
||||
);
|
||||
|
||||
expect(metadata).not.toEqual({
|
||||
test1: '1234',
|
||||
});
|
||||
});
|
||||
|
||||
test('setWorkflowExecutionMetadata should limit the number of metadata entries', () => {
|
||||
const metadata = {};
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
const expected: Record<string, string> = {};
|
||||
for (let i = 0; i < KV_LIMIT; i++) {
|
||||
expected[`test${i + 1}`] = `value${i + 1}`;
|
||||
}
|
||||
|
||||
for (let i = 0; i < KV_LIMIT + 10; i++) {
|
||||
setWorkflowExecutionMetadata(executionData, `test${i + 1}`, `value${i + 1}`);
|
||||
}
|
||||
|
||||
expect(metadata).toEqual(expected);
|
||||
});
|
||||
|
||||
test('getWorkflowExecutionMetadata should return a single value for an existing key', () => {
|
||||
const metadata: Record<string, string> = { test1: 'value1' };
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
expect(getWorkflowExecutionMetadata(executionData, 'test1')).toBe('value1');
|
||||
});
|
||||
|
||||
test('getWorkflowExecutionMetadata should return undefined for an unset key', () => {
|
||||
const metadata: Record<string, string> = { test1: 'value1' };
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
expect(getWorkflowExecutionMetadata(executionData, 'test2')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('getAllWorkflowExecutionMetadata should return all metadata', () => {
|
||||
const metadata: Record<string, string> = { test1: 'value1', test2: 'value2' };
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
expect(getAllWorkflowExecutionMetadata(executionData)).toEqual(metadata);
|
||||
});
|
||||
|
||||
test('getAllWorkflowExecutionMetadata should not an object that modifies internal state', () => {
|
||||
const metadata: Record<string, string> = { test1: 'value1', test2: 'value2' };
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
getAllWorkflowExecutionMetadata(executionData).test1 = 'changed';
|
||||
|
||||
expect(metadata.test1).not.toBe('changed');
|
||||
expect(metadata.test1).toBe('value1');
|
||||
});
|
||||
|
||||
test('setWorkflowExecutionMetadata should truncate long keys', () => {
|
||||
const metadata = {};
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
setWorkflowExecutionMetadata(
|
||||
executionData,
|
||||
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab',
|
||||
'value1',
|
||||
);
|
||||
|
||||
expect(metadata).toEqual({
|
||||
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: 'value1',
|
||||
});
|
||||
});
|
||||
|
||||
test('setWorkflowExecutionMetadata should truncate long values', () => {
|
||||
const metadata = {};
|
||||
const executionData = {
|
||||
resultData: {
|
||||
metadata,
|
||||
},
|
||||
} as IRunExecutionData;
|
||||
|
||||
setWorkflowExecutionMetadata(
|
||||
executionData,
|
||||
'test1',
|
||||
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab',
|
||||
);
|
||||
|
||||
expect(metadata).toEqual({
|
||||
test1:
|
||||
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,9 +7,9 @@ import type {
|
||||
SecretsHelpersBase,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { PLACEHOLDER_EMPTY_EXECUTION_ID } from '@/Constants';
|
||||
import { PLACEHOLDER_EMPTY_EXECUTION_ID } from '@/constants';
|
||||
|
||||
import { getAdditionalKeys } from '../getAdditionalKeys';
|
||||
import { getAdditionalKeys } from '../get-additional-keys';
|
||||
|
||||
describe('getAdditionalKeys', () => {
|
||||
const secretsHelpers = mock<SecretsHelpersBase>();
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { IDataObject, INode, INodeType } from 'n8n-workflow';
|
||||
|
||||
import { validateValueAgainstSchema } from '../validateValueAgainstSchema';
|
||||
import { validateValueAgainstSchema } from '../validate-value-against-schema';
|
||||
|
||||
describe('validateValueAgainstSchema', () => {
|
||||
test('should validate fixedCollection values parameter', () => {
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { IRunExecutionData } from 'n8n-workflow';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import { InvalidExecutionMetadataError } from './errors/invalid-execution-metadata.error';
|
||||
import { InvalidExecutionMetadataError } from '@/errors/invalid-execution-metadata.error';
|
||||
|
||||
export const KV_LIMIT = 10;
|
||||
|
||||
@@ -6,14 +6,15 @@ import type {
|
||||
} from 'n8n-workflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import { PLACEHOLDER_EMPTY_EXECUTION_ID } from '@/Constants';
|
||||
import { PLACEHOLDER_EMPTY_EXECUTION_ID } from '@/constants';
|
||||
|
||||
import {
|
||||
setWorkflowExecutionMetadata,
|
||||
setAllWorkflowExecutionMetadata,
|
||||
getWorkflowExecutionMetadata,
|
||||
getAllWorkflowExecutionMetadata,
|
||||
} from '@/ExecutionMetadata';
|
||||
import { getSecretsProxy } from '@/Secrets';
|
||||
} from './execution-metadata';
|
||||
import { getSecretsProxy } from './get-secrets-proxy';
|
||||
|
||||
/** Returns the additional keys for Expressions and Function-Nodes */
|
||||
export function getAdditionalKeys(
|
||||
@@ -19,10 +19,10 @@ import {
|
||||
ApplicationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { createNodeAsTool } from '@/CreateNodeAsTool';
|
||||
import { createNodeAsTool } from './create-node-as-tool';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { SupplyDataContext } from '@/node-execution-context';
|
||||
import type { ExecuteContext, WebhookContext } from '@/node-execution-context';
|
||||
import { SupplyDataContext } from '../../node-execution-context';
|
||||
import type { ExecuteContext, WebhookContext } from '../../node-execution-context';
|
||||
|
||||
export async function getInputConnectionData(
|
||||
this: ExecuteContext | WebhookContext | SupplyDataContext,
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
validateFieldType,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { ExtendedValidationResult } from '@/Interfaces';
|
||||
import type { ExtendedValidationResult } from '@/interfaces';
|
||||
|
||||
const validateResourceMapperValue = (
|
||||
parameterName: string,
|
||||
@@ -25,10 +25,10 @@ import {
|
||||
getNodeWebhookUrl,
|
||||
getRequestHelperFunctions,
|
||||
returnJsonArray,
|
||||
} from '@/NodeExecuteFunctions';
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { NodeExecutionContext } from './node-execution-context';
|
||||
import { getInputConnectionData } from './utils/getInputConnectionData';
|
||||
import { getInputConnectionData } from './utils/get-input-connection-data';
|
||||
|
||||
export class WebhookContext extends NodeExecutionContext implements IWebhookFunctions {
|
||||
readonly helpers: IWebhookFunctions['helpers'];
|
||||
@@ -1,8 +1,8 @@
|
||||
import type { IRunData } from 'n8n-workflow';
|
||||
|
||||
import { createNodeData, toITaskData } from './helpers';
|
||||
import { cleanRunData } from '../cleanRunData';
|
||||
import { DirectedGraph } from '../DirectedGraph';
|
||||
import { cleanRunData } from '../clean-run-data';
|
||||
import { DirectedGraph } from '../directed-graph';
|
||||
|
||||
describe('cleanRunData', () => {
|
||||
// ┌─────┐ ┌─────┐ ┌─────┐
|
||||
@@ -13,7 +13,7 @@ import type { INode } from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import { createNodeData, defaultWorkflowParameter } from './helpers';
|
||||
import { DirectedGraph } from '../DirectedGraph';
|
||||
import { DirectedGraph } from '../directed-graph';
|
||||
|
||||
describe('DirectedGraph', () => {
|
||||
// ┌─────┐ ┌─────┐ ┌─────┐
|
||||
@@ -12,8 +12,8 @@
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import { createNodeData } from './helpers';
|
||||
import { DirectedGraph } from '../DirectedGraph';
|
||||
import { filterDisabledNodes } from '../filterDisabledNodes';
|
||||
import { DirectedGraph } from '../directed-graph';
|
||||
import { filterDisabledNodes } from '../filter-disabled-nodes';
|
||||
|
||||
describe('filterDisabledNodes', () => {
|
||||
// XX
|
||||
@@ -12,8 +12,8 @@
|
||||
import { type IPinData, type IRunData } from 'n8n-workflow';
|
||||
|
||||
import { createNodeData, toITaskData } from './helpers';
|
||||
import { DirectedGraph } from '../DirectedGraph';
|
||||
import { findStartNodes, isDirty } from '../findStartNodes';
|
||||
import { DirectedGraph } from '../directed-graph';
|
||||
import { findStartNodes, isDirty } from '../find-start-nodes';
|
||||
|
||||
describe('isDirty', () => {
|
||||
test("if the node has pinned data it's not dirty", () => {
|
||||
@@ -12,8 +12,8 @@
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import { createNodeData } from './helpers';
|
||||
import { DirectedGraph } from '../DirectedGraph';
|
||||
import { findSubgraph } from '../findSubgraph';
|
||||
import { DirectedGraph } from '../directed-graph';
|
||||
import { findSubgraph } from '../find-subgraph';
|
||||
|
||||
describe('findSubgraph', () => {
|
||||
// ►►
|
||||
@@ -11,8 +11,8 @@ import type { IPinData } from 'n8n-workflow';
|
||||
import { NodeConnectionType, type IRunData } from 'n8n-workflow';
|
||||
|
||||
import { createNodeData, toITaskData } from './helpers';
|
||||
import { DirectedGraph } from '../DirectedGraph';
|
||||
import { getSourceDataGroups } from '../getSourceDataGroups';
|
||||
import { DirectedGraph } from '../directed-graph';
|
||||
import { getSourceDataGroups } from '../get-source-data-groups';
|
||||
|
||||
describe('getSourceDataGroups', () => {
|
||||
//┌───────┐1
|
||||
@@ -10,8 +10,8 @@
|
||||
// PD denotes that the node has pinned data
|
||||
|
||||
import { createNodeData } from './helpers';
|
||||
import { DirectedGraph } from '../DirectedGraph';
|
||||
import { handleCycles } from '../handleCycles';
|
||||
import { DirectedGraph } from '../directed-graph';
|
||||
import { handleCycles } from '../handle-cycles';
|
||||
|
||||
describe('handleCycles', () => {
|
||||
// ┌────┐ ┌─────────┐
|
||||
@@ -18,15 +18,14 @@ import type {
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType, type IPinData, type IRunData } from 'n8n-workflow';
|
||||
|
||||
import { createNodeData, toITaskData } from './helpers';
|
||||
import { DirectedGraph } from '../directed-graph';
|
||||
import { findSubgraph } from '../find-subgraph';
|
||||
import {
|
||||
addWaitingExecution,
|
||||
addWaitingExecutionSource,
|
||||
recreateNodeExecutionStack,
|
||||
} from '@/PartialExecutionUtils/recreateNodeExecutionStack';
|
||||
|
||||
import { createNodeData, toITaskData } from './helpers';
|
||||
import { DirectedGraph } from '../DirectedGraph';
|
||||
import { findSubgraph } from '../findSubgraph';
|
||||
} from '../recreate-node-execution-stack';
|
||||
|
||||
describe('recreateNodeExecutionStack', () => {
|
||||
// ►►
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { INode, IRunData } from 'n8n-workflow';
|
||||
|
||||
import type { DirectedGraph } from './DirectedGraph';
|
||||
import type { DirectedGraph } from './directed-graph';
|
||||
|
||||
/**
|
||||
* Returns new run data that does not contain data for any node that is a child
|
||||
@@ -1,6 +1,6 @@
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import type { DirectedGraph } from './DirectedGraph';
|
||||
import type { DirectedGraph } from './directed-graph';
|
||||
|
||||
export function filterDisabledNodes(graph: DirectedGraph): DirectedGraph {
|
||||
const filteredGraph = graph.clone();
|
||||
@@ -1,7 +1,7 @@
|
||||
import { NodeConnectionType, type INode, type IPinData, type IRunData } from 'n8n-workflow';
|
||||
|
||||
import type { DirectedGraph } from './DirectedGraph';
|
||||
import { getIncomingData, getIncomingDataFromAnyRun } from './getIncomingData';
|
||||
import type { DirectedGraph } from './directed-graph';
|
||||
import { getIncomingData, getIncomingDataFromAnyRun } from './get-incoming-data';
|
||||
|
||||
/**
|
||||
* A node is dirty if either of the following is true:
|
||||
@@ -1,7 +1,7 @@
|
||||
import { NodeConnectionType, type INode } from 'n8n-workflow';
|
||||
|
||||
import type { GraphConnection } from './DirectedGraph';
|
||||
import { DirectedGraph } from './DirectedGraph';
|
||||
import type { GraphConnection } from './directed-graph';
|
||||
import { DirectedGraph } from './directed-graph';
|
||||
|
||||
function findSubgraphRecursive(
|
||||
graph: DirectedGraph,
|
||||
@@ -1,6 +1,6 @@
|
||||
import { type INode, type IPinData, type IRunData } from 'n8n-workflow';
|
||||
|
||||
import type { GraphConnection, DirectedGraph } from './DirectedGraph';
|
||||
import type { GraphConnection, DirectedGraph } from './directed-graph';
|
||||
|
||||
function sortByInputIndexThenByName(
|
||||
connection1: GraphConnection,
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { INode } from 'n8n-workflow';
|
||||
import * as a from 'node:assert/strict';
|
||||
|
||||
import type { DirectedGraph } from './DirectedGraph';
|
||||
import type { DirectedGraph } from './directed-graph';
|
||||
|
||||
/**
|
||||
* Returns a new set of start nodes.
|
||||
@@ -0,0 +1,8 @@
|
||||
export { DirectedGraph } from './directed-graph';
|
||||
export { findTriggerForPartialExecution } from './find-trigger-for-partial-execution';
|
||||
export { findStartNodes } from './find-start-nodes';
|
||||
export { findSubgraph } from './find-subgraph';
|
||||
export { recreateNodeExecutionStack } from './recreate-node-execution-stack';
|
||||
export { cleanRunData } from './clean-run-data';
|
||||
export { handleCycles } from './handle-cycles';
|
||||
export { filterDisabledNodes } from './filter-disabled-nodes';
|
||||
@@ -12,9 +12,9 @@ import {
|
||||
type IWaitingForExecutionSource,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { DirectedGraph } from './DirectedGraph';
|
||||
import { getIncomingDataFromAnyRun } from './getIncomingData';
|
||||
import { getSourceDataGroups } from './getSourceDataGroups';
|
||||
import type { DirectedGraph } from './directed-graph';
|
||||
import { getIncomingDataFromAnyRun } from './get-incoming-data';
|
||||
import { getSourceDataGroups } from './get-source-data-groups';
|
||||
|
||||
export function addWaitingExecution(
|
||||
waitingExecution: IWaitingForExecution,
|
||||
@@ -2,7 +2,7 @@ import { Service } from '@n8n/di';
|
||||
import { CronJob } from 'cron';
|
||||
import type { CronExpression, Workflow } from 'n8n-workflow';
|
||||
|
||||
import { InstanceSettings } from './InstanceSettings';
|
||||
import { InstanceSettings } from '@/instance-settings';
|
||||
|
||||
@Service()
|
||||
export class ScheduledTaskManager {
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user