refactor: Overhaul nodes-testing setup - Part 3 (no-changelog) (#14967)

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™
2025-04-29 17:42:21 +02:00
committed by GitHub
parent 3e43f9f8bc
commit 979f9e6327
241 changed files with 1868 additions and 2013 deletions

View File

@@ -0,0 +1,79 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { pick } from 'lodash';
import type { WorkflowTestData } from 'n8n-workflow';
import path from 'node:path';
describe('OpenAI Workflow', () => {
const baseUrl = 'https://api.openai.com/v1';
const credentials = {
openAiApi: { url: baseUrl },
};
const testHarness = new NodeTestHarness({
additionalPackagePaths: [path.dirname(require.resolve('n8n-nodes-base'))],
});
const assistants = [
{
id: 'asst_abc123',
object: 'assistant',
created_at: 1698982736,
name: 'Coding Tutor',
description: null,
model: 'gpt-4o',
tools: [],
tool_resources: {},
metadata: {},
top_p: 1.0,
temperature: 1.0,
response_format: 'auto',
},
{
id: 'asst_abc456',
object: 'assistant',
created_at: 1698982718,
name: 'My Assistant',
description: null,
model: 'gpt-4o',
tools: [],
tool_resources: {},
metadata: {},
top_p: 1.0,
temperature: 1.0,
response_format: 'auto',
},
];
const testData: WorkflowTestData = {
description: 'List Assistants',
input: {
workflowData: testHarness.readWorkflowJSON('list-assistants.workflow.json'),
},
output: {
nodeExecutionOrder: ['When clicking Test workflow', 'OpenAI'],
nodeData: {
OpenAI: [
assistants.map((assistant) => ({
json: pick(assistant, ['id', 'model', 'name']),
})),
],
},
},
nock: {
baseUrl,
mocks: [
{
method: 'get',
path: '/assistants?limit=100',
statusCode: 200,
responseBody: {
object: 'list',
data: assistants,
},
},
],
},
};
testHarness.setupTest(testData, { credentials });
});

View File

@@ -0,0 +1,43 @@
{
"nodes": [
{
"parameters": {},
"type": "n8n-nodes-base.manualTrigger",
"typeVersion": 1,
"position": [0, 0],
"id": "ce6133c3-2eb6-4262-8e0c-54015ed0f795",
"name": "When clicking Test workflow"
},
{
"parameters": {
"resource": "assistant",
"operation": "list"
},
"type": "@n8n/n8n-nodes-langchain.openAi",
"typeVersion": 1.8,
"position": [220, 0],
"id": "070d2fcc-032c-4c3f-ae33-80a5352785f8",
"name": "OpenAI",
"credentials": {
"openAiApi": {
"id": "123",
"name": "OpenAi account"
}
}
}
],
"connections": {
"When clicking Test workflow": {
"main": [
[
{
"node": "OpenAI",
"type": "main",
"index": 0
}
]
]
}
},
"pinData": {}
}

View File

@@ -6,9 +6,12 @@
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@utils/*": ["./utils/*"]
"@utils/*": ["./utils/*"],
"@nodes-testing/*": ["../../core/nodes-testing/*"]
},
"tsBuildInfoFile": "dist/typecheck.tsbuildinfo",
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
// TODO: remove all options below this line
"useUnknownInCatchVariables": false
},

View File

@@ -12,7 +12,7 @@ module.exports = {
project: './tsconfig.json',
},
ignorePatterns: ['bin/*.js'],
ignorePatterns: ['bin/*.js', 'nodes-testing/*.ts'],
rules: {
complexity: 'error',

View File

@@ -1,5 +1,4 @@
import { Container, Service } from '@n8n/di';
import { Credentials } from 'n8n-core';
import { Service } from '@n8n/di';
import { ICredentialsHelper } from 'n8n-workflow';
import type {
ICredentialDataDecryptedObject,
@@ -10,12 +9,17 @@ import type {
IWorkflowExecuteAdditionalData,
} from 'n8n-workflow';
import { Credentials } from '../dist/credentials';
import { CredentialTypes } from './credential-types';
@Service()
export class CredentialsHelper extends ICredentialsHelper {
private credentialsMap: Record<string, ICredentialDataDecryptedObject> = {};
constructor(private readonly credentialTypes: CredentialTypes) {
super();
}
setCredentials(credentialsMap: Record<string, ICredentialDataDecryptedObject>) {
this.credentialsMap = credentialsMap;
}
@@ -29,7 +33,7 @@ export class CredentialsHelper extends ICredentialsHelper {
typeName: string,
requestParams: IHttpRequestOptions,
): Promise<IHttpRequestOptions> {
const credentialType = Container.get(CredentialTypes).getByName(typeName);
const credentialType = this.credentialTypes.getByName(typeName);
if (typeof credentialType.authenticate === 'function') {
return await credentialType.authenticate(credentials, requestParams);
}

View File

@@ -0,0 +1,97 @@
import { Service } from '@n8n/di';
import type {
ICredentialType,
INodeType,
IVersionedNodeType,
KnownNodesAndCredentials,
LoadedClass,
LoadedNodesAndCredentials,
LoadingDetails,
} from 'n8n-workflow';
import path from 'node:path';
import { UnrecognizedCredentialTypeError, UnrecognizedNodeTypeError } from '../dist/errors';
import { LazyPackageDirectoryLoader } from '../dist/nodes-loader/lazy-package-directory-loader';
/** This rewrites the nodes/credentials source path to load the typescript code instead of the compiled javascript code */
const fixSourcePath = (loadInfo: LoadingDetails) => {
if (!loadInfo) return;
loadInfo.sourcePath = loadInfo.sourcePath.replace(/^dist\//, './').replace(/\.js$/, '.ts');
};
@Service()
export class LoadNodesAndCredentials {
private loaders: Record<string, LazyPackageDirectoryLoader> = {};
readonly known: KnownNodesAndCredentials = { nodes: {}, credentials: {} };
readonly loaded: LoadedNodesAndCredentials = { nodes: {}, credentials: {} };
constructor(packagePaths: string[]) {
for (const packagePath of packagePaths) {
const loader = new LazyPackageDirectoryLoader(packagePath);
this.loaders[loader.packageName] = loader;
}
}
async init() {
for (const [packageName, loader] of Object.entries(this.loaders)) {
await loader.loadAll();
const { known, directory } = loader;
for (const type in known.nodes) {
const { className, sourcePath } = known.nodes[type];
this.known.nodes[`${packageName}.${type}`] = {
className,
sourcePath: path.join(directory, sourcePath),
};
}
for (const type in known.credentials) {
const {
className,
sourcePath,
supportedNodes,
extends: extendsArr,
} = known.credentials[type];
this.known.credentials[type] = {
className,
sourcePath: path.join(directory, sourcePath),
supportedNodes: supportedNodes?.map((nodeName) => `${loader.packageName}.${nodeName}`),
extends: extendsArr,
};
}
}
}
recognizesCredential(credentialType: string): boolean {
return credentialType in this.known.credentials;
}
getCredential(credentialType: string): LoadedClass<ICredentialType> {
for (const loader of Object.values(this.loaders)) {
if (credentialType in loader.known.credentials) {
const loaded = loader.getCredential(credentialType);
this.loaded.credentials[credentialType] = loaded;
fixSourcePath(loader.known.credentials[credentialType]);
}
}
if (credentialType in this.loaded.credentials) {
return this.loaded.credentials[credentialType];
}
throw new UnrecognizedCredentialTypeError(credentialType);
}
getNode(fullNodeType: string): LoadedClass<INodeType | IVersionedNodeType> {
const [packageName, nodeType] = fullNodeType.split('.');
const { loaders } = this;
const loader = loaders[packageName];
if (!loader) {
throw new UnrecognizedNodeTypeError(packageName, nodeType);
}
fixSourcePath(loader.known.nodes[nodeType]);
return loader.getNode(nodeType);
}
}

View File

@@ -0,0 +1,341 @@
import { Memoized } from '@n8n/decorators';
import { Container } from '@n8n/di';
import callsites from 'callsites';
import glob from 'fast-glob';
import { mock } from 'jest-mock-extended';
import { isEmpty } from 'lodash';
import type {
ICredentialDataDecryptedObject,
IRun,
IRunExecutionData,
IWorkflowBase,
IWorkflowExecuteAdditionalData,
WorkflowTestData,
} from 'n8n-workflow';
import { createDeferredPromise, UnexpectedError, Workflow } from 'n8n-workflow';
import nock from 'nock';
import { readFileSync, mkdtempSync, existsSync, rmSync } from 'node:fs';
import { tmpdir } from 'node:os';
import path from 'node:path';
import { ExecutionLifecycleHooks } from '../dist/execution-engine/execution-lifecycle-hooks';
import { WorkflowExecute } from '../dist/execution-engine/workflow-execute';
import { CredentialsHelper } from './credentials-helper';
import { LoadNodesAndCredentials } from './load-nodes-and-credentials';
import { NodeTypes } from './node-types';
type NodeOutputs = WorkflowTestData['output']['nodeData'];
type TestHarnessOptions = {
additionalPackagePaths?: string[];
};
type TestOptions = {
credentials?: Record<string, ICredentialDataDecryptedObject>;
assertBinaryData?: boolean;
workflowFiles?: string[];
nock?: WorkflowTestData['nock'];
customAssertions?: () => void;
};
export class NodeTestHarness {
private readonly testDir: string;
private readonly packagePaths: string[];
constructor({ additionalPackagePaths }: TestHarnessOptions = {}) {
this.testDir = path.dirname(callsites()[1].getFileName()!);
this.packagePaths = additionalPackagePaths ?? [];
this.packagePaths.unshift(this.packageDir);
beforeEach(() => nock.disableNetConnect());
}
readWorkflowJSON(filePath: string) {
if (!filePath.startsWith(this.relativePath)) {
filePath = path.join(this.testDir, filePath);
}
return JSON.parse(readFileSync(filePath, 'utf-8')) as IWorkflowBase &
Pick<WorkflowTestData, 'trigger'>;
}
setupTests(options: TestOptions = {}) {
const workflowFilenames =
options.workflowFiles?.map((fileName) => path.join(this.relativePath, fileName)) ??
this.workflowFilenames;
const tests = this.workflowToTests(workflowFilenames, options);
for (const testData of tests) {
this.setupTest(testData, options);
}
}
setupTest(testData: WorkflowTestData, options: TestOptions = {}) {
if (options.assertBinaryData) testData.output.assertBinaryData = true;
if (options.credentials) testData.credentials = options.credentials;
if (options.nock) testData.nock = options.nock;
test(testData.description, async () => {
if (testData.nock) this.setupNetworkMocks(testData.nock);
const { result, nodeExecutionOrder } = await this.executeWorkflow(testData);
this.assertOutput(testData, result, nodeExecutionOrder);
if (options.customAssertions) options.customAssertions();
});
}
@Memoized
get temporaryDir() {
const dir = mkdtempSync(path.join(tmpdir(), 'n8n-'));
afterAll(() => rmSync(dir, { recursive: true }));
return dir;
}
private workflowToTests(workflowFiles: string[], options: TestOptions = {}) {
const testCases: WorkflowTestData[] = [];
for (const filePath of workflowFiles) {
const description = filePath.replace('.json', '');
const workflowData = this.readWorkflowJSON(filePath);
workflowData.nodes.forEach((node) => {
if (node.parameters) {
node.parameters = JSON.parse(
JSON.stringify(node.parameters).replace(/"C:\\\\Test\\\\(.*)"/, `"${this.testDir}/$1"`),
);
}
});
const { pinData } = workflowData;
if (pinData === undefined) {
throw new UnexpectedError('Workflow data does not contain pinData');
}
const nodeData = Object.keys(pinData).reduce((acc, key) => {
const items = pinData[key];
acc[key] = [items];
return acc;
}, {} as NodeOutputs);
delete workflowData.pinData;
const { trigger } = workflowData;
delete workflowData.trigger;
testCases.push({
description,
input: { workflowData },
output: { nodeData },
trigger,
credentials: options.credentials,
});
}
return testCases;
}
@Memoized
private get packageDir() {
let packageDir = this.testDir;
while (packageDir !== '/') {
if (existsSync(path.join(packageDir, 'package.json'))) break;
packageDir = path.dirname(packageDir);
}
if (packageDir === '/') {
throw new UnexpectedError('Invalid package');
}
return packageDir;
}
@Memoized
private get relativePath() {
return path.relative(this.packageDir, this.testDir);
}
@Memoized
private get workflowFilenames() {
return glob.sync(`${this.relativePath}/**/*.json`, { cwd: this.packageDir });
}
private setupNetworkMocks({ baseUrl, mocks }: NonNullable<WorkflowTestData['nock']>) {
const agent = nock(baseUrl);
mocks.forEach(
({
method,
path,
statusCode,
requestBody,
requestHeaders,
responseBody,
responseHeaders,
}) => {
let mock = agent[method](path, requestBody);
// nock interceptor reqheaders option is ignored, so we chain matchHeader()
// agent[method](path, requestBody, { reqheaders: requestHeaders }).reply(statusCode, responseBody, responseHeaders)
// https://github.com/nock/nock/issues/2545
if (requestHeaders && Object.keys(requestHeaders).length > 0) {
Object.entries(requestHeaders).forEach(([key, value]) => {
mock = mock.matchHeader(key, value);
});
}
mock.reply(statusCode, responseBody, responseHeaders);
},
);
}
private async executeWorkflow(testData: WorkflowTestData) {
const loadNodesAndCredentials = new LoadNodesAndCredentials(this.packagePaths);
Container.set(LoadNodesAndCredentials, loadNodesAndCredentials);
await loadNodesAndCredentials.init();
const nodeTypes = Container.get(NodeTypes);
const credentialsHelper = Container.get(CredentialsHelper);
credentialsHelper.setCredentials(testData.credentials ?? {});
const executionMode = testData.trigger?.mode ?? 'manual';
const { connections, nodes, settings } = testData.input.workflowData;
const workflowInstance = new Workflow({
id: 'test',
nodes,
connections,
nodeTypes,
settings,
active: false,
});
const hooks = new ExecutionLifecycleHooks('trigger', '1', mock());
const nodeExecutionOrder: string[] = [];
hooks.addHandler('nodeExecuteAfter', (nodeName) => {
nodeExecutionOrder.push(nodeName);
});
const waitPromise = createDeferredPromise<IRun>();
hooks.addHandler('workflowExecuteAfter', (fullRunData) => waitPromise.resolve(fullRunData));
const additionalData = mock<IWorkflowExecuteAdditionalData>({
hooks,
// Get from node.parameters
currentNodeParameters: undefined,
});
additionalData.credentialsHelper = credentialsHelper;
let executionData: IRun;
const runExecutionData: IRunExecutionData = {
resultData: {
runData: {},
},
executionData: {
metadata: {},
contextData: {},
waitingExecution: {},
waitingExecutionSource: null,
nodeExecutionStack: [
{
node: workflowInstance.getStartNode()!,
data: {
main: [[testData.trigger?.input ?? { json: {} }]],
},
source: null,
},
],
},
};
const workflowExecute = new WorkflowExecute(additionalData, executionMode, runExecutionData);
executionData = await workflowExecute.processRunExecutionData(workflowInstance);
const result = await waitPromise.promise;
return { executionData, result, nodeExecutionOrder };
}
private getResultNodeData(result: IRun, testData: WorkflowTestData) {
const { runData } = result.data.resultData;
return Object.keys(testData.output.nodeData).map((nodeName) => {
if (runData[nodeName] === undefined) {
// log errors from other nodes
Object.keys(runData).forEach((key) => {
const error = runData[key][0]?.error;
if (error) {
console.log(`Node ${key}\n`, error);
}
});
throw new UnexpectedError(`Data for node "${nodeName}" is missing!`);
}
const resultData = runData[nodeName].map((nodeData) => {
if (nodeData.data === undefined) {
return null;
}
// TODO: iterate all runIndexes
return nodeData.data.main[0]!.map((entry) => {
if (entry.binary && isEmpty(entry.binary)) delete entry.binary;
delete entry.pairedItem;
return entry;
});
});
return {
nodeName,
resultData,
};
});
}
private assertOutput(testData: WorkflowTestData, result: IRun, nodeExecutionOrder: string[]) {
const { output } = testData;
// Check if the nodes did executed in the correct order (if the test defines this)
if (output.nodeExecutionOrder?.length) {
expect(nodeExecutionOrder).toEqual(output.nodeExecutionOrder);
}
const {
finished,
status,
data: { executionData, resultData },
} = result;
if (output.nodeExecutionStack) {
expect(executionData?.nodeExecutionStack).toEqual(output.nodeExecutionStack);
}
if (output.error) {
const { error } = resultData;
const errorMessage = (error?.cause ? error.cause : error)?.message;
expect(errorMessage).toBeDefined();
expect(output.error).toBe(errorMessage);
expect(finished).toBeUndefined();
return;
}
// check if result node data matches expected test data
const resultNodeData = this.getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) => {
resultData.forEach((items) => {
items?.forEach((item) => {
const { binary, json } = item;
if (binary) {
if (!output.assertBinaryData) {
delete item.binary;
} else {
for (const key in binary) {
delete binary[key].directory;
}
}
}
// Convert errors to JSON so tests can compare
if (json?.error instanceof Error) {
json.error = JSON.parse(
JSON.stringify(json.error, ['message', 'name', 'description', 'context']),
);
}
});
});
const msg = `Equality failed for "${testData.description}" at node "${nodeName}"`;
expect(resultData, msg).toEqual(output.nodeData[nodeName]);
});
if (finished) {
expect(status).toEqual('success');
} else {
expect(status).toEqual('waiting');
}
}
}

View File

@@ -1,44 +1,34 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import type { WorkflowTestData } from 'n8n-workflow';
import nock from 'nock';
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
import * as Helpers from '@test/nodes/Helpers';
const records = [
{
const record = {
id: 'rec2BWBoyS5QsS7pT',
createdTime: '2022-08-25T08:22:34.000Z',
fields: {
name: 'Tim',
email: 'tim@email.com',
},
},
];
createdTime: '2022-08-25T08:22:34.000Z',
};
describe('Execute Airtable Node', () => {
const testHarness = new NodeTestHarness();
beforeEach(() => {
nock('https://api.airtable.com/v0')
.get('/appIaXXdDqS5ORr4V/tbljyBEdYzCPF0NDh?pageSize=100')
.reply(200, { records });
.get('/appIaXXdDqS5ORr4V/tbljyBEdYzCPF0NDh/rec2BWBoyS5QsS7pT')
.reply(200, record);
});
const tests: WorkflowTestData[] = [
{
const testData: WorkflowTestData = {
description: 'List Airtable Records',
input: {
workflowData: Helpers.readJsonFileSync('nodes/Airtable/test/workflow.json'),
workflowData: testHarness.readWorkflowJSON('workflow.json'),
},
output: {
nodeData: {
Airtable: [[...records.map((r) => ({ json: r }))]],
Airtable: [[{ json: record }]],
},
},
},
];
};
for (const testData of tests) {
test(testData.description, async () => {
await executeWorkflow(testData);
});
}
testHarness.setupTest(testData, { credentials: { airtableTokenApi: {} } });
});

View File

@@ -13,16 +13,19 @@
},
{
"parameters": {
"operation": "list",
"base": "appIaXXdDqS5ORr4V",
"resource": "record",
"operation": "get",
"id": "rec2BWBoyS5QsS7pT",
"application": {
"__rl": true,
"value": "https://airtable.com/appIaXXdDqS5ORr4V/tbljyBEdYzCPF0NDh/viwInsMdsxffad0aU",
"value": "https://airtable.com/appIaXXdDqS5ORr4V/tbljyBEdYzCPF0NDh/rec2BWBoyS5QsS7pT",
"mode": "url",
"__regex": "https://airtable.com/([a-zA-Z0-9]{2,})"
},
"table": {
"__rl": true,
"value": "https://airtable.com/appIaXXdDqS5ORr4V/tbljyBEdYzCPF0NDh/viwInsMdsxffad0aU",
"value": "https://airtable.com/appIaXXdDqS5ORr4V/tbljyBEdYzCPF0NDh/rec2BWBoyS5QsS7pT",
"mode": "url",
"__regex": "https://airtable.com/[a-zA-Z0-9]{2,}/([a-zA-Z0-9]{2,})"
},
@@ -31,10 +34,10 @@
"id": "5654d3b3-fe83-4988-889b-94f107d41807",
"name": "Airtable",
"type": "n8n-nodes-base.airtable",
"typeVersion": 1,
"typeVersion": 2,
"position": [1020, 380],
"credentials": {
"airtableApi": {
"airtableTokenApi": {
"id": "20",
"name": "Airtable account"
}

View File

@@ -1,11 +1,8 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../__tests__/credentials';
const workflows = getWorkflowFilenames(__dirname);
describe('Test AWS Comprehend Node', () => {
describe('Detect Language', () => {
let mock: nock.Scope;
@@ -35,6 +32,6 @@ describe('Test AWS Comprehend Node', () => {
mock.post('/').reply(200, response);
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({ credentials });
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../__tests__/credentials';
const responseLabels = [
@@ -288,7 +287,6 @@ const responseLabels = [
describe('Test AWS Rekogntion Node', () => {
describe('Image Labels Recognition', () => {
const workflows = getWorkflowFilenames(__dirname);
const baseUrl = 'https://rekognition.eu-central-1.amazonaws.com';
let mock: nock.Scope;
@@ -300,6 +298,6 @@ describe('Test AWS Rekogntion Node', () => {
mock.post('/').reply(200, responseLabels);
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({ credentials });
});
});

View File

@@ -1,11 +1,8 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../__tests__/credentials';
const workflows = getWorkflowFilenames(__dirname);
describe('Test S3 V1 Node', () => {
describe('File Upload', () => {
let mock: nock.Scope;
@@ -39,6 +36,6 @@ describe('Test S3 V1 Node', () => {
.reply(200, { success: true });
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({ credentials });
});
});

View File

@@ -1,11 +1,8 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../__tests__/credentials';
const workflows = getWorkflowFilenames(__dirname);
describe('Test S3 V2 Node', () => {
describe('File Upload', () => {
let mock: nock.Scope;
@@ -39,6 +36,6 @@ describe('Test S3 V2 Node', () => {
.reply(200, { success: true });
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({ credentials });
});
});

View File

@@ -1,13 +1,12 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { NodeConnectionTypes, type WorkflowTestData } from 'n8n-workflow';
import assert from 'node:assert';
import qs from 'node:querystring';
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
import * as Helpers from '@test/nodes/Helpers';
import { credentials } from '../../__tests__/credentials';
describe('AwsSes Node', () => {
const testHarness = new NodeTestHarness();
const email = 'test+user@example.com';
const templateData = {
Name: 'Special. Characters @#$%^&*()_-',
@@ -65,7 +64,6 @@ describe('AwsSes Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'AWS SES': [[{ json: { success: 'true' } }]],
},
@@ -153,7 +151,6 @@ describe('AwsSes Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: { 'AWS SES': [[{ json: { success: 'true' } }]] },
},
nock: {
@@ -171,13 +168,7 @@ describe('AwsSes Node', () => {
},
];
test.each(tests)('$description', async (testData) => {
testData.credentials = credentials;
const { result } = await executeWorkflow(testData);
const resultNodeData = Helpers.getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) =>
expect(resultData).toEqual(testData.output.nodeData[nodeName]),
);
expect(result.finished).toEqual(true);
});
for (const testData of tests) {
testHarness.setupTest(testData, { credentials });
}
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import {
createResponse,
fieldsResponse,
@@ -55,7 +54,6 @@ describe('Baserow > Workflows', () => {
mock.delete('/api/database/rows/table/482710/3/').reply(200, {});
});
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({ credentials });
});
});

View File

@@ -1,18 +1,15 @@
import { NodeVM } from '@n8n/vm2';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { anyNumber, mock } from 'jest-mock-extended';
import { normalizeItems } from 'n8n-core';
import type { IExecuteFunctions, IWorkflowDataProxyData } from 'n8n-workflow';
import { ApplicationError } from 'n8n-workflow';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { Code } from '../Code.node';
import { ValidationError } from '../ValidationError';
describe('Test Code Node', () => {
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows);
new NodeTestHarness().setupTests();
});
describe('Code Node unit test', () => {

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test Compare Datasets Node', () => testWorkflows(workflows));
describe('Test Compare Datasets Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,16 +1,14 @@
/* eslint-disable @typescript-eslint/no-loop-func */
import type { IDataObject, WorkflowTestData } from 'n8n-workflow';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import type { WorkflowTestData } from 'n8n-workflow';
import os from 'node:os';
import path from 'path';
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
import { getResultNodeData, readJsonFileSync } from '@test/nodes/Helpers';
if (os.platform() !== 'win32') {
describe('Execute Compression Node', () => {
const workflowData = readJsonFileSync('nodes/Compression/test/node/workflow.compression.json');
const testHarness = new NodeTestHarness();
const workflowData = testHarness.readWorkflowJSON('workflow.compression.json');
const node = workflowData.nodes.find((n: IDataObject) => n.name === 'Read Binary File');
const node = workflowData.nodes.find((n) => n.name === 'Read Binary File')!;
node.parameters.filePath = path.join(__dirname, 'lorem.txt');
const tests: WorkflowTestData[] = [
@@ -20,6 +18,7 @@ if (os.platform() !== 'win32') {
workflowData,
},
output: {
assertBinaryData: true,
nodeData: {
Compression1: [
[
@@ -44,16 +43,7 @@ if (os.platform() !== 'win32') {
];
for (const testData of tests) {
test(testData.description, async () => {
const { result } = await executeWorkflow(testData);
const resultNodeData = getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) => {
expect(resultData).toEqual(testData.output.nodeData[nodeName]);
});
expect(result.finished).toEqual(true);
});
testHarness.setupTest(testData);
}
});
} else {

View File

@@ -1,11 +1,8 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import fs from 'fs';
import fsPromises from 'fs/promises';
import { Readable } from 'stream';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
const workflows = getWorkflowFilenames(__dirname);
describe('Test Crypto Node', () => {
jest.mock('fast-glob', () => async () => ['/test/binary.data']);
jest.mock('fs/promises');
@@ -13,5 +10,5 @@ describe('Test Crypto Node', () => {
jest.mock('fs');
fs.createReadStream = () => Readable.from(Buffer.from('test')) as fs.ReadStream;
testWorkflows(workflows);
new NodeTestHarness().setupTests();
});

View File

@@ -1,16 +1,13 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import moment from 'moment-timezone';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
const workflows = getWorkflowFilenames(__dirname);
// ! When making changes to the Workflow test files make sure to export env TZ=UTC as Github Actions runs in UTC timezone
if (new Date().getTimezoneOffset() === 0 || moment().utcOffset() === 0) {
describe('Test DateTime Node', () => testWorkflows(workflows));
} else {
describe('Test DateTime Node', () => {
describe('Test DateTime Node', () => {
// ! When making changes to the Workflow test files make sure to export env TZ=UTC as Github Actions runs in UTC timezone
if (new Date().getTimezoneOffset() === 0 || moment().utcOffset() === 0) {
new NodeTestHarness().setupTests();
} else {
it('Skipped because timezone is not UTC', () => {
expect(true).toBe(true);
});
});
}
}
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, channel => create', () => {
nock('https://discord.com/api/v10')
.post('/guilds/1168516062791340136/channels', { name: 'third', type: '0' })
@@ -20,6 +19,7 @@ describe('Test DiscordV2, channel => create', () => {
nsfw: false,
});
const workflows = ['nodes/Discord/test/v2/node/channel/create.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['create.workflow.json'],
});
});

View File

@@ -1,12 +1,12 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, channel => deleteChannel', () => {
nock('https://discord.com/api/v10')
.delete('/channels/1168528323006181417')
.reply(200, { success: true });
const workflows = ['nodes/Discord/test/v2/node/channel/deleteChannel.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['deleteChannel.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, channel => get', () => {
nock('https://discord.com/api/v10')
.persist()
@@ -23,6 +22,7 @@ describe('Test DiscordV2, channel => get', () => {
nsfw: false,
});
const workflows = ['nodes/Discord/test/v2/node/channel/get.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['get.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, channel => getAll', () => {
nock('https://discord.com/api/v10')
.get('/guilds/1168516062791340136/channels')
@@ -96,6 +95,7 @@ describe('Test DiscordV2, channel => getAll', () => {
},
]);
const workflows = ['nodes/Discord/test/v2/node/channel/getAll.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['getAll.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, channel => update', () => {
nock('https://discord.com/api/v10').patch('/channels/1168516240332034067').reply(200, {
id: '1168516240332034067',
@@ -18,6 +17,7 @@ describe('Test DiscordV2, channel => update', () => {
nsfw: true,
});
const workflows = ['nodes/Discord/test/v2/node/channel/update.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['update.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, member => getAll', () => {
nock('https://discord.com/api/v10')
.get('/guilds/1168516062791340136/members?limit=2')
@@ -43,6 +42,7 @@ describe('Test DiscordV2, member => getAll', () => {
},
]);
const workflows = ['nodes/Discord/test/v2/node/member/getAll.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['getAll.workflow.json'],
});
});

View File

@@ -1,12 +1,12 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, member => roleAdd', () => {
nock('https://discord.com/api/v10')
.put('/guilds/1168516062791340136/members/470936827994570762/roles/1168772374540320890')
.reply(200, { success: true });
const workflows = ['nodes/Discord/test/v2/node/member/roleAdd.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['roleAdd.workflow.json'],
});
});

View File

@@ -1,13 +1,13 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, member => roleRemove', () => {
nock('https://discord.com/api/v10')
.persist()
.delete(/\/guilds\/1168516062791340136\/members\/470936827994570762\/roles\/\d+/)
.reply(200, { success: true });
const workflows = ['nodes/Discord/test/v2/node/member/roleRemove.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['roleRemove.workflow.json'],
});
});

View File

@@ -1,12 +1,12 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, message => deleteMessage', () => {
nock('https://discord.com/api/v10')
.delete('/channels/1168516240332034067/messages/1168776343194972210')
.reply(200, { success: true });
const workflows = ['nodes/Discord/test/v2/node/message/deleteMessage.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['deleteMessage.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, message => get', () => {
nock('https://discord.com/api/v10')
.get('/channels/1168516240332034067/messages/1168777380144369718')
@@ -28,6 +27,7 @@ describe('Test DiscordV2, message => get', () => {
type: 0,
});
const workflows = ['nodes/Discord/test/v2/node/message/get.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['get.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, message => getAll', () => {
nock('https://discord.com/api/v10')
.get('/channels/1168516240332034067/messages?limit=1')
@@ -51,6 +50,7 @@ describe('Test DiscordV2, message => getAll', () => {
},
]);
const workflows = ['nodes/Discord/test/v2/node/message/getAll.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['getAll.workflow.json'],
});
});

View File

@@ -1,12 +1,12 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, message => react', () => {
nock('https://discord.com/api/v10')
.put('/channels/1168516240332034067/messages/1168777380144369718/reactions/%F0%9F%98%80/@me')
.reply(200, { success: true });
const workflows = ['nodes/Discord/test/v2/node/message/react.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['react.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, message => send', () => {
nock('https://discord.com/api/v10')
.post('/channels/1168516240332034067/messages', {
@@ -61,6 +60,7 @@ describe('Test DiscordV2, message => send', () => {
referenced_message: null,
});
const workflows = ['nodes/Discord/test/v2/node/message/send.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['send.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
describe('Test DiscordV2, webhook => sendLegacy', () => {
const credentials = {
discordWebhookApi: {
@@ -50,6 +49,8 @@ describe('Test DiscordV2, webhook => sendLegacy', () => {
webhook_id: '1153265494955135077',
});
const workflows = ['nodes/Discord/test/v2/node/webhook/sendLegacy.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['sendLegacy.workflow.json'],
});
});

View File

@@ -2,7 +2,7 @@ import { mock } from 'jest-mock-extended';
import type { INodeTypes, IExecuteFunctions, AssignmentCollectionValue } from 'n8n-workflow';
import { NodeOperationError } from 'n8n-workflow';
import { EvaluationMetrics } from './EvaluationMetrics.node';
import { EvaluationMetrics } from '../EvaluationMetrics.node';
describe('EvaluationMetrics Node', () => {
const nodeTypes = mock<INodeTypes>();

View File

@@ -1,4 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
const workflows = getWorkflowFilenames(__dirname);
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
describe('Execute Execute Command Node', () => testWorkflows(workflows));
describe('Execute Execute Command Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,8 +1,7 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { mock } from 'jest-mock-extended';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { ExecutionData } from '../ExecutionData.node';
describe('ExecutionData Node', () => {
@@ -20,5 +19,6 @@ describe('ExecutionData Node', () => {
});
});
const workflows = getWorkflowFilenames(__dirname);
describe('ExecutionData -> Should run the workflow', () => testWorkflows(workflows));
describe('ExecutionData -> Should run the workflow', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test ConvertToFile Node', () => testWorkflows(workflows));
describe('Test ConvertToFile Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,6 +1,5 @@
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
describe('ExtractFromFile', () => {
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows);
new NodeTestHarness().setupTests();
});

View File

@@ -124,7 +124,6 @@ export async function execute(this: IExecuteFunctions, items: INodeExecutionData
mimeType: binaryData.mimeType,
fileType: binaryData.fileType,
fileName: binaryData.fileName,
directory: binaryData.directory,
fileExtension: binaryData.fileExtension,
fileSize: binaryData.fileSize,
},

View File

@@ -1,36 +1,31 @@
/* eslint-disable @typescript-eslint/no-loop-func */
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import type { WorkflowTestData } from 'n8n-workflow';
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
import * as Helpers from '@test/nodes/Helpers';
describe('Test ReadWriteFile Node', () => {
const temporaryDir = Helpers.createTemporaryDir();
const directory = __dirname.replace(/\\/gi, '/');
const workflow = Helpers.readJsonFileSync(
'nodes/Files/ReadWriteFile/test/ReadWriteFile.workflow.json',
);
const testHarness = new NodeTestHarness();
const workflowData = testHarness.readWorkflowJSON('ReadWriteFile.workflow.json');
const readFileNode = workflow.nodes.find((n: any) => n.name === 'Read from Disk');
const readFileNode = workflowData.nodes.find((n) => n.name === 'Read from Disk')!;
readFileNode.parameters.fileSelector = `${directory}/image.jpg`;
const writeFileNode = workflow.nodes.find((n: any) => n.name === 'Write to Disk');
writeFileNode.parameters.fileName = `${temporaryDir}/image-written.jpg`;
const writeFileNode = workflowData.nodes.find((n) => n.name === 'Write to Disk')!;
writeFileNode.parameters.fileName = `${testHarness.temporaryDir}/image-written.jpg`;
const tests: WorkflowTestData[] = [
{
description: 'nodes/Files/ReadWriteFile/test/ReadWriteFile.workflow.json',
input: {
workflowData: workflow,
workflowData,
},
output: {
assertBinaryData: true,
nodeData: {
'Read from Disk': [
[
{
json: {
directory,
fileExtension: 'jpg',
fileName: 'image.jpg',
fileSize: '1.04 kB',
@@ -43,7 +38,6 @@ describe('Test ReadWriteFile Node', () => {
fileType: 'image',
fileExtension: 'jpg',
data: '/9j/4AAQSkZJRgABAQEASABIAAD/4QBmRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAAExAAIAAAAQAAAATgAAAAAAARlJAAAD6AABGUkAAAPocGFpbnQubmV0IDUuMC4xAP/bAEMAIBYYHBgUIBwaHCQiICYwUDQwLCwwYkZKOlB0Znp4cmZwboCQuJyAiK6KbnCg2qKuvsTO0M58muLy4MjwuMrOxv/bAEMBIiQkMCowXjQ0XsaEcITGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxv/AABEIAB8AOwMBEgACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2gAMAwEAAhEDEQA/AOgqgrXF2zNHJ5aKcD3oNPZ23di/VKG82bkuTh1OMgdaAdOSLtZ6G5ut0iSeWoOAKAdO27NCqUN8oQrcHDqccDrQDpyRNPdRwEKcsx7CobIebPLORwThc0inGMF724jagNpxG4OOM1dIDAgjIPBpkqUOxnR2pmh85pW3nJB9KkNi4yqTssZ6rSNXNX0ehHFfusYDLuI7+tXY4I40ChQcdzQRKcL7Fb7PcQO32cqUY5we1XqZPtH11KsFoFDGYK7sckkZxVqgTnJlEQXMBZYGUoTkZ7VeoH7RvcqwWaIh80K7k5JIq1QJzkyhbMtvdSxMdqnlc1amgjmx5i5I70inNSVpFdrmaWRltkBVerHvUW57B2AUNGxyOaC+VW9xXLVrcGbcjrtkXqKZZxvveeTAL9APSgiooq1ty3RTMj//2Q==',
directory,
fileName: 'image.jpg',
fileSize: '1.04 kB',
},
@@ -55,7 +49,6 @@ describe('Test ReadWriteFile Node', () => {
[
{
json: {
directory,
fileExtension: 'jpg',
fileName: writeFileNode.parameters.fileName,
fileSize: '1.04 kB',
@@ -68,7 +61,6 @@ describe('Test ReadWriteFile Node', () => {
fileType: 'image',
fileExtension: 'jpg',
data: '/9j/4AAQSkZJRgABAQEASABIAAD/4QBmRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAAExAAIAAAAQAAAATgAAAAAAARlJAAAD6AABGUkAAAPocGFpbnQubmV0IDUuMC4xAP/bAEMAIBYYHBgUIBwaHCQiICYwUDQwLCwwYkZKOlB0Znp4cmZwboCQuJyAiK6KbnCg2qKuvsTO0M58muLy4MjwuMrOxv/bAEMBIiQkMCowXjQ0XsaEcITGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxv/AABEIAB8AOwMBEgACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2gAMAwEAAhEDEQA/AOgqgrXF2zNHJ5aKcD3oNPZ23di/VKG82bkuTh1OMgdaAdOSLtZ6G5ut0iSeWoOAKAdO27NCqUN8oQrcHDqccDrQDpyRNPdRwEKcsx7CobIebPLORwThc0inGMF724jagNpxG4OOM1dIDAgjIPBpkqUOxnR2pmh85pW3nJB9KkNi4yqTssZ6rSNXNX0ehHFfusYDLuI7+tXY4I40ChQcdzQRKcL7Fb7PcQO32cqUY5we1XqZPtH11KsFoFDGYK7sckkZxVqgTnJlEQXMBZYGUoTkZ7VeoH7RvcqwWaIh80K7k5JIq1QJzkyhbMtvdSxMdqnlc1amgjmx5i5I70inNSVpFdrmaWRltkBVerHvUW57B2AUNGxyOaC+VW9xXLVrcGbcjrtkXqKZZxvveeTAL9APSgiooq1ty3RTMj//2Q==',
directory,
fileName: 'image.jpg',
fileSize: '1.04 kB',
},
@@ -82,15 +74,6 @@ describe('Test ReadWriteFile Node', () => {
];
for (const testData of tests) {
test(testData.description, async () => {
const { result } = await executeWorkflow(testData);
const resultNodeData = Helpers.getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) => {
expect(resultData).toEqual(testData.output.nodeData[nodeName]);
});
expect(result.finished).toEqual(true);
});
testHarness.setupTest(testData);
}
});

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test Convert to File Node', () => testWorkflows(workflows));
describe('Test Convert to File Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test Filter Node', () => testWorkflows(workflows));
describe('Test Filter Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -83,7 +83,7 @@ export const prepareFormFields = (context: IWebhookFunctions, fields: FormFields
html = html.replace(resolvable, context.evaluateExpression(resolvable) as string);
}
field.html = sanitizeHtml(html as string);
field.html = sanitizeHtml(html);
}
if (field.fieldType === 'hiddenField') {

View File

@@ -1,11 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('GithubDispatchAndWaitWorkflow.json'),
);
describe('Test Github Node - Dispatch and Wait', () => {
describe('Workflow Dispatch and Wait', () => {
const now = 1683028800000;
@@ -77,7 +72,7 @@ describe('Test Github Node - Dispatch and Wait', () => {
.post(
`/repos/${owner}/${repository}/actions/workflows/${workflowId}/dispatches`,
(body) => {
return body.ref === ref && body.inputs && body.inputs.resumeUrl;
return body.ref === ref && body.inputs?.resumeUrl;
},
)
.reply(200, {});
@@ -87,6 +82,8 @@ describe('Test Github Node - Dispatch and Wait', () => {
nock.cleanAll();
});
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['GithubDispatchAndWaitWorkflow.json'],
});
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { NodeApiError, NodeOperationError } from 'n8n-workflow';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import { Github } from '../../Github.node';
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('GithubTestWorkflow.json'),
);
describe('Test Github Node', () => {
describe('Workflow Dispatch', () => {
const now = 1683028800000;
@@ -87,7 +82,9 @@ describe('Test Github Node', () => {
.reply(200, {});
});
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['GithubTestWorkflow.json'],
});
});
describe('Error Handling', () => {

View File

@@ -1,12 +1,11 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import type { WorkflowTestData } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
import * as Helpers from '@test/nodes/Helpers';
import { gongApiResponse, gongNodeResponse } from './mocks';
describe('Gong Node', () => {
const testHarness = new NodeTestHarness();
const baseUrl = 'https://api.gong.io';
const credentials = {
gongApi: { baseUrl },
@@ -103,7 +102,6 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Gong gongApi': [[{ json: { metaData: gongNodeResponse.getCall[0].json.metaData } }]],
'Gong gongOAuth2Api': [
@@ -141,15 +139,9 @@ describe('Gong Node', () => {
},
];
test.each(tests)('$description', async (testData) => {
testData.credentials = credentials;
const { result } = await executeWorkflow(testData);
const resultNodeData = Helpers.getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) =>
expect(resultData).toEqual(testData.output.nodeData[nodeName]),
);
expect(result.finished).toEqual(true);
});
for (const testData of tests) {
testHarness.setupTest(testData, { credentials });
}
});
describe('Call description', () => {
@@ -207,7 +199,6 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
Gong: [[{ json: { metaData: gongNodeResponse.getCall[0].json.metaData } }]],
},
@@ -298,7 +289,6 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
Gong: [gongNodeResponse.getCall],
},
@@ -415,7 +405,6 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
Gong: [gongNodeResponse.getAllCall],
},
@@ -552,7 +541,6 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
Gong: [
Array.from({ length: 50 }, () => ({ ...gongNodeResponse.getAllCallNoOptions[0] })),
@@ -633,7 +621,6 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
Gong: [[{ json: {} }]],
},
@@ -709,10 +696,10 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
Gong: [],
},
error: 'The resource you are requesting could not be found',
},
nock: {
baseUrl,
@@ -736,24 +723,9 @@ describe('Gong Node', () => {
},
];
test.each(tests)('$description', async (testData) => {
testData.credentials = credentials;
const { result } = await executeWorkflow(testData);
if (testData.description === 'should handle error response') {
// Only matches error message
expect(() => Helpers.getResultNodeData(result, testData)).toThrowError(
'The resource you are requesting could not be found',
);
return;
for (const testData of tests) {
testHarness.setupTest(testData, { credentials });
}
const resultNodeData = Helpers.getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) =>
expect(resultData).toEqual(testData.output.nodeData[nodeName]),
);
expect(result.finished).toEqual(true);
});
});
describe('User description', () => {
@@ -810,7 +782,6 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
Gong: [gongNodeResponse.getUser],
},
@@ -885,7 +856,6 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
Gong: [gongNodeResponse.getAllUser],
},
@@ -979,10 +949,10 @@ describe('Gong Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
Gong: [],
},
error: "The Users IDs don't match any existing user",
},
nock: {
baseUrl,
@@ -1006,22 +976,8 @@ describe('Gong Node', () => {
},
];
test.each(tests)('$description', async (testData) => {
testData.credentials = credentials;
const { result } = await executeWorkflow(testData);
if (testData.description === 'should handle error response') {
expect(() => Helpers.getResultNodeData(result, testData)).toThrow(
"The Users IDs don't match any existing user",
);
return;
for (const testData of tests) {
testHarness.setupTest(testData, { credentials });
}
const resultNodeData = Helpers.getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) =>
expect(resultData).toEqual(testData.output.nodeData[nodeName]),
);
expect(result.finished).toEqual(true);
});
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
jest.mock('jsonwebtoken', () => ({
sign: jest.fn().mockReturnValue('signature'),
}));
@@ -37,6 +36,7 @@ describe('Test Google BigQuery V2, executeQuery', () => {
.get('/v2/projects/test-project/queries/job_123?maxResults=1000&timeoutMs=10000')
.reply(200, { rows: [], schema: {} });
const workflows = ['nodes/Google/BigQuery/test/v2/node/executeQuery.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['executeQuery.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
jest.mock('jsonwebtoken', () => ({
sign: jest.fn().mockReturnValue('signature'),
}));
@@ -42,6 +41,7 @@ describe('Test Google BigQuery V2, insert auto map', () => {
{ kind: 'bigquery#tableDataInsertAllResponse' },
]);
const workflows = ['nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['insert.autoMapMode.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
jest.mock('jsonwebtoken', () => ({
sign: jest.fn().mockReturnValue('signature'),
}));
@@ -35,6 +34,7 @@ describe('Test Google BigQuery V2, insert define manually', () => {
)
.reply(200, [{ kind: 'bigquery#tableDataInsertAllResponse' }]);
const workflows = ['nodes/Google/BigQuery/test/v2/node/insert.manualMode.workflow.json'];
testWorkflows(workflows);
new NodeTestHarness().setupTests({
workflowFiles: ['insert.manualMode.workflow.json'],
});
});

View File

@@ -1,9 +1,8 @@
/* eslint-disable n8n-nodes-base/node-param-display-name-miscased */
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { jsonParse } from 'n8n-workflow';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import labels from '../fixtures/labels.json';
import messages from '../fixtures/messages.json';
@@ -71,7 +70,9 @@ describe('Test Gmail Node v1', () => {
afterAll(() => gmailNock.done());
testWorkflows(['nodes/Google/Gmail/test/v1/messages.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['messages.workflow.json'],
});
});
describe('Labels', () => {
@@ -94,7 +95,9 @@ describe('Test Gmail Node v1', () => {
afterAll(() => gmailNock.done());
testWorkflows(['nodes/Google/Gmail/test/v1/labels.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['labels.workflow.json'],
});
});
describe('Message Labels', () => {
@@ -111,7 +114,9 @@ describe('Test Gmail Node v1', () => {
afterAll(() => gmailNock.done());
testWorkflows(['nodes/Google/Gmail/test/v1/message-labels.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['message-labels.workflow.json'],
});
});
describe('Drafts', () => {
@@ -189,6 +194,8 @@ describe('Test Gmail Node v1', () => {
afterAll(() => gmailNock.done());
testWorkflows(['nodes/Google/Gmail/test/v1/drafts.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['drafts.workflow.json'],
});
});
});

View File

@@ -1,10 +1,9 @@
/* eslint-disable n8n-nodes-base/node-param-display-name-miscased */
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { mock, mockDeep } from 'jest-mock-extended';
import { jsonParse, type ILoadOptionsFunctions, type INode } from 'n8n-workflow';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { getGmailAliases, getLabels, getThreadMessages } from '../../v2/loadOptions';
import labels from '../fixtures/labels.json';
import messages from '../fixtures/messages.json';
@@ -131,7 +130,9 @@ describe('Test Gmail Node v2', () => {
afterAll(() => gmailNock.done());
testWorkflows(['nodes/Google/Gmail/test/v2/messages.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['messages.workflow.json'],
});
});
describe('Labels', () => {
@@ -154,7 +155,9 @@ describe('Test Gmail Node v2', () => {
afterAll(() => gmailNock.done());
testWorkflows(['nodes/Google/Gmail/test/v2/labels.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['labels.workflow.json'],
});
});
describe('Drafts', () => {
@@ -242,7 +245,9 @@ describe('Test Gmail Node v2', () => {
afterAll(() => gmailNock.done());
testWorkflows(['nodes/Google/Gmail/test/v2/drafts.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['drafts.workflow.json'],
});
});
describe('Threads', () => {
@@ -303,7 +308,9 @@ describe('Test Gmail Node v2', () => {
afterAll(() => gmailNock.done());
testWorkflows(['nodes/Google/Gmail/test/v2/threads.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['threads.workflow.json'],
});
});
describe('loadOptions', () => {

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import categories from './fixtures/categories.json';
import channels from './fixtures/channels.json';
import playlistItems from './fixtures/playlistItems.json';
@@ -62,9 +61,10 @@ describe('Test YouTube Node', () => {
});
});
afterAll(() => youtubeNock.done());
testWorkflows(['nodes/Google/YouTube/__test__/node/channels.workflow.json'], credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['channels.workflow.json'],
});
});
describe('Playlist', () => {
@@ -113,9 +113,10 @@ describe('Test YouTube Node', () => {
youtubeNock.delete('/v3/playlists', { id: 'playlist_id_1' }).reply(200, { success: true });
});
afterAll(() => youtubeNock.done());
testWorkflows(['nodes/Google/YouTube/__test__/node/playlists.workflow.json'], credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['playlists.workflow.json'],
});
});
describe('Video Categories', () => {
@@ -131,10 +132,10 @@ describe('Test YouTube Node', () => {
afterAll(() => youtubeNock.done());
testWorkflows(
['nodes/Google/YouTube/__test__/node/videoCategories.workflow.json'],
new NodeTestHarness().setupTests({
credentials,
);
workflowFiles: ['videoCategories.workflow.json'],
});
});
describe('Playlist Item', () => {
@@ -173,6 +174,9 @@ describe('Test YouTube Node', () => {
afterAll(() => youtubeNock.done());
testWorkflows(['nodes/Google/YouTube/__test__/node/playlistItems.workflow.json'], credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['playlistItems.workflow.json'],
});
});
});

View File

@@ -1,8 +1,7 @@
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
describe('GraphQL Node', () => {
const baseUrl = 'https://api.n8n.io/';
@@ -58,6 +57,5 @@ describe('GraphQL Node', () => {
});
});
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows);
new NodeTestHarness().setupTests();
});

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test Html Node > extractHtmlContent', () => testWorkflows(workflows));
describe('Test Html Node > extractHtmlContent', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test HTML Extract Node', () => testWorkflows(workflows));
describe('Test HTML Extract Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
describe('Test Binary Data Download', () => {
const baseUrl = 'https://dummy.domain';
@@ -21,6 +20,5 @@ describe('Test Binary Data Download', () => {
});
});
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows);
new NodeTestHarness().setupTests({ assertBinaryData: true });
});

View File

@@ -6,10 +6,7 @@
"type": "n8n-nodes-base.manualTrigger",
"typeVersion": 1,
"parameters": {},
"position": [
580,
300
]
"position": [580, 300]
},
{
"name": "HTTP Request (v1)",
@@ -19,10 +16,7 @@
"url": "https://dummy.domain/path/to/image.png",
"responseFormat": "file"
},
"position": [
1020,
-100
]
"position": [1020, -100]
},
{
"name": "HTTP Request (v2)",
@@ -33,10 +27,7 @@
"responseFormat": "file",
"options": {}
},
"position": [
1020,
80
]
"position": [1020, 80]
},
{
"name": "HTTP Request (v3)",
@@ -52,10 +43,7 @@
}
}
},
"position": [
1020,
240
]
"position": [1020, 240]
},
{
"name": "HTTP Request (v4)",
@@ -71,10 +59,7 @@
}
}
},
"position": [
1020,
400
]
"position": [1020, 400]
},
{
"name": "Follow Redirect",
@@ -90,10 +75,7 @@
}
}
},
"position": [
1020,
560
]
"position": [1020, 560]
},
{
"name": "Content Disposition",
@@ -109,10 +91,7 @@
}
}
},
"position": [
1020,
720
]
"position": [1020, 720]
}
],
"pinData": {
@@ -120,6 +99,7 @@
{
"binary": {
"data": {
"data": "dGVzdA==",
"mimeType": "image/png",
"fileType": "image",
"fileExtension": "png",
@@ -134,6 +114,7 @@
{
"binary": {
"data": {
"data": "dGVzdA==",
"mimeType": "image/png",
"fileType": "image",
"fileExtension": "png",
@@ -148,6 +129,7 @@
{
"binary": {
"data": {
"data": "dGVzdA==",
"mimeType": "image/png",
"fileType": "image",
"fileExtension": "png",
@@ -162,6 +144,7 @@
{
"binary": {
"data": {
"data": "dGVzdA==",
"mimeType": "image/png",
"fileType": "image",
"fileExtension": "png",
@@ -176,6 +159,7 @@
{
"binary": {
"data": {
"data": "dGVzdA==",
"mimeType": "image/png",
"fileType": "image",
"fileExtension": "png",
@@ -190,6 +174,7 @@
{
"binary": {
"data": {
"data": "dGVzdGluZw==",
"mimeType": "image/jpeg",
"fileType": "image",
"fileExtension": "jpg",

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
describe('Test Response Encoding', () => {
const baseUrl = 'https://dummy.domain';
const payload = Buffer.from(
@@ -16,6 +15,5 @@ describe('Test Response Encoding', () => {
.reply(200, payload, { 'content-type': 'text/plain; charset=latin1' });
});
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows);
new NodeTestHarness().setupTests();
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
describe('Test Quoted Response Encoding', () => {
const baseUrl = 'https://dummy.domain';
const payload = Buffer.from(
@@ -16,6 +15,5 @@ describe('Test Quoted Response Encoding', () => {
.reply(200, payload, { 'content-type': 'text/plain; charset="latin1"' });
});
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows);
new NodeTestHarness().setupTests();
});

View File

@@ -1,8 +1,7 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { parse as parseUrl } from 'url';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
describe('Test HTTP Request Node', () => {
const baseUrl = 'https://dummyjson.com';
@@ -181,6 +180,5 @@ describe('Test HTTP Request Node', () => {
});
});
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows);
new NodeTestHarness().setupTests();
});

View File

@@ -1,8 +1,7 @@
/* eslint-disable n8n-nodes-base/node-param-display-name-miscased */
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import companies from './fixtures/companies.json';
import companiesSearchResult from './fixtures/companies_search_result.json';
import contacts from './fixtures/contacts.json';
@@ -114,7 +113,9 @@ describe('Hubspot Node', () => {
afterAll(() => hubspotNock.done());
testWorkflows(['nodes/Hubspot/__test__/companies.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['companies.workflow.json'],
});
});
describe('contacts', () => {
@@ -210,7 +211,9 @@ describe('Hubspot Node', () => {
afterAll(() => hubspotNock.done());
testWorkflows(['nodes/Hubspot/__test__/contacts.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['contacts.workflow.json'],
});
});
describe('deals', () => {
@@ -255,6 +258,8 @@ describe('Hubspot Node', () => {
afterAll(() => hubspotNock.done());
testWorkflows(['nodes/Hubspot/__test__/deals.workflow.json']);
new NodeTestHarness().setupTests({
workflowFiles: ['deals.workflow.json'],
});
});
});

View File

@@ -1,19 +1,28 @@
/* eslint-disable @typescript-eslint/no-loop-func */
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import type { WorkflowTestData } from 'n8n-workflow';
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
import { getResultNodeData, readJsonFileSync } from '@test/nodes/Helpers';
jest.mock('ics', () => {
const ics = jest.requireActual('ics');
return {
...ics,
createEvent(attributes: any, cb: () => {}) {
attributes.uid = 'test-uid';
attributes.timestamp = '20250424T135100Z';
return ics.createEvent(attributes, cb);
},
};
});
describe('iCalendar Node', () => {
const workflowData = readJsonFileSync('nodes/ICalendar/test/node/workflow.iCalendar.json');
const testHarness = new NodeTestHarness();
const tests: WorkflowTestData[] = [
{
description: 'nodes/ICalendar/test/node/workflow.iCalendar.json',
input: {
workflowData,
workflowData: testHarness.readWorkflowJSON('workflow.iCalendar.json'),
},
output: {
assertBinaryData: true,
nodeData: {
iCalendar: [
[
@@ -24,9 +33,9 @@ describe('iCalendar Node', () => {
mimeType: 'text/calendar',
fileType: 'text',
fileExtension: 'ics',
data: 'QkVHSU46VkNBTEVOREFSDQpWRVJTSU9OOjIuMA0KQ0FMU0NBTEU6R1JFR09SSUFODQpQUk9ESUQ6YWRhbWdpYmJvbnMvaWNzDQpNRVRIT0Q6UFVCTElTSA0KWC1XUi1DQUxOQU1FOmRlZmF1bHQNClgtUFVCTElTSEVELVRUTDpQVDFIDQpCRUdJTjpWRVZFTlQNClVJRDpMWC1zckVYdkI1MXA1ZUxNS1gwTnkNClNVTU1BUlk6bmV3IGV2ZW50DQpEVFNUQU1QOjIwMjMwMjEwVDA5MzYwMFoNCkRUU1RBUlQ7VkFMVUU9REFURToyMDIzMDIyOA0KRFRFTkQ7VkFMVUU9REFURToyMDIzMDMwMQ0KQVRURU5ERUU7UlNWUD1GQUxTRTtDTj1QZXJzb246bWFpbHRvOnBlcnNvbjFAZW1haWwuY29tDQpFTkQ6VkVWRU5UDQpFTkQ6VkNBTEVOREFSDQo=',
data: 'QkVHSU46VkNBTEVOREFSDQpWRVJTSU9OOjIuMA0KQ0FMU0NBTEU6R1JFR09SSUFODQpQUk9ESUQ6YWRhbWdpYmJvbnMvaWNzDQpNRVRIT0Q6UFVCTElTSA0KWC1XUi1DQUxOQU1FOmRlZmF1bHQNClgtUFVCTElTSEVELVRUTDpQVDFIDQpCRUdJTjpWRVZFTlQNClVJRDp0ZXN0LXVpZA0KU1VNTUFSWTpuZXcgZXZlbnQNCkRUU1RBTVA6MjAyNTA0MjRUMTM1MTAwWg0KRFRTVEFSVDtWQUxVRT1EQVRFOjIwMjMwMjI3DQpEVEVORDtWQUxVRT1EQVRFOjIwMjMwMjI4DQpBVFRFTkRFRTtSU1ZQPUZBTFNFO0NOPVBlcnNvbjptYWlsdG86cGVyc29uMUBlbWFpbC5jb20NCkVORDpWRVZFTlQNCkVORDpWQ0FMRU5EQVINCg==',
fileName: 'event.ics',
fileSize: '359 B',
fileSize: '346 B',
},
},
},
@@ -38,25 +47,6 @@ describe('iCalendar Node', () => {
];
for (const testData of tests) {
test(testData.description, async () => {
const { result } = await executeWorkflow(testData);
const resultNodeData = getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) => {
//@ts-ignore
expect(resultData[0][0].binary.data.data.length).toEqual(
testData.output.nodeData[nodeName][0][0].binary.data.data.length,
);
//uid every time would be different, so we need to delete it in order to compare objects
//@ts-ignore
delete resultData[0][0].binary.data.data;
delete testData.output.nodeData[nodeName][0][0].binary.data.data;
expect(resultData).toEqual(testData.output.nodeData[nodeName]);
});
expect(result.finished).toEqual(true);
});
testHarness.setupTest(testData);
}
});

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test IF Node', () => testWorkflows(workflows));
describe('Test IF Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,3 +1,4 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { mock } from 'jest-mock-extended';
import { get } from 'lodash';
import {
@@ -8,8 +9,6 @@ import {
type IGetNodeParameterOptions,
} from 'n8n-workflow';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import * as IfV2 from '../../V2/IfV2.node';
jest.mock('lodash/set', () => jest.fn());
@@ -17,7 +16,9 @@ jest.mock('lodash/set', () => jest.fn());
describe('Test IF v2 Node Tests', () => {
afterEach(() => jest.resetAllMocks());
describe('Test IF v2 Node Workflow Tests', () => testWorkflows(getWorkflowFilenames(__dirname)));
describe('Test IF v2 Node Workflow Tests', () => {
new NodeTestHarness().setupTests();
});
describe('Test IF V2 Node Unit Tests', () => {
const node = new IfV2.IfV2(mock<INodeTypeDescription>());

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test ItemLists Node', () => testWorkflows(workflows));
describe('Test ItemLists Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -8,7 +8,7 @@ import type {
import { testWebhookTriggerNode } from '@test/nodes/TriggerHelpers';
import { JiraTrigger } from './JiraTrigger.node';
import { JiraTrigger } from '../JiraTrigger.node';
describe('JiraTrigger', () => {
describe('Webhook lifecycle', () => {

View File

@@ -1,4 +1,4 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const credentials = {
jwtAuth: {
@@ -8,6 +8,6 @@ const credentials = {
},
};
const workflows = getWorkflowFilenames(__dirname);
describe('Test Jwt Node', () => testWorkflows(workflows, credentials));
describe('Test Jwt Node', () => {
new NodeTestHarness().setupTests({ credentials });
});

View File

@@ -1,10 +1,8 @@
import { SchemaRegistry } from '@kafkajs/confluent-schema-registry';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { mock } from 'jest-mock-extended';
import type { Producer } from 'kafkajs';
import { Kafka as apacheKafka } from 'kafkajs';
import path from 'path';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
jest.mock('kafkajs');
jest.mock('@kafkajs/confluent-schema-registry');
@@ -43,8 +41,7 @@ describe('Kafka Node', () => {
(SchemaRegistry as jest.Mock).mockReturnValue(mockRegistry);
});
const workflows = getWorkflowFilenames(path.join(__dirname, 'test'));
testWorkflows(workflows);
new NodeTestHarness().setupTests();
test('should publish the correct kafka messages', async () => {
expect(mockProducerSend).toHaveBeenCalledTimes(2);

View File

@@ -14,7 +14,7 @@ import { NodeOperationError } from 'n8n-workflow';
import { testTriggerNode } from '@test/nodes/TriggerHelpers';
import { KafkaTrigger } from './KafkaTrigger.node';
import { KafkaTrigger } from '../KafkaTrigger.node';
jest.mock('kafkajs');
jest.mock('@kafkajs/confluent-schema-registry');

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import {
getCreateResponseClassic,
getSubscriberResponseClassic,
@@ -20,7 +19,6 @@ describe('MailerLite', () => {
mock.put('/subscribers/demo@mailerlite.com').reply(200, getUpdateSubscriberResponseClassic);
});
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows);
new NodeTestHarness().setupTests();
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import {
getCreateResponseV2,
getSubscriberResponseV2,
@@ -23,7 +22,6 @@ describe('MailerLite', () => {
mock.put('/subscribers/user@n8n.io').reply(200, getUpdateSubscriberResponseV2);
});
const workflows = getWorkflowFilenames(__dirname);
testWorkflows(workflows);
new NodeTestHarness().setupTests();
});
});

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test Markdown Node', () => testWorkflows(workflows));
describe('Test Markdown Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,5 +1,5 @@
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
const workflows = getWorkflowFilenames(__dirname);
describe('Test Merge Node', () => testWorkflows(workflows));
describe('Test Merge Node', () => {
new NodeTestHarness().setupTests();
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Create Container', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('create.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -75,5 +70,8 @@ describe('Azure Cosmos DB - Create Container', () => {
});
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['create.workflow.json'],
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Delete Container', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('delete.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -19,5 +14,8 @@ describe('Azure Cosmos DB - Delete Container', () => {
.reply(204, {});
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['delete.workflow.json'],
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Get Container', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('get.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -57,5 +52,8 @@ describe('Azure Cosmos DB - Get Container', () => {
});
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['get.workflow.json'],
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Get All Containers', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('getAll.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -159,5 +154,8 @@ describe('Azure Cosmos DB - Get All Containers', () => {
});
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['getAll.workflow.json'],
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Create Item', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('create.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -57,5 +52,8 @@ describe('Azure Cosmos DB - Create Item', () => {
});
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['create.workflow.json'],
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Delete Item', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('delete.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -50,5 +45,8 @@ describe('Azure Cosmos DB - Delete Item', () => {
.reply(204, '');
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['delete.workflow.json'],
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Get Item', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('get.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -58,5 +53,8 @@ describe('Azure Cosmos DB - Get Item', () => {
});
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['get.workflow.json'],
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Get All Items', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('getAll.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -69,5 +64,8 @@ describe('Azure Cosmos DB - Get All Items', () => {
});
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['getAll.workflow.json'],
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Query Items', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('query.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -34,5 +29,8 @@ describe('Azure Cosmos DB - Query Items', () => {
});
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['query.workflow.json'],
});
});

View File

@@ -1,14 +1,9 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
import { credentials } from '../credentials';
describe('Azure Cosmos DB - Update Item', () => {
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
filename.includes('update.workflow.json'),
);
beforeEach(() => {
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
@@ -62,5 +57,8 @@ describe('Azure Cosmos DB - Update Item', () => {
});
});
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['update.workflow.json'],
});
});

View File

@@ -1,12 +1,11 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { NodeConnectionTypes, type WorkflowTestData } from 'n8n-workflow';
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
import * as Helpers from '@test/nodes/Helpers';
import { microsoftEntraApiResponse, microsoftEntraNodeResponse } from './mocks';
describe('Microsoft Entra Node', () => {
const baseUrl = 'https://graph.microsoft.com/v1.0';
const testHarness = new NodeTestHarness();
describe('Group description', () => {
const tests: WorkflowTestData[] = [
@@ -73,7 +72,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.createGroup],
},
@@ -165,7 +163,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.deleteGroup],
},
@@ -236,7 +233,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.getGroup],
},
@@ -353,7 +349,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.getGroupWithProperties],
},
@@ -421,7 +416,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [new Array(102).fill(microsoftEntraNodeResponse.getGroup[0])],
},
@@ -503,7 +497,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [new Array(10).fill(microsoftEntraNodeResponse.getGroup[0])],
},
@@ -612,7 +605,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [
new Array(102).fill(microsoftEntraNodeResponse.getGroupWithProperties[0]),
@@ -711,7 +703,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.updateGroup],
},
@@ -751,14 +742,8 @@ describe('Microsoft Entra Node', () => {
},
];
test.each(tests)('$description', async (testData) => {
const { result } = await executeWorkflow(testData);
const resultNodeData = Helpers.getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) =>
expect(resultData).toEqual(testData.output.nodeData[nodeName]),
);
expect(result.status).toEqual('success');
});
for (const testData of tests) {
testHarness.setupTest(testData);
}
});
});

View File

@@ -1,13 +1,12 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import type { ILoadOptionsFunctions, WorkflowTestData } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
import * as Helpers from '@test/nodes/Helpers';
import { microsoftEntraApiResponse, microsoftEntraNodeResponse } from './mocks';
import { MicrosoftEntra } from '../MicrosoftEntra.node';
describe('Microsoft Entra Node', () => {
const testHarness = new NodeTestHarness();
const baseUrl = 'https://graph.microsoft.com/v1.0';
describe('Credentials', () => {
@@ -76,7 +75,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.getGroup],
},
@@ -94,18 +92,12 @@ describe('Microsoft Entra Node', () => {
},
],
},
credentials,
},
];
test.each(tests)('$description', async (testData) => {
const { result } = await executeWorkflow(testData);
const resultNodeData = Helpers.getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) =>
expect(resultData).toEqual(testData.output.nodeData[nodeName]),
);
expect(result.status).toEqual('success');
});
for (const testData of tests) {
testHarness.setupTest(testData, { credentials });
}
});
describe('Load options', () => {

View File

@@ -1,11 +1,10 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import { NodeConnectionTypes, type WorkflowTestData } from 'n8n-workflow';
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
import * as Helpers from '@test/nodes/Helpers';
import { microsoftEntraApiResponse, microsoftEntraNodeResponse } from './mocks';
describe('Microsoft Entra Node', () => {
const testHarness = new NodeTestHarness();
const baseUrl = 'https://graph.microsoft.com/v1.0';
describe('User description', () => {
@@ -68,7 +67,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.addUserToGroup],
},
@@ -183,7 +181,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.createUser],
},
@@ -320,7 +317,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.deleteUser],
},
@@ -391,7 +387,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.getUser],
},
@@ -539,7 +534,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [
[
@@ -620,7 +614,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [new Array(102).fill(microsoftEntraNodeResponse.getUser[0])],
},
@@ -702,7 +695,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [new Array(10).fill(microsoftEntraNodeResponse.getUser[0])],
},
@@ -842,7 +834,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [
new Array(102).fill({
@@ -944,7 +935,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.removeUserFromGroup],
},
@@ -1062,7 +1052,6 @@ describe('Microsoft Entra Node', () => {
},
},
output: {
nodeExecutionOrder: ['Start'],
nodeData: {
'Micosoft Entra ID': [microsoftEntraNodeResponse.updateUser],
},
@@ -1137,14 +1126,8 @@ describe('Microsoft Entra Node', () => {
},
];
test.each(tests)('$description', async (testData) => {
const { result } = await executeWorkflow(testData);
const resultNodeData = Helpers.getResultNodeData(result, testData);
resultNodeData.forEach(({ nodeName, resultData }) =>
expect(resultData).toEqual(testData.output.nodeData[nodeName]),
);
expect(result.status).toEqual('success');
});
for (const testData of tests) {
testHarness.setupTest(testData);
}
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../credentials';
describe('Test MicrosoftExcelV2, table => addTable', () => {
@@ -24,6 +23,8 @@ describe('Test MicrosoftExcelV2, table => addTable', () => {
showTotals: false,
});
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/addTable.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['addTable.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../credentials';
describe('Test MicrosoftExcelV2, table => append', () => {
@@ -25,6 +24,8 @@ describe('Test MicrosoftExcelV2, table => append', () => {
.post('/drive/items/01FUWX3BQ4ATCOZNR265GLA6IJEZDQUE4I/workbook/closeSession')
.reply(200);
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/append.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['append.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../credentials';
describe('Test MicrosoftExcelV2, table => convertToRange', () => {
@@ -20,6 +19,8 @@ describe('Test MicrosoftExcelV2, table => convertToRange', () => {
],
});
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/convertToRange.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['convertToRange.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../credentials';
describe('Test MicrosoftExcelV2, table => deleteTable', () => {
@@ -11,6 +10,8 @@ describe('Test MicrosoftExcelV2, table => deleteTable', () => {
)
.reply(200);
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/deleteTable.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['deleteTable.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../credentials';
describe('Test MicrosoftExcelV2, table => getColumns', () => {
@@ -22,6 +21,8 @@ describe('Test MicrosoftExcelV2, table => getColumns', () => {
)
.reply(200, { value: [] });
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/getColumns.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['getColumns.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../credentials';
describe('Test MicrosoftExcelV2, table => getRows', () => {
@@ -31,6 +30,8 @@ describe('Test MicrosoftExcelV2, table => getRows', () => {
)
.reply(200, { value: [] });
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/getRows.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['getRows.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../credentials';
describe('Test MicrosoftExcelV2, table => lookup', () => {
@@ -39,6 +38,8 @@ describe('Test MicrosoftExcelV2, table => lookup', () => {
)
.reply(200, { value: [] });
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/lookup.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['lookup.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../credentials';
describe('Test MicrosoftExcelV2, workbook => addWorksheet', () => {
@@ -23,6 +22,8 @@ describe('Test MicrosoftExcelV2, workbook => addWorksheet', () => {
.post('/drive/items/01FUWX3BQ4ATCOZNR265GLA6IJEZDQUE4I/workbook/closeSession')
.reply(200);
const workflows = ['nodes/Microsoft/Excel/test/v2/node/workbook/addWorksheet.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['addWorksheet.workflow.json'],
});
});

View File

@@ -1,7 +1,6 @@
import { NodeTestHarness } from '@nodes-testing/node-test-harness';
import nock from 'nock';
import { testWorkflows } from '@test/nodes/Helpers';
import { credentials } from '../../../credentials';
describe('Test MicrosoftExcelV2, workbook => deleteWorkbook', () => {
@@ -9,6 +8,8 @@ describe('Test MicrosoftExcelV2, workbook => deleteWorkbook', () => {
.delete('/drive/items/01FUWX3BXJLISGF2CFWBGYPHXFCXPXOJUK')
.reply(200);
const workflows = ['nodes/Microsoft/Excel/test/v2/node/workbook/deleteWorkbook.workflow.json'];
testWorkflows(workflows, credentials);
new NodeTestHarness().setupTests({
credentials,
workflowFiles: ['deleteWorkbook.workflow.json'],
});
});

Some files were not shown because too many files have changed in this diff Show More