mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-16 17:46:45 +00:00
refactor: Overhaul nodes-testing setup - Part 2 (no-changelog) (#14873)
This commit is contained in:
committed by
GitHub
parent
897338bd24
commit
91069f057e
@@ -1,6 +1,8 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../__tests__/credentials';
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
|
||||
@@ -20,11 +22,10 @@ describe('Test AWS Comprehend Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
jest.useFakeTimers({ doNotFake: ['nextTick'], now });
|
||||
|
||||
await initBinaryDataService();
|
||||
|
||||
const baseUrl = 'https://comprehend.eu-central-1.amazonaws.com';
|
||||
|
||||
mock = nock(baseUrl);
|
||||
@@ -34,6 +35,6 @@ describe('Test AWS Comprehend Node', () => {
|
||||
mock.post('/').reply(200, response);
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../__tests__/credentials';
|
||||
|
||||
const responseLabels = [
|
||||
{
|
||||
LabelModelVersion: '3.0',
|
||||
@@ -298,6 +300,6 @@ describe('Test AWS Rekogntion Node', () => {
|
||||
mock.post('/').reply(200, responseLabels);
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../__tests__/credentials';
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
|
||||
@@ -12,8 +14,6 @@ describe('Test S3 V1 Node', () => {
|
||||
beforeAll(async () => {
|
||||
jest.useFakeTimers({ doNotFake: ['nextTick'], now });
|
||||
|
||||
await initBinaryDataService();
|
||||
|
||||
mock = nock('https://bucket.s3.eu-central-1.amazonaws.com');
|
||||
});
|
||||
|
||||
@@ -39,6 +39,6 @@ describe('Test S3 V1 Node', () => {
|
||||
.reply(200, { success: true });
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../__tests__/credentials';
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
|
||||
@@ -12,8 +14,6 @@ describe('Test S3 V2 Node', () => {
|
||||
beforeAll(async () => {
|
||||
jest.useFakeTimers({ doNotFake: ['nextTick'], now });
|
||||
|
||||
await initBinaryDataService();
|
||||
|
||||
mock = nock('https://s3.eu-central-1.amazonaws.com/buc.ket');
|
||||
});
|
||||
|
||||
@@ -39,6 +39,6 @@ describe('Test S3 V2 Node', () => {
|
||||
.reply(200, { success: true });
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,6 +5,8 @@ import qs from 'node:querystring';
|
||||
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import * as Helpers from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../__tests__/credentials';
|
||||
|
||||
describe('AwsSes Node', () => {
|
||||
const email = 'test+user@example.com';
|
||||
const templateData = {
|
||||
@@ -170,6 +172,7 @@ describe('AwsSes Node', () => {
|
||||
];
|
||||
|
||||
test.each(tests)('$description', async (testData) => {
|
||||
testData.credentials = credentials;
|
||||
const { result } = await executeWorkflow(testData);
|
||||
const resultNodeData = Helpers.getResultNodeData(result, testData);
|
||||
resultNodeData.forEach(({ nodeName, resultData }) =>
|
||||
|
||||
7
packages/nodes-base/nodes/Aws/__tests__/credentials.ts
Normal file
7
packages/nodes-base/nodes/Aws/__tests__/credentials.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export const credentials = {
|
||||
aws: {
|
||||
region: 'eu-central-1',
|
||||
accessKeyId: 'key',
|
||||
secretAccessKey: 'secret',
|
||||
},
|
||||
};
|
||||
@@ -11,6 +11,14 @@ import {
|
||||
} from './apiResponses';
|
||||
|
||||
describe('Baserow > Workflows', () => {
|
||||
const credentials = {
|
||||
baserowApi: {
|
||||
host: 'https://api.baserow.io',
|
||||
username: 'nathan@n8n.io',
|
||||
password: 'fake-password',
|
||||
},
|
||||
};
|
||||
|
||||
describe('Run workflow', () => {
|
||||
beforeAll(() => {
|
||||
const mock = nock('https://api.baserow.io');
|
||||
@@ -48,6 +56,6 @@ describe('Baserow > Workflows', () => {
|
||||
});
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ import { normalizeItems } from 'n8n-core';
|
||||
import type { IExecuteFunctions, IWorkflowDataProxyData } from 'n8n-workflow';
|
||||
import { ApplicationError } from 'n8n-workflow';
|
||||
|
||||
import { testWorkflows, getWorkflowFilenames, initBinaryDataService } from '@test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { Code } from '../Code.node';
|
||||
import { ValidationError } from '../ValidationError';
|
||||
@@ -12,10 +12,6 @@ import { ValidationError } from '../ValidationError';
|
||||
describe('Test Code Node', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
});
|
||||
|
||||
|
||||
@@ -4,14 +4,10 @@ import os from 'node:os';
|
||||
import path from 'path';
|
||||
|
||||
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import { getResultNodeData, initBinaryDataService, readJsonFileSync } from '@test/nodes/Helpers';
|
||||
import { getResultNodeData, readJsonFileSync } from '@test/nodes/Helpers';
|
||||
|
||||
if (os.platform() !== 'win32') {
|
||||
describe('Execute Compression Node', () => {
|
||||
beforeEach(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
const workflowData = readJsonFileSync('nodes/Compression/test/node/workflow.compression.json');
|
||||
|
||||
const node = workflowData.nodes.find((n: IDataObject) => n.name === 'Read Binary File');
|
||||
|
||||
@@ -2,7 +2,7 @@ import fs from 'fs';
|
||||
import fsPromises from 'fs/promises';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
import { testWorkflows, getWorkflowFilenames, initBinaryDataService } from '@test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
|
||||
@@ -13,9 +13,5 @@ describe('Test Crypto Node', () => {
|
||||
jest.mock('fs');
|
||||
fs.createReadStream = () => Readable.from(Buffer.from('test')) as fs.ReadStream;
|
||||
|
||||
beforeEach(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
});
|
||||
|
||||
@@ -3,8 +3,14 @@ import nock from 'nock';
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test DiscordV2, webhook => sendLegacy', () => {
|
||||
nock('https://discord.com')
|
||||
.post('/webhook?wait=true')
|
||||
const credentials = {
|
||||
discordWebhookApi: {
|
||||
webhookUri: 'https://discord.com/webhook',
|
||||
},
|
||||
};
|
||||
|
||||
nock(credentials.discordWebhookApi.webhookUri)
|
||||
.post('?wait=true')
|
||||
.reply(200, {
|
||||
id: '1168768986385747999',
|
||||
type: 0,
|
||||
@@ -45,5 +51,5 @@ describe('Test DiscordV2, webhook => sendLegacy', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Discord/test/v2/node/webhook/sendLegacy.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { type INodeTypeBaseDescription, type ITriggerFunctions } from 'n8n-workflow';
|
||||
|
||||
import { type ICredentialsDataImap } from '../../../../credentials/Imap.credentials';
|
||||
import { type ICredentialsDataImap } from '@credentials/Imap.credentials';
|
||||
|
||||
import { EmailReadImapV2 } from '../../v2/EmailReadImapV2.node';
|
||||
|
||||
jest.mock('@n8n/imap', () => {
|
||||
|
||||
@@ -17,9 +17,10 @@ import type {
|
||||
import { NodeConnectionTypes, NodeOperationError, TriggerCloseError } from 'n8n-workflow';
|
||||
import rfc2047 from 'rfc2047';
|
||||
|
||||
import type { ICredentialsDataImap } from '@credentials/Imap.credentials';
|
||||
import { isCredentialsDataImap } from '@credentials/Imap.credentials';
|
||||
|
||||
import { getNewEmails } from './utils';
|
||||
import type { ICredentialsDataImap } from '../../../credentials/Imap.credentials';
|
||||
import { isCredentialsDataImap } from '../../../credentials/Imap.credentials';
|
||||
|
||||
const versionDescription: INodeTypeDescription = {
|
||||
displayName: 'Email Trigger (IMAP)',
|
||||
|
||||
@@ -5,10 +5,6 @@ import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import * as Helpers from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test ReadWriteFile Node', () => {
|
||||
beforeEach(async () => {
|
||||
await Helpers.initBinaryDataService();
|
||||
});
|
||||
|
||||
const temporaryDir = Helpers.createTemporaryDir();
|
||||
const directory = __dirname.replace(/\\/gi, '/');
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('GithubDispatchAndWaitWorkflow.json'),
|
||||
@@ -56,7 +56,6 @@ describe('Test Github Node - Dispatch and Wait', () => {
|
||||
|
||||
beforeAll(async () => {
|
||||
jest.useFakeTimers({ doNotFake: ['nextTick'], now });
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { NodeApiError, NodeOperationError } from 'n8n-workflow';
|
||||
import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { Github } from '../../Github.node';
|
||||
|
||||
@@ -65,7 +65,6 @@ describe('Test Github Node', () => {
|
||||
|
||||
beforeAll(async () => {
|
||||
jest.useFakeTimers({ doNotFake: ['nextTick'], now });
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
|
||||
@@ -1,12 +1,6 @@
|
||||
import type {
|
||||
ICredentialDataDecryptedObject,
|
||||
IDataObject,
|
||||
IHttpRequestOptions,
|
||||
WorkflowTestData,
|
||||
} from 'n8n-workflow';
|
||||
import type { WorkflowTestData } from 'n8n-workflow';
|
||||
import { NodeConnectionTypes } from 'n8n-workflow';
|
||||
|
||||
import { CredentialsHelper } from '@test/nodes/credentials-helper';
|
||||
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import * as Helpers from '@test/nodes/Helpers';
|
||||
|
||||
@@ -14,6 +8,10 @@ import { gongApiResponse, gongNodeResponse } from './mocks';
|
||||
|
||||
describe('Gong Node', () => {
|
||||
const baseUrl = 'https://api.gong.io';
|
||||
const credentials = {
|
||||
gongApi: { baseUrl },
|
||||
gongOAuth2Api: { baseUrl },
|
||||
};
|
||||
|
||||
describe('Credentials', () => {
|
||||
const tests: WorkflowTestData[] = [
|
||||
@@ -143,42 +141,8 @@ describe('Gong Node', () => {
|
||||
},
|
||||
];
|
||||
|
||||
beforeAll(() => {
|
||||
jest
|
||||
.spyOn(CredentialsHelper.prototype, 'authenticate')
|
||||
.mockImplementation(
|
||||
async (
|
||||
credentials: ICredentialDataDecryptedObject,
|
||||
typeName: string,
|
||||
requestParams: IHttpRequestOptions,
|
||||
): Promise<IHttpRequestOptions> => {
|
||||
if (typeName === 'gongApi') {
|
||||
return {
|
||||
...requestParams,
|
||||
headers: {
|
||||
authorization:
|
||||
'basic ' +
|
||||
Buffer.from(`${credentials.accessKey}:${credentials.accessKeySecret}`).toString(
|
||||
'base64',
|
||||
),
|
||||
},
|
||||
};
|
||||
} else if (typeName === 'gongOAuth2Api') {
|
||||
return {
|
||||
...requestParams,
|
||||
headers: {
|
||||
authorization:
|
||||
'bearer ' + (credentials.oauthTokenData as IDataObject).access_token,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return requestParams;
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test.each(tests)('$description', async (testData) => {
|
||||
testData.credentials = credentials;
|
||||
const { result } = await executeWorkflow(testData);
|
||||
const resultNodeData = Helpers.getResultNodeData(result, testData);
|
||||
resultNodeData.forEach(({ nodeName, resultData }) =>
|
||||
@@ -773,6 +737,7 @@ describe('Gong Node', () => {
|
||||
];
|
||||
|
||||
test.each(tests)('$description', async (testData) => {
|
||||
testData.credentials = credentials;
|
||||
const { result } = await executeWorkflow(testData);
|
||||
|
||||
if (testData.description === 'should handle error response') {
|
||||
@@ -1042,6 +1007,7 @@ describe('Gong Node', () => {
|
||||
];
|
||||
|
||||
test.each(tests)('$description', async (testData) => {
|
||||
testData.credentials = credentials;
|
||||
const { result } = await executeWorkflow(testData);
|
||||
|
||||
if (testData.description === 'should handle error response') {
|
||||
|
||||
@@ -69,11 +69,9 @@ describe('Test Gmail Node v1', () => {
|
||||
.reply(200, messages[0]);
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/Gmail/test/v1/messages.workflow.json']);
|
||||
afterAll(() => gmailNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
gmailNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/Gmail/test/v1/messages.workflow.json']);
|
||||
});
|
||||
|
||||
describe('Labels', () => {
|
||||
@@ -94,11 +92,9 @@ describe('Test Gmail Node v1', () => {
|
||||
});
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/Gmail/test/v1/labels.workflow.json']);
|
||||
afterAll(() => gmailNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
gmailNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/Gmail/test/v1/labels.workflow.json']);
|
||||
});
|
||||
|
||||
describe('Message Labels', () => {
|
||||
@@ -113,11 +109,9 @@ describe('Test Gmail Node v1', () => {
|
||||
.reply(200, messages[0]);
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/Gmail/test/v1/message-labels.workflow.json']);
|
||||
afterAll(() => gmailNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
gmailNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/Gmail/test/v1/message-labels.workflow.json']);
|
||||
});
|
||||
|
||||
describe('Drafts', () => {
|
||||
@@ -193,10 +187,8 @@ describe('Test Gmail Node v1', () => {
|
||||
});
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/Gmail/test/v1/drafts.workflow.json']);
|
||||
afterAll(() => gmailNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
gmailNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/Gmail/test/v1/drafts.workflow.json']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -129,11 +129,9 @@ describe('Test Gmail Node v2', () => {
|
||||
.reply(200, messages[0]);
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/Gmail/test/v2/messages.workflow.json']);
|
||||
afterAll(() => gmailNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
gmailNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/Gmail/test/v2/messages.workflow.json']);
|
||||
});
|
||||
|
||||
describe('Labels', () => {
|
||||
@@ -154,11 +152,9 @@ describe('Test Gmail Node v2', () => {
|
||||
});
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/Gmail/test/v2/labels.workflow.json']);
|
||||
afterAll(() => gmailNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
gmailNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/Gmail/test/v2/labels.workflow.json']);
|
||||
});
|
||||
|
||||
describe('Drafts', () => {
|
||||
@@ -244,11 +240,9 @@ describe('Test Gmail Node v2', () => {
|
||||
});
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/Gmail/test/v2/drafts.workflow.json']);
|
||||
afterAll(() => gmailNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
gmailNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/Gmail/test/v2/drafts.workflow.json']);
|
||||
});
|
||||
|
||||
describe('Threads', () => {
|
||||
@@ -307,11 +301,9 @@ describe('Test Gmail Node v2', () => {
|
||||
.reply(200, messages[0]);
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/Gmail/test/v2/threads.workflow.json']);
|
||||
afterAll(() => gmailNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
gmailNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/Gmail/test/v2/threads.workflow.json']);
|
||||
});
|
||||
|
||||
describe('loadOptions', () => {
|
||||
|
||||
@@ -8,6 +8,15 @@ import playlistItems from './fixtures/playlistItems.json';
|
||||
import playlists from './fixtures/playlists.json';
|
||||
|
||||
describe('Test YouTube Node', () => {
|
||||
const credentials = {
|
||||
youTubeOAuth2Api: {
|
||||
scope: '',
|
||||
oauthTokenData: {
|
||||
access_token: 'ACCESSTOKEN',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const youtubeNock = nock('https://www.googleapis.com/youtube');
|
||||
beforeAll(() => {
|
||||
jest
|
||||
@@ -51,16 +60,11 @@ describe('Test YouTube Node', () => {
|
||||
image: {},
|
||||
},
|
||||
});
|
||||
nock.emitter.on('no match', (req) => {
|
||||
console.error('Unmatched request:', req);
|
||||
});
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/YouTube/__test__/node/channels.workflow.json']);
|
||||
afterAll(() => youtubeNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
youtubeNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/YouTube/__test__/node/channels.workflow.json'], credentials);
|
||||
});
|
||||
|
||||
describe('Playlist', () => {
|
||||
@@ -107,16 +111,11 @@ describe('Test YouTube Node', () => {
|
||||
})
|
||||
.reply(200, { items: playlists });
|
||||
youtubeNock.delete('/v3/playlists', { id: 'playlist_id_1' }).reply(200, { success: true });
|
||||
nock.emitter.on('no match', (req) => {
|
||||
console.error('Unmatched request:', req);
|
||||
});
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/YouTube/__test__/node/playlists.workflow.json']);
|
||||
afterAll(() => youtubeNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
youtubeNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Google/YouTube/__test__/node/playlists.workflow.json'], credentials);
|
||||
});
|
||||
|
||||
describe('Video Categories', () => {
|
||||
@@ -128,17 +127,16 @@ describe('Test YouTube Node', () => {
|
||||
regionCode: 'GB',
|
||||
})
|
||||
.reply(200, { items: categories });
|
||||
nock.emitter.on('no match', (req) => {
|
||||
console.error('Unmatched request:', req);
|
||||
});
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/YouTube/__test__/node/videoCategories.workflow.json']);
|
||||
afterAll(() => youtubeNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
youtubeNock.done();
|
||||
});
|
||||
testWorkflows(
|
||||
['nodes/Google/YouTube/__test__/node/videoCategories.workflow.json'],
|
||||
credentials,
|
||||
);
|
||||
});
|
||||
|
||||
describe('Playlist Item', () => {
|
||||
beforeAll(() => {
|
||||
youtubeNock
|
||||
@@ -171,15 +169,10 @@ describe('Test YouTube Node', () => {
|
||||
return body.id === 'UExWUDRtV2RxbGFhNWlwZEJRWXZVaFgyNk9RTENJRlV2cS41NkI0NEY2RDEwNTU3Q0M2';
|
||||
})
|
||||
.reply(200, {});
|
||||
nock.emitter.on('no match', (req) => {
|
||||
console.error('Unmatched request:', req);
|
||||
});
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Google/YouTube/__test__/node/playlistItems.workflow.json']);
|
||||
afterAll(() => youtubeNock.done());
|
||||
|
||||
// it('should make the correct network calls', () => {
|
||||
// youtubeNock.done();
|
||||
// });
|
||||
testWorkflows(['nodes/Google/YouTube/__test__/node/playlistItems.workflow.json'], credentials);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
|
||||
import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
describe('GraphQL Node', () => {
|
||||
const baseUrl = 'https://api.n8n.io/';
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
|
||||
nock(baseUrl)
|
||||
.matchHeader('accept', 'application/json')
|
||||
.matchHeader('content-type', 'application/json')
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test Binary Data Download', () => {
|
||||
const baseUrl = 'https://dummy.domain';
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.get('/path/to/image.png')
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test Response Encoding', () => {
|
||||
const baseUrl = 'https://dummy.domain';
|
||||
@@ -10,8 +10,6 @@ describe('Test Response Encoding', () => {
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.get('/index.html')
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test Quoted Response Encoding', () => {
|
||||
const baseUrl = 'https://dummy.domain';
|
||||
@@ -10,8 +10,6 @@ describe('Test Quoted Response Encoding', () => {
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.get('/index.html')
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import nock from 'nock';
|
||||
import { parse as parseUrl } from 'url';
|
||||
|
||||
import { initBinaryDataService, getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test HTTP Request Node', () => {
|
||||
const baseUrl = 'https://dummyjson.com';
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
|
||||
function getPaginationReturnData(this: nock.ReplyFnContext, limit = 10, skip = 0) {
|
||||
const nextUrl = `${baseUrl}/users?skip=${skip + limit}&limit=${limit}`;
|
||||
|
||||
|
||||
@@ -112,11 +112,9 @@ describe('Hubspot Node', () => {
|
||||
.reply(200, companies.companies[0]);
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Hubspot/__test__/companies.workflow.json']);
|
||||
afterAll(() => hubspotNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
hubspotNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Hubspot/__test__/companies.workflow.json']);
|
||||
});
|
||||
|
||||
describe('contacts', () => {
|
||||
@@ -210,11 +208,9 @@ describe('Hubspot Node', () => {
|
||||
.reply(200, contacts.contacts[0]);
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Hubspot/__test__/contacts.workflow.json']);
|
||||
afterAll(() => hubspotNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
hubspotNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Hubspot/__test__/contacts.workflow.json']);
|
||||
});
|
||||
|
||||
describe('deals', () => {
|
||||
@@ -257,10 +253,8 @@ describe('Hubspot Node', () => {
|
||||
.reply(200, dealsSearchResult);
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Hubspot/__test__/deals.workflow.json']);
|
||||
afterAll(() => hubspotNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
hubspotNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Hubspot/__test__/deals.workflow.json']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,12 +2,9 @@
|
||||
import type { WorkflowTestData } from 'n8n-workflow';
|
||||
|
||||
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import { getResultNodeData, readJsonFileSync, initBinaryDataService } from '@test/nodes/Helpers';
|
||||
import { getResultNodeData, readJsonFileSync } from '@test/nodes/Helpers';
|
||||
|
||||
describe('Execute iCalendar Node', () => {
|
||||
beforeEach(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
describe('iCalendar Node', () => {
|
||||
const workflowData = readJsonFileSync('nodes/ICalendar/test/node/workflow.iCalendar.json');
|
||||
|
||||
const tests: WorkflowTestData[] = [
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
const credentials = {
|
||||
jwtAuth: {
|
||||
keyType: 'passphrase',
|
||||
secret: 'baz',
|
||||
algorithm: 'HS256',
|
||||
},
|
||||
};
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
|
||||
describe('Test Jwt Node', () => testWorkflows(workflows));
|
||||
describe('Test Jwt Node', () => testWorkflows(workflows, credentials));
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
getWorkflowFilenames,
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Create Container', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('create.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -86,9 +75,5 @@ describe('Azure Cosmos DB - Create Container', () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
getWorkflowFilenames,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Delete Container', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('delete.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -30,9 +19,5 @@ describe('Azure Cosmos DB - Delete Container', () => {
|
||||
.reply(204, {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
getWorkflowFilenames,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Get Container', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('get.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -68,9 +57,5 @@ describe('Azure Cosmos DB - Get Container', () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
getWorkflowFilenames,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Get All Containers', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('getAll.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -170,9 +159,5 @@ describe('Azure Cosmos DB - Get All Containers', () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
export const credentials = {
|
||||
microsoftAzureCosmosDbSharedKeyApi: {
|
||||
account: 'n8n-us-east-account',
|
||||
key: 'I3rwpzP0XoFpNzJ7hRIUXjwgpD1qaVKi71NZBbk7oOHUXrbd80WAoIAAoRaT47W9hHO3b6us1yABACDbVdilag==',
|
||||
database: 'database_1',
|
||||
baseUrl: 'https://n8n-us-east-account.documents.azure.com/dbs/database_1',
|
||||
},
|
||||
};
|
||||
@@ -1,12 +1,9 @@
|
||||
import { OperationalError } from 'n8n-workflow';
|
||||
import type {
|
||||
ICredentialDataDecryptedObject,
|
||||
IHttpRequestOptions,
|
||||
IRequestOptions,
|
||||
} from 'n8n-workflow';
|
||||
import type { IHttpRequestOptions, IRequestOptions } from 'n8n-workflow';
|
||||
|
||||
import { MicrosoftAzureCosmosDbSharedKeyApi } from '../../../../../credentials/MicrosoftAzureCosmosDbSharedKeyApi.credentials';
|
||||
import { FAKE_CREDENTIALS_DATA } from '../../../../../test/nodes/FakeCredentialsMap';
|
||||
import { MicrosoftAzureCosmosDbSharedKeyApi } from '@credentials/MicrosoftAzureCosmosDbSharedKeyApi.credentials';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
jest.mock('crypto', () => ({
|
||||
createHmac: jest.fn(() => ({
|
||||
@@ -17,20 +14,18 @@ jest.mock('crypto', () => ({
|
||||
}));
|
||||
|
||||
describe('Azure Cosmos DB', () => {
|
||||
const { account, key, baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
describe('authenticate', () => {
|
||||
const azureCosmosDbSharedKeyApi = new MicrosoftAzureCosmosDbSharedKeyApi();
|
||||
|
||||
it('should generate a valid authorization header', async () => {
|
||||
jest.useFakeTimers().setSystemTime(new Date('2025-01-01T00:00:00Z'));
|
||||
const credentials: ICredentialDataDecryptedObject = {
|
||||
account: FAKE_CREDENTIALS_DATA.microsoftAzureCosmosDbSharedKeyApi.account,
|
||||
key: FAKE_CREDENTIALS_DATA.microsoftAzureCosmosDbSharedKeyApi.key,
|
||||
};
|
||||
const requestOptions: IHttpRequestOptions = {
|
||||
url: `${FAKE_CREDENTIALS_DATA.microsoftAzureCosmosDbSharedKeyApi.baseUrl}/colls/container1/docs/item1`,
|
||||
url: `${baseUrl}/colls/container1/docs/item1`,
|
||||
method: 'GET',
|
||||
};
|
||||
const result = await azureCosmosDbSharedKeyApi.authenticate(credentials, requestOptions);
|
||||
const result = await azureCosmosDbSharedKeyApi.authenticate({ account, key }, requestOptions);
|
||||
|
||||
expect(result.headers?.authorization).toBe(
|
||||
'type%3Dmaster%26ver%3D1.0%26sig%3Dfake-signature',
|
||||
@@ -114,15 +109,11 @@ describe('Azure Cosmos DB', () => {
|
||||
|
||||
it('should properly construct the resourceId and payload', async () => {
|
||||
jest.useFakeTimers().setSystemTime(new Date('2025-01-01T00:00:00Z'));
|
||||
const credentials: ICredentialDataDecryptedObject = {
|
||||
account: FAKE_CREDENTIALS_DATA.microsoftAzureCosmosDbSharedKeyApi.account,
|
||||
key: FAKE_CREDENTIALS_DATA.microsoftAzureCosmosDbSharedKeyApi.key,
|
||||
};
|
||||
const requestOptions: IHttpRequestOptions = {
|
||||
url: 'https://example.com/dbs/mydb/colls/mycoll/docs/mydoc',
|
||||
method: 'GET',
|
||||
};
|
||||
const result = await azureCosmosDbSharedKeyApi.authenticate(credentials, requestOptions);
|
||||
const result = await azureCosmosDbSharedKeyApi.authenticate({ account, key }, requestOptions);
|
||||
|
||||
expect(result.headers?.authorization).toBe(
|
||||
'type%3Dmaster%26ver%3D1.0%26sig%3Dfake-signature',
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
import type { IDataObject, IHttpRequestOptions, INodeExecutionData } from 'n8n-workflow';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type {
|
||||
IDataObject,
|
||||
IExecuteSingleFunctions,
|
||||
IHttpRequestOptions,
|
||||
INode,
|
||||
INodeExecutionData,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeApiError, NodeOperationError, OperationalError } from 'n8n-workflow';
|
||||
|
||||
const azureCosmosDbApiRequest = jest.fn();
|
||||
jest.mock('../../transport', () => ({ azureCosmosDbApiRequest }));
|
||||
|
||||
import { ErrorMap } from '../../helpers/errorHandler';
|
||||
import {
|
||||
getPartitionKey,
|
||||
@@ -10,31 +20,19 @@ import {
|
||||
validatePartitionKey,
|
||||
validateCustomProperties,
|
||||
} from '../../helpers/utils';
|
||||
import { azureCosmosDbApiRequest } from '../../transport';
|
||||
|
||||
interface RequestBodyWithParameters extends IDataObject {
|
||||
parameters: Array<{ name: string; value: string }>;
|
||||
}
|
||||
|
||||
jest.mock('n8n-workflow', () => ({
|
||||
...jest.requireActual('n8n-workflow'),
|
||||
azureCosmosDbApiRequest: jest.fn(),
|
||||
}));
|
||||
const mockExecuteSingleFunctions = mock<IExecuteSingleFunctions>();
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
|
||||
jest.mock('../../transport', () => ({
|
||||
azureCosmosDbApiRequest: jest.fn(),
|
||||
}));
|
||||
mockExecuteSingleFunctions.getNode.mockReturnValue({ name: 'MockNode' } as INode);
|
||||
});
|
||||
|
||||
describe('getPartitionKey', () => {
|
||||
let mockExecuteSingleFunctions: any;
|
||||
|
||||
beforeEach(() => {
|
||||
mockExecuteSingleFunctions = {
|
||||
getNodeParameter: jest.fn(),
|
||||
getNode: jest.fn(() => ({ name: 'MockNode' })),
|
||||
};
|
||||
});
|
||||
|
||||
test('should return partition key when found', async () => {
|
||||
mockExecuteSingleFunctions.getNodeParameter.mockReturnValue('containerName');
|
||||
const mockApiResponse = {
|
||||
@@ -42,7 +40,7 @@ describe('getPartitionKey', () => {
|
||||
paths: ['/partitionKeyPath'],
|
||||
},
|
||||
};
|
||||
(azureCosmosDbApiRequest as jest.Mock).mockResolvedValue(mockApiResponse);
|
||||
azureCosmosDbApiRequest.mockResolvedValue(mockApiResponse);
|
||||
|
||||
const result = await getPartitionKey.call(mockExecuteSingleFunctions);
|
||||
|
||||
@@ -52,7 +50,7 @@ describe('getPartitionKey', () => {
|
||||
test('should throw NodeOperationError if partition key is not found', async () => {
|
||||
mockExecuteSingleFunctions.getNodeParameter.mockReturnValue('containerName');
|
||||
const mockApiResponse = {};
|
||||
(azureCosmosDbApiRequest as jest.Mock).mockResolvedValue(mockApiResponse);
|
||||
azureCosmosDbApiRequest.mockResolvedValue(mockApiResponse);
|
||||
|
||||
await expect(getPartitionKey.call(mockExecuteSingleFunctions)).rejects.toThrowError(
|
||||
new NodeOperationError(mockExecuteSingleFunctions.getNode(), 'Partition key not found', {
|
||||
@@ -77,7 +75,7 @@ describe('getPartitionKey', () => {
|
||||
},
|
||||
);
|
||||
|
||||
(azureCosmosDbApiRequest as jest.Mock).mockRejectedValue(mockError);
|
||||
azureCosmosDbApiRequest.mockRejectedValue(mockError);
|
||||
|
||||
await expect(getPartitionKey.call(mockExecuteSingleFunctions)).rejects.toThrowError(
|
||||
new NodeApiError(
|
||||
@@ -93,17 +91,12 @@ describe('getPartitionKey', () => {
|
||||
});
|
||||
|
||||
describe('validatePartitionKey', () => {
|
||||
let mockExecuteSingleFunctions: any;
|
||||
let requestOptions: any;
|
||||
|
||||
beforeEach(() => {
|
||||
mockExecuteSingleFunctions = {
|
||||
getNodeParameter: jest.fn(),
|
||||
getNode: jest.fn(() => ({ name: 'MockNode' })),
|
||||
};
|
||||
requestOptions = { body: {}, headers: {} };
|
||||
|
||||
(azureCosmosDbApiRequest as jest.Mock).mockClear();
|
||||
azureCosmosDbApiRequest.mockClear();
|
||||
});
|
||||
|
||||
test('should throw NodeOperationError when partition key is missing for "create" operation', async () => {
|
||||
@@ -115,7 +108,7 @@ describe('validatePartitionKey', () => {
|
||||
paths: ['/partitionKeyPath'],
|
||||
},
|
||||
};
|
||||
(azureCosmosDbApiRequest as jest.Mock).mockResolvedValue(mockApiResponse);
|
||||
azureCosmosDbApiRequest.mockResolvedValue(mockApiResponse);
|
||||
|
||||
await expect(
|
||||
validatePartitionKey.call(mockExecuteSingleFunctions, requestOptions),
|
||||
@@ -140,7 +133,7 @@ describe('validatePartitionKey', () => {
|
||||
paths: ['/partitionKeyPath'],
|
||||
},
|
||||
};
|
||||
(azureCosmosDbApiRequest as jest.Mock).mockResolvedValue(mockApiResponse);
|
||||
azureCosmosDbApiRequest.mockResolvedValue(mockApiResponse);
|
||||
|
||||
await expect(
|
||||
validatePartitionKey.call(mockExecuteSingleFunctions, requestOptions),
|
||||
@@ -164,7 +157,7 @@ describe('validatePartitionKey', () => {
|
||||
paths: ['/partitionKeyPath'],
|
||||
},
|
||||
};
|
||||
(azureCosmosDbApiRequest as jest.Mock).mockResolvedValue(mockApiResponse);
|
||||
azureCosmosDbApiRequest.mockResolvedValue(mockApiResponse);
|
||||
|
||||
await expect(
|
||||
validatePartitionKey.call(mockExecuteSingleFunctions, requestOptions),
|
||||
@@ -188,7 +181,7 @@ describe('validatePartitionKey', () => {
|
||||
paths: ['/partitionKeyPath'],
|
||||
},
|
||||
};
|
||||
(azureCosmosDbApiRequest as jest.Mock).mockResolvedValue(mockApiResponse);
|
||||
azureCosmosDbApiRequest.mockResolvedValue(mockApiResponse);
|
||||
|
||||
await expect(
|
||||
validatePartitionKey.call(mockExecuteSingleFunctions, requestOptions),
|
||||
@@ -205,15 +198,6 @@ describe('validatePartitionKey', () => {
|
||||
});
|
||||
|
||||
describe('simplifyData', () => {
|
||||
let mockExecuteSingleFunctions: any;
|
||||
|
||||
beforeEach(() => {
|
||||
mockExecuteSingleFunctions = {
|
||||
getNodeParameter: jest.fn(),
|
||||
getNode: jest.fn(() => ({ name: 'MockNode' })),
|
||||
};
|
||||
});
|
||||
|
||||
test('should return the same data when "simple" parameter is false', async () => {
|
||||
mockExecuteSingleFunctions.getNodeParameter.mockReturnValue(false);
|
||||
const items = [{ json: { foo: 'bar' } }] as INodeExecutionData[];
|
||||
@@ -234,14 +218,9 @@ describe('simplifyData', () => {
|
||||
});
|
||||
|
||||
describe('validateQueryParameters', () => {
|
||||
let mockExecuteSingleFunctions: any;
|
||||
let requestOptions: IHttpRequestOptions;
|
||||
|
||||
beforeEach(() => {
|
||||
mockExecuteSingleFunctions = {
|
||||
getNodeParameter: jest.fn(),
|
||||
getNode: jest.fn(() => ({ name: 'MockNode' })),
|
||||
};
|
||||
requestOptions = { body: {}, headers: {} } as IHttpRequestOptions;
|
||||
});
|
||||
|
||||
@@ -340,21 +319,12 @@ describe('processJsonInput', () => {
|
||||
});
|
||||
|
||||
describe('validateCustomProperties', () => {
|
||||
let mockExecuteSingleFunctions: any;
|
||||
let requestOptions: any;
|
||||
|
||||
beforeEach(() => {
|
||||
mockExecuteSingleFunctions = {
|
||||
getNodeParameter: jest.fn(),
|
||||
getNode: jest.fn(() => ({ name: 'MockNode' })),
|
||||
};
|
||||
requestOptions = { body: {}, headers: {}, url: 'http://mock.url' };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
test('should merge custom properties into requestOptions.body for valid input', async () => {
|
||||
const validCustomProperties = { property1: 'value1', property2: 'value2' };
|
||||
mockExecuteSingleFunctions.getNodeParameter.mockReturnValue(validCustomProperties);
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
getWorkflowFilenames,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Create Item', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('create.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -68,9 +57,5 @@ describe('Azure Cosmos DB - Create Item', () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
getWorkflowFilenames,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Delete Item', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('delete.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -61,9 +50,5 @@ describe('Azure Cosmos DB - Delete Item', () => {
|
||||
.reply(204, '');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
getWorkflowFilenames,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Get Item', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('get.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -69,9 +58,5 @@ describe('Azure Cosmos DB - Get Item', () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
getWorkflowFilenames,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Get All Items', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('getAll.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -80,9 +69,5 @@ describe('Azure Cosmos DB - Get All Items', () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
getWorkflowFilenames,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Query Items', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('query.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -45,9 +34,5 @@ describe('Azure Cosmos DB - Query Items', () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import {
|
||||
initBinaryDataService,
|
||||
testWorkflows,
|
||||
getWorkflowFilenames,
|
||||
} from '../../../../../test/nodes/Helpers';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB - Update Item', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname).filter((filename) =>
|
||||
filename.includes('update.workflow.json'),
|
||||
);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
if (!nock.isActive()) {
|
||||
nock.activate();
|
||||
}
|
||||
const { baseUrl } = credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
|
||||
const baseUrl = 'https://n8n-us-east-account.documents.azure.com/dbs/database_1';
|
||||
|
||||
nock.cleanAll();
|
||||
nock(baseUrl)
|
||||
.persist()
|
||||
.defaultReplyHeaders({ 'Content-Type': 'application/json' })
|
||||
@@ -73,9 +62,5 @@ describe('Azure Cosmos DB - Update Item', () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -4,9 +4,9 @@ import {
|
||||
type ILoadOptionsFunctions,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { FAKE_CREDENTIALS_DATA } from '../../../../../test/nodes/FakeCredentialsMap';
|
||||
import { AzureCosmosDb } from '../../AzureCosmosDb.node';
|
||||
import { HeaderConstants } from '../../helpers/constants';
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Cosmos DB', () => {
|
||||
describe('List search', () => {
|
||||
@@ -31,7 +31,7 @@ describe('Azure Cosmos DB', () => {
|
||||
|
||||
const mockGetCredentials = jest.fn(async (type: string, _itemIndex?: number) => {
|
||||
if (type === 'microsoftAzureCosmosDbSharedKeyApi') {
|
||||
return FAKE_CREDENTIALS_DATA.microsoftAzureCosmosDbSharedKeyApi;
|
||||
return credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
}
|
||||
throw new OperationalError('Unknown credentials');
|
||||
});
|
||||
@@ -100,7 +100,7 @@ describe('Azure Cosmos DB', () => {
|
||||
|
||||
const mockGetCredentials = jest.fn(async (type: string, _itemIndex?: number) => {
|
||||
if (type === 'microsoftAzureCosmosDbSharedKeyApi') {
|
||||
return FAKE_CREDENTIALS_DATA.microsoftAzureCosmosDbSharedKeyApi;
|
||||
return credentials.microsoftAzureCosmosDbSharedKeyApi;
|
||||
}
|
||||
throw new OperationalError('Unknown credentials');
|
||||
});
|
||||
|
||||
@@ -1,13 +1,6 @@
|
||||
import type {
|
||||
ICredentialDataDecryptedObject,
|
||||
IDataObject,
|
||||
IHttpRequestOptions,
|
||||
ILoadOptionsFunctions,
|
||||
WorkflowTestData,
|
||||
} from 'n8n-workflow';
|
||||
import type { ILoadOptionsFunctions, WorkflowTestData } from 'n8n-workflow';
|
||||
import { NodeConnectionTypes } from 'n8n-workflow';
|
||||
|
||||
import { CredentialsHelper } from '@test/nodes/credentials-helper';
|
||||
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import * as Helpers from '@test/nodes/Helpers';
|
||||
|
||||
@@ -18,6 +11,15 @@ describe('Microsoft Entra Node', () => {
|
||||
const baseUrl = 'https://graph.microsoft.com/v1.0';
|
||||
|
||||
describe('Credentials', () => {
|
||||
const credentials = {
|
||||
microsoftEntraOAuth2Api: {
|
||||
scope: '',
|
||||
oauthTokenData: {
|
||||
access_token: 'ACCESSTOKEN',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const tests: WorkflowTestData[] = [
|
||||
{
|
||||
description: 'should use correct credentials',
|
||||
@@ -92,33 +94,10 @@ describe('Microsoft Entra Node', () => {
|
||||
},
|
||||
],
|
||||
},
|
||||
credentials,
|
||||
},
|
||||
];
|
||||
|
||||
beforeAll(() => {
|
||||
jest
|
||||
.spyOn(CredentialsHelper.prototype, 'authenticate')
|
||||
.mockImplementation(
|
||||
async (
|
||||
credentials: ICredentialDataDecryptedObject,
|
||||
typeName: string,
|
||||
requestParams: IHttpRequestOptions,
|
||||
): Promise<IHttpRequestOptions> => {
|
||||
if (typeName === 'microsoftEntraOAuth2Api') {
|
||||
return {
|
||||
...requestParams,
|
||||
headers: {
|
||||
authorization:
|
||||
'bearer ' + (credentials.oauthTokenData as IDataObject).access_token,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return requestParams;
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test.each(tests)('$description', async (testData) => {
|
||||
const { result } = await executeWorkflow(testData);
|
||||
const resultNodeData = Helpers.getResultNodeData(result, testData);
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
export const credentials = {
|
||||
microsoftExcelOAuth2Api: {
|
||||
scope: 'openid',
|
||||
oauthTokenData: {
|
||||
access_token: 'token',
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, table => addTable', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.post(
|
||||
@@ -23,5 +25,5 @@ describe('Test MicrosoftExcelV2, table => addTable', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/addTable.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, table => append', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get(
|
||||
@@ -24,5 +26,5 @@ describe('Test MicrosoftExcelV2, table => append', () => {
|
||||
.reply(200);
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/append.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, table => convertToRange', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.post(
|
||||
@@ -19,5 +21,5 @@ describe('Test MicrosoftExcelV2, table => convertToRange', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/convertToRange.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, table => deleteTable', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.delete(
|
||||
@@ -10,5 +12,5 @@ describe('Test MicrosoftExcelV2, table => deleteTable', () => {
|
||||
.reply(200);
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/deleteTable.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, table => getColumns', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get(
|
||||
@@ -21,5 +23,5 @@ describe('Test MicrosoftExcelV2, table => getColumns', () => {
|
||||
.reply(200, { value: [] });
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/getColumns.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, table => getRows', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get(
|
||||
@@ -30,5 +32,5 @@ describe('Test MicrosoftExcelV2, table => getRows', () => {
|
||||
.reply(200, { value: [] });
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/getRows.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, table => lookup', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get(
|
||||
@@ -38,5 +40,5 @@ describe('Test MicrosoftExcelV2, table => lookup', () => {
|
||||
.reply(200, { value: [] });
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/table/lookup.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, workbook => addWorksheet', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.post('/drive/items/01FUWX3BQ4ATCOZNR265GLA6IJEZDQUE4I/workbook/createSession', {
|
||||
@@ -22,5 +24,5 @@ describe('Test MicrosoftExcelV2, workbook => addWorksheet', () => {
|
||||
.reply(200);
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/workbook/addWorksheet.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,11 +2,13 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, workbook => deleteWorkbook', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.delete('/drive/items/01FUWX3BXJLISGF2CFWBGYPHXFCXPXOJUK')
|
||||
.reply(200);
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/workbook/deleteWorkbook.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, workbook => getAll', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get("/drive/root/search(q='.xlsx')?%24select=name&%24top=2")
|
||||
@@ -19,5 +21,5 @@ describe('Test MicrosoftExcelV2, workbook => getAll', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/workbook/getAll.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, worksheet => append', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get(
|
||||
@@ -38,5 +40,5 @@ describe('Test MicrosoftExcelV2, worksheet => append', () => {
|
||||
.reply(200, { values: [[4, 'Don', 37, 'data 44']] });
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/worksheet/append.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, worksheet => clear', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.post(
|
||||
@@ -13,5 +15,5 @@ describe('Test MicrosoftExcelV2, worksheet => clear', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/worksheet/clear.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, worksheet => deleteWorksheet', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.delete(
|
||||
@@ -12,5 +14,5 @@ describe('Test MicrosoftExcelV2, worksheet => deleteWorksheet', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/worksheet/deleteWorksheet.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, worksheet => getAll', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get(
|
||||
@@ -25,5 +27,5 @@ describe('Test MicrosoftExcelV2, worksheet => getAll', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/worksheet/getAll.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, worksheet => readRows', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get(
|
||||
@@ -27,5 +29,5 @@ describe('Test MicrosoftExcelV2, worksheet => readRows', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/worksheet/readRows.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, worksheet => update', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get(
|
||||
@@ -52,5 +54,5 @@ describe('Test MicrosoftExcelV2, worksheet => update', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/worksheet/update.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftExcelV2, worksheet => upsert', () => {
|
||||
nock('https://graph.microsoft.com/v1.0/me')
|
||||
.get(
|
||||
@@ -54,5 +56,5 @@ describe('Test MicrosoftExcelV2, worksheet => upsert', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Excel/test/v2/node/worksheet/upsert.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/blob_create.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
@@ -55,6 +57,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/blob_delete.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
@@ -24,6 +26,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/blob_get.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
@@ -76,6 +78,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/blob_getAll.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
@@ -25,6 +27,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = [
|
||||
'nodes/Microsoft/Storage/test/workflows/blob_getAllLimitOptions.workflow.json',
|
||||
@@ -20,6 +22,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/container_create.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
@@ -27,6 +29,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/container_delete.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
@@ -24,6 +26,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/container_get.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
@@ -33,6 +35,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/container_getAll.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
@@ -25,6 +27,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = [
|
||||
'nodes/Microsoft/Storage/test/workflows/container_getAllLimitOptions.workflow.json',
|
||||
@@ -20,6 +22,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
export const credentials = {
|
||||
azureStorageOAuth2Api: {
|
||||
grantType: 'authorizationCode',
|
||||
authUrl: 'https://login.microsoftonline.com/common/oauth2/v2.0/authorize',
|
||||
accessTokenUrl: 'https://login.microsoftonline.com/common/oauth2/v2.0/token',
|
||||
clientId: 'CLIENTID',
|
||||
clientSecret: 'CLIENTSECRET',
|
||||
scope: 'https://storage.azure.com/user_impersonation',
|
||||
authQueryParameters: 'response_mode=query',
|
||||
authentication: 'body',
|
||||
oauthTokenData: {
|
||||
token_type: 'Bearer',
|
||||
scope: 'https://storage.azure.com/user_impersonation',
|
||||
expires_in: 4730,
|
||||
ext_expires_in: 4730,
|
||||
access_token: 'ACCESSTOKEN',
|
||||
callbackQueryString: {
|
||||
session_state: 'SESSIONSTATE',
|
||||
},
|
||||
},
|
||||
account: 'myaccount',
|
||||
baseUrl: 'https://myaccount.blob.core.windows.net',
|
||||
},
|
||||
azureStorageSharedKeyApi: {
|
||||
account: 'devstoreaccount1',
|
||||
key: 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==',
|
||||
baseUrl: 'https://myaccount.blob.core.windows.net',
|
||||
},
|
||||
};
|
||||
@@ -1,43 +1,13 @@
|
||||
import type {
|
||||
ICredentialDataDecryptedObject,
|
||||
IDataObject,
|
||||
IHttpRequestOptions,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { CredentialsHelper } from '@test/nodes/credentials-helper';
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/credentials_oauth2.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
|
||||
describe('should use correct oauth2 credentials', () => {
|
||||
beforeAll(() => {
|
||||
jest
|
||||
.spyOn(CredentialsHelper.prototype, 'authenticate')
|
||||
.mockImplementation(
|
||||
async (
|
||||
credentials: ICredentialDataDecryptedObject,
|
||||
typeName: string,
|
||||
requestParams: IHttpRequestOptions,
|
||||
): Promise<IHttpRequestOptions> => {
|
||||
if (typeName === 'azureStorageOAuth2Api') {
|
||||
return {
|
||||
...requestParams,
|
||||
headers: {
|
||||
authorization: `bearer ${(credentials.oauthTokenData as IDataObject).access_token as string}`,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return requestParams;
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
beforeEach(() => jest.restoreAllMocks());
|
||||
|
||||
for (const workflow of workflowTests) {
|
||||
workflow.nock = {
|
||||
@@ -67,6 +37,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,44 +1,17 @@
|
||||
import type { ICredentialDataDecryptedObject, IHttpRequestOptions } from 'n8n-workflow';
|
||||
|
||||
import { CredentialsHelper } from '@test/nodes/credentials-helper';
|
||||
import { FAKE_CREDENTIALS_DATA } from '@test/nodes/FakeCredentialsMap';
|
||||
import { AzureStorageSharedKeyApi } from '@credentials/AzureStorageSharedKeyApi.credentials';
|
||||
import { equalityTest, workflowToTests } from '@test/nodes/Helpers';
|
||||
|
||||
import { AzureStorageSharedKeyApi } from '../../../../../credentials/AzureStorageSharedKeyApi.credentials';
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
const { account, baseUrl, key } = FAKE_CREDENTIALS_DATA.azureStorageSharedKeyApi;
|
||||
const { account, baseUrl, key } = credentials.azureStorageSharedKeyApi;
|
||||
const workflows = ['nodes/Microsoft/Storage/test/workflows/credentials_sharedKey.workflow.json'];
|
||||
const workflowTests = workflowToTests(workflows);
|
||||
|
||||
describe('should use correct shared key credentials', () => {
|
||||
beforeAll(() => {
|
||||
jest
|
||||
.spyOn(CredentialsHelper.prototype, 'authenticate')
|
||||
.mockImplementation(
|
||||
async (
|
||||
_credentials: ICredentialDataDecryptedObject,
|
||||
typeName: string,
|
||||
requestParams: IHttpRequestOptions,
|
||||
): Promise<IHttpRequestOptions> => {
|
||||
if (typeName === 'azureStorageSharedKeyApi') {
|
||||
return {
|
||||
...requestParams,
|
||||
headers: {
|
||||
authorization:
|
||||
'SharedKey Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==',
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return requestParams;
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
beforeEach(() => jest.restoreAllMocks());
|
||||
|
||||
for (const workflow of workflowTests) {
|
||||
workflow.nock = {
|
||||
@@ -68,6 +41,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
],
|
||||
};
|
||||
workflow.credentials = credentials;
|
||||
test(workflow.description, async () => await equalityTest(workflow));
|
||||
}
|
||||
});
|
||||
@@ -76,7 +50,7 @@ describe('Azure Storage Node', () => {
|
||||
const azureStorageSharedKeyApi = new AzureStorageSharedKeyApi();
|
||||
|
||||
it('should remove undefined query parameters and headers', async () => {
|
||||
const credentials: ICredentialDataDecryptedObject = {
|
||||
const authCredentials: ICredentialDataDecryptedObject = {
|
||||
account,
|
||||
key,
|
||||
};
|
||||
@@ -89,14 +63,14 @@ describe('Azure Storage Node', () => {
|
||||
method: 'GET',
|
||||
};
|
||||
|
||||
const result = await azureStorageSharedKeyApi.authenticate(credentials, requestOptions);
|
||||
const result = await azureStorageSharedKeyApi.authenticate(authCredentials, requestOptions);
|
||||
|
||||
expect(result.qs).toEqual({ restype: 'container' });
|
||||
expect(result.headers).not.toHaveProperty('Content-Length');
|
||||
});
|
||||
|
||||
it('should default method to GET if not provided', async () => {
|
||||
const credentials: ICredentialDataDecryptedObject = {
|
||||
const authCredentials: ICredentialDataDecryptedObject = {
|
||||
account,
|
||||
key,
|
||||
};
|
||||
@@ -108,12 +82,12 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
};
|
||||
|
||||
const result = await azureStorageSharedKeyApi.authenticate(credentials, requestOptions);
|
||||
const result = await azureStorageSharedKeyApi.authenticate(authCredentials, requestOptions);
|
||||
expect(result.method).toBe('GET');
|
||||
});
|
||||
|
||||
it('should generate a valid authorization header', async () => {
|
||||
const credentials: ICredentialDataDecryptedObject = {
|
||||
const authCredentials: ICredentialDataDecryptedObject = {
|
||||
account,
|
||||
key,
|
||||
};
|
||||
@@ -133,7 +107,7 @@ describe('Azure Storage Node', () => {
|
||||
},
|
||||
method: 'PUT',
|
||||
};
|
||||
const result = await azureStorageSharedKeyApi.authenticate(credentials, requestOptions);
|
||||
const result = await azureStorageSharedKeyApi.authenticate(authCredentials, requestOptions);
|
||||
|
||||
expect(result.headers?.authorization).toBe(
|
||||
'SharedKey devstoreaccount1:6sSQ3N4yNFQynBs/iLptIRPS5DQeaFBocW+dyYbAdOI=',
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import type { ILoadOptionsFunctions, INodeParameterResourceLocator } from 'n8n-workflow';
|
||||
|
||||
import { FAKE_CREDENTIALS_DATA } from '@test/nodes/FakeCredentialsMap';
|
||||
|
||||
import { AzureStorage } from '../../AzureStorage.node';
|
||||
import { XMsVersion } from '../../GenericFunctions';
|
||||
import { credentials } from '../credentials';
|
||||
|
||||
describe('Azure Storage Node', () => {
|
||||
describe('List search', () => {
|
||||
@@ -25,7 +24,7 @@ describe('Azure Storage Node', () => {
|
||||
});
|
||||
const mockGetCredentials = jest.fn(async (type: string, _itemIndex?: number) => {
|
||||
if (type === 'azureStorageSharedKeyApi') {
|
||||
return FAKE_CREDENTIALS_DATA.azureStorageSharedKeyApi;
|
||||
return credentials.azureStorageSharedKeyApi;
|
||||
}
|
||||
// eslint-disable-next-line n8n-local-rules/no-plain-errors
|
||||
throw new Error('Unknown credentials');
|
||||
@@ -76,7 +75,7 @@ describe('Azure Storage Node', () => {
|
||||
});
|
||||
const mockGetCredentials = jest.fn(async (type: string, _itemIndex?: number) => {
|
||||
if (type === 'azureStorageSharedKeyApi') {
|
||||
return FAKE_CREDENTIALS_DATA.azureStorageSharedKeyApi;
|
||||
return credentials.azureStorageSharedKeyApi;
|
||||
}
|
||||
// eslint-disable-next-line n8n-local-rules/no-plain-errors
|
||||
throw new Error('Unknown credentials');
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
export const credentials = {
|
||||
microsoftTeamsOAuth2Api: {
|
||||
scope: 'openid',
|
||||
oauthTokenData: {
|
||||
access_token: 'token',
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, channel => create', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.post('/v1.0/teams/1644e7fe-547e-4223-a24f-922395865343/channels')
|
||||
@@ -20,5 +22,5 @@ describe('Test MicrosoftTeamsV2, channel => create', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/channel/create.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, channel => deleteChannel', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.delete(
|
||||
@@ -10,5 +12,5 @@ describe('Test MicrosoftTeamsV2, channel => deleteChannel', () => {
|
||||
.reply(200, {});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/channel/deleteChannel.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, channel => get', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.get(
|
||||
@@ -23,5 +25,5 @@ describe('Test MicrosoftTeamsV2, channel => get', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/channel/get.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, channel => getAll', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.get('/v1.0/teams/1111-2222-3333/channels')
|
||||
@@ -47,5 +49,5 @@ describe('Test MicrosoftTeamsV2, channel => getAll', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/channel/getAll.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, channel => update', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.patch(
|
||||
@@ -11,5 +13,5 @@ describe('Test MicrosoftTeamsV2, channel => update', () => {
|
||||
.reply(200, {});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/channel/update.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, channelMessage => create', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.post('/beta/teams/1111-2222-3333/channels/42:aaabbbccc.tacv2/messages', {
|
||||
@@ -52,5 +54,5 @@ describe('Test MicrosoftTeamsV2, channelMessage => create', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/channelMessage/create.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, channelMessage => getAll', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.get('/beta/teams/1111-2222-3333/channels/42:aaabbbccc.tacv2/messages')
|
||||
@@ -64,5 +66,5 @@ describe('Test MicrosoftTeamsV2, channelMessage => getAll', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/channelMessage/getAll.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, chatMessage => create', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.post('/v1.0/chats/19:ebed9ad42c904d6c83adf0db360053ec@thread.v2/messages')
|
||||
@@ -46,5 +48,5 @@ describe('Test MicrosoftTeamsV2, chatMessage => create', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/chatMessage/create.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, chatMessage => get', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.get('/v1.0/chats/19:ebed9ad42c904d6c83adf0db360053ec@thread.v2/messages/1698378560692')
|
||||
@@ -47,5 +49,5 @@ describe('Test MicrosoftTeamsV2, chatMessage => get', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/chatMessage/get.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, chatMessage => getAll', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.get('/v1.0/chats/19:ebed9ad42c904d6c83adf0db360053ec@thread.v2/messages')
|
||||
@@ -87,5 +89,5 @@ describe('Test MicrosoftTeamsV2, chatMessage => getAll', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/chatMessage/getAll.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, task => create', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.post('/v1.0/planner/tasks', {
|
||||
@@ -70,5 +72,5 @@ describe('Test MicrosoftTeamsV2, task => create', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/task/create.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, task => deleteTask', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.get('/v1.0/planner/tasks/lDrRJ7N_-06p_26iKBtJ6ZgAKffD')
|
||||
@@ -11,5 +13,5 @@ describe('Test MicrosoftTeamsV2, task => deleteTask', () => {
|
||||
.reply(200, {});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/task/deleteTask.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, task => get', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.get('/v1.0/planner/tasks/lDrRJ7N_-06p_26iKBtJ6ZgAKffD')
|
||||
@@ -58,5 +60,5 @@ describe('Test MicrosoftTeamsV2, task => get', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/task/get.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, task => getAll', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.get('/v1.0/me')
|
||||
@@ -147,5 +149,5 @@ describe('Test MicrosoftTeamsV2, task => getAll', () => {
|
||||
});
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/task/getAll.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import nock from 'nock';
|
||||
|
||||
import { testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
import { credentials } from '../../../credentials';
|
||||
|
||||
describe('Test MicrosoftTeamsV2, task => update', () => {
|
||||
nock('https://graph.microsoft.com')
|
||||
.get('/v1.0/planner/tasks/lDrRJ7N_-06p_26iKBtJ6ZgAKffD')
|
||||
@@ -15,5 +17,5 @@ describe('Test MicrosoftTeamsV2, task => update', () => {
|
||||
.reply(200);
|
||||
|
||||
const workflows = ['nodes/Microsoft/Teams/test/v2/node/task/update.workflow.json'];
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -6,10 +6,6 @@ import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import * as Helpers from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test Move Binary Data Node', () => {
|
||||
beforeEach(async () => {
|
||||
await Helpers.initBinaryDataService();
|
||||
});
|
||||
|
||||
const workflow = Helpers.readJsonFileSync(
|
||||
'nodes/MoveBinaryData/test/MoveBinaryData.workflow.json',
|
||||
);
|
||||
|
||||
@@ -5,12 +5,19 @@ import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import { workflowToTests, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test N8n Node, expect base_url to be received from credentials', () => {
|
||||
const credentials = {
|
||||
n8nApi: {
|
||||
apiKey: 'key123',
|
||||
baseUrl: 'https://test.app.n8n.cloud/api/v1',
|
||||
},
|
||||
};
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
const tests = workflowToTests(workflows);
|
||||
const tests = workflowToTests(workflows, credentials);
|
||||
|
||||
beforeAll(() => {
|
||||
//base url is set in fake credentials map packages/nodes-base/test/nodes/FakeCredentialsMap.ts
|
||||
const baseUrl = 'https://test.app.n8n.cloud/api/v1';
|
||||
const { baseUrl } = credentials.n8nApi;
|
||||
nock(baseUrl).get('/workflows?tags=n8n-test').reply(200, {});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { FAKE_CREDENTIALS_DATA } from '@test/nodes/FakeCredentialsMap';
|
||||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test npm Node', () => {
|
||||
const credentials = {
|
||||
npmApi: {
|
||||
accessToken: 'fake-npm-access-token',
|
||||
registryUrl: 'https://fake.npm.registry',
|
||||
},
|
||||
};
|
||||
|
||||
beforeAll(() => {
|
||||
const { registryUrl } = FAKE_CREDENTIALS_DATA.npmApi;
|
||||
const { registryUrl } = credentials.npmApi;
|
||||
const mock = nock(registryUrl); //.matchHeader('Authorization', `Bearer ${accessToken}`);
|
||||
|
||||
mock.get('/-/package/n8n/dist-tags').reply(200, {
|
||||
@@ -29,5 +35,5 @@ describe('Test npm Node', () => {
|
||||
});
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
|
||||
@@ -10,6 +10,25 @@ import {
|
||||
} from './apiResponses';
|
||||
|
||||
describe('PhilipsHue', () => {
|
||||
const credentials = {
|
||||
philipsHueOAuth2Api: {
|
||||
grantType: 'authorizationCode',
|
||||
appId: 'APPID',
|
||||
authUrl: 'https://api.meethue.com/v2/oauth2/authorize',
|
||||
accessTokenUrl: 'https://api.meethue.com/v2/oauth2/token',
|
||||
authQueryParameters: 'appid=APPID',
|
||||
scope: '',
|
||||
authentication: 'header',
|
||||
oauthTokenData: {
|
||||
access_token: 'ACCESSTOKEN',
|
||||
refresh_token: 'REFRESHTOKEN',
|
||||
scope: '',
|
||||
token_type: 'bearer',
|
||||
expires_in: 86400,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
describe('Run workflow', () => {
|
||||
beforeAll(() => {
|
||||
const mock = nock('https://api.meethue.com/route');
|
||||
@@ -20,6 +39,6 @@ describe('PhilipsHue', () => {
|
||||
});
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
testWorkflows(workflows);
|
||||
testWorkflows(workflows, credentials);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,7 +7,6 @@ import * as Helpers from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test QuickChart Node', () => {
|
||||
beforeEach(async () => {
|
||||
await Helpers.initBinaryDataService();
|
||||
nock('https://quickchart.io')
|
||||
.persist()
|
||||
.get(/chart.*/)
|
||||
|
||||
@@ -6,10 +6,6 @@ import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import * as Helpers from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test Read Binary File Node', () => {
|
||||
beforeEach(async () => {
|
||||
await Helpers.initBinaryDataService();
|
||||
});
|
||||
|
||||
const workflow = Helpers.readJsonFileSync(
|
||||
'nodes/ReadBinaryFile/test/ReadBinaryFile.workflow.json',
|
||||
);
|
||||
|
||||
@@ -6,10 +6,6 @@ import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
import * as Helpers from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test Read Binary Files Node', () => {
|
||||
beforeEach(async () => {
|
||||
await Helpers.initBinaryDataService();
|
||||
});
|
||||
|
||||
const workflow = Helpers.readJsonFileSync(
|
||||
'nodes/ReadBinaryFiles/test/ReadBinaryFiles.workflow.json',
|
||||
);
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
import { getWorkflowFilenames, initBinaryDataService, testWorkflows } from '@test/nodes/Helpers';
|
||||
import { getWorkflowFilenames, testWorkflows } from '@test/nodes/Helpers';
|
||||
|
||||
describe('Test Read PDF Node', () => {
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
|
||||
beforeAll(async () => {
|
||||
await initBinaryDataService();
|
||||
});
|
||||
|
||||
testWorkflows(workflows);
|
||||
});
|
||||
|
||||
@@ -10,13 +10,19 @@ import opportunityDetails from './fixtures/opportunity-details.json';
|
||||
import taskDetails from './fixtures/task-details.json';
|
||||
import taskSummary from './fixtures/task-summary.json';
|
||||
import tasks from './fixtures/tasks.json';
|
||||
import userDeltails from './fixtures/user-details.json';
|
||||
import userDetails from './fixtures/user-details.json';
|
||||
import users from './fixtures/users.json';
|
||||
|
||||
describe('Salesforce Node', () => {
|
||||
nock.emitter.on('no match', (req) => {
|
||||
console.error('Unmatched request: ', req);
|
||||
});
|
||||
const credentials = {
|
||||
salesforceOAuth2Api: {
|
||||
scope: 'full refresh_token',
|
||||
oauthTokenData: {
|
||||
access_token: 'ACCESSTOKEN',
|
||||
instance_url: 'https://salesforce.instance',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const salesforceNock = nock('https://salesforce.instance/services/data/v59.0');
|
||||
|
||||
@@ -29,14 +35,12 @@ describe('Salesforce Node', () => {
|
||||
})
|
||||
.reply(200, { records: users })
|
||||
.get('/sobjects/user/id1')
|
||||
.reply(200, userDeltails);
|
||||
.reply(200, userDetails);
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/users.workflow.json']);
|
||||
afterAll(() => salesforceNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
salesforceNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/users.workflow.json'], credentials);
|
||||
});
|
||||
|
||||
describe('tasks', () => {
|
||||
@@ -63,11 +67,9 @@ describe('Salesforce Node', () => {
|
||||
.reply(200, { success: true, errors: [] });
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/tasks.workflow.json']);
|
||||
afterAll(() => salesforceNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
salesforceNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/tasks.workflow.json'], credentials);
|
||||
});
|
||||
|
||||
describe('accounts', () => {
|
||||
@@ -96,11 +98,9 @@ describe('Salesforce Node', () => {
|
||||
.reply(200, { success: true, errors: [] });
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/accounts.workflow.json']);
|
||||
afterAll(() => salesforceNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
salesforceNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/accounts.workflow.json'], credentials);
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
@@ -113,11 +113,9 @@ describe('Salesforce Node', () => {
|
||||
.reply(200, { records: accounts });
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/search.workflow.json']);
|
||||
afterAll(() => salesforceNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
salesforceNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/search.workflow.json'], credentials);
|
||||
});
|
||||
|
||||
describe('opportunities', () => {
|
||||
@@ -156,10 +154,8 @@ describe('Salesforce Node', () => {
|
||||
.reply(200, opportunitiesSummary);
|
||||
});
|
||||
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/opportunities.workflow.json']);
|
||||
afterAll(() => salesforceNock.done());
|
||||
|
||||
it('should make the correct network calls', () => {
|
||||
salesforceNock.done();
|
||||
});
|
||||
testWorkflows(['nodes/Salesforce/__test__/node/opportunities.workflow.json'], credentials);
|
||||
});
|
||||
});
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user