mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 10:02:05 +00:00
* feat: Design system color improvements and button component redesign.
* feat: Added button focus state and unit tests.
* refactor: Aligned n8n-button usage inside of editor-ui.
* test: Updated snapshots.
* refactor: Extracted focus outline width into scss variable.
* fix: Fixed select input border-radius.
* refactor: Removed element-ui references in button.
* fix: Fixed scss variable imports.
* feat: Added color-neutral variable story.
* fix: Fixed color-secondary variable definition.
* feat: Added color-white story.
* test: Updated button snapshot.
* feat: Replaced zoom buttons with new n8n-icon-button.
* feat: Added stories for float utilities.
* chore: Updated color shades generation code for later use.
* chore: Removed color-white code.
* chore: Updated story properties for button components.
* fix: Added el-button fallback for places where el-button is not replaceable (messagebox).
* feat: Reverted to css modules. Replaced el-button with n8n-button at application level.
* test: Updated button snapshot.
* fix: Fixed element-ui locally referenced buttons (via components: {}).
* fix: Updated colors. Removed irrelevant validation. Added ElButton override component.
* test: Updated button override snapshot.
* fix: Various button adjustments and fixes.
* fix: Updated button disabled state.
* test: Updated snapshots.
* fix: Consolidated css variables changes.
* Data pinning (#3512)
* refactor: Aligned n8n-button usage inside of editor-ui.
* feat: Added edit data button on json hover.
* feat: Extracted code editor into separate form component.
* feat: Added edit data button on json hover.
* feat: Added pinData and edit mode methods.
* 🔥 Remove conflict markers
* ✏️ Update i18n keys
* ⚡ Add JSON validation
* 🗃️ Add `pinData` column to `workflow_entity`
* 📘 Tighten type
* ⚡ Make `pinData` column nullable
* ⚡ Adjust workflow endpoints for pin data
* 📘 Improve types
* ✏️ Improve wording
* Inject pindata into items flow (#3420)
* ⚡ Inject pin data - Second approach
* 🔥 Remove unneeded lint exception
* feat: Added edit data button on json hover.
* feat: Extracted code editor into separate form component.
* feat: Added edit data button on json hover.
* fix: Fixed rebase conflicts.
* ⏪ Undo button change
* 🐛 Fix runNode call
Adjust per update in bdb84130d6
* 🧪 Fix workflow tests
* 🐛 More merge conflict fixes
* feat: Added pin/unpin button and store mutations.
* feat: Size check. Various design and ux improvements.
* ⚡ Add transformer
* ⚡ Hoist pin data
* ⚡ Adjust endpoints for hoisted pin data
* 📘 Expand interface
* 🐛 Fix stray array
* 👕 Fix build
* 👕 Add lint exception
* 👕 Fix header
* 🎨 Add color secondary tints
* ✨ Create `HeaderMessage` component
* ⚡ Adjust `InfoTip` component
* ✨ Add `HeaderMessage` to `RunData`
* 🐛 Fix console error
* 👕 Fix lint
* ⚡ Consolidate `HeaderMessage` and `Callout`
* ⏪ Undo `InfoTip` changes
* 🔥 Remove duplicate icons
* ⚡ Simplify template
* 🎨 Change cursor for action text
* 👕 Fix lint
* ⚡ Add URL
* 🐛 Fix handler name
* ⚡ Use constant
* ♻️ Refactor per feedback
* fix: Various fixes after data pinning relocation.
* fix: Added store mutation for setting pinned data.
* feat: Added pinned state for workflow canvas node.
* fix: Fixed workflow saving.
* fix: Removed pinData hoisting (no longer necessary).
* feat: Added canPinData flag to hide for input pane and binary data. Fixed unpin and execute flow.
* ⚡ Fixes for canvas pin data (#3587)
* ⚡ Fixes for canvas pin data
* 📘 Rename type
* 🧪 Fix unrelated Public API test
* 🔥 Remove logging
* feat: Updated pinData mixin to no longer include extra fields.
* ⚡ Output same pindata for every run
* 🎨 Fix cropping
* 🔥 Remove unrelated logging
* feat: Moved edit button next to pin button.
* feat: Changed data to be inserted for empty state.
* chore: Changed invalid editor output translation.
* feat: Added error line reporting on JSON Validation.
* feat: Migrated pinData edit mode to store.
* chore: Merged duplicate node border color condition.
* feat: Moved pin data validation to mixin. Added check before closing ndv modal.
* fix: Changed pinned data size calculation to discard active node pin data.
* feat: Added support for rename and delete node with pin data.
* feat: Simplified editing state. Fixed edit mode in input panel after store migration.
* feat: Various data pinning improvements.
* fix: Fixed callout link underline.
* refactor: Added support for both string and objects for data size check.
* feat: Added disabled node check for input panel. Fixed monaco editor resizing.
* fix: Fixed edit mode footer size.
* ⚡ Fix pindata items per run
* 👕 Remove unneeded exception
* refactor: Added isValidPinData() helper method.
* refactor: Changed how string size in bytes in calculated.g
* refactor: Updated pinData mixin interface.
* refactor: Merged filter and reduce in pinDataSize calculation.
* fix: Changed code-editor to correct type.
* fix: Added insert test data message to trigger nodes.
* feat: Disabled data pinning for multiple output nodes.
* refactor: Updated ndv.input.disabled translation to include node name.
* refactor: Aligned n8n-button usage inside of editor-ui.
* feat: Added edit data button on json hover.
* feat: Extracted code editor into separate form component.
* feat: Added edit data button on json hover.
* feat: Added pinData and edit mode methods.
* 🔥 Remove conflict markers
* ✏️ Update i18n keys
* ⚡ Add JSON validation
* 🗃️ Add `pinData` column to `workflow_entity`
* 📘 Tighten type
* ⚡ Make `pinData` column nullable
* ⚡ Adjust workflow endpoints for pin data
* 📘 Improve types
* ✏️ Improve wording
* Inject pindata into items flow (#3420)
* ⚡ Inject pin data - Second approach
* 🔥 Remove unneeded lint exception
* feat: Added edit data button on json hover.
* feat: Extracted code editor into separate form component.
* feat: Added edit data button on json hover.
* fix: Fixed rebase conflicts.
* ⏪ Undo button change
* 🐛 Fix runNode call
Adjust per update in bdb84130d6
* 🧪 Fix workflow tests
* 🐛 More merge conflict fixes
* feat: Added pin/unpin button and store mutations.
* feat: Size check. Various design and ux improvements.
* ⚡ Add transformer
* ⚡ Hoist pin data
* ⚡ Adjust endpoints for hoisted pin data
* 📘 Expand interface
* 🐛 Fix stray array
* 👕 Fix build
* 🎨 Add color secondary tints
* ✨ Create `HeaderMessage` component
* ⚡ Adjust `InfoTip` component
* ✨ Add `HeaderMessage` to `RunData`
* 🐛 Fix console error
* 👕 Fix lint
* ⚡ Consolidate `HeaderMessage` and `Callout`
* ⏪ Undo `InfoTip` changes
* 🔥 Remove duplicate icons
* ⚡ Simplify template
* 🎨 Change cursor for action text
* 👕 Fix lint
* ⚡ Add URL
* 🐛 Fix handler name
* ⚡ Use constant
* ♻️ Refactor per feedback
* fix: Various fixes after data pinning relocation.
* fix: Added store mutation for setting pinned data.
* feat: Added pinned state for workflow canvas node.
* ⚡ Fixes for canvas pin data (#3587)
* ⚡ Fixes for canvas pin data
* 📘 Rename type
* 🧪 Fix unrelated Public API test
* 🔥 Remove logging
* feat: Updated pinData mixin to no longer include extra fields.
* fix: Removed pinData hoisting (no longer necessary).
* chore: Merged duplicate node border color condition.
* ⚡ Output same pindata for every run
* 🎨 Fix cropping
* 🐛 Fix excess closing template tag
* fix: Removed rogue template tag after merge.
* fix: Fixed code-editor resizing when moving ndv panel.
* feat: Added node duplication pin data.
* ⚡ Implement telemetry
* ♻️ Add clarifications from call
* fix: Fixed run data header height.
* feat: Removed border from pin data callout.
* feat: Added line-break before 'or insert pin data'.
* feat: Changed enterEditMode to always insert test data if there's no execution data.
* feat: Removed copy output tooltip.
* feat: Removed unpin tooltip.
* fix: Removed thumbtack icon rotation.
* fix: Removed run info from Edit Output title.
* feat: Hid edit and pin buttons when editing.
* feat: Updated monaco code-editor padding and borders.
* feat: Progress on pinData error message format
* feat: Updated copy feature to work without any selected value.
* feat: Moved save and cancel buttons. Cleared notifications on save.
* feat: Changed pin data beforeClosing confirm text.
* feat: Closing ndv when discarding or saving pindata on close.
* feat: Added split in batches node to pin data denylist.
* fix: Added missing margin-bottom to webhook node.
* feat: Moved thumbtack icon to the right, replacing the checkmark.
* fix: Hid pagination while editing.
* feat: Added pin data discovery flow.
* feat: Changed pin data discovery flow to avoid tooltip glitching.
* fix: Changed copy selection to copy all input data.
* feat: Updated pin data validation error message for unexpected single quotes.
* fix: Replaced :manual='true' prop with manual shorthand.
* fix: Removed unused variable.
* chore: Renamed translation key to node.discovery.pinData.
* refactor: Extracted isPinDataNodeType to pinData mixin.
* fix: Updated watch condition to improve performance.
* refactor: Renamed some pin data variables and methods as per review.
* fix: Added partial translation for JSON.parse pin data error messages.
* chore: Temporarily disabled failing unit test.
* 🧪 Fix data pinning workflow retrieval test
* 🔥 Remove unused imports
* 🔥 Remove leftover line
* ⚡ Skip pindata node issues on BE
* ⚡ Skip pindata node issues on FE
* ⚡ Hide `RunInfo` for pindata node
* ⚡ Hide purple banner in edit output mode
* feat: Updated data pinning discoverability flow.
* fix: Fixed paginated data pinning.
* fix: Disabled pin data in read only mode.
* 🐛 Fix runtime error with non-array
* fix: Loading pin data when opening execution.
* ⚡ Adjust stale data warning for pinned data
* ⚡ Skip auth in endpoint
* ⚡ Mark start node for pinned trigger
* ✏️ Comment on passthrough
* 🔥 Remove comment
* Final pindata metrics changes (#3673)
* 🐛 Fix `pinData` tracked as `0`
* ⚡ Add `is_pinned` to `nodesGraph`
* 📘 Extend `IWorkflowBase`
* ⚡ Handle `pinData` being `undefined`
* ⚡ Add `data_pinning_tooltip_presented`
* ♻️ Refactor to remove circular dependency
* fix: Added pin data handling when importing workflow. (#3698)
* 🔥 Remove helper from WorkflowExecute
* ⚡ Add logic for single pinned trigger
* 👕 Remove lint exception
* fix: Added pin data handling in importWorkflowExact.
* N8N-4077 data pinning discoverability part 2 (#3701)
* fix: Fixed pin data discovery tooltip position when moving canvas.
* feat: Updated data pinning discovery tooltip copy.
* Fix data pinning build (#3702)
* ⚡ Disable edit button for disabled node
* ⚡ Ensure disabled pinned nodes are passthrough
* 🐛 Fix JSON key unfurling in edit mode
* ⚡ Improve implementation
* 🐛 Fix console error
* fix: Fixed copying pinned output data. (#3715)
* Fix pinning for webhook responding with output from last node (#3719)
* fix: Fixed entering edit mode after refresh.
* fix: Fixed type error during build.
* fix: RunData import formatting.
* chore: Updated pin data types.
* fix: Added missing type to stringSizeInBytes.
Co-authored-by: Iván Ovejero <ivov.src@gmail.com>
* fix: Showing pin data without executing the node only in output pane.
* fix: Updated no data message when previous node not executed.
* feat: Added expression input and evaluation for pin data nodes without execution.
* chore: Fixed linting issues and removed remnant console.log().
* chore: Undone package-lock changes.
* fix: Removed pin data store changes.
* fix: Created a new object using vuex runExecutionData.
* fix: Fixed bug appearing when adding a new node after executing.
* fix: Fix editor-ui build
* feat: Added green node connectors when having pin data output.
* chore: Fixed linting errors.
* fix: Added pin data eventBus unsubscribe.
* fix: Added pin data color check after adding a connection.
* 🎨 Add pindata styles
Co-authored-by: Iván Ovejero <ivov.src@gmail.com>
718 lines
20 KiB
TypeScript
718 lines
20 KiB
TypeScript
import { exec as callbackExec } from 'child_process';
|
|
import { promisify } from 'util';
|
|
|
|
import { createConnection, getConnection, ConnectionOptions, Connection } from 'typeorm';
|
|
import { UserSettings } from 'n8n-core';
|
|
|
|
import config from '../../../config';
|
|
import {
|
|
BOOTSTRAP_MYSQL_CONNECTION_NAME,
|
|
BOOTSTRAP_POSTGRES_CONNECTION_NAME,
|
|
DB_INITIALIZATION_TIMEOUT,
|
|
MAPPING_TABLES,
|
|
MAPPING_TABLES_TO_CLEAR,
|
|
} from './constants';
|
|
import { DatabaseType, Db, ICredentialsDb } from '../../../src';
|
|
import { randomApiKey, randomEmail, randomName, randomString, randomValidPassword } from './random';
|
|
import { CredentialsEntity } from '../../../src/databases/entities/CredentialsEntity';
|
|
import { hashPassword } from '../../../src/UserManagement/UserManagementHelper';
|
|
import { entities } from '../../../src/databases/entities';
|
|
import { mysqlMigrations } from '../../../src/databases/migrations/mysqldb';
|
|
import { postgresMigrations } from '../../../src/databases/migrations/postgresdb';
|
|
import { sqliteMigrations } from '../../../src/databases/migrations/sqlite';
|
|
import { categorize, getPostgresSchemaSection } from './utils';
|
|
import { createCredentiasFromCredentialsEntity } from '../../../src/CredentialsHelper';
|
|
|
|
import type { Role } from '../../../src/databases/entities/Role';
|
|
import type { CollectionName, CredentialPayload, InstalledNodePayload, InstalledPackagePayload, MappingName } from './types';
|
|
import { InstalledPackages } from '../../../src/databases/entities/InstalledPackages';
|
|
import { InstalledNodes } from '../../../src/databases/entities/InstalledNodes';
|
|
import { User } from '../../../src/databases/entities/User';
|
|
import { WorkflowEntity } from '../../../src/databases/entities/WorkflowEntity';
|
|
import { ExecutionEntity } from '../../../src/databases/entities/ExecutionEntity';
|
|
import { TagEntity } from '../../../src/databases/entities/TagEntity';
|
|
|
|
const exec = promisify(callbackExec);
|
|
|
|
/**
|
|
* Initialize one test DB per suite run, with bootstrap connection if needed.
|
|
*/
|
|
export async function init() {
|
|
const dbType = config.getEnv('database.type');
|
|
|
|
if (dbType === 'sqlite') {
|
|
jest.setTimeout(DB_INITIALIZATION_TIMEOUT);
|
|
|
|
// no bootstrap connection required
|
|
const testDbName = `n8n_test_sqlite_${randomString(6, 10)}_${Date.now()}`;
|
|
await Db.init(getSqliteOptions({ name: testDbName }));
|
|
await getConnection(testDbName).runMigrations({ transaction: 'none' });
|
|
|
|
return { testDbName };
|
|
}
|
|
|
|
if (dbType === 'postgresdb') {
|
|
jest.setTimeout(DB_INITIALIZATION_TIMEOUT);
|
|
|
|
let bootstrapPostgres;
|
|
const pgOptions = getBootstrapPostgresOptions();
|
|
|
|
try {
|
|
bootstrapPostgres = await createConnection(pgOptions);
|
|
} catch (error) {
|
|
const pgConfig = getPostgresSchemaSection();
|
|
|
|
if (!pgConfig) throw new Error("Failed to find config schema section for 'postgresdb'");
|
|
|
|
const message = [
|
|
"ERROR: Failed to connect to Postgres default DB 'postgres'",
|
|
'Please review your Postgres connection options:',
|
|
`host: ${pgOptions.host} | port: ${pgOptions.port} | schema: ${pgOptions.schema} | username: ${pgOptions.username} | password: ${pgOptions.password}`,
|
|
'Fix by setting correct values via environment variables:',
|
|
`${pgConfig.host.env} | ${pgConfig.port.env} | ${pgConfig.schema.env} | ${pgConfig.user.env} | ${pgConfig.password.env}`,
|
|
'Otherwise, make sure your Postgres server is running.',
|
|
].join('\n');
|
|
|
|
console.error(message);
|
|
|
|
process.exit(1);
|
|
}
|
|
|
|
const testDbName = `pg_${randomString(6, 10)}_${Date.now()}_n8n_test`;
|
|
await bootstrapPostgres.query(`CREATE DATABASE ${testDbName};`);
|
|
|
|
try {
|
|
const schema = config.getEnv('database.postgresdb.schema');
|
|
await exec(`psql -d ${testDbName} -c "CREATE SCHEMA IF NOT EXISTS ${schema}";`);
|
|
} catch (error) {
|
|
if (error instanceof Error && error.message.includes('command not found')) {
|
|
console.error(
|
|
'psql command not found. Make sure psql is installed and added to your PATH.',
|
|
);
|
|
}
|
|
process.exit(1);
|
|
}
|
|
|
|
await Db.init(getPostgresOptions({ name: testDbName }));
|
|
|
|
return { testDbName };
|
|
}
|
|
|
|
if (dbType === 'mysqldb') {
|
|
// initialization timeout in test/setup.ts
|
|
|
|
const bootstrapMysql = await createConnection(getBootstrapMySqlOptions());
|
|
|
|
const testDbName = `mysql_${randomString(6, 10)}_${Date.now()}_n8n_test`;
|
|
await bootstrapMysql.query(`CREATE DATABASE ${testDbName};`);
|
|
|
|
await Db.init(getMySqlOptions({ name: testDbName }));
|
|
|
|
return { testDbName };
|
|
}
|
|
|
|
throw new Error(`Unrecognized DB type: ${dbType}`);
|
|
}
|
|
|
|
/**
|
|
* Drop test DB, closing bootstrap connection if existing.
|
|
*/
|
|
export async function terminate(testDbName: string) {
|
|
const dbType = config.getEnv('database.type');
|
|
|
|
if (dbType === 'sqlite') {
|
|
await getConnection(testDbName).close();
|
|
}
|
|
|
|
if (dbType === 'postgresdb') {
|
|
await getConnection(testDbName).close();
|
|
|
|
const bootstrapPostgres = getConnection(BOOTSTRAP_POSTGRES_CONNECTION_NAME);
|
|
await bootstrapPostgres.query(`DROP DATABASE ${testDbName}`);
|
|
await bootstrapPostgres.close();
|
|
}
|
|
|
|
if (dbType === 'mysqldb') {
|
|
await getConnection(testDbName).close();
|
|
|
|
const bootstrapMySql = getConnection(BOOTSTRAP_MYSQL_CONNECTION_NAME);
|
|
await bootstrapMySql.query(`DROP DATABASE ${testDbName}`);
|
|
await bootstrapMySql.close();
|
|
}
|
|
}
|
|
|
|
async function truncateMappingTables(
|
|
dbType: DatabaseType,
|
|
collections: Array<CollectionName>,
|
|
testDb: Connection,
|
|
) {
|
|
const mappingTables = collections.reduce<string[]>((acc, collection) => {
|
|
const found = MAPPING_TABLES_TO_CLEAR[collection];
|
|
|
|
if (found) acc.push(...found);
|
|
|
|
return acc;
|
|
}, []);
|
|
|
|
if (dbType === 'sqlite') {
|
|
const promises = mappingTables.map((tableName) =>
|
|
testDb.query(
|
|
`DELETE FROM ${tableName}; DELETE FROM sqlite_sequence WHERE name=${tableName};`,
|
|
),
|
|
);
|
|
|
|
return Promise.all(promises);
|
|
}
|
|
|
|
if (dbType === 'postgresdb') {
|
|
const schema = config.getEnv('database.postgresdb.schema');
|
|
|
|
// `TRUNCATE` in postgres cannot be parallelized
|
|
for (const tableName of mappingTables) {
|
|
const fullTableName = `${schema}.${tableName}`;
|
|
await testDb.query(`TRUNCATE TABLE ${fullTableName} RESTART IDENTITY CASCADE;`);
|
|
}
|
|
|
|
return Promise.resolve([]);
|
|
}
|
|
|
|
// mysqldb, mariadb
|
|
|
|
const promises = mappingTables.flatMap((tableName) => [
|
|
testDb.query(`DELETE FROM ${tableName};`),
|
|
testDb.query(`ALTER TABLE ${tableName} AUTO_INCREMENT = 1;`),
|
|
]);
|
|
|
|
return Promise.all(promises);
|
|
}
|
|
|
|
/**
|
|
* Truncate specific DB tables in a test DB.
|
|
*
|
|
* @param collections Array of entity names whose tables to truncate.
|
|
* @param testDbName Name of the test DB to truncate tables in.
|
|
*/
|
|
export async function truncate(collections: Array<CollectionName>, testDbName: string) {
|
|
const dbType = config.getEnv('database.type');
|
|
const testDb = getConnection(testDbName);
|
|
|
|
if (dbType === 'sqlite') {
|
|
await testDb.query('PRAGMA foreign_keys=OFF');
|
|
|
|
const truncationPromises = collections.map((collection) => {
|
|
const tableName = toTableName(collection);
|
|
Db.collections[collection].clear();
|
|
return testDb.query(
|
|
`DELETE FROM ${tableName}; DELETE FROM sqlite_sequence WHERE name=${tableName};`,
|
|
);
|
|
});
|
|
|
|
truncationPromises.push(truncateMappingTables(dbType, collections, testDb));
|
|
|
|
await Promise.all(truncationPromises);
|
|
|
|
return testDb.query('PRAGMA foreign_keys=ON');
|
|
}
|
|
|
|
if (dbType === 'postgresdb') {
|
|
const schema = config.getEnv('database.postgresdb.schema');
|
|
|
|
// `TRUNCATE` in postgres cannot be parallelized
|
|
for (const collection of collections) {
|
|
const fullTableName = `${schema}.${toTableName(collection)}`;
|
|
await testDb.query(`TRUNCATE TABLE ${fullTableName} RESTART IDENTITY CASCADE;`);
|
|
}
|
|
|
|
return await truncateMappingTables(dbType, collections, testDb);
|
|
}
|
|
|
|
/**
|
|
* MySQL `TRUNCATE` requires enabling and disabling the global variable `foreign_key_checks`,
|
|
* which cannot be safely manipulated by parallel tests, so use `DELETE` and `AUTO_INCREMENT`.
|
|
* Clear shared tables first to avoid deadlock: https://stackoverflow.com/a/41174997
|
|
*/
|
|
if (dbType === 'mysqldb') {
|
|
const { pass: isShared, fail: isNotShared } = categorize(
|
|
collections,
|
|
(collectionName: CollectionName) => collectionName.toLowerCase().startsWith('shared'),
|
|
);
|
|
|
|
await truncateMySql(testDb, isShared);
|
|
await truncateMappingTables(dbType, collections, testDb);
|
|
await truncateMySql(testDb, isNotShared);
|
|
}
|
|
}
|
|
|
|
const isMapping = (collection: string): collection is MappingName =>
|
|
Object.keys(MAPPING_TABLES).includes(collection);
|
|
|
|
function toTableName(sourceName: CollectionName | MappingName) {
|
|
if (isMapping(sourceName)) return MAPPING_TABLES[sourceName];
|
|
|
|
return {
|
|
Credentials: 'credentials_entity',
|
|
Workflow: 'workflow_entity',
|
|
Execution: 'execution_entity',
|
|
Tag: 'tag_entity',
|
|
Webhook: 'webhook_entity',
|
|
Role: 'role',
|
|
User: 'user',
|
|
SharedCredentials: 'shared_credentials',
|
|
SharedWorkflow: 'shared_workflow',
|
|
Settings: 'settings',
|
|
InstalledPackages: 'installed_packages',
|
|
InstalledNodes: 'installed_nodes',
|
|
}[sourceName];
|
|
}
|
|
|
|
function truncateMySql(connection: Connection, collections: CollectionName[]) {
|
|
return Promise.all(
|
|
collections.map(async (collection) => {
|
|
const tableName = toTableName(collection);
|
|
await connection.query(`DELETE FROM ${tableName};`);
|
|
await connection.query(`ALTER TABLE ${tableName} AUTO_INCREMENT = 1;`);
|
|
}),
|
|
);
|
|
}
|
|
|
|
// ----------------------------------
|
|
// credential creation
|
|
// ----------------------------------
|
|
|
|
/**
|
|
* Save a credential to the test DB, sharing it with a user.
|
|
*/
|
|
export async function saveCredential(
|
|
credentialPayload: CredentialPayload,
|
|
{ user, role }: { user: User; role: Role },
|
|
) {
|
|
const newCredential = new CredentialsEntity();
|
|
|
|
Object.assign(newCredential, credentialPayload);
|
|
|
|
const encryptedData = await encryptCredentialData(newCredential);
|
|
|
|
Object.assign(newCredential, encryptedData);
|
|
|
|
const savedCredential = await Db.collections.Credentials.save(newCredential);
|
|
|
|
savedCredential.data = newCredential.data;
|
|
|
|
await Db.collections.SharedCredentials.save({
|
|
user,
|
|
credentials: savedCredential,
|
|
role,
|
|
});
|
|
|
|
return savedCredential;
|
|
}
|
|
|
|
// ----------------------------------
|
|
// user creation
|
|
// ----------------------------------
|
|
|
|
/**
|
|
* Store a user in the DB, defaulting to a `member`.
|
|
*/
|
|
export async function createUser(attributes: Partial<User> = {}): Promise<User> {
|
|
const { email, password, firstName, lastName, globalRole, ...rest } = attributes;
|
|
const user = {
|
|
email: email ?? randomEmail(),
|
|
password: await hashPassword(password ?? randomValidPassword()),
|
|
firstName: firstName ?? randomName(),
|
|
lastName: lastName ?? randomName(),
|
|
globalRole: globalRole ?? (await getGlobalMemberRole()),
|
|
...rest,
|
|
};
|
|
|
|
return Db.collections.User.save(user);
|
|
}
|
|
|
|
export function createUserShell(globalRole: Role): Promise<User> {
|
|
if (globalRole.scope !== 'global') {
|
|
throw new Error(`Invalid role received: ${JSON.stringify(globalRole)}`);
|
|
}
|
|
|
|
const shell: Partial<User> = { globalRole };
|
|
|
|
if (globalRole.name !== 'owner') {
|
|
shell.email = randomEmail();
|
|
}
|
|
|
|
return Db.collections.User.save(shell);
|
|
}
|
|
|
|
// --------------------------------------
|
|
// Installed nodes and packages creation
|
|
// --------------------------------------
|
|
|
|
export async function saveInstalledPackage(installedPackagePayload: InstalledPackagePayload): Promise<InstalledPackages> {
|
|
const newInstalledPackage = new InstalledPackages();
|
|
|
|
Object.assign(newInstalledPackage, installedPackagePayload);
|
|
|
|
|
|
const savedInstalledPackage = await Db.collections.InstalledPackages.save(newInstalledPackage);
|
|
return savedInstalledPackage;
|
|
}
|
|
|
|
export async function saveInstalledNode(installedNodePayload: InstalledNodePayload): Promise<InstalledNodes> {
|
|
const newInstalledNode = new InstalledNodes();
|
|
|
|
Object.assign(newInstalledNode, installedNodePayload);
|
|
|
|
const savedInstalledNode = await Db.collections.InstalledNodes.save(newInstalledNode);
|
|
return savedInstalledNode;
|
|
}
|
|
|
|
export function addApiKey(user: User): Promise<User> {
|
|
user.apiKey = randomApiKey();
|
|
return Db.collections.User.save(user);
|
|
}
|
|
|
|
// ----------------------------------
|
|
// role fetchers
|
|
// ----------------------------------
|
|
|
|
export function getGlobalOwnerRole() {
|
|
return Db.collections.Role.findOneOrFail({
|
|
name: 'owner',
|
|
scope: 'global',
|
|
});
|
|
}
|
|
|
|
export function getGlobalMemberRole() {
|
|
return Db.collections.Role.findOneOrFail({
|
|
name: 'member',
|
|
scope: 'global',
|
|
});
|
|
}
|
|
|
|
export function getWorkflowOwnerRole() {
|
|
return Db.collections.Role.findOneOrFail({
|
|
name: 'owner',
|
|
scope: 'workflow',
|
|
});
|
|
}
|
|
|
|
export function getCredentialOwnerRole() {
|
|
return Db.collections.Role.findOneOrFail({
|
|
name: 'owner',
|
|
scope: 'credential',
|
|
});
|
|
}
|
|
|
|
export function getAllRoles() {
|
|
return Promise.all([
|
|
getGlobalOwnerRole(),
|
|
getGlobalMemberRole(),
|
|
getWorkflowOwnerRole(),
|
|
getCredentialOwnerRole(),
|
|
]);
|
|
}
|
|
|
|
// ----------------------------------
|
|
// Execution helpers
|
|
// ----------------------------------
|
|
|
|
export async function createManyExecutions(
|
|
amount: number,
|
|
workflow: WorkflowEntity,
|
|
callback: (workflow: WorkflowEntity) => Promise<ExecutionEntity>,
|
|
) {
|
|
const executionsRequests = [...Array(amount)].map((_) => callback(workflow));
|
|
return Promise.all(executionsRequests);
|
|
}
|
|
|
|
/**
|
|
* Store a execution in the DB and assign it to a workflow.
|
|
*/
|
|
export async function createExecution(
|
|
attributes: Partial<ExecutionEntity> = {},
|
|
workflow: WorkflowEntity,
|
|
) {
|
|
const { data, finished, mode, startedAt, stoppedAt, waitTill } = attributes;
|
|
|
|
const execution = await Db.collections.Execution.save({
|
|
data: data ?? '[]',
|
|
finished: finished ?? true,
|
|
mode: mode ?? 'manual',
|
|
startedAt: startedAt ?? new Date(),
|
|
...(workflow !== undefined && { workflowData: workflow, workflowId: workflow.id.toString() }),
|
|
stoppedAt: stoppedAt ?? new Date(),
|
|
waitTill: waitTill ?? null,
|
|
});
|
|
|
|
return execution;
|
|
}
|
|
|
|
/**
|
|
* Store a successful execution in the DB and assign it to a workflow.
|
|
*/
|
|
export async function createSuccessfulExecution(workflow: WorkflowEntity) {
|
|
return await createExecution(
|
|
{
|
|
finished: true,
|
|
},
|
|
workflow,
|
|
);
|
|
}
|
|
|
|
/**
|
|
* Store an error execution in the DB and assign it to a workflow.
|
|
*/
|
|
export async function createErrorExecution(workflow: WorkflowEntity) {
|
|
return await createExecution(
|
|
{
|
|
finished: false,
|
|
stoppedAt: new Date(),
|
|
},
|
|
workflow,
|
|
);
|
|
}
|
|
|
|
/**
|
|
* Store a waiting execution in the DB and assign it to a workflow.
|
|
*/
|
|
export async function createWaitingExecution(workflow: WorkflowEntity) {
|
|
return await createExecution(
|
|
{
|
|
finished: false,
|
|
waitTill: new Date(),
|
|
},
|
|
workflow,
|
|
);
|
|
}
|
|
|
|
// ----------------------------------
|
|
// Tags
|
|
// ----------------------------------
|
|
|
|
export async function createTag(attributes: Partial<TagEntity> = {}) {
|
|
const { name } = attributes;
|
|
|
|
return await Db.collections.Tag.save({
|
|
name: name ?? randomName(),
|
|
...attributes,
|
|
});
|
|
}
|
|
|
|
// ----------------------------------
|
|
// Workflow helpers
|
|
// ----------------------------------
|
|
|
|
export async function createManyWorkflows(
|
|
amount: number,
|
|
attributes: Partial<WorkflowEntity> = {},
|
|
user?: User,
|
|
) {
|
|
const workflowRequests = [...Array(amount)].map((_) => createWorkflow(attributes, user));
|
|
return Promise.all(workflowRequests);
|
|
}
|
|
|
|
/**
|
|
* Store a workflow in the DB (without a trigger) and optionally assign it to a user.
|
|
* @param user user to assign the workflow to
|
|
*/
|
|
export async function createWorkflow(attributes: Partial<WorkflowEntity> = {}, user?: User) {
|
|
const { active, name, nodes, connections } = attributes;
|
|
|
|
const workflow = await Db.collections.Workflow.save({
|
|
active: active ?? false,
|
|
name: name ?? 'test workflow',
|
|
nodes: nodes ?? [
|
|
{
|
|
name: 'Start',
|
|
parameters: {},
|
|
position: [-20, 260],
|
|
type: 'n8n-nodes-base.start',
|
|
typeVersion: 1,
|
|
},
|
|
],
|
|
connections: connections ?? {},
|
|
...attributes,
|
|
});
|
|
|
|
if (user) {
|
|
await Db.collections.SharedWorkflow.save({
|
|
user,
|
|
workflow,
|
|
role: await getWorkflowOwnerRole(),
|
|
});
|
|
}
|
|
return workflow;
|
|
}
|
|
|
|
/**
|
|
* Store a workflow in the DB (with a trigger) and optionally assign it to a user.
|
|
* @param user user to assign the workflow to
|
|
*/
|
|
export async function createWorkflowWithTrigger(
|
|
attributes: Partial<WorkflowEntity> = {},
|
|
user?: User,
|
|
) {
|
|
const workflow = await createWorkflow(
|
|
{
|
|
nodes: [
|
|
{
|
|
parameters: {},
|
|
name: 'Start',
|
|
type: 'n8n-nodes-base.start',
|
|
typeVersion: 1,
|
|
position: [240, 300],
|
|
},
|
|
{
|
|
parameters: { triggerTimes: { item: [{ mode: 'everyMinute' }] } },
|
|
name: 'Cron',
|
|
type: 'n8n-nodes-base.cron',
|
|
typeVersion: 1,
|
|
position: [500, 300],
|
|
},
|
|
{
|
|
parameters: { options: {} },
|
|
name: 'Set',
|
|
type: 'n8n-nodes-base.set',
|
|
typeVersion: 1,
|
|
position: [780, 300],
|
|
},
|
|
],
|
|
connections: { Cron: { main: [[{ node: 'Set', type: 'main', index: 0 }]] } },
|
|
...attributes,
|
|
},
|
|
user,
|
|
);
|
|
|
|
return workflow;
|
|
}
|
|
|
|
// ----------------------------------
|
|
// connection options
|
|
// ----------------------------------
|
|
|
|
/**
|
|
* Generate options for an in-memory sqlite database connection,
|
|
* one per test suite run.
|
|
*/
|
|
export const getSqliteOptions = ({ name }: { name: string }): ConnectionOptions => {
|
|
return {
|
|
name,
|
|
type: 'sqlite',
|
|
database: ':memory:',
|
|
entityPrefix: '',
|
|
dropSchema: true,
|
|
migrations: sqliteMigrations,
|
|
migrationsTableName: 'migrations',
|
|
migrationsRun: false,
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Generate options for a bootstrap Postgres connection,
|
|
* to create and drop test Postgres databases.
|
|
*/
|
|
export const getBootstrapPostgresOptions = () => {
|
|
const username = config.getEnv('database.postgresdb.user');
|
|
const password = config.getEnv('database.postgresdb.password');
|
|
const host = config.getEnv('database.postgresdb.host');
|
|
const port = config.getEnv('database.postgresdb.port');
|
|
const schema = config.getEnv('database.postgresdb.schema');
|
|
|
|
return {
|
|
name: BOOTSTRAP_POSTGRES_CONNECTION_NAME,
|
|
type: 'postgres',
|
|
database: 'postgres', // pre-existing default database
|
|
host,
|
|
port,
|
|
username,
|
|
password,
|
|
schema,
|
|
} as const;
|
|
};
|
|
|
|
export const getPostgresOptions = ({ name }: { name: string }): ConnectionOptions => {
|
|
const username = config.getEnv('database.postgresdb.user');
|
|
const password = config.getEnv('database.postgresdb.password');
|
|
const host = config.getEnv('database.postgresdb.host');
|
|
const port = config.getEnv('database.postgresdb.port');
|
|
const schema = config.getEnv('database.postgresdb.schema');
|
|
|
|
return {
|
|
name,
|
|
type: 'postgres',
|
|
database: name,
|
|
host,
|
|
port,
|
|
username,
|
|
password,
|
|
entityPrefix: '',
|
|
schema,
|
|
dropSchema: true,
|
|
migrations: postgresMigrations,
|
|
migrationsRun: true,
|
|
migrationsTableName: 'migrations',
|
|
entities: Object.values(entities),
|
|
synchronize: false,
|
|
logging: false,
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Generate options for a bootstrap MySQL connection,
|
|
* to create and drop test MySQL databases.
|
|
*/
|
|
export const getBootstrapMySqlOptions = (): ConnectionOptions => {
|
|
const username = config.getEnv('database.mysqldb.user');
|
|
const password = config.getEnv('database.mysqldb.password');
|
|
const host = config.getEnv('database.mysqldb.host');
|
|
const port = config.getEnv('database.mysqldb.port');
|
|
|
|
return {
|
|
name: BOOTSTRAP_MYSQL_CONNECTION_NAME,
|
|
database: BOOTSTRAP_MYSQL_CONNECTION_NAME,
|
|
type: 'mysql',
|
|
host,
|
|
port,
|
|
username,
|
|
password,
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Generate options for a MySQL database connection,
|
|
* one per test suite run.
|
|
*/
|
|
export const getMySqlOptions = ({ name }: { name: string }): ConnectionOptions => {
|
|
const username = config.getEnv('database.mysqldb.user');
|
|
const password = config.getEnv('database.mysqldb.password');
|
|
const host = config.getEnv('database.mysqldb.host');
|
|
const port = config.getEnv('database.mysqldb.port');
|
|
|
|
return {
|
|
name,
|
|
database: name,
|
|
type: 'mysql',
|
|
host,
|
|
port,
|
|
username,
|
|
password,
|
|
migrations: mysqlMigrations,
|
|
migrationsTableName: 'migrations',
|
|
migrationsRun: true,
|
|
};
|
|
};
|
|
|
|
// ----------------------------------
|
|
// encryption
|
|
// ----------------------------------
|
|
|
|
async function encryptCredentialData(credential: CredentialsEntity) {
|
|
const encryptionKey = await UserSettings.getEncryptionKey();
|
|
|
|
const coreCredential = createCredentiasFromCredentialsEntity(credential, true);
|
|
|
|
// @ts-ignore
|
|
coreCredential.setData(credential.data, encryptionKey);
|
|
|
|
return coreCredential.getDataToSave() as ICredentialsDb;
|
|
}
|