mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-18 18:41:14 +00:00
refactor(core): Improve DB directory setup (#3502)
This commit is contained in:
@@ -0,0 +1,46 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class InitialMigration1588102412422 implements MigrationInterface {
|
||||
name = 'InitialMigration1588102412422';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(128) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`,
|
||||
undefined,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8" ON "${tablePrefix}credentials_entity" ("type") `,
|
||||
undefined,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime NOT NULL, "workflowData" text NOT NULL, "workflowId" varchar)`,
|
||||
undefined,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefix}c4d999a5e90784e8caccf5589d" ON "${tablePrefix}execution_entity" ("workflowId") `,
|
||||
undefined,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}workflow_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text NOT NULL, "connections" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL, "settings" text, "staticData" text)`,
|
||||
undefined,
|
||||
);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_entity"`, undefined);
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}c4d999a5e90784e8caccf5589d"`, undefined);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity"`, undefined);
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8"`, undefined);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}credentials_entity"`, undefined);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class WebhookModel1592445003908 implements MigrationInterface {
|
||||
name = 'WebhookModel1592445003908';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`,
|
||||
);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
|
||||
name = 'CreateIndexStoppedAt1594825041918';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "${tablePrefix}execution_entity" ("stoppedAt") `,
|
||||
);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class MakeStoppedAtNullable1607431743769 implements MigrationInterface {
|
||||
name = 'MakeStoppedAtNullable1607431743769';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
// SQLite does not allow us to simply "alter column"
|
||||
// We're hacking the way sqlite identifies tables
|
||||
// Allowing a column to become nullable
|
||||
// This is a very strict case when this can be done safely
|
||||
// As no collateral effects exist.
|
||||
await queryRunner.query(`PRAGMA writable_schema = 1; `, undefined);
|
||||
await queryRunner.query(
|
||||
`UPDATE SQLITE_MASTER SET SQL = 'CREATE TABLE IF NOT EXISTS "${tablePrefix}execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime, "workflowData" text NOT NULL, "workflowId" varchar)' WHERE NAME = "${tablePrefix}execution_entity";`,
|
||||
undefined,
|
||||
);
|
||||
await queryRunner.query(`PRAGMA writable_schema = 0;`, undefined);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
// This cannot be undone as the table might already have nullable values
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class AddWebhookId1611071044839 implements MigrationInterface {
|
||||
name = 'AddWebhookId1611071044839';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, "webhookId" varchar, "pathLength" integer, PRIMARY KEY ("webhookPath", "method"))`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "temporary_webhook_entity"("workflowId", "webhookPath", "method", "node") SELECT "workflowId", "webhookPath", "method", "node" FROM "${tablePrefix}webhook_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}webhook_entity"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "temporary_webhook_entity" RENAME TO "${tablePrefix}webhook_entity"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2" ON "${tablePrefix}webhook_entity" ("webhookId", "method", "pathLength") `,
|
||||
);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}webhook_entity" RENAME TO "temporary_webhook_entity"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}webhook_entity"("workflowId", "webhookPath", "method", "node") SELECT "workflowId", "webhookPath", "method", "node" FROM "temporary_webhook_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "temporary_webhook_entity"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,127 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class CreateTagEntity1617213344594 implements MigrationInterface {
|
||||
name = 'CreateTagEntity1617213344594';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// create tags table + relationship with workflow entity
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}tag_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(24) NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefix}8f949d7a3a984759044054e89b" ON "${tablePrefix}tag_entity" ("name") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}workflows_tags" ("workflowId" integer NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "FK_54b2f0343d6a2078fa137443869" FOREIGN KEY ("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_77505b341625b0b4768082e2171" FOREIGN KEY ("tagId") REFERENCES "${tablePrefix}tag_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("workflowId", "tagId"))`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}54b2f0343d6a2078fa13744386" ON "${tablePrefix}workflows_tags" ("workflowId") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}77505b341625b0b4768082e217" ON "${tablePrefix}workflows_tags" ("tagId") `,
|
||||
);
|
||||
|
||||
// set default dates for `createdAt` and `updatedAt`
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8"`);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}temporary_credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(32) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')))`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}temporary_credentials_entity"("id", "name", "data", "type", "nodesAccess", "createdAt", "updatedAt") SELECT "id", "name", "data", "type", "nodesAccess", "createdAt", "updatedAt" FROM "${tablePrefix}credentials_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}credentials_entity"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}temporary_credentials_entity" RENAME TO "${tablePrefix}credentials_entity"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8" ON "${tablePrefix}credentials_entity" ("type") `,
|
||||
);
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}8f949d7a3a984759044054e89b"`);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}temporary_tag_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(24) NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')))`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}temporary_tag_entity"("id", "name", "createdAt", "updatedAt") SELECT "id", "name", "createdAt", "updatedAt" FROM "${tablePrefix}tag_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}tag_entity"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}temporary_tag_entity" RENAME TO "${tablePrefix}tag_entity"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefix}8f949d7a3a984759044054e89b" ON "${tablePrefix}tag_entity" ("name") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}temporary_workflow_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}temporary_workflow_entity"("id", "name", "active", "nodes", "connections", "createdAt", "updatedAt", "settings", "staticData") SELECT "id", "name", "active", "nodes", "connections", "createdAt", "updatedAt", "settings", "staticData" FROM "${tablePrefix}workflow_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_entity"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}temporary_workflow_entity" RENAME TO "${tablePrefix}workflow_entity"`,
|
||||
);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// `createdAt` and `updatedAt`
|
||||
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}workflow_entity" RENAME TO "${tablePrefix}temporary_workflow_entity"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}workflow_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text NOT NULL, "connections" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL, "settings" text, "staticData" text)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}workflow_entity"("id", "name", "active", "nodes", "connections", "createdAt", "updatedAt", "settings", "staticData") SELECT "id", "name", "active", "nodes", "connections", "createdAt", "updatedAt", "settings", "staticData" FROM "${tablePrefix}temporary_workflow_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}temporary_workflow_entity"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}8f949d7a3a984759044054e89b"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}tag_entity" RENAME TO "${tablePrefix}temporary_tag_entity"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}tag_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(24) NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}tag_entity"("id", "name", "createdAt", "updatedAt") SELECT "id", "name", "createdAt", "updatedAt" FROM "${tablePrefix}temporary_tag_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}temporary_tag_entity"`);
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefix}8f949d7a3a984759044054e89b" ON "${tablePrefix}tag_entity" ("name") `,
|
||||
);
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}credentials_entity" RENAME TO "temporary_credentials_entity"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(32) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}credentials_entity"("id", "name", "data", "type", "nodesAccess", "createdAt", "updatedAt") SELECT "id", "name", "data", "type", "nodesAccess", "createdAt", "updatedAt" FROM "${tablePrefix}temporary_credentials_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}temporary_credentials_entity"`);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8" ON "credentials_entity" ("type") `,
|
||||
);
|
||||
|
||||
// tags
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}77505b341625b0b4768082e217"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}54b2f0343d6a2078fa13744386"`);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}workflows_tags"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}8f949d7a3a984759044054e89b"`);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}tag_entity"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class UniqueWorkflowNames1620821879465 implements MigrationInterface {
|
||||
name = 'UniqueWorkflowNames1620821879465';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const workflowNames = await queryRunner.query(`
|
||||
SELECT name
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`);
|
||||
|
||||
for (const { name } of workflowNames) {
|
||||
const [duplicatesQuery, parameters] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
SELECT id, name
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
WHERE name = :name
|
||||
ORDER BY createdAt ASC
|
||||
`,
|
||||
{ name },
|
||||
{},
|
||||
);
|
||||
|
||||
const duplicates = await queryRunner.query(duplicatesQuery, parameters);
|
||||
|
||||
if (duplicates.length > 1) {
|
||||
await Promise.all(
|
||||
duplicates.map(({ id, name }: { id: number; name: string }, index: number) => {
|
||||
if (index === 0) return Promise.resolve();
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET name = :name
|
||||
WHERE id = '${id}'
|
||||
`,
|
||||
{ name: `${name} ${index + 1}` },
|
||||
{},
|
||||
);
|
||||
|
||||
return queryRunner.query(updateQuery, updateParams);
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9" ON "${tablePrefix}workflow_entity" ("name") `,
|
||||
);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class AddWaitColumn1621707690587 implements MigrationInterface {
|
||||
name = 'AddWaitColumn1621707690587';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS "${tablePrefix}temporary_execution_entity"`);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}temporary_execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime, "workflowData" text NOT NULL, "workflowId" varchar, "waitTill" DATETIME)`,
|
||||
undefined,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}temporary_execution_entity"("id", "data", "finished", "mode", "retryOf", "retrySuccessId", "startedAt", "stoppedAt", "workflowData", "workflowId") SELECT "id", "data", "finished", "mode", "retryOf", "retrySuccessId", "startedAt", "stoppedAt", "workflowData", "workflowId" FROM "${tablePrefix}execution_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}temporary_execution_entity" RENAME TO "${tablePrefix}execution_entity"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "${tablePrefix}execution_entity" ("stoppedAt")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2" ON "${tablePrefix}execution_entity" ("waitTill")`,
|
||||
);
|
||||
await queryRunner.query(`VACUUM;`);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}temporary_execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime, "workflowData" text NOT NULL, "workflowId" varchar)`,
|
||||
undefined,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}temporary_execution_entity"("id", "data", "finished", "mode", "retryOf", "retrySuccessId", "startedAt", "stoppedAt", "workflowData", "workflowId") SELECT "id", "data", "finished", "mode", "retryOf", "retrySuccessId", "startedAt", "stoppedAt", "workflowData", "workflowId" FROM "${tablePrefix}execution_entity"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}temporary_execution_entity" RENAME TO "${tablePrefix}execution_entity"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "${tablePrefix}execution_entity" ("stoppedAt")`,
|
||||
);
|
||||
await queryRunner.query(`VACUUM;`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,308 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
import { runChunked } from '../../utils/migrationHelpers';
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
// `nodeType: name` changes to `nodeType: { id, name }`
|
||||
|
||||
export class UpdateWorkflowCredentials1630330987096 implements MigrationInterface {
|
||||
name = 'UpdateWorkflowCredentials1630330987096';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM "${tablePrefix}credentials_entity"
|
||||
`);
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = JSON.parse(workflow.nodes);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, "workflowData"
|
||||
FROM "${tablePrefix}execution_entity"
|
||||
WHERE "waitTill" IS NOT NULL AND finished = 0
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = JSON.parse(execution.workflowData);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}execution_entity"
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, "workflowData"
|
||||
FROM "${tablePrefix}execution_entity"
|
||||
WHERE "waitTill" IS NULL AND finished = 0 AND mode != 'retry'
|
||||
ORDER BY "startedAt" DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = JSON.parse(execution.workflowData);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}execution_entity"
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM "${tablePrefix}credentials_entity"
|
||||
`);
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||
// @ts-ignore
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = JSON.parse(workflow.nodes);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore double-equals because creds.id can be string or number
|
||||
(credentials) => credentials.id == creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, "workflowData"
|
||||
FROM "${tablePrefix}execution_entity"
|
||||
WHERE "waitTill" IS NOT NULL AND finished = 0
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
// @ts-ignore
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = JSON.parse(execution.workflowData);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore double-equals because creds.id can be string or number
|
||||
(credentials) => credentials.id == creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}execution_entity"
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, "workflowData"
|
||||
FROM "${tablePrefix}execution_entity"
|
||||
WHERE "waitTill" IS NULL AND finished = 0 AND mode != 'retry'
|
||||
ORDER BY "startedAt" DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = JSON.parse(execution.workflowData);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore double-equals because creds.id can be string or number
|
||||
(credentials) => credentials.id == creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}execution_entity"
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class AddExecutionEntityIndexes1644421939510 implements MigrationInterface {
|
||||
name = 'AddExecutionEntityIndexes1644421939510';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d'`);
|
||||
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS 'IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2'`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}06da892aaf92a48e7d3e400003' ON '${tablePrefix}execution_entity' ('workflowId', 'waitTill', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}78d62b89dc1433192b86dce18a' ON '${tablePrefix}execution_entity' ('workflowId', 'finished', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}1688846335d274033e15c846a4' ON '${tablePrefix}execution_entity' ('finished', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9' ON '${tablePrefix}execution_entity' ('waitTill', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}81fc04c8a17de15835713505e4' ON '${tablePrefix}execution_entity' ('workflowId', 'id') `,
|
||||
);
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}81fc04c8a17de15835713505e4'`);
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9'`);
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}1688846335d274033e15c846a4'`);
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}78d62b89dc1433192b86dce18a'`);
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}06da892aaf92a48e7d3e400003'`);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2' ON '${tablePrefix}execution_entity' ('waitTill') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d' ON '${tablePrefix}execution_entity' ('workflowId') `,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import * as config from '../../../../config';
|
||||
import {
|
||||
loadSurveyFromDisk,
|
||||
logMigrationEnd,
|
||||
logMigrationStart,
|
||||
} from '../../utils/migrationHelpers';
|
||||
|
||||
export class CreateUserManagement1646992772331 implements MigrationInterface {
|
||||
name = 'CreateUserManagement1646992772331';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}role" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(32) NOT NULL, "scope" varchar NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), CONSTRAINT "UQ_${tablePrefix}5b49d0f504f7ef31045a1fb2eb8" UNIQUE ("scope", "name"))`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}user" ("id" varchar PRIMARY KEY NOT NULL, "email" varchar(255), "firstName" varchar(32), "lastName" varchar(32), "password" varchar, "resetPasswordToken" varchar, "resetPasswordTokenExpiration" integer DEFAULT NULL, "personalizationAnswers" text, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "globalRoleId" integer NOT NULL, CONSTRAINT "FK_${tablePrefix}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "UQ_${tablePrefix}e12875dfb3b1d92d7d7c5377e2" ON "${tablePrefix}user" ("email")`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}shared_workflow" ("createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "roleId" integer NOT NULL, "userId" varchar NOT NULL, "workflowId" integer NOT NULL, CONSTRAINT "FK_${tablePrefix}3540da03964527aa24ae014b780" FOREIGN KEY ("roleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}82b2fd9ec4e3e24209af8160282" FOREIGN KEY ("userId") REFERENCES "${tablePrefix}user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}b83f8d2530884b66a9c848c8b88" FOREIGN KEY ("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("userId", "workflowId"))`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}65a0933c0f19d278881653bf81d35064" ON "${tablePrefix}shared_workflow" ("workflowId")`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}shared_credentials" ("createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "roleId" integer NOT NULL, "userId" varchar NOT NULL, "credentialsId" integer NOT NULL, CONSTRAINT "FK_${tablePrefix}c68e056637562000b68f480815a" FOREIGN KEY ("roleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}484f0327e778648dd04f1d70493" FOREIGN KEY ("userId") REFERENCES "${tablePrefix}user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}68661def1d4bcf2451ac8dbd949" FOREIGN KEY ("credentialsId") REFERENCES "${tablePrefix}credentials_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("userId", "credentialsId"))`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}829d16efa0e265cb076d50eca8d21733" ON "${tablePrefix}shared_credentials" ("credentialsId")`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}settings" ("key" TEXT NOT NULL,"value" TEXT NOT NULL DEFAULT \'\',"loadOnStartup" boolean NOT NULL default false,PRIMARY KEY("key"))`,
|
||||
);
|
||||
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9"`);
|
||||
|
||||
// Insert initial roles
|
||||
await queryRunner.query(`
|
||||
INSERT INTO "${tablePrefix}role" (name, scope)
|
||||
VALUES ("owner", "global");
|
||||
`);
|
||||
|
||||
const instanceOwnerRole = await queryRunner.query('SELECT last_insert_rowid() as insertId');
|
||||
|
||||
await queryRunner.query(`
|
||||
INSERT INTO "${tablePrefix}role" (name, scope)
|
||||
VALUES ("member", "global");
|
||||
`);
|
||||
|
||||
await queryRunner.query(`
|
||||
INSERT INTO "${tablePrefix}role" (name, scope)
|
||||
VALUES ("owner", "workflow");
|
||||
`);
|
||||
|
||||
const workflowOwnerRole = await queryRunner.query('SELECT last_insert_rowid() as insertId');
|
||||
|
||||
await queryRunner.query(`
|
||||
INSERT INTO "${tablePrefix}role" (name, scope)
|
||||
VALUES ("owner", "credential");
|
||||
`);
|
||||
|
||||
const credentialOwnerRole = await queryRunner.query('SELECT last_insert_rowid() as insertId');
|
||||
|
||||
const survey = loadSurveyFromDisk();
|
||||
|
||||
const ownerUserId = uuid();
|
||||
await queryRunner.query(
|
||||
`
|
||||
INSERT INTO "${tablePrefix}user" (id, globalRoleId, personalizationAnswers) values
|
||||
(?, ?, ?)
|
||||
`,
|
||||
[ownerUserId, instanceOwnerRole[0].insertId, survey],
|
||||
);
|
||||
|
||||
await queryRunner.query(`
|
||||
INSERT INTO "${tablePrefix}shared_workflow" (createdAt, updatedAt, roleId, userId, workflowId)
|
||||
select DATETIME('now'), DATETIME('now'), '${workflowOwnerRole[0].insertId}', '${ownerUserId}', id from "${tablePrefix}workflow_entity"
|
||||
`);
|
||||
|
||||
await queryRunner.query(`
|
||||
INSERT INTO "${tablePrefix}shared_credentials" (createdAt, updatedAt, roleId, userId, credentialsId)
|
||||
select DATETIME('now'), DATETIME('now'), '${credentialOwnerRole[0].insertId}', '${ownerUserId}', id from "${tablePrefix}credentials_entity"
|
||||
`);
|
||||
|
||||
await queryRunner.query(`
|
||||
INSERT INTO "${tablePrefix}settings" (key, value, loadOnStartup) values
|
||||
('userManagement.isInstanceOwnerSetUp', 'false', true), ('userManagement.skipInstanceOwnerSetup', 'false', true)
|
||||
`);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9" ON "${tablePrefix}workflow_entity" ("name") `,
|
||||
);
|
||||
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}shared_credentials"`);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}shared_workflow"`);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}user"`);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}role"`);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}settings"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class LowerCaseUserEmail1648740597343 implements MigrationInterface {
|
||||
name = 'LowerCaseUserEmail1648740597343';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`
|
||||
UPDATE "${tablePrefix}user"
|
||||
SET email = LOWER(email);
|
||||
`);
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class AddUserSettings1652367743993 implements MigrationInterface {
|
||||
name = 'AddUserSettings1652367743993';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('PRAGMA foreign_keys=OFF');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_user" ("id" varchar PRIMARY KEY NOT NULL, "email" varchar(255), "firstName" varchar(32), "lastName" varchar(32), "password" varchar, "resetPasswordToken" varchar, "resetPasswordTokenExpiration" integer DEFAULT NULL, "personalizationAnswers" text, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "globalRoleId" integer NOT NULL, "settings" text, CONSTRAINT "FK_${tablePrefix}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "temporary_user"("id", "email", "firstName", "lastName", "password", "resetPasswordToken", "resetPasswordTokenExpiration", "personalizationAnswers", "createdAt", "updatedAt", "globalRoleId") SELECT "id", "email", "firstName", "lastName", "password", "resetPasswordToken", "resetPasswordTokenExpiration", "personalizationAnswers", "createdAt", "updatedAt", "globalRoleId" FROM "${tablePrefix}user"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}user"`);
|
||||
await queryRunner.query(`ALTER TABLE "temporary_user" RENAME TO "${tablePrefix}user"`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "UQ_${tablePrefix}e12875dfb3b1d92d7d7c5377e2" ON "${tablePrefix}user" ("email")`,
|
||||
);
|
||||
|
||||
await queryRunner.query('PRAGMA foreign_keys=ON');
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('PRAGMA foreign_keys=OFF');
|
||||
|
||||
await queryRunner.query(`ALTER TABLE "${tablePrefix}user" RENAME TO "temporary_user"`);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}user" ("id" varchar PRIMARY KEY NOT NULL, "email" varchar(255), "firstName" varchar(32), "lastName" varchar(32), "password" varchar, "resetPasswordToken" varchar, "resetPasswordTokenExpiration" integer DEFAULT NULL, "personalizationAnswers" text, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "globalRoleId" integer NOT NULL, CONSTRAINT "FK_${tablePrefix}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}user"("id", "email", "firstName", "lastName", "password", "resetPasswordToken", "resetPasswordTokenExpiration", "personalizationAnswers", "createdAt", "updatedAt", "globalRoleId") SELECT "id", "email", "firstName", "lastName", "password", "resetPasswordToken", "resetPasswordTokenExpiration", "personalizationAnswers", "createdAt", "updatedAt", "globalRoleId" FROM "temporary_user"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "temporary_user"`);
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "UQ_${tablePrefix}e12875dfb3b1d92d7d7c5377e2" ON "${tablePrefix}user" ("email")`,
|
||||
);
|
||||
|
||||
await queryRunner.query('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
export class AddAPIKeyColumn1652905585850 implements MigrationInterface {
|
||||
name = 'AddAPIKeyColumn1652905585850';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('PRAGMA foreign_keys=OFF');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_user" ("id" varchar PRIMARY KEY NOT NULL, "email" varchar(255), "firstName" varchar(32), "lastName" varchar(32), "password" varchar, "resetPasswordToken" varchar, "resetPasswordTokenExpiration" integer DEFAULT NULL, "personalizationAnswers" text, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "globalRoleId" integer NOT NULL, "settings" text, "apiKey" varchar, CONSTRAINT "FK_${tablePrefix}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "temporary_user"("id", "email", "firstName", "lastName", "password", "resetPasswordToken", "resetPasswordTokenExpiration", "personalizationAnswers", "createdAt", "updatedAt", "globalRoleId", "settings") SELECT "id", "email", "firstName", "lastName", "password", "resetPasswordToken", "resetPasswordTokenExpiration", "personalizationAnswers", "createdAt", "updatedAt", "globalRoleId", "settings" FROM "${tablePrefix}user"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}user"`);
|
||||
await queryRunner.query(`ALTER TABLE "temporary_user" RENAME TO "${tablePrefix}user"`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "UQ_${tablePrefix}e12875dfb3b1d92d7d7c5377e2" ON "${tablePrefix}user" ("email")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "UQ_${tablePrefix}ie0zomxves9w3p774drfrkxtj5" ON "${tablePrefix}user" ("apiKey")`,
|
||||
);
|
||||
|
||||
await queryRunner.query('PRAGMA foreign_keys=ON');
|
||||
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('PRAGMA foreign_keys=OFF');
|
||||
|
||||
await queryRunner.query(`ALTER TABLE "${tablePrefix}user" RENAME TO "temporary_user"`);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}user" ("id" varchar PRIMARY KEY NOT NULL, "email" varchar(255), "firstName" varchar(32), "lastName" varchar(32), "password" varchar, "resetPasswordToken" varchar, "resetPasswordTokenExpiration" integer DEFAULT NULL, "personalizationAnswers" text, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "globalRoleId" integer NOT NULL, "settings" text, CONSTRAINT "FK_${tablePrefix}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}user"("id", "email", "firstName", "lastName", "password", "resetPasswordToken", "resetPasswordTokenExpiration", "personalizationAnswers", "createdAt", "updatedAt", "globalRoleId", "settings") SELECT "id", "email", "firstName", "lastName", "password", "resetPasswordToken", "resetPasswordTokenExpiration", "personalizationAnswers", "createdAt", "updatedAt", "globalRoleId", "settings" FROM "temporary_user"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "temporary_user"`);
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "UQ_${tablePrefix}e12875dfb3b1d92d7d7c5377e2" ON "${tablePrefix}user" ("email")`,
|
||||
);
|
||||
|
||||
await queryRunner.query('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
}
|
||||
33
packages/cli/src/databases/migrations/sqlite/index.ts
Normal file
33
packages/cli/src/databases/migrations/sqlite/index.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { InitialMigration1588102412422 } from './1588102412422-InitialMigration';
|
||||
import { WebhookModel1592445003908 } from './1592445003908-WebhookModel';
|
||||
import { CreateIndexStoppedAt1594825041918 } from './1594825041918-CreateIndexStoppedAt';
|
||||
import { AddWebhookId1611071044839 } from './1611071044839-AddWebhookId';
|
||||
import { MakeStoppedAtNullable1607431743769 } from './1607431743769-MakeStoppedAtNullable';
|
||||
import { CreateTagEntity1617213344594 } from './1617213344594-CreateTagEntity';
|
||||
import { UniqueWorkflowNames1620821879465 } from './1620821879465-UniqueWorkflowNames';
|
||||
import { AddWaitColumn1621707690587 } from './1621707690587-AddWaitColumn';
|
||||
import { UpdateWorkflowCredentials1630330987096 } from './1630330987096-UpdateWorkflowCredentials';
|
||||
import { AddExecutionEntityIndexes1644421939510 } from './1644421939510-AddExecutionEntityIndexes';
|
||||
import { CreateUserManagement1646992772331 } from './1646992772331-CreateUserManagement';
|
||||
import { LowerCaseUserEmail1648740597343 } from './1648740597343-LowerCaseUserEmail';
|
||||
import { AddUserSettings1652367743993 } from './1652367743993-AddUserSettings';
|
||||
import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn';
|
||||
|
||||
const sqliteMigrations = [
|
||||
InitialMigration1588102412422,
|
||||
WebhookModel1592445003908,
|
||||
CreateIndexStoppedAt1594825041918,
|
||||
AddWebhookId1611071044839,
|
||||
MakeStoppedAtNullable1607431743769,
|
||||
CreateTagEntity1617213344594,
|
||||
UniqueWorkflowNames1620821879465,
|
||||
AddWaitColumn1621707690587,
|
||||
UpdateWorkflowCredentials1630330987096,
|
||||
AddExecutionEntityIndexes1644421939510,
|
||||
CreateUserManagement1646992772331,
|
||||
LowerCaseUserEmail1648740597343,
|
||||
AddUserSettings1652367743993,
|
||||
AddAPIKeyColumn1652905585850,
|
||||
];
|
||||
|
||||
export { sqliteMigrations };
|
||||
Reference in New Issue
Block a user