mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-18 02:21:13 +00:00
refactor(core): Improve DB directory setup (#3502)
This commit is contained in:
@@ -0,0 +1,43 @@
|
||||
import {
|
||||
MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class InitialMigration1587669153312 implements MigrationInterface {
|
||||
name = 'InitialMigration1587669153312';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixIndex = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}credentials_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "data" text NOT NULL, "type" character varying(32) NOT NULL, "nodesAccess" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT PK_${tablePrefixIndex}814c3d3c36e8a27fa8edb761b0e PRIMARY KEY ("id"))`, undefined);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixIndex}07fde106c0b471d8cc80a64fc8 ON ${tablePrefix}credentials_entity (type) `, undefined);
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}execution_entity ("id" SERIAL NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" character varying NOT NULL, "retryOf" character varying, "retrySuccessId" character varying, "startedAt" TIMESTAMP NOT NULL, "stoppedAt" TIMESTAMP NOT NULL, "workflowData" json NOT NULL, "workflowId" character varying, CONSTRAINT PK_${tablePrefixIndex}e3e63bbf986767844bbe1166d4e PRIMARY KEY ("id"))`, undefined);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixIndex}c4d999a5e90784e8caccf5589d ON ${tablePrefix}execution_entity ("workflowId") `, undefined);
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}workflow_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "active" boolean NOT NULL, "nodes" json NOT NULL, "connections" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, "settings" json, "staticData" json, CONSTRAINT PK_${tablePrefixIndex}eded7d72664448da7745d551207 PRIMARY KEY ("id"))`, undefined);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixIndex = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}workflow_entity`, undefined);
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixIndex}c4d999a5e90784e8caccf5589d`, undefined);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}execution_entity`, undefined);
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixIndex}07fde106c0b471d8cc80a64fc8`, undefined);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}credentials_entity`, undefined);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
import {
|
||||
MigrationInterface,
|
||||
QueryRunner,
|
||||
} from 'typeorm';
|
||||
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class WebhookModel1589476000887 implements MigrationInterface {
|
||||
name = 'WebhookModel1589476000887';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixIndex = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" character varying NOT NULL, "method" character varying NOT NULL, "node" character varying NOT NULL, CONSTRAINT "PK_${tablePrefixIndex}b21ace2e13596ccd87dc9bf4ea6" PRIMARY KEY ("webhookPath", "method"))`, undefined);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`, undefined);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
|
||||
name = 'CreateIndexStoppedAt1594828256133';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}33228da131bb1112247cf52a42 ON ${tablePrefix}execution_entity ("stoppedAt") `);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}33228da131bb1112247cf52a42`);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
import {MigrationInterface, QueryRunner} from "typeorm";
|
||||
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class MakeStoppedAtNullable1607431743768 implements MigrationInterface {
|
||||
name = 'MakeStoppedAtNullable1607431743768';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query('ALTER TABLE ' + tablePrefix + 'execution_entity ALTER COLUMN "stoppedAt" DROP NOT NULL', undefined);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
// Cannot be undone as column might already have null values
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
import {MigrationInterface, QueryRunner} from "typeorm";
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class AddWebhookId1611144599516 implements MigrationInterface {
|
||||
name = 'AddWebhookId1611144599516';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity ADD "webhookId" character varying`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity ADD "pathLength" integer`);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240 ON ${tablePrefix}webhook_entity ("webhookId", "method", "pathLength") `);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "pathLength"`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "webhookId"`);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
import {MigrationInterface, QueryRunner} from "typeorm";
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class CreateTagEntity1617270242566 implements MigrationInterface {
|
||||
name = 'CreateTagEntity1617270242566';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
// create tags table + relationship with workflow entity
|
||||
|
||||
await queryRunner.query(`CREATE TABLE ${tablePrefix}tag_entity ("id" SERIAL NOT NULL, "name" character varying(24) NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT "PK_${tablePrefixPure}7a50a9b74ae6855c0dcaee25052" PRIMARY KEY ("id"))`);
|
||||
await queryRunner.query(`CREATE UNIQUE INDEX IDX_${tablePrefixPure}812eb05f7451ca757fb98444ce ON ${tablePrefix}tag_entity ("name") `);
|
||||
|
||||
await queryRunner.query(`CREATE TABLE ${tablePrefix}workflows_tags ("workflowId" integer NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "PK_${tablePrefixPure}a60448a90e51a114e95e2a125b3" PRIMARY KEY ("workflowId", "tagId"))`);
|
||||
await queryRunner.query(`CREATE INDEX IDX_${tablePrefixPure}31140eb41f019805b40d008744 ON ${tablePrefix}workflows_tags ("workflowId") `);
|
||||
await queryRunner.query(`CREATE INDEX IDX_${tablePrefixPure}5e29bfe9e22c5d6567f509d4a4 ON ${tablePrefix}workflows_tags ("tagId") `);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "FK_${tablePrefixPure}31140eb41f019805b40d0087449" FOREIGN KEY ("workflowId") REFERENCES ${tablePrefix}workflow_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "FK_${tablePrefixPure}5e29bfe9e22c5d6567f509d4a46" FOREIGN KEY ("tagId") REFERENCES ${tablePrefix}tag_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION`);
|
||||
|
||||
// set default dates for `createdAt` and `updatedAt`
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "createdAt" TYPE TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "createdAt" SET DEFAULT CURRENT_TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "updatedAt" TYPE TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "updatedAt" SET DEFAULT CURRENT_TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN "createdAt" TYPE TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN "createdAt" SET DEFAULT CURRENT_TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN "updatedAt" TYPE TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN "updatedAt" SET DEFAULT CURRENT_TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN "createdAt" TYPE TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN "createdAt" SET DEFAULT CURRENT_TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN "updatedAt" TYPE TIMESTAMP(3)`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN "updatedAt" SET DEFAULT CURRENT_TIMESTAMP(3)`);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
// `createdAt` and `updatedAt`
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN "updatedAt" DROP DEFAULT`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN "updatedAt" TYPE TIMESTAMP`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN "createdAt" DROP DEFAULT`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN "createdAt" TYPE TIMESTAMP`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN "updatedAt" DROP DEFAULT`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN "updatedAt" TYPE TIMESTAMP`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN "createdAt" DROP DEFAULT`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN "createdAt" TYPE TIMESTAMP`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "updatedAt" DROP DEFAULT`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "updatedAt" TYPE TIMESTAMP`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "createdAt" DROP DEFAULT`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "createdAt" TYPE TIMESTAMP`);
|
||||
|
||||
// tags
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT "FK_${tablePrefixPure}5e29bfe9e22c5d6567f509d4a46"`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT "FK_${tablePrefixPure}31140eb41f019805b40d0087449"`);
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}5e29bfe9e22c5d6567f509d4a4`);
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}31140eb41f019805b40d008744`);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}workflows_tags`);
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}812eb05f7451ca757fb98444ce`);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}tag_entity`);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class UniqueWorkflowNames1620824779533 implements MigrationInterface {
|
||||
name = 'UniqueWorkflowNames1620824779533';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
const workflowNames = await queryRunner.query(`
|
||||
SELECT name
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`);
|
||||
|
||||
for (const { name } of workflowNames) {
|
||||
const [duplicatesQuery, parameters] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
SELECT id, name
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
WHERE name = :name
|
||||
ORDER BY "createdAt" ASC
|
||||
`,
|
||||
{ name },
|
||||
{},
|
||||
);
|
||||
|
||||
const duplicates = await queryRunner.query(duplicatesQuery, parameters);
|
||||
|
||||
if (duplicates.length > 1) {
|
||||
await Promise.all(
|
||||
duplicates.map(({ id, name }: { id: number; name: string }, index: number) => {
|
||||
if (index === 0) return Promise.resolve();
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET name = :name
|
||||
WHERE id = '${id}'
|
||||
`,
|
||||
{ name: `${name} ${index + 1}` },
|
||||
{},
|
||||
);
|
||||
|
||||
return queryRunner.query(updateQuery, updateParams);
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefixPure}a252c527c4c89237221fe2c0ab" ON ${tablePrefix}workflow_entity ("name") `,
|
||||
);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefixPure}a252c527c4c89237221fe2c0ab"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
import {MigrationInterface, QueryRunner} from "typeorm";
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class AddwaitTill1626176912946 implements MigrationInterface {
|
||||
name = 'AddwaitTill1626176912946';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}execution_entity ADD "waitTill" TIMESTAMP`);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2 ON ${tablePrefix}execution_entity ("waitTill")`);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "waitTill"`);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,315 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
import { runChunked } from '../../utils/migrationHelpers';
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
// `nodeType: name` changes to `nodeType: { id, name }`
|
||||
|
||||
export class UpdateWorkflowCredentials1630419189837 implements MigrationInterface {
|
||||
name = 'UpdateWorkflowCredentials1630419189837';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM ${tablePrefix}credentials_entity
|
||||
`);
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = workflow.nodes;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, "workflowData"
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE "waitTill" IS NOT NULL AND finished = FALSE
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, "workflowData"
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE "waitTill" IS NULL AND finished = FALSE AND mode != 'retry'
|
||||
ORDER BY "startedAt" DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM ${tablePrefix}credentials_entity
|
||||
`);
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = workflow.nodes;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.id === creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, "workflowData"
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE "waitTill" IS NOT NULL AND finished = FALSE
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.id === creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, "workflowData"
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE "waitTill" IS NULL AND finished = FALSE AND mode != 'retry'
|
||||
ORDER BY "startedAt" DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.id === creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class AddExecutionEntityIndexes1644422880309 implements MigrationInterface {
|
||||
name = 'AddExecutionEntityIndexes1644422880309';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(
|
||||
`DROP INDEX IF EXISTS "${schema}".IDX_${tablePrefixPure}c4d999a5e90784e8caccf5589d`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX IF EXISTS "${schema}".IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}33228da131bb1112247cf52a42" ON ${tablePrefix}execution_entity ("stoppedAt") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}58154df94c686818c99fb754ce" ON ${tablePrefix}execution_entity ("workflowId", "waitTill", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}4f474ac92be81610439aaad61e" ON ${tablePrefix}execution_entity ("workflowId", "finished", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}72ffaaab9f04c2c1f1ea86e662" ON ${tablePrefix}execution_entity ("finished", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}85b981df7b444f905f8bf50747" ON ${tablePrefix}execution_entity ("waitTill", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}d160d4771aba5a0d78943edbe3" ON ${tablePrefix}execution_entity ("workflowId", "id") `,
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "IDX_${tablePrefixPure}d160d4771aba5a0d78943edbe3"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "IDX_${tablePrefixPure}85b981df7b444f905f8bf50747"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "IDX_${tablePrefixPure}72ffaaab9f04c2c1f1ea86e662"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "IDX_${tablePrefixPure}4f474ac92be81610439aaad61e"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "IDX_${tablePrefixPure}58154df94c686818c99fb754ce"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "IDX_${tablePrefixPure}33228da131bb1112247cf52a42"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2" ON ${tablePrefix}execution_entity ("waitTill") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}c4d999a5e90784e8caccf5589d" ON ${tablePrefix}execution_entity ("workflowId") `,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
import {
|
||||
MigrationInterface,
|
||||
QueryRunner,
|
||||
} from 'typeorm';
|
||||
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class IncreaseTypeVarcharLimit1646834195327 implements MigrationInterface {
|
||||
name = 'IncreaseTypeVarcharLimit1646834195327';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "type" TYPE VARCHAR(128)`);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {}
|
||||
}
|
||||
@@ -0,0 +1,163 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import config from '../../../../config';
|
||||
import { loadSurveyFromDisk } from '../../utils/migrationHelpers';
|
||||
|
||||
export class CreateUserManagement1646992772331 implements MigrationInterface {
|
||||
name = 'CreateUserManagement1646992772331';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}role (
|
||||
"id" serial NOT NULL,
|
||||
"name" VARCHAR(32) NOT NULL,
|
||||
"scope" VARCHAR(255) NOT NULL,
|
||||
"createdAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT "PK_${tablePrefixPure}e853ce24e8200abe5721d2c6ac552b73" PRIMARY KEY ("id"),
|
||||
CONSTRAINT "UQ_${tablePrefixPure}5b49d0f504f7ef31045a1fb2eb8" UNIQUE ("scope", "name")
|
||||
);`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}user (
|
||||
"id" UUID NOT NULL DEFAULT uuid_in(overlay(overlay(md5(random()::text || ':' || clock_timestamp()::text) placing '4' from 13) placing to_hex(floor(random()*(11-8+1) + 8)::int)::text from 17)::cstring),
|
||||
"email" VARCHAR(255),
|
||||
"firstName" VARCHAR(32),
|
||||
"lastName" VARCHAR(32),
|
||||
"password" VARCHAR(255),
|
||||
"resetPasswordToken" VARCHAR(255),
|
||||
"resetPasswordTokenExpiration" int,
|
||||
"personalizationAnswers" text,
|
||||
"createdAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"globalRoleId" int NOT NULL,
|
||||
CONSTRAINT "PK_${tablePrefixPure}ea8f538c94b6e352418254ed6474a81f" PRIMARY KEY ("id"),
|
||||
CONSTRAINT "UQ_${tablePrefixPure}e12875dfb3b1d92d7d7c5377e2" UNIQUE (email),
|
||||
CONSTRAINT "FK_${tablePrefixPure}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES ${tablePrefix}role (id)
|
||||
);`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}shared_workflow (
|
||||
"createdAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"roleId" INT NOT NULL,
|
||||
"userId" UUID NOT NULL,
|
||||
"workflowId" INT NOT NULL,
|
||||
CONSTRAINT "PK_${tablePrefixPure}cc5d5a71c7b2591f5154ffb0c785e85e" PRIMARY KEY ("userId", "workflowId"),
|
||||
CONSTRAINT "FK_${tablePrefixPure}3540da03964527aa24ae014b780" FOREIGN KEY ("roleId") REFERENCES ${tablePrefix}role ("id") ON DELETE NO ACTION ON UPDATE NO ACTION,
|
||||
CONSTRAINT "FK_${tablePrefixPure}82b2fd9ec4e3e24209af8160282" FOREIGN KEY ("userId") REFERENCES ${tablePrefix}user ("id") ON DELETE CASCADE ON UPDATE NO ACTION,
|
||||
CONSTRAINT "FK_${tablePrefixPure}b83f8d2530884b66a9c848c8b88" FOREIGN KEY ("workflowId") REFERENCES
|
||||
${tablePrefix}workflow_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION
|
||||
);`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}65a0933c0f19d278881653bf81d35064" ON ${tablePrefix}shared_workflow ("workflowId");`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}shared_credentials (
|
||||
"createdAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"roleId" INT NOT NULL,
|
||||
"userId" UUID NOT NULL,
|
||||
"credentialsId" INT NOT NULL,
|
||||
CONSTRAINT "PK_${tablePrefixPure}10dd1527ffb639609be7aadd98f628c6" PRIMARY KEY ("userId", "credentialsId"),
|
||||
CONSTRAINT "FK_${tablePrefixPure}c68e056637562000b68f480815a" FOREIGN KEY ("roleId") REFERENCES ${tablePrefix}role ("id") ON DELETE NO ACTION ON UPDATE NO ACTION,
|
||||
CONSTRAINT "FK_${tablePrefixPure}484f0327e778648dd04f1d70493" FOREIGN KEY ("userId") REFERENCES ${tablePrefix}user ("id") ON DELETE CASCADE ON UPDATE NO ACTION,
|
||||
CONSTRAINT "FK_${tablePrefixPure}68661def1d4bcf2451ac8dbd949" FOREIGN KEY ("credentialsId") REFERENCES ${tablePrefix}credentials_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION
|
||||
);`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}829d16efa0e265cb076d50eca8d21733" ON ${tablePrefix}shared_credentials ("credentialsId");`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}settings (
|
||||
"key" VARCHAR(255) NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"loadOnStartup" boolean NOT NULL DEFAULT false,
|
||||
CONSTRAINT "PK_${tablePrefixPure}dc0fe14e6d9943f268e7b119f69ab8bd" PRIMARY KEY ("key")
|
||||
);`,
|
||||
);
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefixPure}a252c527c4c89237221fe2c0ab"`);
|
||||
|
||||
// Insert initial roles
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${tablePrefix}role (name, scope) VALUES ('owner', 'global');`,
|
||||
);
|
||||
|
||||
const instanceOwnerRole = await queryRunner.query('SELECT lastval() as "insertId"');
|
||||
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${tablePrefix}role (name, scope) VALUES ('member', 'global');`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${tablePrefix}role (name, scope) VALUES ('owner', 'workflow');`,
|
||||
);
|
||||
|
||||
const workflowOwnerRole = await queryRunner.query('SELECT lastval() as "insertId"');
|
||||
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${tablePrefix}role (name, scope) VALUES ('owner', 'credential');`,
|
||||
);
|
||||
|
||||
const credentialOwnerRole = await queryRunner.query('SELECT lastval() as "insertId"');
|
||||
|
||||
const survey = loadSurveyFromDisk();
|
||||
|
||||
const ownerUserId = uuid();
|
||||
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${tablePrefix}user ("id", "globalRoleId", "personalizationAnswers") values ($1, $2, $3)`,
|
||||
[ownerUserId, instanceOwnerRole[0].insertId, survey],
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${tablePrefix}shared_workflow ("createdAt", "updatedAt", "roleId", "userId", "workflowId") select
|
||||
NOW(), NOW(), '${workflowOwnerRole[0].insertId}', '${ownerUserId}', "id" FROM ${tablePrefix}workflow_entity`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${tablePrefix}shared_credentials ("createdAt", "updatedAt", "roleId", "userId", "credentialsId") SELECT NOW(), NOW(), '${credentialOwnerRole[0].insertId}', '${ownerUserId}', "id" FROM ${tablePrefix}credentials_entity`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${tablePrefix}settings ("key", "value", "loadOnStartup") VALUES ('userManagement.isInstanceOwnerSetUp', 'false', true), ('userManagement.skipInstanceOwnerSetup', 'false', true)`,
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefixPure}a252c527c4c89237221fe2c0ab" ON ${tablePrefix}workflow_entity ("name")`,
|
||||
);
|
||||
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}shared_credentials`);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}shared_workflow`);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}user`);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}role`);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}settings`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
|
||||
export class LowerCaseUserEmail1648740597343 implements MigrationInterface {
|
||||
name = 'LowerCaseUserEmail1648740597343';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`
|
||||
UPDATE ${tablePrefix}user
|
||||
SET email = LOWER(email);
|
||||
`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class AddUserSettings1652367743993 implements MigrationInterface {
|
||||
name = 'AddUserSettings1652367743993';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}user ADD COLUMN settings json`);
|
||||
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE ${tablePrefix}user ALTER COLUMN "personalizationAnswers" TYPE json USING to_jsonb("personalizationAnswers")::json;`,
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}user DROP COLUMN settings`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config from '../../../../config';
|
||||
|
||||
export class AddAPIKeyColumn1652905585850 implements MigrationInterface {
|
||||
name = 'AddAPIKeyColumn1652905585850';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}user ADD COLUMN "apiKey" VARCHAR(255)`);
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "UQ_${tablePrefix}ie0zomxves9w3p774drfrkxtj5" ON ${tablePrefix}user ("apiKey")`,
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`DROP INDEX "UQ_${tablePrefix}ie0zomxves9w3p774drfrkxtj5"`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}user DROP COLUMN "apiKey"`);
|
||||
}
|
||||
}
|
||||
33
packages/cli/src/databases/migrations/postgresdb/index.ts
Normal file
33
packages/cli/src/databases/migrations/postgresdb/index.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { InitialMigration1587669153312 } from './1587669153312-InitialMigration';
|
||||
import { WebhookModel1589476000887 } from './1589476000887-WebhookModel';
|
||||
import { CreateIndexStoppedAt1594828256133 } from './1594828256133-CreateIndexStoppedAt';
|
||||
import { AddWebhookId1611144599516 } from './1611144599516-AddWebhookId';
|
||||
import { MakeStoppedAtNullable1607431743768 } from './1607431743768-MakeStoppedAtNullable';
|
||||
import { CreateTagEntity1617270242566 } from './1617270242566-CreateTagEntity';
|
||||
import { UniqueWorkflowNames1620824779533 } from './1620824779533-UniqueWorkflowNames';
|
||||
import { AddwaitTill1626176912946 } from './1626176912946-AddwaitTill';
|
||||
import { UpdateWorkflowCredentials1630419189837 } from './1630419189837-UpdateWorkflowCredentials';
|
||||
import { AddExecutionEntityIndexes1644422880309 } from './1644422880309-AddExecutionEntityIndexes';
|
||||
import { IncreaseTypeVarcharLimit1646834195327 } from './1646834195327-IncreaseTypeVarcharLimit';
|
||||
import { CreateUserManagement1646992772331 } from './1646992772331-CreateUserManagement';
|
||||
import { LowerCaseUserEmail1648740597343 } from './1648740597343-LowerCaseUserEmail';
|
||||
import { AddUserSettings1652367743993 } from './1652367743993-AddUserSettings';
|
||||
import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn';
|
||||
|
||||
export const postgresMigrations = [
|
||||
InitialMigration1587669153312,
|
||||
WebhookModel1589476000887,
|
||||
CreateIndexStoppedAt1594828256133,
|
||||
AddWebhookId1611144599516,
|
||||
MakeStoppedAtNullable1607431743768,
|
||||
CreateTagEntity1617270242566,
|
||||
UniqueWorkflowNames1620824779533,
|
||||
AddwaitTill1626176912946,
|
||||
UpdateWorkflowCredentials1630419189837,
|
||||
AddExecutionEntityIndexes1644422880309,
|
||||
IncreaseTypeVarcharLimit1646834195327,
|
||||
CreateUserManagement1646992772331,
|
||||
LowerCaseUserEmail1648740597343,
|
||||
AddUserSettings1652367743993,
|
||||
AddAPIKeyColumn1652905585850,
|
||||
];
|
||||
Reference in New Issue
Block a user