feat: Migrate integer primary keys to nanoids (#6345)

* first commit for postgres migration

* (not working)

* sqlite migration

* quicksave

* fix tests

* fix pg test

* fix postgres

* fix variables import

* fix execution saving

* add user settings fix

* change migration to single lines

* patch preferences endpoint

* cleanup

* improve variable import

* cleanup unusued code

* Update packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts

Co-authored-by: Omar Ajoue <krynble@gmail.com>

* address review notes

* fix var update/import

* refactor: Separate execution data to its own table (#6323)

* wip: Temporary migration process

* refactor: Create boilerplate repository methods for executions

* fix: Lint issues

* refactor: Added search endpoint to repository

* refactor: Make the execution list work again

* wip: Updating how we create and update executions everywhere

* fix: Lint issues and remove most of the direct access to execution model

* refactor: Remove includeWorkflowData flag and fix more tests

* fix: Lint issues

* fix: Fixed ordering of executions for FE, removed transaction when saving execution and removed unnecessary update

* refactor: Add comment about missing feature

* refactor: Refactor counting executions

* refactor: Add migration for other dbms and fix issues found

* refactor: Fix lint issues

* refactor: Remove unnecessary comment and auto inject repo to internal hooks

* refactor: remove type assertion

* fix: Fix broken tests

* fix: Remove unnecessary import

* Remove unnecessary toString() call

Co-authored-by: Iván Ovejero <ivov.src@gmail.com>

* fix: Address comments after review

* refactor: Remove unused import

* fix: Lint issues

* fix: Add correct migration files

---------

Co-authored-by: Iván Ovejero <ivov.src@gmail.com>

* remove null values from credential export

* fix: Fix an issue with queue mode where all running execution would be returned

* fix: Update n8n node to allow for workflow ids with letters

* set upstream on set branch

* remove typo

* add nodeAccess to credentials

* fix unsaved run check for undefined id

* fix(core): Rename version control feature to source control (#6480)

* rename versionControl to sourceControl

* fix source control tooltip wording

---------

Co-authored-by: Romain Minaud <romain.minaud@gmail.com>

* fix(editor): Pay 548 hide the set up version control button (#6485)

* feat(DebugHelper Node): Fix and include in main app (#6406)

* improve node a bit

* fixing continueOnFail() ton contain error in json

* improve pairedItem

* fix random data returning object results

* fix nanoId length typo

* update pnpm-lock file

---------

Co-authored-by: Marcus <marcus@n8n.io>

* fix(editor): Remove setup source control CTA button

* fix(editor): Remove setup source control CTA button

---------

Co-authored-by: Michael Auerswald <michael.auerswald@gmail.com>
Co-authored-by: Marcus <marcus@n8n.io>

* fix(editor): Update source control docs links (#6488)

* feat(DebugHelper Node): Fix and include in main app (#6406)

* improve node a bit

* fixing continueOnFail() ton contain error in json

* improve pairedItem

* fix random data returning object results

* fix nanoId length typo

* update pnpm-lock file

---------

Co-authored-by: Marcus <marcus@n8n.io>

* feat(editor): Replace root events with event bus events (no-changelog) (#6454)

* feat: replace root events with event bus events

* fix: prevent cypress from replacing global with globalThis in import path

* feat: remove emitter mixin

* fix: replace component events with event bus

* fix: fix linting issue

* fix: fix breaking expression switch

* chore: prettify ndv e2e suite code

* fix(editor): Update source control docs links

---------

Co-authored-by: Michael Auerswald <michael.auerswald@gmail.com>
Co-authored-by: Marcus <marcus@n8n.io>
Co-authored-by: Alex Grozav <alex@grozav.com>

* fix tag endpoint regex

---------

Co-authored-by: Omar Ajoue <krynble@gmail.com>
Co-authored-by: Iván Ovejero <ivov.src@gmail.com>
Co-authored-by: Romain Minaud <romain.minaud@gmail.com>
Co-authored-by: Csaba Tuncsik <csaba@n8n.io>
Co-authored-by: Marcus <marcus@n8n.io>
Co-authored-by: Alex Grozav <alex@grozav.com>
This commit is contained in:
Michael Auerswald
2023-06-20 19:13:18 +02:00
committed by GitHub
parent da330f0648
commit c3ba0123ad
156 changed files with 3499 additions and 2594 deletions

View File

@@ -102,8 +102,8 @@
"tsconfig-paths": "^4.1.2"
},
"dependencies": {
"@n8n_io/license-sdk": "~2.4.0",
"@n8n/client-oauth2": "workspace:*",
"@n8n_io/license-sdk": "~2.4.0",
"@oclif/command": "^1.8.16",
"@oclif/core": "^1.16.4",
"@oclif/errors": "^1.3.6",
@@ -152,6 +152,7 @@
"n8n-editor-ui": "workspace:*",
"n8n-nodes-base": "workspace:*",
"n8n-workflow": "workspace:*",
"nanoid": "^3.3.6",
"nodemailer": "^6.7.1",
"oauth-1.0a": "^2.2.6",
"open": "^7.0.0",

View File

@@ -11,19 +11,16 @@ import type {
import { createDeferredPromise, LoggerProxy } from 'n8n-workflow';
import type { ChildProcess } from 'child_process';
import { stringify } from 'flatted';
import type PCancelable from 'p-cancelable';
import * as Db from '@/Db';
import type {
IExecutingWorkflowData,
IExecutionDb,
IExecutionFlattedDb,
IExecutionsCurrentSummary,
IWorkflowExecutionDataProcess,
} from '@/Interfaces';
import * as ResponseHelper from '@/ResponseHelper';
import { isWorkflowIdValid } from '@/utils';
import { Service } from 'typedi';
import Container, { Service } from 'typedi';
import { ExecutionRepository } from './databases/repositories';
@Service()
export class ActiveExecutions {
@@ -61,15 +58,10 @@ export class ActiveExecutions {
fullExecutionData.workflowId = workflowId;
}
const execution = ResponseHelper.flattenExecutionData(fullExecutionData);
const executionResult = await Db.collections.Execution.save(execution as IExecutionFlattedDb);
// TODO: what is going on here?
executionId =
typeof executionResult.id === 'object'
? // @ts-ignore
executionResult.id!.toString()
: executionResult.id + '';
const executionResult = await Container.get(ExecutionRepository).createNewExecution(
fullExecutionData,
);
executionId = executionResult.id;
if (executionId === undefined) {
throw new Error('There was an issue assigning an execution id to the execution');
}
@@ -77,14 +69,14 @@ export class ActiveExecutions {
} else {
// Is an existing execution we want to finish so update in DB
const execution: Pick<IExecutionFlattedDb, 'id' | 'data' | 'waitTill' | 'status'> = {
const execution: Pick<IExecutionDb, 'id' | 'data' | 'waitTill' | 'status'> = {
id: executionId,
data: stringify(executionData.executionData!),
data: executionData.executionData!,
waitTill: null,
status: executionStatus,
};
await Db.collections.Execution.update(executionId, execution);
await Container.get(ExecutionRepository).updateExistingExecution(executionId, execution);
}
this.activeExecutions[executionId] = {

View File

@@ -29,6 +29,7 @@ import {
AuthProviderSyncHistoryRepository,
CredentialsRepository,
EventDestinationsRepository,
ExecutionDataRepository,
ExecutionMetadataRepository,
ExecutionRepository,
InstalledNodesRepository,
@@ -175,6 +176,7 @@ export async function init(testConnectionOptions?: ConnectionOptions): Promise<v
collections.Credentials = Container.get(CredentialsRepository);
collections.EventDestinations = Container.get(EventDestinationsRepository);
collections.Execution = Container.get(ExecutionRepository);
collections.ExecutionData = Container.get(ExecutionDataRepository);
collections.ExecutionMetadata = Container.get(ExecutionMetadataRepository);
collections.InstalledNodes = Container.get(InstalledNodesRepository);
collections.InstalledPackages = Container.get(InstalledPackagesRepository);

View File

@@ -16,13 +16,15 @@ import { validate } from 'class-validator';
import { Like } from 'typeorm';
import config from '@/config';
import * as Db from '@/Db';
import type { ICredentialsDb, IExecutionDb, IExecutionFlattedDb, IWorkflowDb } from '@/Interfaces';
import type { ICredentialsDb, IExecutionDb, IWorkflowDb } from '@/Interfaces';
import * as ResponseHelper from '@/ResponseHelper';
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
import type { CredentialsEntity } from '@db/entities/CredentialsEntity';
import type { TagEntity } from '@db/entities/TagEntity';
import type { User } from '@db/entities/User';
import type { UserUpdatePayload } from '@/requests';
import Container from 'typedi';
import { ExecutionRepository } from './databases/repositories';
/**
* Returns the base URL n8n is reachable from
@@ -194,9 +196,7 @@ export async function createErrorExecution(
status: 'error',
};
const execution = ResponseHelper.flattenExecutionData(fullExecutionData);
await Db.collections.Execution.save(execution as IExecutionFlattedDb);
await Container.get(ExecutionRepository).createNewExecution(fullExecutionData);
}
export const DEFAULT_EXECUTIONS_GET_ALL_LIMIT = 20;

View File

@@ -44,6 +44,7 @@ import type {
AuthProviderSyncHistoryRepository,
CredentialsRepository,
EventDestinationsRepository,
ExecutionDataRepository,
ExecutionMetadataRepository,
ExecutionRepository,
InstalledNodesRepository,
@@ -90,6 +91,7 @@ export interface IDatabaseCollections extends Record<string, Repository<any>> {
Credentials: CredentialsRepository;
EventDestinations: EventDestinationsRepository;
Execution: ExecutionRepository;
ExecutionData: ExecutionDataRepository;
ExecutionMetadata: ExecutionMetadataRepository;
InstalledNodes: InstalledNodesRepository;
InstalledPackages: InstalledPackagesRepository;
@@ -217,19 +219,6 @@ export interface IExecutionFlattedResponse extends IExecutionFlatted {
retryOf?: string;
}
export interface IExecutionResponseApi {
id: string;
mode: WorkflowExecuteMode;
startedAt: Date;
stoppedAt?: Date;
workflowId?: string;
finished: boolean;
retryOf?: string;
retrySuccessId?: string;
data?: object;
waitTill?: Date | null;
workflowData: IWorkflowBase;
}
export interface IExecutionsListResponse {
count: number;
// results: IExecutionShortResponse[];

View File

@@ -29,9 +29,9 @@ import { RoleService } from './role/role.service';
import { eventBus } from './eventbus';
import type { User } from '@db/entities/User';
import { N8N_VERSION } from '@/constants';
import * as Db from '@/Db';
import { NodeTypes } from './NodeTypes';
import type { ExecutionMetadata } from './databases/entities/ExecutionMetadata';
import { ExecutionRepository } from './databases/repositories';
function userToPayload(user: User): {
userId: string;
@@ -57,6 +57,7 @@ export class InternalHooks implements IInternalHooksClass {
private telemetry: Telemetry,
private nodeTypes: NodeTypes,
private roleService: RoleService,
private executionRepository: ExecutionRepository,
) {}
async init(instanceId: string) {
@@ -236,7 +237,9 @@ export class InternalHooks implements IInternalHooksClass {
data: IWorkflowExecutionDataProcess,
): Promise<void> {
void Promise.all([
Db.collections.Execution.update(executionId, { status: 'running' }),
this.executionRepository.updateExistingExecution(executionId, {
status: 'running',
}),
eventBus.sendWorkflowEvent({
eventName: 'n8n.workflow.started',
payload: {
@@ -425,12 +428,6 @@ export class InternalHooks implements IInternalHooksClass {
}
}
promises.push(
Db.collections.Execution.update(executionId, {
status: executionStatus,
}) as unknown as Promise<void>,
);
promises.push(
properties.success
? eventBus.sendWorkflowEvent({

View File

@@ -129,8 +129,8 @@ export class License {
return this.isFeatureEnabled(LICENSE_FEATURES.VARIABLES);
}
isVersionControlLicensed() {
return this.isFeatureEnabled(LICENSE_FEATURES.VERSION_CONTROL);
isSourceControlLicensed() {
return this.isFeatureEnabled(LICENSE_FEATURES.SOURCE_CONTROL);
}
isAPIDisabled() {

View File

@@ -163,14 +163,14 @@ export interface IJsonSchema {
required: string[];
}
export class VersionControlPull {
export class SourceControlPull {
force?: boolean;
variables?: { [key: string]: string };
}
export declare namespace PublicVersionControlRequest {
type Pull = AuthenticatedRequest<{}, {}, VersionControlPull, {}>;
export declare namespace PublicSourceControlRequest {
type Pull = AuthenticatedRequest<{}, {}, SourceControlPull, {}>;
}
// ----------------------------------

View File

@@ -12,15 +12,15 @@ delete:
description: The credential ID that needs to be deleted
required: true
schema:
type: number
type: string
responses:
'200':
"200":
description: Operation successful.
content:
application/json:
schema:
$ref: '../schemas/credential.yml'
'401':
$ref: '../../../../shared/spec/responses/unauthorized.yml'
'404':
$ref: '../../../../shared/spec/responses/notFound.yml'
$ref: "../schemas/credential.yml"
"401":
$ref: "../../../../shared/spec/responses/unauthorized.yml"
"404":
$ref: "../../../../shared/spec/responses/notFound.yml"

View File

@@ -5,9 +5,9 @@ required:
type: object
properties:
id:
type: number
type: string
readOnly: true
example: 42
example: R2DjclaysHbqn778
name:
type: string
example: Joe's Github Credentials
@@ -17,14 +17,14 @@ properties:
data:
type: object
writeOnly: true
example: { token: 'ada612vad6fa5df4adf5a5dsf4389adsf76da7s' }
example: { token: "ada612vad6fa5df4adf5a5dsf4389adsf76da7s" }
createdAt:
type: string
format: date-time
readOnly: true
example: '2022-04-29T11:02:29.842Z'
example: "2022-04-29T11:02:29.842Z"
updatedAt:
type: string
format: date-time
readOnly: true
example: '2022-04-29T11:02:29.842Z'
example: "2022-04-29T11:02:29.842Z"

View File

@@ -37,7 +37,7 @@ export = {
return res.status(404).json({ message: 'Not Found' });
}
await BinaryDataManager.getInstance().deleteBinaryDataByExecutionId(execution.id);
await BinaryDataManager.getInstance().deleteBinaryDataByExecutionId(execution.id!);
await deleteExecution(execution);
@@ -111,7 +111,7 @@ export = {
const executions = await getExecutions(filters);
const newLastId = !executions.length ? '0' : executions.slice(-1)[0].id;
const newLastId = !executions.length ? '0' : (executions.slice(-1)[0].id as string);
filters.lastId = newLastId;

View File

@@ -1,61 +1,26 @@
import { parse } from 'flatted';
import type { FindOptionsWhere } from 'typeorm';
import { In, Not, Raw, LessThan, IsNull } from 'typeorm';
import type { DeleteResult, FindOptionsWhere } from 'typeorm';
import { In, Not, Raw, LessThan } from 'typeorm';
import * as Db from '@/Db';
import type { IExecutionFlattedDb, IExecutionResponseApi } from '@/Interfaces';
import type { IExecutionBase, IExecutionFlattedDb } from '@/Interfaces';
import type { ExecutionStatus } from 'n8n-workflow';
function prepareExecutionData(
execution: IExecutionFlattedDb | null,
): IExecutionResponseApi | undefined {
if (!execution) return undefined;
// @ts-ignore
if (!execution.data) return execution;
return {
...execution,
data: parse(execution.data) as object,
};
}
import Container from 'typedi';
import { ExecutionRepository } from '@/databases/repositories';
function getStatusCondition(status: ExecutionStatus) {
const condition: Pick<
FindOptionsWhere<IExecutionFlattedDb>,
'finished' | 'waitTill' | 'stoppedAt'
> = {};
const condition: Pick<FindOptionsWhere<IExecutionFlattedDb>, 'status'> = {};
if (status === 'success') {
condition.finished = true;
condition.status = 'success';
} else if (status === 'waiting') {
condition.waitTill = Not(IsNull());
condition.status = 'waiting';
} else if (status === 'error') {
condition.stoppedAt = Not(IsNull());
condition.finished = false;
condition.status = In(['error', 'crashed', 'failed']);
}
return condition;
}
function getExecutionSelectableProperties(includeData?: boolean): Array<keyof IExecutionFlattedDb> {
const selectFields: Array<keyof IExecutionFlattedDb> = [
'id',
'mode',
'retryOf',
'retrySuccessId',
'startedAt',
'stoppedAt',
'workflowId',
'waitTill',
'finished',
];
if (includeData) selectFields.push('data');
return selectFields;
}
export async function getExecutions(params: {
limit: number;
includeData?: boolean;
@@ -63,7 +28,7 @@ export async function getExecutions(params: {
workflowIds?: string[];
status?: ExecutionStatus;
excludedExecutionsIds?: string[];
}): Promise<IExecutionResponseApi[]> {
}): Promise<IExecutionBase[]> {
let where: FindOptionsWhere<IExecutionFlattedDb> = {};
if (params.lastId && params.excludedExecutionsIds?.length) {
@@ -85,14 +50,29 @@ export async function getExecutions(params: {
where = { ...where, workflowId: In(params.workflowIds) };
}
const executions = await Db.collections.Execution.find({
select: getExecutionSelectableProperties(params.includeData),
where,
order: { id: 'DESC' },
take: params.limit,
});
return executions.map(prepareExecutionData) as IExecutionResponseApi[];
return Container.get(ExecutionRepository).findMultipleExecutions(
{
select: [
'id',
'mode',
'retryOf',
'retrySuccessId',
'startedAt',
'stoppedAt',
'workflowId',
'waitTill',
'finished',
],
where,
order: { id: 'DESC' },
take: params.limit,
relations: ['executionData'],
},
{
includeData: params.includeData,
unflattenData: true,
},
);
}
export async function getExecutionsCount(data: {
@@ -102,6 +82,7 @@ export async function getExecutionsCount(data: {
status?: ExecutionStatus;
excludedWorkflowIds?: string[];
}): Promise<number> {
// TODO: Consider moving this to the repository as well
const executions = await Db.collections.Execution.count({
where: {
...(data.lastId && { id: LessThan(data.lastId) }),
@@ -119,21 +100,16 @@ export async function getExecutionInWorkflows(
id: string,
workflowIds: string[],
includeData?: boolean,
): Promise<IExecutionResponseApi | undefined> {
const execution = await Db.collections.Execution.findOne({
select: getExecutionSelectableProperties(includeData),
): Promise<IExecutionBase | undefined> {
return Container.get(ExecutionRepository).findSingleExecution(id, {
where: {
id,
workflowId: In(workflowIds),
},
includeData,
unflattenData: true,
});
return prepareExecutionData(execution);
}
export async function deleteExecution(
execution: IExecutionResponseApi | undefined,
): Promise<IExecutionFlattedDb> {
// @ts-ignore
return Db.collections.Execution.remove(execution);
export async function deleteExecution(execution: IExecutionBase): Promise<DeleteResult> {
return Container.get(ExecutionRepository).deleteExecution(execution.id as string);
}

View File

@@ -0,0 +1,47 @@
import type express from 'express';
import type { StatusResult } from 'simple-git';
import type { PublicSourceControlRequest } from '../../../types';
import { authorize } from '../../shared/middlewares/global.middleware';
import type { ImportResult } from '@/environments/sourceControl/types/importResult';
import Container from 'typedi';
import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee';
import { SourceControlPreferencesService } from '@/environments/sourceControl/sourceControlPreferences.service.ee';
import { isSourceControlLicensed } from '@/environments/sourceControl/sourceControlHelper.ee';
export = {
pull: [
authorize(['owner', 'member']),
async (
req: PublicSourceControlRequest.Pull,
res: express.Response,
): Promise<ImportResult | StatusResult | Promise<express.Response>> => {
const sourceControlPreferencesService = Container.get(SourceControlPreferencesService);
if (!isSourceControlLicensed()) {
return res
.status(401)
.json({ status: 'Error', message: 'Source Control feature is not licensed' });
}
if (!sourceControlPreferencesService.isSourceControlConnected()) {
return res
.status(400)
.json({ status: 'Error', message: 'Source Control is not connected to a repository' });
}
try {
const sourceControlService = Container.get(SourceControlService);
const result = await sourceControlService.pullWorkfolder({
force: req.body.force,
variables: req.body.variables,
userId: req.user.id,
importAfterPull: true,
});
if ((result as ImportResult)?.workflows) {
return res.status(200).send(result as ImportResult);
} else {
return res.status(409).send(result);
}
} catch (error) {
return res.status(400).send((error as { message: string }).message);
}
},
],
};

View File

@@ -1,10 +1,10 @@
post:
x-eov-operation-id: pull
x-eov-operation-handler: v1/handlers/versionControl/versionControl.handler
x-eov-operation-handler: v1/handlers/sourceControl/sourceControl.handler
tags:
- VersionControl
- SourceControl
summary: Pull changes from the remote repository
description: Requires the Version Control feature to be licensed and connected to a repository.
description: Requires the Source Control feature to be licensed and connected to a repository.
requestBody:
description: Pull options
required: true

View File

@@ -1,47 +0,0 @@
import type express from 'express';
import type { StatusResult } from 'simple-git';
import type { PublicVersionControlRequest } from '../../../types';
import { authorize } from '../../shared/middlewares/global.middleware';
import type { ImportResult } from '@/environments/versionControl/types/importResult';
import Container from 'typedi';
import { VersionControlService } from '@/environments/versionControl/versionControl.service.ee';
import { VersionControlPreferencesService } from '@/environments/versionControl/versionControlPreferences.service.ee';
import { isVersionControlLicensed } from '@/environments/versionControl/versionControlHelper.ee';
export = {
pull: [
authorize(['owner', 'member']),
async (
req: PublicVersionControlRequest.Pull,
res: express.Response,
): Promise<ImportResult | StatusResult | Promise<express.Response>> => {
const versionControlPreferencesService = Container.get(VersionControlPreferencesService);
if (!isVersionControlLicensed()) {
return res
.status(401)
.json({ status: 'Error', message: 'Version Control feature is not licensed' });
}
if (!versionControlPreferencesService.isVersionControlConnected()) {
return res
.status(400)
.json({ status: 'Error', message: 'Version Control is not connected to a repository' });
}
try {
const versionControlService = Container.get(VersionControlService);
const result = await versionControlService.pullWorkfolder({
force: req.body.force,
variables: req.body.variables,
userId: req.user.id,
importAfterPull: true,
});
if ((result as ImportResult)?.workflows) {
return res.status(200).send(result as ImportResult);
} else {
return res.status(409).send(result);
}
} catch (error) {
return res.status(400).send((error as { message: string }).message);
}
},
],
};

View File

@@ -3,4 +3,4 @@ in: path
description: The ID of the workflow.
required: true
schema:
type: number
type: string

View File

@@ -7,9 +7,9 @@ required:
- settings
properties:
id:
type: number
type: string
readOnly: true
example: 1
example: 2tUt1wbLX592XDdX
name:
type: string
example: Workflow 1
@@ -27,23 +27,23 @@ properties:
nodes:
type: array
items:
$ref: './node.yml'
$ref: "./node.yml"
connections:
type: object
example: { main: [{ node: 'Jira', type: 'main', index: 0 }] }
example: { main: [{ node: "Jira", type: "main", index: 0 }] }
settings:
$ref: './workflowSettings.yml'
$ref: "./workflowSettings.yml"
staticData:
example: { lastId: 1 }
nullable: true
anyOf:
- type: string
format: 'jsonString'
format: "jsonString"
nullable: true
- type: object
nullable: true
tags:
type: array
items:
$ref: './tag.yml'
$ref: "./tag.yml"
readOnly: true

View File

@@ -154,6 +154,7 @@ export = {
const { id } = req.params;
const updateData = new WorkflowEntity();
Object.assign(updateData, req.body);
updateData.id = id;
const sharedWorkflow = await getSharedWorkflow(req.user, id);

View File

@@ -1,7 +1,7 @@
---
openapi: 3.0.0
info:
title: n8n Public API11
title: n8n Public API
description: n8n Public API
termsOfService: https://n8n.io/legal/terms
contact:
@@ -24,8 +24,8 @@ tags:
description: Operations about workflows
- name: Credential
description: Operations about credentials
- name: VersionControl
description: Operations about version control
- name: SourceControl
description: Operations about source control
paths:
/audit:
@@ -48,8 +48,8 @@ paths:
$ref: "./handlers/workflows/spec/paths/workflows.id.activate.yml"
/workflows/{id}/deactivate:
$ref: "./handlers/workflows/spec/paths/workflows.id.deactivate.yml"
/version-control/pull:
$ref: "./handlers/versionControl/spec/paths/versionControl.yml"
/source-control/pull:
$ref: "./handlers/sourceControl/spec/paths/sourceControl.yml"
components:
schemas:
$ref: "./shared/spec/schemas/_index.yml"

View File

@@ -21,6 +21,6 @@ CredentialType:
Audit:
$ref: "./../../../handlers/audit/spec/schemas/audit.yml"
Pull:
$ref: "./../../../handlers/versionControl/spec/schemas/pull.yml"
$ref: "./../../../handlers/sourceControl/spec/schemas/pull.yml"
ImportResult:
$ref: "./../../../handlers/versionControl/spec/schemas/importResult.yml"
$ref: "./../../../handlers/sourceControl/spec/schemas/importResult.yml"

View File

@@ -171,7 +171,7 @@ export function sendErrorResponse(res: Response, error: Error) {
res.status(httpStatusCode).json(response);
}
const isUniqueConstraintError = (error: Error) =>
export const isUniqueConstraintError = (error: Error) =>
['unique', 'duplicate'].some((s) => error.message.toLowerCase().includes(s));
/**
@@ -215,6 +215,7 @@ export function send<T, R extends Request, S extends Response>(
*
* @param {IExecutionDb} fullExecutionData The data to flatten
*/
// TODO: Remove this functions since it's purpose should be fulfilled by the execution repository
export function flattenExecutionData(fullExecutionData: IExecutionDb): IExecutionFlatted {
// Flatten the data
const returnData: IExecutionFlatted = {
@@ -251,6 +252,7 @@ export function flattenExecutionData(fullExecutionData: IExecutionDb): IExecutio
*
* @param {IExecutionFlattedDb} fullExecutionData The data to unflatten
*/
// TODO: Remove this functions since it's purpose should be fulfilled by the execution repository
export function unflattenExecutionData(fullExecutionData: IExecutionFlattedDb): IExecutionResponse {
const returnData: IExecutionResponse = {
id: fullExecutionData.id,

View File

@@ -21,7 +21,7 @@ import cookieParser from 'cookie-parser';
import express from 'express';
import { engine as expressHandlebars } from 'express-handlebars';
import type { ServeStaticOptions } from 'serve-static';
import type { FindManyOptions } from 'typeorm';
import type { FindManyOptions, FindOptionsWhere } from 'typeorm';
import { Not, In } from 'typeorm';
import type { AxiosRequestConfig } from 'axios';
import axios from 'axios';
@@ -114,7 +114,6 @@ import type {
ICredentialsDb,
ICredentialsOverwrite,
IDiagnosticInfo,
IExecutionFlattedDb,
IExecutionsStopData,
} from '@/Interfaces';
import { ActiveExecutions } from '@/ActiveExecutions';
@@ -167,10 +166,12 @@ import {
isLdapCurrentAuthenticationMethod,
isSamlCurrentAuthenticationMethod,
} from './sso/ssoHelpers';
import { isVersionControlLicensed } from '@/environments/versionControl/versionControlHelper.ee';
import { VersionControlService } from '@/environments/versionControl/versionControl.service.ee';
import { VersionControlController } from '@/environments/versionControl/versionControl.controller.ee';
import { VersionControlPreferencesService } from './environments/versionControl/versionControlPreferences.service.ee';
import { isSourceControlLicensed } from '@/environments/sourceControl/sourceControlHelper.ee';
import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee';
import { SourceControlController } from '@/environments/sourceControl/sourceControl.controller.ee';
import { SourceControlPreferencesService } from './environments/sourceControl/sourceControlPreferences.service.ee';
import { ExecutionRepository } from './databases/repositories';
import type { ExecutionEntity } from './databases/entities/ExecutionEntity';
const exec = promisify(callbackExec);
@@ -312,7 +313,7 @@ export class Server extends AbstractServer {
logStreaming: false,
advancedExecutionFilters: false,
variables: false,
versionControl: false,
sourceControl: false,
auditLogs: false,
},
hideUsagePage: config.getEnv('hideUsagePage'),
@@ -430,7 +431,7 @@ export class Server extends AbstractServer {
saml: isSamlLicensed(),
advancedExecutionFilters: isAdvancedExecutionFiltersEnabled(),
variables: isVariablesEnabled(),
versionControl: isVersionControlLicensed(),
sourceControl: isSourceControlLicensed(),
});
if (isLdapEnabled()) {
@@ -467,8 +468,8 @@ export class Server extends AbstractServer {
const mailer = Container.get(UserManagementMailer);
const postHog = this.postHog;
const samlService = Container.get(SamlService);
const versionControlService = Container.get(VersionControlService);
const versionControlPreferencesService = Container.get(VersionControlPreferencesService);
const sourceControlService = Container.get(SourceControlService);
const sourceControlPreferencesService = Container.get(SourceControlPreferencesService);
const controllers: object[] = [
new EventBusController(),
@@ -497,7 +498,7 @@ export class Server extends AbstractServer {
postHog,
}),
new SamlController(samlService),
new VersionControlController(versionControlService, versionControlPreferencesService),
new SourceControlController(sourceControlService, sourceControlPreferencesService),
];
if (isLdapEnabled()) {
@@ -637,15 +638,12 @@ export class Server extends AbstractServer {
this.app.use(`/${this.restEndpoint}/variables`, variablesController);
// ----------------------------------------
// Version Control
// Source Control
// ----------------------------------------
// initialize SamlService if it is licensed, even if not enabled, to
// set up the initial environment
try {
await Container.get(VersionControlService).init();
await Container.get(SourceControlService).init();
} catch (error) {
LoggerProxy.warn(`Version Control initialization failed: ${error.message}`);
LoggerProxy.warn(`Source Control initialization failed: ${error.message}`);
}
// ----------------------------------------
@@ -1154,7 +1152,9 @@ export class Server extends AbstractServer {
if (!currentlyRunningExecutionIds.length) return [];
const findOptions: FindManyOptions<IExecutionFlattedDb> = {
const findOptions: FindManyOptions<ExecutionEntity> & {
where: FindOptionsWhere<ExecutionEntity>;
} = {
select: ['id', 'workflowId', 'mode', 'retryOf', 'startedAt', 'stoppedAt', 'status'],
order: { id: 'DESC' },
where: {
@@ -1170,19 +1170,23 @@ export class Server extends AbstractServer {
if (req.query.filter) {
const { workflowId, status, finished } = jsonParse<any>(req.query.filter);
if (workflowId && sharedWorkflowIds.includes(workflowId)) {
Object.assign(findOptions.where!, { workflowId });
Object.assign(findOptions.where, { workflowId });
} else {
Object.assign(findOptions.where, { workflowId: In(sharedWorkflowIds) });
}
if (status) {
Object.assign(findOptions.where!, { status: In(status) });
Object.assign(findOptions.where, { status: In(status) });
}
if (finished) {
Object.assign(findOptions.where!, { finished });
Object.assign(findOptions.where, { finished });
}
} else {
Object.assign(findOptions.where!, { workflowId: In(sharedWorkflowIds) });
Object.assign(findOptions.where, { workflowId: In(sharedWorkflowIds) });
}
const executions = await Db.collections.Execution.find(findOptions);
const executions = await Container.get(ExecutionRepository).findMultipleExecutions(
findOptions,
);
if (!executions.length) return [];
@@ -1247,14 +1251,16 @@ export class Server extends AbstractServer {
throw new ResponseHelper.NotFoundError('Execution not found');
}
const execution = await Db.collections.Execution.exist({
where: {
id: executionId,
workflowId: In(sharedWorkflowIds),
const fullExecutionData = await Container.get(ExecutionRepository).findSingleExecution(
executionId,
{
where: {
workflowId: In(sharedWorkflowIds),
},
},
});
);
if (!execution) {
if (!fullExecutionData) {
throw new ResponseHelper.NotFoundError('Execution not found');
}
@@ -1292,11 +1298,6 @@ export class Server extends AbstractServer {
await queue.stopJob(job);
}
const executionDb = (await Db.collections.Execution.findOneBy({
id: req.params.id,
})) as IExecutionFlattedDb;
const fullExecutionData = ResponseHelper.unflattenExecutionData(executionDb);
const returnData: IExecutionsStopData = {
mode: fullExecutionData.mode,
startedAt: new Date(fullExecutionData.startedAt),

View File

@@ -8,16 +8,14 @@ import {
LoggerProxy as Logger,
WorkflowOperationError,
} from 'n8n-workflow';
import { Service } from 'typedi';
import Container, { Service } from 'typedi';
import type { FindManyOptions, ObjectLiteral } from 'typeorm';
import { Not, LessThanOrEqual } from 'typeorm';
import { DateUtils } from 'typeorm/util/DateUtils';
import config from '@/config';
import * as Db from '@/Db';
import * as ResponseHelper from '@/ResponseHelper';
import type {
IExecutionFlattedDb,
IExecutionResponse,
IExecutionsStopData,
IWorkflowExecutionDataProcess,
@@ -25,6 +23,8 @@ import type {
import { WorkflowRunner } from '@/WorkflowRunner';
import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper';
import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEventBus/recoverEvents';
import { ExecutionRepository } from './databases/repositories';
import type { ExecutionEntity } from './databases/entities/ExecutionEntity';
@Service()
export class WaitTracker {
@@ -37,7 +37,7 @@ export class WaitTracker {
mainTimer: NodeJS.Timeout;
constructor() {
constructor(private executionRepository: ExecutionRepository) {
// Poll every 60 seconds a list of upcoming executions
this.mainTimer = setInterval(() => {
void this.getWaitingExecutions();
@@ -50,7 +50,7 @@ export class WaitTracker {
async getWaitingExecutions() {
Logger.debug('Wait tracker querying database for waiting executions');
// Find all the executions which should be triggered in the next 70 seconds
const findQuery: FindManyOptions<IExecutionFlattedDb> = {
const findQuery: FindManyOptions<ExecutionEntity> = {
select: ['id', 'waitTill'],
where: {
waitTill: LessThanOrEqual(new Date(Date.now() + 70000)),
@@ -70,7 +70,7 @@ export class WaitTracker {
);
}
const executions = await Db.collections.Execution.find(findQuery);
const executions = await this.executionRepository.findMultipleExecutions(findQuery);
if (executions.length === 0) {
return;
@@ -106,9 +106,11 @@ export class WaitTracker {
}
// Also check in database
const execution = await Db.collections.Execution.findOneBy({ id: executionId });
const execution = await this.executionRepository.findSingleExecution(executionId, {
includeData: true,
});
if (execution === null) {
if (!execution) {
throw new Error(`The execution ID "${executionId}" could not be found.`);
}
@@ -124,12 +126,17 @@ export class WaitTracker {
// if the execution ended in an unforseen, non-cancelable state, try to recover it
await recoverExecutionDataFromEventLogMessages(executionId, [], true);
// find recovered data
const recoveredExecution = await Db.collections.Execution.findOneBy({ id: executionId });
if (recoveredExecution) {
fullExecutionData = ResponseHelper.unflattenExecutionData(recoveredExecution);
} else {
const restoredExecution = await Container.get(ExecutionRepository).findSingleExecution(
executionId,
{
includeData: true,
unflattenData: true,
},
);
if (!restoredExecution) {
throw new Error(`Execution ${executionId} could not be recovered or canceled.`);
}
fullExecutionData = restoredExecution;
}
// Set in execution in DB as failed and remove waitTill time
const error = new WorkflowOperationError('Workflow-Execution has been canceled!');
@@ -144,11 +151,9 @@ export class WaitTracker {
fullExecutionData.waitTill = null;
fullExecutionData.status = 'canceled';
await Db.collections.Execution.update(
await Container.get(ExecutionRepository).updateExistingExecution(
executionId,
ResponseHelper.flattenExecutionData({
...fullExecutionData,
}) as IExecutionFlattedDb,
fullExecutionData,
);
return {
@@ -166,16 +171,14 @@ export class WaitTracker {
(async () => {
// Get the data to execute
const fullExecutionDataFlatted = await Db.collections.Execution.findOneBy({
id: executionId,
const fullExecutionData = await this.executionRepository.findSingleExecution(executionId, {
includeData: true,
unflattenData: true,
});
if (fullExecutionDataFlatted === null) {
if (!fullExecutionData) {
throw new Error(`The execution with the id "${executionId}" does not exist.`);
}
const fullExecutionData = ResponseHelper.unflattenExecutionData(fullExecutionDataFlatted);
if (fullExecutionData.finished) {
throw new Error('The execution did succeed and can so not be started again.');
}

View File

@@ -6,17 +6,17 @@ import { NodeHelpers, Workflow, LoggerProxy as Logger } from 'n8n-workflow';
import { Service } from 'typedi';
import type express from 'express';
import * as Db from '@/Db';
import * as ResponseHelper from '@/ResponseHelper';
import * as WebhookHelpers from '@/WebhookHelpers';
import { NodeTypes } from '@/NodeTypes';
import type { IExecutionResponse, IResponseCallbackData, IWorkflowDb } from '@/Interfaces';
import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData';
import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper';
import { ExecutionRepository } from './databases/repositories';
@Service()
export class WaitingWebhooks {
constructor(private nodeTypes: NodeTypes) {}
constructor(private nodeTypes: NodeTypes, private executionRepository: ExecutionRepository) {}
async executeWebhook(
httpMethod: WebhookHttpMethod,
@@ -39,19 +39,20 @@ export class WaitingWebhooks {
const executionId = pathParts.shift();
const path = pathParts.join('/');
const execution = await Db.collections.Execution.findOneBy({ id: executionId });
const execution = await this.executionRepository.findSingleExecution(executionId as string, {
includeData: true,
unflattenData: true,
});
if (execution === null) {
if (!execution) {
throw new ResponseHelper.NotFoundError(`The execution "${executionId} does not exist.`);
}
const fullExecutionData = ResponseHelper.unflattenExecutionData(execution);
if (fullExecutionData.finished || fullExecutionData.data.resultData.error) {
if (execution.finished || execution.data.resultData.error) {
throw new ResponseHelper.ConflictError(`The execution "${executionId} has finished already.`);
}
return this.startExecution(httpMethod, path, fullExecutionData, req, res);
return this.startExecution(httpMethod, path, execution, req, res);
}
async startExecution(

View File

@@ -54,7 +54,6 @@ import { ExternalHooks } from '@/ExternalHooks';
import type {
IExecutionDb,
IExecutionFlattedDb,
IExecutionResponse,
IPushDataExecutionFinished,
IWorkflowExecuteProcess,
IWorkflowExecutionDataProcess,
@@ -62,7 +61,6 @@ import type {
} from '@/Interfaces';
import { NodeTypes } from '@/NodeTypes';
import { Push } from '@/push';
import * as ResponseHelper from '@/ResponseHelper';
import * as WebhookHelpers from '@/WebhookHelpers';
import * as WorkflowHelpers from '@/WorkflowHelpers';
import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper';
@@ -72,6 +70,7 @@ import { WorkflowsService } from './workflows/workflows.services';
import { Container } from 'typedi';
import { InternalHooks } from '@/InternalHooks';
import type { ExecutionMetadata } from '@db/entities/ExecutionMetadata';
import { ExecutionRepository } from './databases/repositories';
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
@@ -185,7 +184,7 @@ export function executeErrorWorkflow(
/**
* Prunes Saved Execution which are older than configured.
* Throttled to be executed just once in configured timeframe.
*
* TODO: Consider moving this whole function to the repository or at least the queries
*/
let throttling = false;
async function pruneExecutionData(this: WorkflowHooks): Promise<void> {
@@ -220,7 +219,6 @@ async function pruneExecutionData(this: WorkflowHooks): Promise<void> {
}
}
const isBinaryModeDefaultMode = config.getEnv('binaryDataManager.mode') === 'default';
try {
setTimeout(() => {
throttling = false;
@@ -236,8 +234,7 @@ async function pruneExecutionData(this: WorkflowHooks): Promise<void> {
).map(({ id }) => id);
await Db.collections.Execution.delete({ id: In(executionIds) });
// Mark binary data for deletion for all executions
if (!isBinaryModeDefaultMode)
await BinaryDataManager.getInstance().markDataForDeletionByExecutionIds(executionIds);
await BinaryDataManager.getInstance().markDataForDeletionByExecutionIds(executionIds);
} while (executionIds.length > 0);
} catch (error) {
ErrorReporter.error(error);
@@ -435,15 +432,19 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
{ executionId: this.executionId, nodeName },
);
const execution = await Db.collections.Execution.findOneBy({ id: this.executionId });
const fullExecutionData = await Container.get(ExecutionRepository).findSingleExecution(
this.executionId,
{
includeData: true,
unflattenData: true,
},
);
if (execution === null) {
if (!fullExecutionData) {
// Something went badly wrong if this happens.
// This check is here mostly to make typescript happy.
return;
}
const fullExecutionData: IExecutionResponse =
ResponseHelper.unflattenExecutionData(execution);
if (fullExecutionData.finished) {
// We already received ´workflowExecuteAfter´ webhook, so this is just an async call
@@ -482,10 +483,9 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
fullExecutionData.status = 'running';
const flattenedExecutionData = ResponseHelper.flattenExecutionData(fullExecutionData);
await Db.collections.Execution.update(
await Container.get(ExecutionRepository).updateExistingExecution(
this.executionId,
flattenedExecutionData as IExecutionFlattedDb,
fullExecutionData,
);
} catch (err) {
ErrorReporter.error(err);
@@ -578,10 +578,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
if (isManualMode && !saveManualExecutions && !fullRunData.waitTill) {
// Data is always saved, so we remove from database
await Db.collections.Execution.delete(this.executionId);
await BinaryDataManager.getInstance().markDataForDeletionByExecutionId(
this.executionId,
);
await Container.get(ExecutionRepository).deleteExecution(this.executionId);
return;
}
@@ -605,6 +602,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
let workflowStatusFinal: ExecutionStatus = workflowDidSucceed ? 'success' : 'failed';
if (workflowHasCrashed) workflowStatusFinal = 'crashed';
if (workflowWasCanceled) workflowStatusFinal = 'canceled';
if (fullRunData.waitTill) workflowStatusFinal = 'waiting';
if (
(workflowDidSucceed && saveDataSuccessExecution === 'none') ||
@@ -619,10 +617,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
this.retryOf,
);
// Data is always saved, so we remove from database
await Db.collections.Execution.delete(this.executionId);
await BinaryDataManager.getInstance().markDataForDeletionByExecutionId(
this.executionId,
);
await Container.get(ExecutionRepository).deleteExecution(this.executionId);
return;
}
@@ -671,12 +666,9 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
stoppedAt: fullExecutionData.stoppedAt,
});
const executionData = ResponseHelper.flattenExecutionData(fullExecutionData);
// Save the Execution in DB
await Db.collections.Execution.update(
await Container.get(ExecutionRepository).updateExistingExecution(
this.executionId,
executionData as IExecutionFlattedDb,
fullExecutionData,
);
try {
@@ -688,9 +680,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
}
if (fullRunData.finished === true && this.retryOf !== undefined) {
// If the retry was successful save the reference it on the original execution
// await Db.collections.Execution.save(executionData as IExecutionFlattedDb);
await Db.collections.Execution.update(this.retryOf, {
await Container.get(ExecutionRepository).updateExistingExecution(this.retryOf, {
retrySuccessId: this.executionId,
});
}
@@ -778,6 +768,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
let workflowStatusFinal: ExecutionStatus = workflowDidSucceed ? 'success' : 'failed';
if (workflowHasCrashed) workflowStatusFinal = 'crashed';
if (workflowWasCanceled) workflowStatusFinal = 'canceled';
if (fullRunData.waitTill) workflowStatusFinal = 'waiting';
if (!workflowDidSucceed) {
executeErrorWorkflow(
@@ -809,17 +800,15 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
fullExecutionData.workflowId = workflowId;
}
const executionData = ResponseHelper.flattenExecutionData(fullExecutionData);
// Save the Execution in DB
await Db.collections.Execution.update(
await Container.get(ExecutionRepository).updateExistingExecution(
this.executionId,
executionData as IExecutionFlattedDb,
fullExecutionData,
);
// For reasons(tm) the execution status is not updated correctly in the first update, so has to be written again (tbd)
await Db.collections.Execution.update(this.executionId, {
status: executionData.status,
await Container.get(ExecutionRepository).updateExistingExecution(this.executionId, {
status: fullExecutionData.status,
});
try {
@@ -832,7 +821,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
if (fullRunData.finished === true && this.retryOf !== undefined) {
// If the retry was successful save the reference it on the original execution
await Db.collections.Execution.update(this.retryOf, {
await Container.get(ExecutionRepository).updateExistingExecution(this.retryOf, {
retrySuccessId: this.executionId,
});
}
@@ -1090,9 +1079,10 @@ async function executeWorkflow(
// remove execution from active executions
Container.get(ActiveExecutions).remove(executionId, fullRunData);
const executionData = ResponseHelper.flattenExecutionData(fullExecutionData);
await Db.collections.Execution.update(executionId, executionData as IExecutionFlattedDb);
await Container.get(ExecutionRepository).updateExistingExecution(
executionId,
fullExecutionData,
);
throw {
...error,
stack: error.stack,

View File

@@ -11,7 +11,7 @@
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
/* eslint-disable @typescript-eslint/no-unused-vars */
import type { IProcessMessage } from 'n8n-core';
import { BinaryDataManager, WorkflowExecute } from 'n8n-core';
import { WorkflowExecute } from 'n8n-core';
import type {
ExecutionError,
@@ -34,10 +34,8 @@ import { fork } from 'child_process';
import { ActiveExecutions } from '@/ActiveExecutions';
import config from '@/config';
import * as Db from '@/Db';
import { ExternalHooks } from '@/ExternalHooks';
import type {
IExecutionFlattedDb,
IProcessMessageDataHook,
IWorkflowExecutionDataProcess,
IWorkflowExecutionDataProcessWithExecution,
@@ -45,7 +43,6 @@ import type {
import { NodeTypes } from '@/NodeTypes';
import type { Job, JobData, JobQueue, JobResponse } from '@/Queue';
import { Queue } from '@/Queue';
import * as ResponseHelper from '@/ResponseHelper';
import * as WebhookHelpers from '@/WebhookHelpers';
import * as WorkflowHelpers from '@/WorkflowHelpers';
import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData';
@@ -57,6 +54,7 @@ import { eventBus } from './eventbus';
import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEventBus/recoverEvents';
import { Container } from 'typedi';
import { InternalHooks } from './InternalHooks';
import { ExecutionRepository } from './databases/repositories';
export class WorkflowRunner {
activeExecutions: ActiveExecutions;
@@ -127,14 +125,22 @@ export class WorkflowRunner {
}
}
const executionFlattedData = await Db.collections.Execution.findOneBy({ id: executionId });
void Container.get(InternalHooks).onWorkflowCrashed(
const executionFlattedData = await Container.get(ExecutionRepository).findSingleExecution(
executionId,
executionMode,
executionFlattedData?.workflowData,
executionFlattedData?.metadata,
{
includeData: true,
},
);
if (executionFlattedData) {
void Container.get(InternalHooks).onWorkflowCrashed(
executionId,
executionMode,
executionFlattedData?.workflowData,
// TODO: get metadata to be sent here
// executionFlattedData?.metadata,
);
}
} catch {
// Ignore errors
}
@@ -566,10 +572,16 @@ export class WorkflowRunner {
reject(error);
}
const executionDb = (await Db.collections.Execution.findOneBy({
id: executionId,
})) as IExecutionFlattedDb;
const fullExecutionData = ResponseHelper.unflattenExecutionData(executionDb);
const fullExecutionData = await Container.get(ExecutionRepository).findSingleExecution(
executionId,
{
includeData: true,
unflattenData: true,
},
);
if (!fullExecutionData) {
return reject(new Error(`Could not find execution with id "${executionId}"`));
}
const runData = {
data: fullExecutionData.data,
finished: fullExecutionData.finished,
@@ -597,8 +609,7 @@ export class WorkflowRunner {
(workflowDidSucceed && saveDataSuccessExecution === 'none') ||
(!workflowDidSucceed && saveDataErrorExecution === 'none')
) {
await Db.collections.Execution.delete(executionId);
await BinaryDataManager.getInstance().markDataForDeletionByExecutionId(executionId);
await Container.get(ExecutionRepository).deleteExecution(executionId);
}
// eslint-disable-next-line id-denylist
} catch (err) {

View File

@@ -29,7 +29,7 @@ const enabledFeatures = {
[LICENSE_FEATURES.SAML]: false,
[LICENSE_FEATURES.LOG_STREAMING]: false,
[LICENSE_FEATURES.ADVANCED_EXECUTION_FILTERS]: false,
[LICENSE_FEATURES.VERSION_CONTROL]: false,
[LICENSE_FEATURES.SOURCE_CONTROL]: false,
};
type Feature = keyof typeof enabledFeatures;

View File

@@ -6,6 +6,8 @@ import config from '@/config';
import { CREDENTIALS_REPORT } from '@/audit/constants';
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
import type { Risk } from '@/audit/types';
import Container from 'typedi';
import { ExecutionRepository } from '@/databases/repositories';
async function getAllCredsInUse(workflows: WorkflowEntity[]) {
const credsInAnyUse = new Set<string>();
@@ -44,12 +46,14 @@ async function getExecutionsInPastDays(days: number) {
const utcDate = DateUtils.mixedDateToUtcDatetimeString(date) as string;
return Db.collections.Execution.find({
select: ['workflowData'],
where: {
startedAt: MoreThanOrEqual(utcDate) as unknown as FindOperator<Date>,
return Container.get(ExecutionRepository).findMultipleExecutions(
{
where: {
startedAt: MoreThanOrEqual(utcDate) as unknown as FindOperator<Date>,
},
},
});
{ includeData: true },
);
}
/**

View File

@@ -6,7 +6,6 @@ import glob from 'fast-glob';
import { Container } from 'typedi';
import type { EntityManager } from 'typeorm';
import { v4 as uuid } from 'uuid';
import config from '@/config';
import * as Db from '@/Db';
import { SharedWorkflow } from '@db/entities/SharedWorkflow';
import { WorkflowEntity } from '@db/entities/WorkflowEntity';
@@ -18,6 +17,7 @@ import { disableAutoGeneratedIds } from '@db/utils/commandHelpers';
import type { ICredentialsDb, IWorkflowToImport } from '@/Interfaces';
import { replaceInvalidCredentials } from '@/WorkflowHelpers';
import { BaseCommand, UM_FIX_INSTRUCTION } from '../BaseCommand';
import { generateNanoId } from '@/databases/utils/generators';
function assertHasWorkflowsToImport(workflows: unknown): asserts workflows is IWorkflowToImport[] {
if (!Array.isArray(workflows)) {
@@ -117,6 +117,9 @@ export class ImportWorkflowsCommand extends BaseCommand {
const workflow = jsonParse<IWorkflowToImport>(
fs.readFileSync(file, { encoding: 'utf8' }),
);
if (!workflow.id) {
workflow.id = generateNanoId();
}
if (credentials.length > 0) {
workflow.nodes.forEach((node: INode) => {
@@ -227,12 +230,6 @@ export class ImportWorkflowsCommand extends BaseCommand {
},
['workflowId', 'userId'],
);
if (config.getEnv('database.type') === 'postgresdb') {
const tablePrefix = config.getEnv('database.tablePrefix');
await this.transactionManager.query(
`SELECT setval('${tablePrefix}workflow_entity_id_seq', (SELECT MAX(id) from "${tablePrefix}workflow_entity"))`,
);
}
}
private async getOwner() {

View File

@@ -22,6 +22,7 @@ import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper';
import { generateFailedExecutionFromError } from '@/WorkflowHelpers';
import { N8N_VERSION } from '@/constants';
import { BaseCommand } from './BaseCommand';
import { ExecutionRepository } from '@/databases/repositories';
export class Worker extends BaseCommand {
static description = '\nStarts a n8n worker';
@@ -89,9 +90,15 @@ export class Worker extends BaseCommand {
async runJob(job: Job, nodeTypes: INodeTypes): Promise<JobResponse> {
const { executionId, loadStaticData } = job.data;
const executionDb = await Db.collections.Execution.findOneBy({ id: executionId });
const fullExecutionData = await Container.get(ExecutionRepository).findSingleExecution(
executionId,
{
includeData: true,
unflattenData: true,
},
);
if (!executionDb) {
if (!fullExecutionData) {
LoggerProxy.error(
`Worker failed to find data of execution "${executionId}" in database. Cannot continue.`,
{ executionId },
@@ -100,15 +107,14 @@ export class Worker extends BaseCommand {
`Unable to find data of execution "${executionId}" in database. Aborting execution.`,
);
}
const currentExecutionDb = ResponseHelper.unflattenExecutionData(executionDb);
const workflowId = currentExecutionDb.workflowData.id!;
const workflowId = fullExecutionData.workflowData.id!;
LoggerProxy.info(
`Start job: ${job.id} (Workflow ID: ${workflowId} | Execution: ${executionId})`,
);
const workflowOwner = await getWorkflowOwner(workflowId);
let { staticData } = currentExecutionDb.workflowData;
let { staticData } = fullExecutionData.workflowData;
if (loadStaticData) {
const workflowData = await Db.collections.Workflow.findOne({
select: ['id', 'staticData'],
@@ -126,7 +132,7 @@ export class Worker extends BaseCommand {
staticData = workflowData.staticData;
}
const workflowSettings = currentExecutionDb.workflowData.settings ?? {};
const workflowSettings = fullExecutionData.workflowData.settings ?? {};
let workflowTimeout = workflowSettings.executionTimeout ?? config.getEnv('executions.timeout'); // initialize with default
@@ -138,13 +144,13 @@ export class Worker extends BaseCommand {
const workflow = new Workflow({
id: workflowId,
name: currentExecutionDb.workflowData.name,
nodes: currentExecutionDb.workflowData.nodes,
connections: currentExecutionDb.workflowData.connections,
active: currentExecutionDb.workflowData.active,
name: fullExecutionData.workflowData.name,
nodes: fullExecutionData.workflowData.nodes,
connections: fullExecutionData.workflowData.connections,
active: fullExecutionData.workflowData.active,
nodeTypes,
staticData,
settings: currentExecutionDb.workflowData.settings,
settings: fullExecutionData.workflowData.settings,
});
const additionalData = await WorkflowExecuteAdditionalData.getBase(
@@ -153,10 +159,10 @@ export class Worker extends BaseCommand {
executionTimeoutTimestamp,
);
additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerExecuter(
currentExecutionDb.mode,
fullExecutionData.mode,
job.data.executionId,
currentExecutionDb.workflowData,
{ retryOf: currentExecutionDb.retryOf as string },
fullExecutionData.workflowData,
{ retryOf: fullExecutionData.retryOf as string },
);
try {
@@ -164,7 +170,7 @@ export class Worker extends BaseCommand {
} catch (error) {
if (error instanceof NodeOperationError) {
const failedExecution = generateFailedExecutionFromError(
currentExecutionDb.mode,
fullExecutionData.mode,
error,
error.node,
);
@@ -192,17 +198,17 @@ export class Worker extends BaseCommand {
let workflowExecute: WorkflowExecute;
let workflowRun: PCancelable<IRun>;
if (currentExecutionDb.data !== undefined) {
if (fullExecutionData.data !== undefined) {
workflowExecute = new WorkflowExecute(
additionalData,
currentExecutionDb.mode,
currentExecutionDb.data,
fullExecutionData.mode,
fullExecutionData.data,
);
workflowRun = workflowExecute.processRunExecutionData(workflow);
} else {
// Execute all nodes
// Can execute without webhook so go on
workflowExecute = new WorkflowExecute(additionalData, currentExecutionDb.mode);
workflowExecute = new WorkflowExecute(additionalData, fullExecutionData.mode);
workflowRun = workflowExecute.run(workflow);
}

View File

@@ -77,7 +77,7 @@ export const enum LICENSE_FEATURES {
LOG_STREAMING = 'feat:logStreaming',
ADVANCED_EXECUTION_FILTERS = 'feat:advancedExecutionFilters',
VARIABLES = 'feat:variables',
VERSION_CONTROL = 'feat:versionControl',
SOURCE_CONTROL = 'feat:sourceControl',
API_DISABLED = 'feat:apiDisabled',
}

View File

@@ -75,7 +75,7 @@ export class TagsController {
}
// Updates a tag
@Patch('/:id(\\d+)')
@Patch('/:id(\\w+)')
async updateTag(req: TagsRequest.Update): Promise<TagEntity> {
const { name } = req.body;
const { id } = req.params;
@@ -93,7 +93,7 @@ export class TagsController {
}
@Authorized(['global', 'owner'])
@Delete('/:id(\\d+)')
@Delete('/:id(\\w+)')
async deleteTag(req: TagsRequest.Delete) {
const { id } = req.params;
await this.externalHooks.run('tag.beforeDelete', [id]);

View File

@@ -51,7 +51,7 @@ EECredentialsController.get(
* GET /credentials/:id
*/
EECredentialsController.get(
'/:id(\\d+)',
'/:id(\\w+)',
(req, res, next) => (req.params.id === 'new' ? next('router') : next()), // skip ee router and use free one for naming
ResponseHelper.send(async (req: CredentialRequest.Get) => {
const { id: credentialId } = req.params;

View File

@@ -65,7 +65,7 @@ credentialsController.get(
* GET /credentials/:id
*/
credentialsController.get(
'/:id(\\d+)',
'/:id(\\w+)',
ResponseHelper.send(async (req: CredentialRequest.Get) => {
const { id: credentialId } = req.params;
const includeDecryptedData = req.query.includeData === 'true';
@@ -147,7 +147,7 @@ credentialsController.post(
* PATCH /credentials/:id
*/
credentialsController.patch(
'/:id(\\d+)',
'/:id(\\w+)',
ResponseHelper.send(async (req: CredentialRequest.Update): Promise<ICredentialsDb> => {
const { id: credentialId } = req.params;
@@ -198,7 +198,7 @@ credentialsController.patch(
* DELETE /credentials/:id
*/
credentialsController.delete(
'/:id(\\d+)',
'/:id(\\w+)',
ResponseHelper.send(async (req: CredentialRequest.Delete) => {
const { id: credentialId } = req.params;

View File

@@ -1,15 +1,28 @@
import type { ICredentialNodeAccess } from 'n8n-workflow';
import { Column, Entity, Generated, Index, OneToMany, PrimaryColumn } from 'typeorm';
import { BeforeInsert, Column, Entity, Index, OneToMany, PrimaryColumn } from 'typeorm';
import { IsArray, IsObject, IsString, Length } from 'class-validator';
import type { SharedCredentials } from './SharedCredentials';
import { AbstractEntity, jsonColumnType } from './AbstractEntity';
import type { ICredentialsDb } from '@/Interfaces';
import { idStringifier } from '../utils/transformers';
import { generateNanoId } from '../utils/generators';
@Entity()
export class CredentialsEntity extends AbstractEntity implements ICredentialsDb {
@Generated()
@PrimaryColumn({ transformer: idStringifier })
constructor(data?: Partial<CredentialsEntity>) {
super();
Object.assign(this, data);
if (!this.id) {
this.id = generateNanoId();
}
}
@BeforeInsert()
nanoId(): void {
if (!this.id) {
this.id = generateNanoId();
}
}
@PrimaryColumn('varchar')
id: string;
@Column({ length: 128 })

View File

@@ -0,0 +1,27 @@
import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm';
import { idStringifier } from '../utils/transformers';
import { ExecutionEntity } from './ExecutionEntity';
import { jsonColumnType } from './AbstractEntity';
import { IWorkflowBase } from 'n8n-workflow';
@Entity()
export class ExecutionData {
@Column('text')
data: string;
// WARNING: the workflowData column has been changed from IWorkflowDb to IWorkflowBase
// when ExecutionData was introduced as a separate entity.
// This is because manual executions of unsaved workflows have no workflow id
// and IWorkflowDb has it as a mandatory field. IWorkflowBase reflects the correct
// data structure for this entity.
@Column(jsonColumnType)
workflowData: IWorkflowBase;
@PrimaryColumn({ transformer: idStringifier })
executionId: string;
@ManyToOne('ExecutionEntity', 'data', {
onDelete: 'CASCADE',
})
execution: ExecutionEntity;
}

View File

@@ -1,10 +1,20 @@
import { ExecutionStatus, WorkflowExecuteMode } from 'n8n-workflow';
import { Column, Entity, Generated, Index, OneToMany, PrimaryColumn } from 'typeorm';
import { datetimeColumnType, jsonColumnType } from './AbstractEntity';
import { IWorkflowDb } from '@/Interfaces';
import type { IExecutionFlattedDb } from '@/Interfaces';
import {
Column,
Entity,
Generated,
Index,
ManyToOne,
OneToMany,
OneToOne,
PrimaryColumn,
Relation,
} from 'typeorm';
import { datetimeColumnType } from './AbstractEntity';
import { idStringifier } from '../utils/transformers';
import type { ExecutionData } from './ExecutionData';
import type { ExecutionMetadata } from './ExecutionMetadata';
import { WorkflowEntity } from './WorkflowEntity';
@Entity()
@Index(['workflowId', 'id'])
@@ -12,14 +22,11 @@ import type { ExecutionMetadata } from './ExecutionMetadata';
@Index(['finished', 'id'])
@Index(['workflowId', 'finished', 'id'])
@Index(['workflowId', 'waitTill', 'id'])
export class ExecutionEntity implements IExecutionFlattedDb {
export class ExecutionEntity {
@Generated()
@PrimaryColumn({ transformer: idStringifier })
id: string;
@Column('text')
data: string;
@Column()
finished: boolean;
@@ -42,10 +49,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
@Column({ type: datetimeColumnType, nullable: true })
stoppedAt: Date;
@Column(jsonColumnType)
workflowData: IWorkflowDb;
@Column({ nullable: true, transformer: idStringifier })
@Column({ nullable: true })
workflowId: string;
@Column({ type: datetimeColumnType, nullable: true })
@@ -53,4 +57,10 @@ export class ExecutionEntity implements IExecutionFlattedDb {
@OneToMany('ExecutionMetadata', 'execution')
metadata: ExecutionMetadata[];
@OneToOne('ExecutionData', 'execution')
executionData: Relation<ExecutionData>;
@ManyToOne('WorkflowEntity')
workflow: WorkflowEntity;
}

View File

@@ -3,7 +3,6 @@ import { CredentialsEntity } from './CredentialsEntity';
import { User } from './User';
import { Role } from './Role';
import { AbstractEntity } from './AbstractEntity';
import { idStringifier } from '../utils/transformers';
@Entity()
export class SharedCredentials extends AbstractEntity {
@@ -22,6 +21,6 @@ export class SharedCredentials extends AbstractEntity {
@ManyToOne('CredentialsEntity', 'shared')
credentials: CredentialsEntity;
@PrimaryColumn({ transformer: idStringifier })
@PrimaryColumn()
credentialsId: string;
}

View File

@@ -3,7 +3,6 @@ import { WorkflowEntity } from './WorkflowEntity';
import { User } from './User';
import { Role } from './Role';
import { AbstractEntity } from './AbstractEntity';
import { idStringifier } from '../utils/transformers';
@Entity()
export class SharedWorkflow extends AbstractEntity {
@@ -22,6 +21,6 @@ export class SharedWorkflow extends AbstractEntity {
@ManyToOne('WorkflowEntity', 'shared')
workflow: WorkflowEntity;
@PrimaryColumn({ transformer: idStringifier })
@PrimaryColumn()
workflowId: string;
}

View File

@@ -1,15 +1,28 @@
import { Column, Entity, Generated, Index, ManyToMany, OneToMany, PrimaryColumn } from 'typeorm';
import { BeforeInsert, Column, Entity, Index, ManyToMany, OneToMany, PrimaryColumn } from 'typeorm';
import { IsString, Length } from 'class-validator';
import { idStringifier } from '../utils/transformers';
import type { WorkflowEntity } from './WorkflowEntity';
import type { WorkflowTagMapping } from './WorkflowTagMapping';
import { AbstractEntity } from './AbstractEntity';
import { generateNanoId } from '../utils/generators';
@Entity()
export class TagEntity extends AbstractEntity {
@Generated()
@PrimaryColumn({ transformer: idStringifier })
constructor(data?: Partial<TagEntity>) {
super();
Object.assign(this, data);
if (!this.id) {
this.id = generateNanoId();
}
}
@BeforeInsert()
nanoId() {
if (!this.id) {
this.id = generateNanoId();
}
}
@PrimaryColumn('varchar')
id: string;
@Column({ length: 24 })

View File

@@ -1,9 +1,24 @@
import { Column, Entity, PrimaryGeneratedColumn } from 'typeorm';
import { BeforeInsert, Column, Entity, PrimaryColumn } from 'typeorm';
import { generateNanoId } from '../utils/generators';
@Entity()
export class Variables {
@PrimaryGeneratedColumn()
id: number;
constructor(data?: Partial<Variables>) {
Object.assign(this, data);
if (!this.id) {
this.id = generateNanoId();
}
}
@BeforeInsert()
nanoId() {
if (!this.id) {
this.id = generateNanoId();
}
}
@PrimaryColumn('varchar')
id: string;
@Column('text')
key: string;

View File

@@ -1,11 +1,9 @@
import { Column, Entity, Index, PrimaryColumn } from 'typeorm';
import { idStringifier } from '../utils/transformers';
@Entity()
@Index(['webhookId', 'method', 'pathLength'])
export class WebhookEntity {
@Column({ transformer: idStringifier })
@Column()
workflowId: string;
@PrimaryColumn()

View File

@@ -4,9 +4,9 @@ import { IConnections, IDataObject, IWorkflowSettings } from 'n8n-workflow';
import type { IBinaryKeyData, INode, IPairedItemData } from 'n8n-workflow';
import {
BeforeInsert,
Column,
Entity,
Generated,
Index,
JoinColumn,
JoinTable,
@@ -20,14 +20,29 @@ import type { TagEntity } from './TagEntity';
import type { SharedWorkflow } from './SharedWorkflow';
import type { WorkflowStatistics } from './WorkflowStatistics';
import type { WorkflowTagMapping } from './WorkflowTagMapping';
import { idStringifier, objectRetriever, sqlite } from '../utils/transformers';
import { objectRetriever, sqlite } from '../utils/transformers';
import { AbstractEntity, jsonColumnType } from './AbstractEntity';
import type { IWorkflowDb } from '@/Interfaces';
import { generateNanoId } from '../utils/generators';
@Entity()
export class WorkflowEntity extends AbstractEntity implements IWorkflowDb {
@Generated()
@PrimaryColumn({ transformer: idStringifier })
constructor(data?: Partial<WorkflowEntity>) {
super();
Object.assign(this, data);
if (!this.id) {
this.id = generateNanoId();
}
}
@BeforeInsert()
nanoId() {
if (!this.id) {
this.id = generateNanoId();
}
}
@PrimaryColumn('varchar')
id: string;
// TODO: Add XSS check

View File

@@ -1,5 +1,4 @@
import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm';
import { idStringifier } from '../utils/transformers';
import { datetimeColumnType } from './AbstractEntity';
import { WorkflowEntity } from './WorkflowEntity';
@@ -25,6 +24,6 @@ export class WorkflowStatistics {
@ManyToOne('WorkflowEntity', 'shared')
workflow: WorkflowEntity;
@PrimaryColumn({ transformer: idStringifier })
@PrimaryColumn()
workflowId: string;
}

View File

@@ -1,11 +1,10 @@
import { Entity, JoinColumn, ManyToOne, PrimaryColumn } from 'typeorm';
import { idStringifier } from '../utils/transformers';
import type { TagEntity } from './TagEntity';
import type { WorkflowEntity } from './WorkflowEntity';
@Entity({ name: 'workflows_tags' })
export class WorkflowTagMapping {
@PrimaryColumn({ transformer: idStringifier })
@PrimaryColumn()
workflowId: string;
@ManyToOne('WorkflowEntity', 'tagMappings')

View File

@@ -18,6 +18,7 @@ import { WorkflowEntity } from './WorkflowEntity';
import { WorkflowTagMapping } from './WorkflowTagMapping';
import { WorkflowStatistics } from './WorkflowStatistics';
import { ExecutionMetadata } from './ExecutionMetadata';
import { ExecutionData } from './ExecutionData';
export const entities = {
AuthIdentity,
@@ -39,4 +40,5 @@ export const entities = {
WorkflowTagMapping,
WorkflowStatistics,
ExecutionMetadata,
ExecutionData,
};

View File

@@ -0,0 +1,252 @@
import type { MigrationContext, ReversibleMigration } from '@db/types';
export class MigrateIntegerKeysToString1690000000001 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity RENAME COLUMN id to tmp_id;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN id varchar(36) NOT NULL;`,
);
await queryRunner.query(`UPDATE ${tablePrefix}workflow_entity SET id = CONVERT(tmp_id, CHAR);`);
await queryRunner.query(
`CREATE INDEX \`TMP_idx_workflow_entity_id\` ON ${tablePrefix}workflow_entity (\`id\`);`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity RENAME COLUMN id to tmp_id;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}tag_entity ADD COLUMN id varchar(36) NOT NULL;`,
);
await queryRunner.query(`UPDATE ${tablePrefix}tag_entity SET id = CONVERT(tmp_id, CHAR);`);
await queryRunner.query(
`CREATE INDEX \`TMP_idx_tag_entity_id\` ON ${tablePrefix}tag_entity (\`id\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN \`workflowId\` varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}workflows_tags SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags RENAME COLUMN \`tagId\` to \`tmp_tagId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN \`tagId\` varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}workflows_tags SET \`tagId\` = CONVERT(\`tmp_tagId\`, CHAR);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`workflowId\`, \`tagId\`);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_workflows_tags_workflowid\` ON ${tablePrefix}workflows_tags (\`workflowId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP FOREIGN KEY \`FK_54b2f0343d6a2078fa137443869\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT \`fk_workflows_tags_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP FOREIGN KEY \`FK_77505b341625b0b4768082e2171\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT \`fk_workflows_tags_tag_id\` FOREIGN KEY (\`tagId\`) REFERENCES tag_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN \`tmp_tagId\`;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow ADD COLUMN \`workflowId\` varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}shared_workflow SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`userId\`, \`workflowId\`);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_shared_workflow_workflow_id\` ON ${tablePrefix}shared_workflow (\`workflowId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow DROP FOREIGN KEY \`FK_b83f8d2530884b66a9c848c8b88\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow ADD CONSTRAINT \`fk_shared_workflow_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics ADD COLUMN \`workflowId\` varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}workflow_statistics SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_workflow_statistics_workflow_id\` ON ${tablePrefix}workflow_statistics (\`workflowId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics DROP FOREIGN KEY \`workflow_statistics_ibfk_1\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics ADD CONSTRAINT \`fk_workflow_statistics_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`workflowId\`, \`name\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity ADD COLUMN \`workflowId\` varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}webhook_entity SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity ADD CONSTRAINT \`fk_webhook_entity_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity ADD COLUMN \`workflowId\` varchar(36);`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}execution_entity SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_execution_entity_workflow_id_id\` ON ${tablePrefix}execution_entity (\`workflowId\`,\`id\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity DROP FOREIGN KEY \`FK_execution_entity_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity ADD CONSTRAINT \`fk_execution_entity_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`DROP INDEX \`IDX_81fc04c8a17de15835713505e4\` ON ${tablePrefix}execution_entity;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity MODIFY COLUMN tmp_id INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(
`DROP INDEX \`TMP_idx_workflow_entity_id\` ON ${tablePrefix}workflow_entity;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN tmp_id;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}tag_entity MODIFY COLUMN tmp_id INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}tag_entity DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(`DROP INDEX \`TMP_idx_tag_entity_id\` ON ${tablePrefix}tag_entity;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity DROP COLUMN tmp_id;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity RENAME COLUMN id to tmp_id;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity ADD COLUMN id varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}credentials_entity SET id = CONVERT(tmp_id, CHAR);`,
);
await queryRunner.query(
`CREATE INDEX \`TMP_idx_credentials_entity_id\` ON ${tablePrefix}credentials_entity (\`id\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials RENAME COLUMN credentialsId to tmp_credentialsId;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials ADD COLUMN credentialsId varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}shared_credentials SET credentialsId = CONVERT(tmp_credentialsId, CHAR);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_shared_credentials_id\` ON ${tablePrefix}shared_credentials (\`credentialsId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials DROP FOREIGN KEY \`FK_68661def1d4bcf2451ac8dbd949\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials ADD CONSTRAINT \`fk_shared_credentials_credentials_id\` FOREIGN KEY (\`credentialsId\`) REFERENCES credentials_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials MODIFY COLUMN tmp_credentialsId INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`userId\`,\`credentialsId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials DROP COLUMN tmp_credentialsId;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity MODIFY COLUMN tmp_id INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(
`DROP INDEX \`TMP_idx_credentials_entity_id\` ON ${tablePrefix}credentials_entity;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity DROP COLUMN tmp_id;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables RENAME COLUMN \`id\` to \`tmp_id\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables ADD COLUMN \`id\` varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}variables SET \`id\` = CONVERT(\`tmp_id\`, CHAR);`,
);
await queryRunner.query(
`CREATE INDEX \`TMP_idx_variables_id\` ON ${tablePrefix}variables (\`id\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables CHANGE \`tmp_id\` \`tmp_id\` int NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables DROP PRIMARY KEY, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}variables DROP COLUMN \`tmp_id\`;`);
}
// eslint-disable-next-line @typescript-eslint/no-empty-function, @typescript-eslint/no-unused-vars
async down({ queryRunner, tablePrefix }: MigrationContext) {}
}

View File

@@ -0,0 +1,43 @@
import type { MigrationContext, ReversibleMigration } from '@db/types';
export class SeparateExecutionData1690000000030 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE ${tablePrefix}execution_data (
executionId int(11) NOT NULL primary key,
workflowData json NOT NULL,
data TEXT NOT NULL,
CONSTRAINT \`${tablePrefix}execution_data_FK\` FOREIGN KEY (\`executionId\`) REFERENCES \`${tablePrefix}execution_entity\` (\`id\`) ON DELETE CASCADE
)
ENGINE=InnoDB`,
);
await queryRunner.query(
`INSERT INTO ${tablePrefix}execution_data (
executionId,
workflowData,
data)
SELECT id, workflowData, data FROM ${tablePrefix}execution_entity
`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity DROP COLUMN workflowData, DROP COLUMN data`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity
ADD workflowData json NULL,
ADD data text NULL`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}execution_entity SET workflowData = ${tablePrefix}execution_data.workflowData, data = ${tablePrefix}execution_data.data
FROM ${tablePrefix}execution_data WHERE ${tablePrefix}execution_data.executionId = ${tablePrefix}execution_entity.id`,
);
await queryRunner.query(`DROP TABLE ${tablePrefix}execution_data`);
}
}

View File

@@ -38,6 +38,8 @@ import { UpdateRunningExecutionStatus1677236788851 } from './1677236788851-Updat
import { CreateExecutionMetadataTable1679416281779 } from './1679416281779-CreateExecutionMetadataTable';
import { CreateVariables1677501636753 } from './1677501636753-CreateVariables';
import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty';
import { MigrateIntegerKeysToString1690000000001 } from './1690000000001-MigrateIntegerKeysToString';
import { SeparateExecutionData1690000000030 } from './1690000000030-SeparateExecutionData';
export const mysqlMigrations: Migration[] = [
InitialMigration1588157391238,
@@ -79,4 +81,6 @@ export const mysqlMigrations: Migration[] = [
CreateExecutionMetadataTable1679416281779,
CreateVariables1677501636753,
AddUserActivatedProperty1681134145996,
MigrateIntegerKeysToString1690000000001,
SeparateExecutionData1690000000030,
];

View File

@@ -0,0 +1,262 @@
/* eslint-disable n8n-local-rules/no-unneeded-backticks */
import type { MigrationContext, ReversibleMigration } from '@db/types';
export class MigrateIntegerKeysToString1690000000000 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity RENAME COLUMN id to tmp_id;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN id varchar(36);`);
await queryRunner.query(`UPDATE ${tablePrefix}workflow_entity SET id = tmp_id::text;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN id SET NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN tmp_id DROP DEFAULT;`,
);
await queryRunner.query(`DROP SEQUENCE IF EXISTS ${tablePrefix}workflow_entity_id_seq;`);
await queryRunner.query(
`CREATE UNIQUE INDEX "pk_workflow_entity_id" ON ${tablePrefix}workflow_entity ("id");`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity RENAME COLUMN id to tmp_id;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ADD COLUMN id varchar(36);`);
await queryRunner.query(`UPDATE ${tablePrefix}tag_entity SET id = tmp_id::text;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN id SET NOT NULL;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN tmp_id DROP DEFAULT;`,
);
await queryRunner.query(`DROP SEQUENCE IF EXISTS tag_entity_id_seq;`);
await queryRunner.query(
`CREATE UNIQUE INDEX "pk_tag_entity_id" ON ${tablePrefix}tag_entity ("id");`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags RENAME COLUMN "workflowId" to "tmp_workflowId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN "workflowId" varchar(36);`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}workflows_tags SET "workflowId" = "tmp_workflowId"::text;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ALTER COLUMN "workflowId" SET NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags RENAME COLUMN "tagId" to "tmp_tagId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN "tagId" varchar(36);`,
);
await queryRunner.query(`UPDATE ${tablePrefix}workflows_tags SET "tagId" = "tmp_tagId"::text;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ALTER COLUMN "tagId" SET NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT IF EXISTS "FK_31140eb41f019805b40d0087449";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT IF EXISTS "FK_5e29bfe9e22c5d6567f509d4a46";`,
);
await queryRunner.query(
`CREATE UNIQUE INDEX "pk_workflows_tags" ON ${tablePrefix}workflows_tags ("workflowId","tagId");`,
);
await queryRunner.query(`DROP INDEX IF EXISTS "idx_31140eb41f019805b40d008744";`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT "PK_a60448a90e51a114e95e2a125b3",
ADD CONSTRAINT "pk_workflows_tags" PRIMARY KEY USING INDEX "pk_workflows_tags";`);
await queryRunner.query(
`CREATE INDEX "idx_workflows_tags_workflow_id" ON ${tablePrefix}workflows_tags ("workflowId");`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "fk_workflows_tags_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "fk_workflows_tags_tag_id" FOREIGN KEY ("tagId") REFERENCES tag_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN "tmp_workflowId";`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN "tmp_tagId";`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow RENAME COLUMN "workflowId" to "tmp_workflowId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow ADD COLUMN "workflowId" varchar(36);`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}shared_workflow SET "workflowId" = "tmp_workflowId"::text;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow ALTER COLUMN "workflowId" SET NOT NULL;`,
);
await queryRunner.query(
`CREATE UNIQUE INDEX "pk_shared_workflow_id" ON ${tablePrefix}shared_workflow ("userId","workflowId");`,
);
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_65a0933c0f19d278881653bf81d35064";`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}shared_workflow DROP CONSTRAINT "PK_cc5d5a71c7b2591f5154ffb0c785e85e",
ADD CONSTRAINT "pk_shared_workflow_id" PRIMARY KEY USING INDEX "pk_shared_workflow_id";`);
await queryRunner.query(
`CREATE INDEX "idx_shared_workflow_workflow_id" ON ${tablePrefix}shared_workflow ("workflowId");`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow ADD CONSTRAINT "fk_shared_workflow_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow DROP COLUMN "tmp_workflowId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics RENAME COLUMN "workflowId" to "tmp_workflowId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics ADD COLUMN "workflowId" varchar(36);`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}workflow_statistics SET "workflowId" = "tmp_workflowId"::text;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics ALTER COLUMN "workflowId" SET NOT NULL;`,
);
await queryRunner.query(
`CREATE UNIQUE INDEX "pk_workflow_statistics" ON ${tablePrefix}workflow_statistics ("workflowId","name");`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_statistics DROP CONSTRAINT IF EXISTS "workflow_statistics_pkey",
ADD CONSTRAINT "pk_workflow_statistics" PRIMARY KEY USING INDEX "pk_workflow_statistics";`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics DROP COLUMN "tmp_workflowId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics ADD CONSTRAINT "fk_workflow_statistics_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity RENAME COLUMN "workflowId" to "tmp_workflowId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity ADD COLUMN "workflowId" varchar(36);`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}webhook_entity SET "workflowId" = "tmp_workflowId"::text;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity ALTER COLUMN "workflowId" SET NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "tmp_workflowId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity ADD CONSTRAINT "fk_webhook_entity_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity RENAME COLUMN "workflowId" to "tmp_workflowId";`,
);
// -- Intentionally NOT setting colum to NOT NULL
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity ADD COLUMN "workflowId" varchar(36);`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}execution_entity SET "workflowId" = "tmp_workflowId"::text;`,
);
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_d160d4771aba5a0d78943edbe3";`);
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_4f474ac92be81610439aaad61e";`);
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_58154df94c686818c99fb754ce";`);
// -- index idx_33228da131bb1112247cf52a42 is a duplicate of IDX_33228da131bb1112247cf52a42
await queryRunner.query(`DROP INDEX IF EXISTS "idx_33228da131bb1112247cf52a42";`);
await queryRunner.query(
`CREATE INDEX "idx_execution_entity_workflow_id_id" ON ${tablePrefix}execution_entity ("workflowId","id");`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity DROP COLUMN "tmp_workflowId";`,
);
// -- FK was missing in prev schema - should it be added?
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity ADD CONSTRAINT "fk_execution_entity_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity DROP CONSTRAINT IF EXISTS "pk_eded7d72664448da7745d551207";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}tag_entity DROP CONSTRAINT IF EXISTS "PK_7a50a9b74ae6855c0dcaee25052";`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN tmp_id;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity DROP COLUMN tmp_id;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ADD PRIMARY KEY (id);`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ADD PRIMARY KEY (id);`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity RENAME COLUMN id to tmp_id;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity ADD COLUMN id varchar(36);`,
);
await queryRunner.query(`UPDATE ${tablePrefix}credentials_entity SET id = tmp_id::text;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN id SET NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN tmp_id DROP DEFAULT;`,
);
await queryRunner.query(`DROP SEQUENCE IF EXISTS credentials_entity_id_seq;`);
await queryRunner.query(
`CREATE UNIQUE INDEX "pk_credentials_entity_id" ON ${tablePrefix}credentials_entity ("id");`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials RENAME COLUMN "credentialsId" to "tmp_credentialsId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials ADD COLUMN "credentialsId" varchar(36);`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}shared_credentials SET "credentialsId" = "tmp_credentialsId"::text;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials ALTER COLUMN "credentialsId" SET NOT NULL;`,
);
await queryRunner.query(
`CREATE UNIQUE INDEX "pk_shared_credentials_id" ON ${tablePrefix}shared_credentials ("userId","credentialsId");`,
);
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_829d16efa0e265cb076d50eca8d21733";`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}shared_credentials DROP CONSTRAINT "PK_10dd1527ffb639609be7aadd98f628c6",
ADD CONSTRAINT "pk_shared_credentials_id" PRIMARY KEY USING INDEX "pk_shared_credentials_id";`);
await queryRunner.query(
`CREATE INDEX "idx_shared_credentials_credentials_id" ON ${tablePrefix}shared_credentials ("credentialsId");`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials ADD CONSTRAINT "fk_shared_credentials_credentials_id" FOREIGN KEY ("credentialsId") REFERENCES credentials_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials DROP COLUMN "tmp_credentialsId";`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity DROP CONSTRAINT IF EXISTS "pk_814c3d3c36e8a27fa8edb761b0e";`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity DROP COLUMN tmp_id;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ADD PRIMARY KEY (id);`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}variables RENAME COLUMN id to tmp_id;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}variables ADD COLUMN id varchar(36);`);
await queryRunner.query(`UPDATE ${tablePrefix}variables SET id = tmp_id::text;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}variables ALTER COLUMN id SET NOT NULL;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables ALTER COLUMN tmp_id DROP DEFAULT;`,
);
await queryRunner.query(`DROP SEQUENCE IF EXISTS variables_id_seq;`);
await queryRunner.query(
`CREATE UNIQUE INDEX "pk_variables_id" ON ${tablePrefix}variables ("id");`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables DROP CONSTRAINT IF EXISTS "variables_pkey";`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}variables DROP COLUMN tmp_id;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}variables ADD PRIMARY KEY (id);`);
}
// eslint-disable-next-line @typescript-eslint/no-empty-function, @typescript-eslint/no-unused-vars
async down({ queryRunner, tablePrefix }: MigrationContext) {}
}

View File

@@ -0,0 +1,42 @@
import type { MigrationContext, ReversibleMigration } from '@db/types';
export class SeparateExecutionData1690000000020 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE "${tablePrefix}execution_data" (
"executionId" integer NOT NULL,
"workflowData" json NOT NULL,
"data" text NOT NULL,
CONSTRAINT "${tablePrefix}execution_data_fk" FOREIGN KEY ("executionId") REFERENCES ${tablePrefix}execution_entity(id) ON DELETE CASCADE
)`,
);
await queryRunner.query(
`INSERT INTO "${tablePrefix}execution_data" (
"executionId",
"workflowData",
"data")
SELECT "id", "workflowData", "data" FROM "${tablePrefix}execution_entity"
`,
);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}execution_entity" DROP COLUMN "workflowData", DROP COLUMN "data"`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE "${tablePrefix}execution_entity"
ADD "workflowData" json NULL,
ADD "data" text NULL`,
);
await queryRunner.query(
`UPDATE "${tablePrefix}execution_entity" SET "workflowData" = "execution_data"."workflowData", "data" = "execution_data"."data"
FROM "${tablePrefix}execution_data" WHERE "${tablePrefix}execution_data"."executionId" = "${tablePrefix}execution_entity"."id"`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_data"`);
}
}

View File

@@ -36,6 +36,8 @@ import { UpdateRunningExecutionStatus1677236854063 } from './1677236854063-Updat
import { CreateExecutionMetadataTable1679416281778 } from './1679416281778-CreateExecutionMetadataTable';
import { CreateVariables1677501636754 } from './1677501636754-CreateVariables';
import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty';
import { MigrateIntegerKeysToString1690000000000 } from './1690000000000-MigrateIntegerKeysToString';
import { SeparateExecutionData1690000000020 } from './1690000000020-SeparateExecutionData';
export const postgresMigrations: Migration[] = [
InitialMigration1587669153312,
@@ -75,4 +77,6 @@ export const postgresMigrations: Migration[] = [
CreateExecutionMetadataTable1679416281778,
CreateVariables1677501636754,
AddUserActivatedProperty1681134145996,
MigrateIntegerKeysToString1690000000000,
SeparateExecutionData1690000000020,
];

View File

@@ -0,0 +1,185 @@
import type { MigrationContext, ReversibleMigration } from '@db/types';
export class MigrateIntegerKeysToString1690000000002 implements ReversibleMigration {
transaction = false as const;
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query('PRAGMA foreign_keys=OFF');
await queryRunner.startTransaction();
await queryRunner.query(`
CREATE TABLE "${tablePrefix}TMP_workflow_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text, "pinData" text, "versionId" varchar(36), "triggerCount" integer NOT NULL DEFAULT 0);`);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_workflow_entity" SELECT * FROM "${tablePrefix}workflow_entity";`,
);
await queryRunner.query('DROP TABLE "workflow_entity";');
await queryRunner.query(`ALTER TABLE "${tablePrefix}TMP_workflow_entity" RENAME TO "${tablePrefix}workflow_entity";
`);
await queryRunner.query(`
CREATE TABLE "${tablePrefix}TMP_tag_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(24) NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')));`);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_tag_entity" SELECT * FROM "${tablePrefix}tag_entity";`,
);
await queryRunner.query('DROP TABLE "tag_entity";');
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_tag_entity" RENAME TO "${tablePrefix}tag_entity";`,
);
await queryRunner.query(`
CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "FK_workflows_tags_workflow_entity" FOREIGN KEY ("workflowId") REFERENCES "workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_workflows_tags_tag_entity" FOREIGN KEY ("tagId") REFERENCES "${tablePrefix}tag_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("workflowId", "tagId"));`);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_workflows_tags" SELECT * FROM "${tablePrefix}workflows_tags";`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}workflows_tags";`);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_workflows_tags" RENAME TO "${tablePrefix}workflows_tags";`,
);
await queryRunner.query(
`CREATE INDEX "idx_workflows_tags_tag_id" ON "${tablePrefix}workflows_tags" ("tagId");`,
);
await queryRunner.query(
`CREATE INDEX "idx_workflows_tags_workflow_id" ON "${tablePrefix}workflows_tags" ("workflowId");`,
);
await queryRunner.query(`CREATE TABLE "${tablePrefix}TMP_workflow_statistics" (
"count" INTEGER DEFAULT 0,
"latestEvent" DATETIME,
"name" VARCHAR(128) NOT NULL,
"workflowId" VARCHAR(36),
PRIMARY KEY("workflowId", "name"),
FOREIGN KEY("workflowId") REFERENCES "${tablePrefix}workflow_entity"("id") ON DELETE CASCADE
);`);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_workflow_statistics" SELECT * FROM "${tablePrefix}workflow_statistics";`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_statistics";`);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_workflow_statistics" RENAME TO "${tablePrefix}workflow_statistics";`,
);
await queryRunner.query(
`CREATE TABLE "${tablePrefix}TMP_shared_workflow" (
"createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
"updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
"roleId" integer NOT NULL, "userId" varchar NOT NULL,
"workflowId" VARCHAR(36) NOT NULL,
CONSTRAINT "FK_shared_workflow_role" FOREIGN KEY ("roleId") REFERENCES "role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION,
CONSTRAINT "FK_shared_workflow_user" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION,
CONSTRAINT "FK_shared_workflow_workflow_entity" FOREIGN KEY ("workflowId") REFERENCES "workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION,
PRIMARY KEY ("userId", "workflowId"));`,
);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_shared_workflow" SELECT * FROM "${tablePrefix}shared_workflow";`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}shared_workflow";`);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_shared_workflow" RENAME TO "${tablePrefix}shared_workflow";`,
);
await queryRunner.query(
`CREATE INDEX "idx_shared_workflow_workflow_id" ON "${tablePrefix}shared_workflow" ("workflowId");`,
);
await queryRunner.query(
`CREATE TABLE "${tablePrefix}TMP_webhook_entity" ("workflowId" varchar(36) NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, "webhookId" varchar, "pathLength" integer, PRIMARY KEY ("webhookPath", "method"));`,
);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_webhook_entity" SELECT * FROM "${tablePrefix}webhook_entity";`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}webhook_entity";`);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_webhook_entity" RENAME TO "${tablePrefix}webhook_entity";`,
);
await queryRunner.query(
`CREATE INDEX "idx_webhook_entity_webhook_path_method" ON "${tablePrefix}webhook_entity" ("webhookId","method","pathLength");`,
);
await queryRunner.query(`CREATE TABLE "${tablePrefix}TMP_execution_entity" (
"id" integer PRIMARY KEY AUTOINCREMENT NOT NULL,
"workflowId" varchar(36),
"finished" boolean NOT NULL,
"mode" varchar NOT NULL,
"retryOf" varchar,
"retrySuccessId" varchar,
"startedAt" datetime NOT NULL,
"stoppedAt" datetime,
"waitTill" datetime,
"workflowData" text NOT NULL,
"data" text NOT NULL, "status" varchar,
FOREIGN KEY("workflowId") REFERENCES "workflow_entity" ("id") ON DELETE CASCADE
);`);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_execution_entity" SELECT * FROM "${tablePrefix}execution_entity";`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity";`);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_execution_entity" RENAME TO "${tablePrefix}execution_entity";`,
);
await queryRunner.query(
`CREATE INDEX "idx_execution_entity_stopped_at" ON "${tablePrefix}execution_entity" ("stoppedAt");`,
);
await queryRunner.query(
`CREATE INDEX "idx_execution_entity_wait_till" ON "${tablePrefix}execution_entity" ("waitTill");`,
);
await queryRunner.query(
`CREATE TABLE "${tablePrefix}TMP_credentials_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(32) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')));`,
);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_credentials_entity" SELECT * FROM "${tablePrefix}credentials_entity";`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}credentials_entity";`);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_credentials_entity" RENAME TO "${tablePrefix}credentials_entity";`,
);
await queryRunner.query(
`CREATE INDEX "idx_credentials_entity_type" ON "${tablePrefix}credentials_entity" ("type");`,
);
await queryRunner.query(
`CREATE TABLE "${tablePrefix}TMP_shared_credentials" ("createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
"updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
"roleId" integer NOT NULL,
"userId" varchar NOT NULL, "credentialsId" varchar(36) NOT NULL,
CONSTRAINT "FK_shared_credentials_role" FOREIGN KEY ("roleId") REFERENCES "role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION,
CONSTRAINT "FK_shared_credentials_user" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION,
CONSTRAINT "FK_shared_credentials_credentials" FOREIGN KEY ("credentialsId") REFERENCES "${tablePrefix}credentials_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("userId", "credentialsId"));`,
);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_shared_credentials" SELECT * FROM "${tablePrefix}shared_credentials";`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}shared_credentials";`);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_shared_credentials" RENAME TO "${tablePrefix}shared_credentials";`,
);
await queryRunner.query(
`CREATE INDEX "idx_shared_credentials_credentials" ON "${tablePrefix}shared_credentials" ("credentialsId");`,
);
await queryRunner.query(
`CREATE UNIQUE INDEX "idx_shared_credentials_user_credentials" ON "${tablePrefix}shared_credentials" ("userId","credentialsId");`,
);
await queryRunner.query(`CREATE TABLE "${tablePrefix}TMP_variables" (
id varchar(36) PRIMARY KEY NOT NULL,
"key" TEXT NOT NULL,
"type" TEXT NOT NULL DEFAULT ('string'),
value TEXT,
UNIQUE("key")
);`);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_variables" SELECT * FROM "${tablePrefix}variables";`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}variables";`);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_variables" RENAME TO "${tablePrefix}variables";`,
);
await queryRunner.query(`CREATE UNIQUE INDEX "idx_variables_key" ON "${tablePrefix}variables" ("key");
`);
await queryRunner.commitTransaction();
await queryRunner.query('PRAGMA foreign_keys=ON');
}
// eslint-disable-next-line @typescript-eslint/no-empty-function, @typescript-eslint/no-unused-vars
async down({ queryRunner, tablePrefix }: MigrationContext) {}
}

View File

@@ -0,0 +1,46 @@
import type { MigrationContext, ReversibleMigration } from '@/databases/types';
export class SeparateExecutionData1690000000010 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext): Promise<void> {
await queryRunner.query(
`CREATE TABLE "${tablePrefix}execution_data" (
"executionId" int PRIMARY KEY NOT NULL,
"workflowData" text NOT NULL,
"data" text NOT NULL,
FOREIGN KEY("executionId") REFERENCES "${tablePrefix}execution_entity" ("id") ON DELETE CASCADE
)`,
);
await queryRunner.query(
`INSERT INTO "${tablePrefix}execution_data" (
"executionId",
"workflowData",
"data")
SELECT "id", "workflowData", "data" FROM "${tablePrefix}execution_entity"
`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}execution_entity\` DROP COLUMN "workflowData"`,
);
await queryRunner.query(`ALTER TABLE \`${tablePrefix}execution_entity\` DROP COLUMN "data"`);
}
async down({ queryRunner, tablePrefix }: MigrationContext): Promise<void> {
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}execution_entity\` ADD COLUMN "workflowData" text NULL`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}execution_entity\` ADD COLUMN "data" text NULL`,
);
await queryRunner.query(
`UPDATE "${tablePrefix}execution_entity" SET "workflowData" = (SELECT "workflowData" FROM "${tablePrefix}execution_data" WHERE "${tablePrefix}execution_data"."executionId" = "${tablePrefix}execution_entity"."id")`,
);
await queryRunner.query(
`UPDATE "${tablePrefix}execution_entity" SET "data" = (SELECT "data" FROM "${tablePrefix}execution_data" WHERE "${tablePrefix}execution_data"."executionId" = "${tablePrefix}execution_entity"."id")`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_data"`);
}
}

View File

@@ -35,6 +35,8 @@ import { UpdateRunningExecutionStatus1677237073720 } from './1677237073720-Updat
import { CreateExecutionMetadataTable1679416281777 } from './1679416281777-CreateExecutionMetadataTable';
import { CreateVariables1677501636752 } from './1677501636752-CreateVariables';
import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty';
import { MigrateIntegerKeysToString1690000000002 } from './1690000000002-MigrateIntegerKeysToString';
import { SeparateExecutionData1690000000010 } from './1690000000010-SeparateExecutionData';
const sqliteMigrations: Migration[] = [
InitialMigration1588102412422,
@@ -73,6 +75,8 @@ const sqliteMigrations: Migration[] = [
CreateVariables1677501636752,
CreateExecutionMetadataTable1679416281777,
AddUserActivatedProperty1681134145996,
MigrateIntegerKeysToString1690000000002,
SeparateExecutionData1690000000010,
];
export { sqliteMigrations };

View File

@@ -1,10 +1,408 @@
import { Service } from 'typedi';
import { DataSource, Repository } from 'typeorm';
import { DataSource, In, LessThanOrEqual, MoreThanOrEqual, Repository } from 'typeorm';
import type {
FindManyOptions,
FindOneOptions,
FindOptionsWhere,
SelectQueryBuilder,
} from 'typeorm';
import { ExecutionEntity } from '../entities/ExecutionEntity';
import { parse, stringify } from 'flatted';
import type {
IExecutionBase,
IExecutionDb,
IExecutionFlattedDb,
IExecutionResponse,
} from '@/Interfaces';
import { LoggerProxy } from 'n8n-workflow';
import type { IExecutionsSummary, IRunExecutionData } from 'n8n-workflow';
import { ExecutionDataRepository } from './executionData.repository';
import type { ExecutionData } from '../entities/ExecutionData';
import type { IGetExecutionsQueryFilter } from '@/executions/executions.service';
import { isAdvancedExecutionFiltersEnabled } from '@/executions/executionHelpers';
import { ExecutionMetadata } from '../entities/ExecutionMetadata';
import { DateUtils } from 'typeorm/util/DateUtils';
import { BinaryDataManager } from 'n8n-core';
import config from '@/config';
function parseFiltersToQueryBuilder(
qb: SelectQueryBuilder<ExecutionEntity>,
filters?: IGetExecutionsQueryFilter,
) {
if (filters?.status) {
qb.andWhere('execution.status IN (:...workflowStatus)', {
workflowStatus: filters.status,
});
}
if (filters?.finished) {
qb.andWhere({ finished: filters.finished });
}
if (filters?.metadata && isAdvancedExecutionFiltersEnabled()) {
qb.leftJoin(ExecutionMetadata, 'md', 'md.executionId = execution.id');
for (const md of filters.metadata) {
qb.andWhere('md.key = :key AND md.value = :value', md);
}
}
if (filters?.startedAfter) {
qb.andWhere({
startedAt: MoreThanOrEqual(
DateUtils.mixedDateToUtcDatetimeString(new Date(filters.startedAfter)),
),
});
}
if (filters?.startedBefore) {
qb.andWhere({
startedAt: LessThanOrEqual(
DateUtils.mixedDateToUtcDatetimeString(new Date(filters.startedBefore)),
),
});
}
if (filters?.workflowId) {
qb.andWhere({
workflowId: filters.workflowId,
});
}
}
@Service()
export class ExecutionRepository extends Repository<ExecutionEntity> {
constructor(dataSource: DataSource) {
private executionDataRepository: ExecutionDataRepository;
constructor(dataSource: DataSource, executionDataRepository: ExecutionDataRepository) {
super(ExecutionEntity, dataSource.manager);
this.executionDataRepository = executionDataRepository;
}
async findMultipleExecutions(
queryParams: FindManyOptions<ExecutionEntity>,
options?: {
unflattenData: true;
includeData?: true;
},
): Promise<IExecutionResponse[]>;
async findMultipleExecutions(
queryParams: FindManyOptions<ExecutionEntity>,
options?: {
unflattenData?: false | undefined;
includeData?: true;
},
): Promise<IExecutionFlattedDb[]>;
async findMultipleExecutions(
queryParams: FindManyOptions<ExecutionEntity>,
options?: {
unflattenData?: boolean;
includeData?: boolean;
},
): Promise<IExecutionBase[]>;
async findMultipleExecutions(
queryParams: FindManyOptions<ExecutionEntity>,
options?: {
unflattenData?: boolean;
includeData?: boolean;
},
): Promise<IExecutionFlattedDb[] | IExecutionResponse[] | IExecutionBase[]> {
if (options?.includeData) {
if (!queryParams.relations) {
queryParams.relations = [];
}
(queryParams.relations as string[]).push('executionData');
}
const executions = await this.find(queryParams);
if (options?.includeData && options?.unflattenData) {
return executions.map((execution) => {
const { executionData, ...rest } = execution;
return {
...rest,
data: parse(executionData.data) as IRunExecutionData,
workflowData: executionData.workflowData,
} as IExecutionResponse;
});
} else if (options?.includeData) {
return executions.map((execution) => {
const { executionData, ...rest } = execution;
return {
...rest,
data: execution.executionData.data,
workflowData: execution.executionData.workflowData,
} as IExecutionFlattedDb;
});
}
return executions.map((execution) => {
const { executionData, ...rest } = execution;
return rest;
});
}
async findSingleExecution(
id: string,
options?: {
includeData: true;
unflattenData: true;
where?: FindOptionsWhere<ExecutionEntity>;
},
): Promise<IExecutionResponse | undefined>;
async findSingleExecution(
id: string,
options?: {
includeData: true;
unflattenData?: false | undefined;
where?: FindOptionsWhere<ExecutionEntity>;
},
): Promise<IExecutionFlattedDb | undefined>;
async findSingleExecution(
id: string,
options?: {
includeData?: boolean;
unflattenData?: boolean;
where?: FindOptionsWhere<ExecutionEntity>;
},
): Promise<IExecutionBase | undefined>;
async findSingleExecution(
id: string,
options?: {
includeData?: boolean;
unflattenData?: boolean;
where?: FindOptionsWhere<ExecutionEntity>;
},
): Promise<IExecutionFlattedDb | IExecutionResponse | IExecutionBase | undefined> {
const whereClause: FindOneOptions<ExecutionEntity> = {
where: {
id,
...options?.where,
},
};
if (options?.includeData) {
whereClause.relations = ['executionData'];
}
const execution = await this.findOne(whereClause);
if (!execution) {
return undefined;
}
const { executionData, ...rest } = execution;
if (options?.includeData && options?.unflattenData) {
return {
...rest,
data: parse(execution.executionData.data) as IRunExecutionData,
workflowData: execution.executionData.workflowData,
} as IExecutionResponse;
} else if (options?.includeData) {
return {
...rest,
data: execution.executionData.data,
workflowData: execution.executionData.workflowData,
} as IExecutionFlattedDb;
}
return rest;
}
async createNewExecution(execution: IExecutionDb) {
const { data, workflowData, ...rest } = execution;
const newExecution = await this.save(rest);
await this.executionDataRepository.save({
execution: newExecution,
workflowData,
data: stringify(data),
});
return newExecution;
}
async updateExistingExecution(executionId: string, execution: Partial<IExecutionResponse>) {
// Se isolate startedAt because it must be set when the execution starts and should never change.
// So we prevent updating it, if it's sent (it usually is and causes problems to executions that
// are resumed after waiting for some time, as a new startedAt is set)
const { id, data, workflowData, startedAt, ...executionInformation } = execution;
if (Object.keys(executionInformation).length > 0) {
await this.update({ id: executionId }, executionInformation);
}
if (data || workflowData) {
const executionData: Partial<ExecutionData> = {};
if (workflowData) {
executionData.workflowData = workflowData;
}
if (data) {
executionData.data = stringify(data);
}
// @ts-ignore
await this.executionDataRepository.update({ executionId }, executionData);
}
}
async deleteExecution(executionId: string) {
// TODO: Should this be awaited? Should we add a catch in case it fails?
await BinaryDataManager.getInstance().deleteBinaryDataByExecutionId(executionId);
return this.delete({ id: executionId });
}
async countExecutions(
filters: IGetExecutionsQueryFilter | undefined,
accessibleWorkflowIds: string[],
currentlyRunningExecutions: string[],
isOwner: boolean,
): Promise<{ count: number; estimated: boolean }> {
const dbType = config.getEnv('database.type');
if (dbType !== 'postgresdb' || (filters && Object.keys(filters).length > 0) || !isOwner) {
const query = this.createQueryBuilder('execution').andWhere(
'execution.workflowId IN (:...accessibleWorkflowIds)',
{ accessibleWorkflowIds },
);
if (currentlyRunningExecutions.length > 0) {
query.andWhere('execution.id NOT IN (:...currentlyRunningExecutions)', {
currentlyRunningExecutions,
});
}
parseFiltersToQueryBuilder(query, filters);
const count = await query.getCount();
return { count, estimated: false };
}
try {
// Get an estimate of rows count.
const estimateRowsNumberSql =
"SELECT n_live_tup FROM pg_stat_all_tables WHERE relname = 'execution_entity';";
const rows = (await this.query(estimateRowsNumberSql)) as Array<{ n_live_tup: string }>;
const estimate = parseInt(rows[0].n_live_tup, 10);
// If over 100k, return just an estimate.
if (estimate > 100_000) {
// if less than 100k, we get the real count as even a full
// table scan should not take so long.
return { count: estimate, estimated: true };
}
} catch (error) {
if (error instanceof Error) {
LoggerProxy.warn(`Failed to get executions count from Postgres: ${error.message}`, {
error,
});
}
}
const count = await this.count({
where: {
workflowId: In(accessibleWorkflowIds),
},
});
return { count, estimated: false };
}
async searchExecutions(
filters: IGetExecutionsQueryFilter | undefined,
limit: number,
excludedExecutionIds: string[],
accessibleWorkflowIds: string[],
additionalFilters?: { lastId?: string; firstId?: string },
): Promise<IExecutionsSummary[]> {
if (accessibleWorkflowIds.length === 0) {
return [];
}
const query = this.createQueryBuilder('execution')
.select([
'execution.id',
'execution.finished',
'execution.mode',
'execution.retryOf',
'execution.retrySuccessId',
'execution.status',
'execution.startedAt',
'execution.stoppedAt',
'execution.workflowId',
'execution.waitTill',
'workflow.name',
])
.innerJoin('execution.workflow', 'workflow')
.limit(limit)
// eslint-disable-next-line @typescript-eslint/naming-convention
.orderBy({ 'execution.id': 'DESC' })
.andWhere('execution.workflowId IN (:...accessibleWorkflowIds)', { accessibleWorkflowIds });
if (excludedExecutionIds.length > 0) {
query.andWhere('execution.id NOT IN (:...excludedExecutionIds)', { excludedExecutionIds });
}
if (additionalFilters?.lastId) {
query.andWhere('execution.id < :lastId', { lastId: additionalFilters.lastId });
}
if (additionalFilters?.firstId) {
query.andWhere('execution.id > :firstId', { firstId: additionalFilters.firstId });
}
parseFiltersToQueryBuilder(query, filters);
const executions = await query.getMany();
return executions.map((execution) => {
const { workflow, waitTill, ...rest } = execution;
return {
...rest,
waitTill: waitTill ?? undefined,
workflowName: workflow.name,
};
});
}
async deleteExecutions(
filters: IGetExecutionsQueryFilter | undefined,
accessibleWorkflowIds: string[],
deleteConditions: {
deleteBefore?: Date;
ids?: string[];
},
) {
if (!deleteConditions?.deleteBefore && !deleteConditions?.ids) {
throw new Error('Either "deleteBefore" or "ids" must be present in the request body');
}
const query = this.createQueryBuilder('execution')
.select(['execution.id'])
.andWhere('execution.workflowId IN (:...accessibleWorkflowIds)', { accessibleWorkflowIds });
if (deleteConditions.deleteBefore) {
// delete executions by date, if user may access the underlying workflows
query.andWhere('execution.startedAt <= :deleteBefore', {
deleteBefore: deleteConditions.deleteBefore,
});
// Filters are only used when filtering by date
parseFiltersToQueryBuilder(query, filters);
} else if (deleteConditions.ids) {
// delete executions by IDs, if user may access the underlying workflows
query.andWhere('execution.id IN (:...executionIds)', { executionIds: deleteConditions.ids });
}
const executions = await query.getMany();
if (!executions.length) {
if (deleteConditions.ids) {
LoggerProxy.error('Failed to delete an execution due to insufficient permissions', {
executionIds: deleteConditions.ids,
});
}
return;
}
const idsToDelete = executions.map(({ id }) => id);
const binaryDataManager = BinaryDataManager.getInstance();
await Promise.all(
idsToDelete.map(async (id) => binaryDataManager.deleteBinaryDataByExecutionId(id)),
);
do {
// Delete in batches to avoid "SQLITE_ERROR: Expression tree is too large (maximum depth 1000)" error
const batch = idsToDelete.splice(0, 500);
await this.delete(batch);
} while (idsToDelete.length > 0);
}
}

View File

@@ -0,0 +1,10 @@
import { Service } from 'typedi';
import { DataSource, Repository } from 'typeorm';
import { ExecutionData } from '../entities/ExecutionData';
@Service()
export class ExecutionDataRepository extends Repository<ExecutionData> {
constructor(dataSource: DataSource) {
super(ExecutionData, dataSource.manager);
}
}

View File

@@ -2,6 +2,7 @@ export { AuthIdentityRepository } from './authIdentity.repository';
export { AuthProviderSyncHistoryRepository } from './authProviderSyncHistory.repository';
export { CredentialsRepository } from './credentials.repository';
export { EventDestinationsRepository } from './eventDestinations.repository';
export { ExecutionDataRepository } from './executionData.repository';
export { ExecutionMetadataRepository } from './executionMetadata.repository';
export { ExecutionRepository } from './execution.repository';
export { InstalledNodesRepository } from './installedNodes.repository';

View File

@@ -0,0 +1,6 @@
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', 16);
export function generateNanoId() {
return nanoid();
}

View File

@@ -0,0 +1,15 @@
export const SOURCE_CONTROL_PREFERENCES_DB_KEY = 'features.sourceControl';
export const SOURCE_CONTROL_GIT_FOLDER = 'git';
export const SOURCE_CONTROL_GIT_KEY_COMMENT = 'n8n deploy key';
export const SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER = 'workflows';
export const SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER = 'credentials';
export const SOURCE_CONTROL_VARIABLES_EXPORT_FILE = 'variables.json';
export const SOURCE_CONTROL_TAGS_EXPORT_FILE = 'tags.json';
export const SOURCE_CONTROL_SSH_FOLDER = 'ssh';
export const SOURCE_CONTROL_SSH_KEY_NAME = 'key';
export const SOURCE_CONTROL_DEFAULT_BRANCH = 'main';
export const SOURCE_CONTROL_ORIGIN = 'origin';
export const SOURCE_CONTROL_API_ROOT = 'source-control';
export const SOURCE_CONTROL_README = `
# n8n Source Control
`;

View File

@@ -0,0 +1,21 @@
import type { RequestHandler } from 'express';
import { isSourceControlLicensed } from '../sourceControlHelper.ee';
import Container from 'typedi';
import { SourceControlPreferencesService } from '../sourceControlPreferences.service.ee';
export const sourceControlLicensedAndEnabledMiddleware: RequestHandler = (req, res, next) => {
const sourceControlPreferencesService = Container.get(SourceControlPreferencesService);
if (sourceControlPreferencesService.isSourceControlLicensedAndEnabled()) {
next();
} else {
res.status(401).json({ status: 'error', message: 'Unauthorized' });
}
};
export const sourceControlLicensedMiddleware: RequestHandler = (req, res, next) => {
if (isSourceControlLicensed()) {
next();
} else {
res.status(401).json({ status: 'error', message: 'Unauthorized' });
}
};

View File

@@ -0,0 +1,235 @@
import { Authorized, Get, Post, Patch, RestController } from '@/decorators';
import {
sourceControlLicensedMiddleware,
sourceControlLicensedAndEnabledMiddleware,
} from './middleware/sourceControlEnabledMiddleware.ee';
import { SourceControlService } from './sourceControl.service.ee';
import { SourceControlRequest } from './types/requests';
import type { SourceControlPreferences } from './types/sourceControlPreferences';
import { BadRequestError } from '@/ResponseHelper';
import type { PullResult, PushResult, StatusResult } from 'simple-git';
import express from 'express';
import type { ImportResult } from './types/importResult';
import { SourceControlPreferencesService } from './sourceControlPreferences.service.ee';
import type { SourceControlledFile } from './types/sourceControlledFile';
import { SOURCE_CONTROL_API_ROOT, SOURCE_CONTROL_DEFAULT_BRANCH } from './constants';
@RestController(`/${SOURCE_CONTROL_API_ROOT}`)
export class SourceControlController {
constructor(
private sourceControlService: SourceControlService,
private sourceControlPreferencesService: SourceControlPreferencesService,
) {}
@Authorized('any')
@Get('/preferences', { middlewares: [sourceControlLicensedMiddleware] })
async getPreferences(): Promise<SourceControlPreferences> {
// returns the settings with the privateKey property redacted
return this.sourceControlPreferencesService.getPreferences();
}
@Authorized(['global', 'owner'])
@Post('/preferences', { middlewares: [sourceControlLicensedMiddleware] })
async setPreferences(req: SourceControlRequest.UpdatePreferences) {
if (
req.body.branchReadOnly === undefined &&
this.sourceControlPreferencesService.isSourceControlConnected()
) {
throw new BadRequestError(
'Cannot change preferences while connected to a source control provider. Please disconnect first.',
);
}
try {
const sanitizedPreferences: Partial<SourceControlPreferences> = {
...req.body,
initRepo: req.body.initRepo ?? true, // default to true if not specified
connected: undefined,
publicKey: undefined,
};
await this.sourceControlPreferencesService.validateSourceControlPreferences(
sanitizedPreferences,
);
const updatedPreferences = await this.sourceControlPreferencesService.setPreferences(
sanitizedPreferences,
);
if (sanitizedPreferences.initRepo === true) {
try {
await this.sourceControlService.initializeRepository({
...updatedPreferences,
branchName:
updatedPreferences.branchName === ''
? SOURCE_CONTROL_DEFAULT_BRANCH
: updatedPreferences.branchName,
initRepo: true,
});
if (this.sourceControlPreferencesService.getPreferences().branchName !== '') {
await this.sourceControlPreferencesService.setPreferences({
connected: true,
});
}
} catch (error) {
// if initialization fails, run cleanup to remove any intermediate state and throw the error
await this.sourceControlService.disconnect({ keepKeyPair: true });
throw error;
}
}
await this.sourceControlService.init();
return this.sourceControlPreferencesService.getPreferences();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Patch('/preferences', { middlewares: [sourceControlLicensedMiddleware] })
async updatePreferences(req: SourceControlRequest.UpdatePreferences) {
try {
const sanitizedPreferences: Partial<SourceControlPreferences> = {
...req.body,
initRepo: false,
connected: undefined,
publicKey: undefined,
repositoryUrl: undefined,
authorName: undefined,
authorEmail: undefined,
};
const currentPreferences = this.sourceControlPreferencesService.getPreferences();
await this.sourceControlPreferencesService.validateSourceControlPreferences(
sanitizedPreferences,
);
if (
sanitizedPreferences.branchName &&
sanitizedPreferences.branchName !== currentPreferences.branchName
) {
await this.sourceControlService.setBranch(sanitizedPreferences.branchName);
}
if (sanitizedPreferences.branchColor || sanitizedPreferences.branchReadOnly !== undefined) {
await this.sourceControlPreferencesService.setPreferences(
{
branchColor: sanitizedPreferences.branchColor,
branchReadOnly: sanitizedPreferences.branchReadOnly,
},
true,
);
}
await this.sourceControlService.init();
return this.sourceControlPreferencesService.getPreferences();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/disconnect', { middlewares: [sourceControlLicensedMiddleware] })
async disconnect(req: SourceControlRequest.Disconnect) {
try {
return await this.sourceControlService.disconnect(req.body);
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized('any')
@Get('/get-branches', { middlewares: [sourceControlLicensedMiddleware] })
async getBranches() {
try {
return await this.sourceControlService.getBranches();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/push-workfolder', { middlewares: [sourceControlLicensedAndEnabledMiddleware] })
async pushWorkfolder(
req: SourceControlRequest.PushWorkFolder,
res: express.Response,
): Promise<PushResult | SourceControlledFile[]> {
if (this.sourceControlPreferencesService.isBranchReadOnly()) {
throw new BadRequestError('Cannot push onto read-only branch.');
}
try {
const result = await this.sourceControlService.pushWorkfolder(req.body);
if ((result as PushResult).pushed) {
res.statusCode = 200;
} else {
res.statusCode = 409;
}
return result;
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/pull-workfolder', { middlewares: [sourceControlLicensedAndEnabledMiddleware] })
async pullWorkfolder(
req: SourceControlRequest.PullWorkFolder,
res: express.Response,
): Promise<SourceControlledFile[] | ImportResult | PullResult | StatusResult | undefined> {
try {
const result = await this.sourceControlService.pullWorkfolder({
force: req.body.force,
variables: req.body.variables,
userId: req.user.id,
importAfterPull: req.body.importAfterPull ?? true,
});
if ((result as ImportResult)?.workflows) {
res.statusCode = 200;
} else {
res.statusCode = 409;
}
return result;
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Get('/reset-workfolder', { middlewares: [sourceControlLicensedAndEnabledMiddleware] })
async resetWorkfolder(
req: SourceControlRequest.PullWorkFolder,
): Promise<ImportResult | undefined> {
try {
return await this.sourceControlService.resetWorkfolder({
force: req.body.force,
variables: req.body.variables,
userId: req.user.id,
importAfterPull: req.body.importAfterPull ?? true,
});
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized('any')
@Get('/get-status', { middlewares: [sourceControlLicensedAndEnabledMiddleware] })
async getStatus() {
try {
return await this.sourceControlService.getStatus();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized('any')
@Get('/status', { middlewares: [sourceControlLicensedMiddleware] })
async status(): Promise<StatusResult> {
try {
return await this.sourceControlService.status();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/generate-key-pair', { middlewares: [sourceControlLicensedMiddleware] })
async generateKeyPair(): Promise<SourceControlPreferences> {
try {
const result = await this.sourceControlPreferencesService.generateAndSaveKeyPair();
return result;
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
}

View File

@@ -1,45 +1,39 @@
import { Service } from 'typedi';
import path from 'path';
import * as Db from '@/Db';
import { versionControlFoldersExistCheck } from './versionControlHelper.ee';
import type { VersionControlPreferences } from './types/versionControlPreferences';
import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee';
import type { SourceControlPreferences } from './types/sourceControlPreferences';
import {
VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER,
VERSION_CONTROL_GIT_FOLDER,
VERSION_CONTROL_README,
VERSION_CONTROL_SSH_FOLDER,
VERSION_CONTROL_SSH_KEY_NAME,
VERSION_CONTROL_TAGS_EXPORT_FILE,
VERSION_CONTROL_VARIABLES_EXPORT_FILE,
VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER,
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
SOURCE_CONTROL_GIT_FOLDER,
SOURCE_CONTROL_README,
SOURCE_CONTROL_SSH_FOLDER,
SOURCE_CONTROL_SSH_KEY_NAME,
SOURCE_CONTROL_TAGS_EXPORT_FILE,
SOURCE_CONTROL_VARIABLES_EXPORT_FILE,
SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER,
} from './constants';
import { LoggerProxy } from 'n8n-workflow';
import { VersionControlGitService } from './versionControlGit.service.ee';
import { SourceControlGitService } from './sourceControlGit.service.ee';
import { UserSettings } from 'n8n-core';
import type {
CommitResult,
DiffResult,
FetchResult,
PullResult,
PushResult,
StatusResult,
} from 'simple-git';
import type { PushResult, StatusResult } from 'simple-git';
import type { ExportResult } from './types/exportResult';
import { VersionControlExportService } from './versionControlExport.service.ee';
import { SourceControlExportService } from './sourceControlExport.service.ee';
import { BadRequestError } from '../../ResponseHelper';
import type { ImportResult } from './types/importResult';
import type { VersionControlPushWorkFolder } from './types/versionControlPushWorkFolder';
import type { VersionControllPullOptions } from './types/versionControlPullWorkFolder';
import type { SourceControlPushWorkFolder } from './types/sourceControlPushWorkFolder';
import type { SourceControllPullOptions } from './types/sourceControlPullWorkFolder';
import type {
VersionControlledFileLocation,
VersionControlledFile,
VersionControlledFileStatus,
VersionControlledFileType,
} from './types/versionControlledFile';
import { VersionControlPreferencesService } from './versionControlPreferences.service.ee';
SourceControlledFileLocation,
SourceControlledFile,
SourceControlledFileStatus,
SourceControlledFileType,
} from './types/sourceControlledFile';
import { SourceControlPreferencesService } from './sourceControlPreferences.service.ee';
import { writeFileSync } from 'fs';
import { SourceControlImportService } from './sourceControlImport.service.ee';
@Service()
export class VersionControlService {
export class SourceControlService {
private sshKeyName: string;
private sshFolder: string;
@@ -47,22 +41,23 @@ export class VersionControlService {
private gitFolder: string;
constructor(
private gitService: VersionControlGitService,
private versionControlPreferencesService: VersionControlPreferencesService,
private versionControlExportService: VersionControlExportService,
private gitService: SourceControlGitService,
private sourceControlPreferencesService: SourceControlPreferencesService,
private sourceControlExportService: SourceControlExportService,
private sourceControlImportService: SourceControlImportService,
) {
const userFolder = UserSettings.getUserN8nFolderPath();
this.sshFolder = path.join(userFolder, VERSION_CONTROL_SSH_FOLDER);
this.gitFolder = path.join(userFolder, VERSION_CONTROL_GIT_FOLDER);
this.sshKeyName = path.join(this.sshFolder, VERSION_CONTROL_SSH_KEY_NAME);
this.sshFolder = path.join(userFolder, SOURCE_CONTROL_SSH_FOLDER);
this.gitFolder = path.join(userFolder, SOURCE_CONTROL_GIT_FOLDER);
this.sshKeyName = path.join(this.sshFolder, SOURCE_CONTROL_SSH_KEY_NAME);
}
async init(): Promise<void> {
this.gitService.resetService();
versionControlFoldersExistCheck([this.gitFolder, this.sshFolder]);
await this.versionControlPreferencesService.loadFromDbAndApplyVersionControlPreferences();
sourceControlFoldersExistCheck([this.gitFolder, this.sshFolder]);
await this.sourceControlPreferencesService.loadFromDbAndApplySourceControlPreferences();
await this.gitService.initService({
versionControlPreferences: this.versionControlPreferencesService.getPreferences(),
sourceControlPreferences: this.sourceControlPreferencesService.getPreferences(),
gitFolder: this.gitFolder,
sshKeyName: this.sshKeyName,
sshFolder: this.sshFolder,
@@ -71,22 +66,22 @@ export class VersionControlService {
async disconnect(options: { keepKeyPair?: boolean } = {}) {
try {
await this.versionControlPreferencesService.setPreferences({
await this.sourceControlPreferencesService.setPreferences({
connected: false,
branchName: '',
});
await this.versionControlExportService.deleteRepositoryFolder();
await this.sourceControlExportService.deleteRepositoryFolder();
if (!options.keepKeyPair) {
await this.versionControlPreferencesService.deleteKeyPairFiles();
await this.sourceControlPreferencesService.deleteKeyPairFiles();
}
this.gitService.resetService();
return this.versionControlPreferencesService.versionControlPreferences;
return this.sourceControlPreferencesService.sourceControlPreferences;
} catch (error) {
throw Error(`Failed to disconnect from version control: ${(error as Error).message}`);
throw Error(`Failed to disconnect from source control: ${(error as Error).message}`);
}
}
async initializeRepository(preferences: VersionControlPreferences) {
async initializeRepository(preferences: SourceControlPreferences) {
if (!this.gitService.git) {
await this.init();
}
@@ -108,7 +103,7 @@ export class VersionControlService {
} else {
if (getBranchesResult.branches?.length === 0) {
try {
writeFileSync(path.join(this.gitFolder, '/README.md'), VERSION_CONTROL_README);
writeFileSync(path.join(this.gitFolder, '/README.md'), SOURCE_CONTROL_README);
await this.gitService.stage(new Set<string>(['README.md']));
await this.gitService.commit('Initial commit');
@@ -121,7 +116,7 @@ export class VersionControlService {
LoggerProxy.error(`Failed to create initial commit: ${(fileError as Error).message}`);
}
} else {
await this.versionControlPreferencesService.setPreferences({
await this.sourceControlPreferencesService.setPreferences({
branchName: '',
connected: true,
});
@@ -144,20 +139,20 @@ export class VersionControlService {
};
try {
// comment next line if needed
await this.versionControlExportService.cleanWorkFolder();
result.tags = await this.versionControlExportService.exportTagsToWorkFolder();
result.variables = await this.versionControlExportService.exportVariablesToWorkFolder();
result.workflows = await this.versionControlExportService.exportWorkflowsToWorkFolder();
result.credentials = await this.versionControlExportService.exportCredentialsToWorkFolder();
await this.sourceControlExportService.cleanWorkFolder();
result.tags = await this.sourceControlExportService.exportTagsToWorkFolder();
result.variables = await this.sourceControlExportService.exportVariablesToWorkFolder();
result.workflows = await this.sourceControlExportService.exportWorkflowsToWorkFolder();
result.credentials = await this.sourceControlExportService.exportCredentialsToWorkFolder();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
return result;
}
async import(options: VersionControllPullOptions): Promise<ImportResult | undefined> {
async import(options: SourceControllPullOptions): Promise<ImportResult | undefined> {
try {
return await this.versionControlExportService.importFromWorkFolder(options);
return await this.sourceControlImportService.importFromWorkFolder(options);
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
@@ -170,7 +165,7 @@ export class VersionControlService {
}
async setBranch(branch: string): Promise<{ branches: string[]; currentBranch: string }> {
await this.versionControlPreferencesService.setPreferences({
await this.sourceControlPreferencesService.setPreferences({
branchName: branch,
connected: branch?.length > 0,
});
@@ -179,9 +174,9 @@ export class VersionControlService {
// will reset the branch to the remote branch and pull
// this will discard all local changes
async resetWorkfolder(options: VersionControllPullOptions): Promise<ImportResult | undefined> {
async resetWorkfolder(options: SourceControllPullOptions): Promise<ImportResult | undefined> {
const currentBranch = await this.gitService.getCurrentBranch();
await this.versionControlExportService.cleanWorkFolder();
await this.sourceControlExportService.cleanWorkFolder();
await this.gitService.resetBranch({
hard: true,
target: currentBranch.remote,
@@ -194,9 +189,9 @@ export class VersionControlService {
}
async pushWorkfolder(
options: VersionControlPushWorkFolder,
): Promise<PushResult | VersionControlledFile[]> {
if (this.versionControlPreferencesService.isBranchReadOnly()) {
options: SourceControlPushWorkFolder,
): Promise<PushResult | SourceControlledFile[]> {
if (this.sourceControlPreferencesService.isBranchReadOnly()) {
throw new BadRequestError('Cannot push onto read-only branch.');
}
if (!options.skipDiff) {
@@ -211,13 +206,13 @@ export class VersionControlService {
await this.stage(options);
await this.gitService.commit(options.message ?? 'Updated Workfolder');
return this.gitService.push({
branch: this.versionControlPreferencesService.getBranchName(),
branch: this.sourceControlPreferencesService.getBranchName(),
force: options.force ?? false,
});
}
async pullWorkfolder(
options: VersionControllPullOptions,
options: SourceControllPullOptions,
): Promise<ImportResult | StatusResult | undefined> {
await this.resetWorkfolder({
importAfterPull: false,
@@ -238,17 +233,17 @@ export class VersionControlService {
}
async stage(
options: Pick<VersionControlPushWorkFolder, 'fileNames' | 'credentialIds' | 'workflowIds'>,
options: Pick<SourceControlPushWorkFolder, 'fileNames' | 'credentialIds' | 'workflowIds'>,
): Promise<{ staged: string[] } | string> {
const { fileNames, credentialIds, workflowIds } = options;
const status = await this.gitService.status();
let mergedFileNames = new Set<string>();
fileNames?.forEach((e) => mergedFileNames.add(e));
credentialIds?.forEach((e) =>
mergedFileNames.add(this.versionControlExportService.getCredentialsPath(e)),
mergedFileNames.add(this.sourceControlExportService.getCredentialsPath(e)),
);
workflowIds?.forEach((e) =>
mergedFileNames.add(this.versionControlExportService.getWorkflowPath(e)),
mergedFileNames.add(this.sourceControlExportService.getWorkflowPath(e)),
);
if (mergedFileNames.size === 0) {
mergedFileNames = new Set<string>([
@@ -280,16 +275,16 @@ export class VersionControlService {
return this.gitService.status();
}
private async fileNameToVersionControlledFile(
private async fileNameToSourceControlledFile(
fileName: string,
location: VersionControlledFileLocation,
location: SourceControlledFileLocation,
statusResult: StatusResult,
): Promise<VersionControlledFile | undefined> {
): Promise<SourceControlledFile | undefined> {
let id: string | undefined = undefined;
let name = '';
let conflict = false;
let status: VersionControlledFileStatus = 'unknown';
let type: VersionControlledFileType = 'file';
let status: SourceControlledFileStatus = 'unknown';
let type: SourceControlledFileType = 'file';
// initialize status from git status result
if (statusResult.not_added.find((e) => e === fileName)) status = 'new';
@@ -300,11 +295,11 @@ export class VersionControlService {
else if (statusResult.deleted.find((e) => e === fileName)) status = 'deleted';
else if (statusResult.modified.find((e) => e === fileName)) status = 'modified';
if (fileName.startsWith(VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER)) {
if (fileName.startsWith(SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER)) {
type = 'workflow';
if (status === 'deleted') {
id = fileName
.replace(VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER, '')
.replace(SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, '')
.replace(/[\/,\\]/, '')
.replace('.json', '');
if (location === 'remote') {
@@ -318,13 +313,13 @@ export class VersionControlService {
name = '(deleted)';
}
} else {
const workflow = await this.versionControlExportService.getWorkflowFromFile(fileName);
const workflow = await this.sourceControlExportService.getWorkflowFromFile(fileName);
if (!workflow?.id) {
if (location === 'local') {
return;
}
id = fileName
.replace(VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER + '/', '')
.replace(SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER + '/', '')
.replace('.json', '');
status = 'created';
} else {
@@ -333,11 +328,11 @@ export class VersionControlService {
}
}
}
if (fileName.startsWith(VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER)) {
if (fileName.startsWith(SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER)) {
type = 'credential';
if (status === 'deleted') {
id = fileName
.replace(VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER, '')
.replace(SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, '')
.replace(/[\/,\\]/, '')
.replace('.json', '');
if (location === 'remote') {
@@ -351,13 +346,13 @@ export class VersionControlService {
name = '(deleted)';
}
} else {
const credential = await this.versionControlExportService.getCredentialFromFile(fileName);
const credential = await this.sourceControlExportService.getCredentialFromFile(fileName);
if (!credential?.id) {
if (location === 'local') {
return;
}
id = fileName
.replace(VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER + '/', '')
.replace(SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER + '/', '')
.replace('.json', '');
status = 'created';
} else {
@@ -367,13 +362,13 @@ export class VersionControlService {
}
}
if (fileName.startsWith(VERSION_CONTROL_VARIABLES_EXPORT_FILE)) {
if (fileName.startsWith(SOURCE_CONTROL_VARIABLES_EXPORT_FILE)) {
id = 'variables';
name = 'variables';
type = 'variables';
}
if (fileName.startsWith(VERSION_CONTROL_TAGS_EXPORT_FILE)) {
if (fileName.startsWith(SOURCE_CONTROL_TAGS_EXPORT_FILE)) {
id = 'tags';
name = 'tags';
type = 'tags';
@@ -392,29 +387,29 @@ export class VersionControlService {
};
}
async getStatus(): Promise<VersionControlledFile[]> {
async getStatus(): Promise<SourceControlledFile[]> {
await this.export();
await this.stage({});
await this.gitService.fetch();
const versionControlledFiles: VersionControlledFile[] = [];
const sourceControlledFiles: SourceControlledFile[] = [];
const diffRemote = await this.gitService.diffRemote();
const diffLocal = await this.gitService.diffLocal();
const status = await this.gitService.status();
await Promise.all([
...(diffRemote?.files.map(async (e) => {
const resolvedFile = await this.fileNameToVersionControlledFile(e.file, 'remote', status);
const resolvedFile = await this.fileNameToSourceControlledFile(e.file, 'remote', status);
if (resolvedFile) {
versionControlledFiles.push(resolvedFile);
sourceControlledFiles.push(resolvedFile);
}
}) ?? []),
...(diffLocal?.files.map(async (e) => {
const resolvedFile = await this.fileNameToVersionControlledFile(e.file, 'local', status);
const resolvedFile = await this.fileNameToSourceControlledFile(e.file, 'local', status);
if (resolvedFile) {
versionControlledFiles.push(resolvedFile);
sourceControlledFiles.push(resolvedFile);
}
}) ?? []),
]);
versionControlledFiles.forEach((e, index, array) => {
sourceControlledFiles.forEach((e, index, array) => {
const similarItems = array.filter(
(f) => f.type === e.type && (f.file === e.file || f.id === e.id),
);
@@ -424,34 +419,6 @@ export class VersionControlService {
});
}
});
return versionControlledFiles;
return sourceControlledFiles;
}
// #region Version Control Test Functions
//TODO: SEPARATE FUNCTIONS FOR DEVELOPMENT ONLY
//TODO: REMOVE THESE FUNCTIONS AFTER TESTING
async commit(message?: string): Promise<CommitResult> {
return this.gitService.commit(message ?? 'Updated Workfolder');
}
async fetch(): Promise<FetchResult> {
return this.gitService.fetch();
}
async diff(): Promise<DiffResult> {
return this.gitService.diff();
}
async pull(): Promise<PullResult> {
return this.gitService.pull();
}
async push(force = false): Promise<PushResult> {
return this.gitService.push({
branch: this.versionControlPreferencesService.getBranchName(),
force,
});
}
// #endregion
}

View File

@@ -0,0 +1,336 @@
import { Service } from 'typedi';
import path from 'path';
import {
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
SOURCE_CONTROL_GIT_FOLDER,
SOURCE_CONTROL_TAGS_EXPORT_FILE,
SOURCE_CONTROL_VARIABLES_EXPORT_FILE,
SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER,
} from './constants';
import * as Db from '@/Db';
import glob from 'fast-glob';
import type { ICredentialDataDecryptedObject } from 'n8n-workflow';
import { LoggerProxy, jsonParse } from 'n8n-workflow';
import { writeFile as fsWriteFile, readFile as fsReadFile, rm as fsRm } from 'fs/promises';
import { Credentials, UserSettings } from 'n8n-core';
import type { IWorkflowToImport } from '@/Interfaces';
import type { ExportableWorkflow } from './types/exportableWorkflow';
import type { ExportableCredential } from './types/exportableCredential';
import type { ExportResult } from './types/exportResult';
import type { SharedWorkflow } from '@/databases/entities/SharedWorkflow';
import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee';
@Service()
export class SourceControlExportService {
private gitFolder: string;
private workflowExportFolder: string;
private credentialExportFolder: string;
constructor() {
const userFolder = UserSettings.getUserN8nFolderPath();
this.gitFolder = path.join(userFolder, SOURCE_CONTROL_GIT_FOLDER);
this.workflowExportFolder = path.join(this.gitFolder, SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER);
this.credentialExportFolder = path.join(
this.gitFolder,
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
);
}
getWorkflowPath(workflowId: string): string {
return path.join(this.workflowExportFolder, `${workflowId}.json`);
}
getCredentialsPath(credentialsId: string): string {
return path.join(this.credentialExportFolder, `${credentialsId}.json`);
}
getTagsPath(): string {
return path.join(this.gitFolder, SOURCE_CONTROL_TAGS_EXPORT_FILE);
}
getVariablesPath(): string {
return path.join(this.gitFolder, SOURCE_CONTROL_VARIABLES_EXPORT_FILE);
}
async getWorkflowFromFile(
filePath: string,
root = this.gitFolder,
): Promise<IWorkflowToImport | undefined> {
try {
const importedWorkflow = jsonParse<IWorkflowToImport>(
await fsReadFile(path.join(root, filePath), { encoding: 'utf8' }),
);
return importedWorkflow;
} catch (error) {
return undefined;
}
}
async getCredentialFromFile(
filePath: string,
root = this.gitFolder,
): Promise<ExportableCredential | undefined> {
try {
const credential = jsonParse<ExportableCredential>(
await fsReadFile(path.join(root, filePath), { encoding: 'utf8' }),
);
return credential;
} catch (error) {
return undefined;
}
}
async cleanWorkFolder() {
try {
const workflowFiles = await glob('*.json', {
cwd: this.workflowExportFolder,
absolute: true,
});
const credentialFiles = await glob('*.json', {
cwd: this.credentialExportFolder,
absolute: true,
});
const variablesFile = await glob(SOURCE_CONTROL_VARIABLES_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
const tagsFile = await glob(SOURCE_CONTROL_TAGS_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
await Promise.all(tagsFile.map(async (e) => fsRm(e)));
await Promise.all(variablesFile.map(async (e) => fsRm(e)));
await Promise.all(workflowFiles.map(async (e) => fsRm(e)));
await Promise.all(credentialFiles.map(async (e) => fsRm(e)));
LoggerProxy.debug('Cleaned work folder.');
} catch (error) {
LoggerProxy.error(`Failed to clean work folder: ${(error as Error).message}`);
}
}
async deleteRepositoryFolder() {
try {
await fsRm(this.gitFolder, { recursive: true });
} catch (error) {
LoggerProxy.error(`Failed to delete work folder: ${(error as Error).message}`);
}
}
private async rmDeletedWorkflowsFromExportFolder(
workflowsToBeExported: SharedWorkflow[],
): Promise<Set<string>> {
const sharedWorkflowsFileNames = new Set<string>(
workflowsToBeExported.map((e) => this.getWorkflowPath(e?.workflow?.name)),
);
const existingWorkflowsInFolder = new Set<string>(
await glob('*.json', {
cwd: this.workflowExportFolder,
absolute: true,
}),
);
const deletedWorkflows = new Set(existingWorkflowsInFolder);
for (const elem of sharedWorkflowsFileNames) {
deletedWorkflows.delete(elem);
}
try {
await Promise.all([...deletedWorkflows].map(async (e) => fsRm(e)));
} catch (error) {
LoggerProxy.error(`Failed to delete workflows from work folder: ${(error as Error).message}`);
}
return deletedWorkflows;
}
private async writeExportableWorkflowsToExportFolder(workflowsToBeExported: SharedWorkflow[]) {
await Promise.all(
workflowsToBeExported.map(async (e) => {
if (!e.workflow) {
LoggerProxy.debug(
`Found no corresponding workflow ${e.workflowId ?? 'unknown'}, skipping export`,
);
return;
}
const fileName = this.getWorkflowPath(e.workflow?.id);
const sanitizedWorkflow: ExportableWorkflow = {
active: e.workflow?.active,
id: e.workflow?.id,
name: e.workflow?.name,
nodes: e.workflow?.nodes,
connections: e.workflow?.connections,
settings: e.workflow?.settings,
triggerCount: e.workflow?.triggerCount,
owner: e.user.email,
versionId: e.workflow?.versionId,
};
LoggerProxy.debug(`Writing workflow ${e.workflowId} to ${fileName}`);
return fsWriteFile(fileName, JSON.stringify(sanitizedWorkflow, null, 2));
}),
);
}
async exportWorkflowsToWorkFolder(): Promise<ExportResult> {
try {
sourceControlFoldersExistCheck([this.workflowExportFolder]);
const sharedWorkflows = await Db.collections.SharedWorkflow.find({
relations: ['workflow', 'role', 'user'],
where: {
role: {
name: 'owner',
scope: 'workflow',
},
},
});
// before exporting, figure out which workflows have been deleted and remove them from the export folder
const removedFiles = await this.rmDeletedWorkflowsFromExportFolder(sharedWorkflows);
// write the workflows to the export folder as json files
await this.writeExportableWorkflowsToExportFolder(sharedWorkflows);
return {
count: sharedWorkflows.length,
folder: this.workflowExportFolder,
files: sharedWorkflows.map((e) => ({
id: e?.workflow?.id,
name: this.getWorkflowPath(e?.workflow?.name),
})),
removedFiles: [...removedFiles],
};
} catch (error) {
throw Error(`Failed to export workflows to work folder: ${(error as Error).message}`);
}
}
async exportVariablesToWorkFolder(): Promise<ExportResult> {
try {
sourceControlFoldersExistCheck([this.gitFolder]);
const variables = await Db.collections.Variables.find();
// do not export empty variables
if (variables.length === 0) {
return {
count: 0,
folder: this.gitFolder,
files: [],
};
}
const fileName = this.getVariablesPath();
const sanitizedVariables = variables.map((e) => ({ ...e, value: '' }));
await fsWriteFile(fileName, JSON.stringify(sanitizedVariables, null, 2));
return {
count: sanitizedVariables.length,
folder: this.gitFolder,
files: [
{
id: '',
name: fileName,
},
],
};
} catch (error) {
throw Error(`Failed to export variables to work folder: ${(error as Error).message}`);
}
}
async exportTagsToWorkFolder(): Promise<ExportResult> {
try {
sourceControlFoldersExistCheck([this.gitFolder]);
const tags = await Db.collections.Tag.find();
// do not export empty tags
if (tags.length === 0) {
return {
count: 0,
folder: this.gitFolder,
files: [],
};
}
const mappings = await Db.collections.WorkflowTagMapping.find();
const fileName = this.getTagsPath();
await fsWriteFile(
fileName,
JSON.stringify(
{
tags: tags.map((tag) => ({ id: tag.id, name: tag.name })),
mappings,
},
null,
2,
),
);
return {
count: tags.length,
folder: this.gitFolder,
files: [
{
id: '',
name: fileName,
},
],
};
} catch (error) {
throw Error(`Failed to export variables to work folder: ${(error as Error).message}`);
}
}
private replaceCredentialData = (
data: ICredentialDataDecryptedObject,
): ICredentialDataDecryptedObject => {
for (const [key] of Object.entries(data)) {
try {
if (data[key] === null) {
delete data[key]; // remove invalid null values
} else if (typeof data[key] === 'object') {
data[key] = this.replaceCredentialData(data[key] as ICredentialDataDecryptedObject);
} else if (typeof data[key] === 'string') {
data[key] = (data[key] as string)?.startsWith('={{') ? data[key] : '';
} else if (typeof data[key] === 'number') {
// TODO: leaving numbers in for now, but maybe we should remove them
continue;
}
} catch (error) {
LoggerProxy.error(`Failed to sanitize credential data: ${(error as Error).message}`);
throw error;
}
}
return data;
};
async exportCredentialsToWorkFolder(): Promise<ExportResult> {
try {
sourceControlFoldersExistCheck([this.credentialExportFolder]);
const sharedCredentials = await Db.collections.SharedCredentials.find({
relations: ['credentials', 'role', 'user'],
});
const encryptionKey = await UserSettings.getEncryptionKey();
await Promise.all(
sharedCredentials.map(async (sharedCredential) => {
const { name, type, nodesAccess, data, id } = sharedCredential.credentials;
const credentialObject = new Credentials({ id, name }, type, nodesAccess, data);
const plainData = credentialObject.getData(encryptionKey);
const sanitizedData = this.replaceCredentialData(plainData);
const fileName = path.join(
this.credentialExportFolder,
`${sharedCredential.credentials.id}.json`,
);
const sanitizedCredential: ExportableCredential = {
id: sharedCredential.credentials.id,
name: sharedCredential.credentials.name,
type: sharedCredential.credentials.type,
data: sanitizedData,
nodesAccess: sharedCredential.credentials.nodesAccess,
};
LoggerProxy.debug(`Writing credential ${sharedCredential.credentials.id} to ${fileName}`);
return fsWriteFile(fileName, JSON.stringify(sanitizedCredential, null, 2));
}),
);
return {
count: sharedCredentials.length,
folder: this.credentialExportFolder,
files: sharedCredentials.map((e) => ({
id: e.credentials.id,
name: path.join(this.credentialExportFolder, `${e.credentials.name}.json`),
})),
};
} catch (error) {
throw Error(`Failed to export credentials to work folder: ${(error as Error).message}`);
}
}
}

View File

@@ -13,12 +13,12 @@ import type {
StatusResult,
} from 'simple-git';
import { simpleGit } from 'simple-git';
import type { VersionControlPreferences } from './types/versionControlPreferences';
import { VERSION_CONTROL_DEFAULT_BRANCH, VERSION_CONTROL_ORIGIN } from './constants';
import { versionControlFoldersExistCheck } from './versionControlHelper.ee';
import type { SourceControlPreferences } from './types/sourceControlPreferences';
import { SOURCE_CONTROL_DEFAULT_BRANCH, SOURCE_CONTROL_ORIGIN } from './constants';
import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee';
@Service()
export class VersionControlGitService {
export class SourceControlGitService {
git: SimpleGit | null = null;
private gitOptions: Partial<SimpleGitOptions> = {};
@@ -49,12 +49,17 @@ export class VersionControlGitService {
}
async initService(options: {
versionControlPreferences: VersionControlPreferences;
sourceControlPreferences: SourceControlPreferences;
gitFolder: string;
sshFolder: string;
sshKeyName: string;
}): Promise<void> {
const { versionControlPreferences, gitFolder, sshKeyName, sshFolder } = options;
const {
sourceControlPreferences: sourceControlPreferences,
gitFolder,
sshKeyName,
sshFolder,
} = options;
LoggerProxy.debug('GitService.init');
if (this.git !== null) {
return;
@@ -63,7 +68,7 @@ export class VersionControlGitService {
this.preInitCheck();
LoggerProxy.debug('Git pre-check passed');
versionControlFoldersExistCheck([gitFolder, sshFolder]);
sourceControlFoldersExistCheck([gitFolder, sshFolder]);
const sshKnownHosts = path.join(sshFolder, 'known_hosts');
const sshCommand = `ssh -o UserKnownHostsFile=${sshKnownHosts} -o StrictHostKeyChecking=no -i ${sshKeyName}`;
@@ -85,9 +90,9 @@ export class VersionControlGitService {
if (!(await this.checkRepositorySetup())) {
await this.git.init();
}
if (!(await this.hasRemote(versionControlPreferences.repositoryUrl))) {
if (versionControlPreferences.connected && versionControlPreferences.repositoryUrl) {
await this.initRepository(versionControlPreferences);
if (!(await this.hasRemote(sourceControlPreferences.repositoryUrl))) {
if (sourceControlPreferences.connected && sourceControlPreferences.repositoryUrl) {
await this.initRepository(sourceControlPreferences);
}
}
}
@@ -96,11 +101,6 @@ export class VersionControlGitService {
this.git = null;
}
resetLocalRepository() {
// TODO: Implement
this.git = null;
}
async checkRepositorySetup(): Promise<boolean> {
if (!this.git) {
throw new Error('Git is not initialized');
@@ -123,7 +123,7 @@ export class VersionControlGitService {
try {
const remotes = await this.git.getRemotes(true);
const foundRemote = remotes.find(
(e) => e.name === VERSION_CONTROL_ORIGIN && e.refs.push === remote,
(e) => e.name === SOURCE_CONTROL_ORIGIN && e.refs.push === remote,
);
if (foundRemote) {
LoggerProxy.debug(`Git remote found: ${foundRemote.name}: ${foundRemote.refs.push}`);
@@ -137,15 +137,15 @@ export class VersionControlGitService {
}
async initRepository(
versionControlPreferences: Pick<
VersionControlPreferences,
sourceControlPreferences: Pick<
SourceControlPreferences,
'repositoryUrl' | 'authorEmail' | 'authorName' | 'branchName' | 'initRepo'
>,
): Promise<void> {
if (!this.git) {
throw new Error('Git is not initialized');
}
if (versionControlPreferences.initRepo) {
if (sourceControlPreferences.initRepo) {
try {
await this.git.init();
} catch (error) {
@@ -153,7 +153,7 @@ export class VersionControlGitService {
}
}
try {
await this.git.addRemote(VERSION_CONTROL_ORIGIN, versionControlPreferences.repositoryUrl);
await this.git.addRemote(SOURCE_CONTROL_ORIGIN, sourceControlPreferences.repositoryUrl);
} catch (error) {
if ((error as Error).message.includes('remote origin already exists')) {
LoggerProxy.debug(`Git remote already exists: ${(error as Error).message}`);
@@ -161,13 +161,13 @@ export class VersionControlGitService {
throw error;
}
}
await this.git.addConfig('user.email', versionControlPreferences.authorEmail);
await this.git.addConfig('user.name', versionControlPreferences.authorName);
if (versionControlPreferences.initRepo) {
await this.git.addConfig('user.email', sourceControlPreferences.authorEmail);
await this.git.addConfig('user.name', sourceControlPreferences.authorName);
if (sourceControlPreferences.initRepo) {
try {
const branches = await this.getBranches();
if (branches.branches?.length === 0) {
await this.git.raw(['branch', '-M', versionControlPreferences.branchName]);
await this.git.raw(['branch', '-M', sourceControlPreferences.branchName]);
}
} catch (error) {
LoggerProxy.debug(`Git init: ${(error as Error).message}`);
@@ -203,6 +203,7 @@ export class VersionControlGitService {
throw new Error('Git is not initialized');
}
await this.git.checkout(branch);
await this.git.branch([`--set-upstream-to=${SOURCE_CONTROL_ORIGIN}/${branch}`, branch]);
return this.getBranches();
}
@@ -272,7 +273,7 @@ export class VersionControlGitService {
async push(
options: { force: boolean; branch: string } = {
force: false,
branch: VERSION_CONTROL_DEFAULT_BRANCH,
branch: SOURCE_CONTROL_DEFAULT_BRANCH,
},
): Promise<PushResult> {
const { force, branch } = options;
@@ -280,9 +281,9 @@ export class VersionControlGitService {
throw new Error('Git is not initialized');
}
if (force) {
return this.git.push(VERSION_CONTROL_ORIGIN, branch, ['-f']);
return this.git.push(SOURCE_CONTROL_ORIGIN, branch, ['-f']);
}
return this.git.push(VERSION_CONTROL_ORIGIN, branch);
return this.git.push(SOURCE_CONTROL_ORIGIN, branch);
}
async stage(files: Set<string>, deletedFiles?: Set<string>): Promise<string> {

View File

@@ -5,9 +5,9 @@ import sshpk from 'sshpk';
import type { KeyPair } from './types/keyPair';
import { constants as fsConstants, mkdirSync, accessSync } from 'fs';
import { LoggerProxy } from 'n8n-workflow';
import { VERSION_CONTROL_GIT_KEY_COMMENT } from './constants';
import { SOURCE_CONTROL_GIT_KEY_COMMENT } from './constants';
export function versionControlFoldersExistCheck(folders: string[]) {
export function sourceControlFoldersExistCheck(folders: string[]) {
// running these file access function synchronously to avoid race conditions
folders.forEach((folder) => {
try {
@@ -22,9 +22,9 @@ export function versionControlFoldersExistCheck(folders: string[]) {
});
}
export function isVersionControlLicensed() {
export function isSourceControlLicensed() {
const license = Container.get(License);
return license.isVersionControlLicensed();
return license.isSourceControlLicensed();
}
export function generateSshKeyPair(keyType: 'ed25519' | 'rsa' = 'ed25519') {
@@ -55,10 +55,10 @@ export function generateSshKeyPair(keyType: 'ed25519' | 'rsa' = 'ed25519') {
break;
}
const keyPublic = sshpk.parseKey(generatedKeyPair.publicKey, 'pem');
keyPublic.comment = VERSION_CONTROL_GIT_KEY_COMMENT;
keyPublic.comment = SOURCE_CONTROL_GIT_KEY_COMMENT;
keyPair.publicKey = keyPublic.toString('ssh');
const keyPrivate = sshpk.parsePrivateKey(generatedKeyPair.privateKey, 'pem');
keyPrivate.comment = VERSION_CONTROL_GIT_KEY_COMMENT;
keyPrivate.comment = SOURCE_CONTROL_GIT_KEY_COMMENT;
keyPair.privateKey = keyPrivate.toString('ssh-private');
return {
privateKey: keyPair.privateKey,

View File

@@ -0,0 +1,363 @@
import Container, { Service } from 'typedi';
import path from 'path';
import {
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
SOURCE_CONTROL_GIT_FOLDER,
SOURCE_CONTROL_TAGS_EXPORT_FILE,
SOURCE_CONTROL_VARIABLES_EXPORT_FILE,
SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER,
} from './constants';
import * as Db from '@/Db';
import glob from 'fast-glob';
import { LoggerProxy, jsonParse } from 'n8n-workflow';
import { readFile as fsReadFile } from 'fs/promises';
import { Credentials, UserSettings } from 'n8n-core';
import type { IWorkflowToImport } from '@/Interfaces';
import type { ExportableCredential } from './types/exportableCredential';
import { SharedWorkflow } from '@/databases/entities/SharedWorkflow';
import { CredentialsEntity } from '@/databases/entities/CredentialsEntity';
import { Variables } from '@/databases/entities/Variables';
import type { ImportResult } from './types/importResult';
import { UM_FIX_INSTRUCTION } from '@/commands/BaseCommand';
import { SharedCredentials } from '@/databases/entities/SharedCredentials';
import { WorkflowEntity } from '@/databases/entities/WorkflowEntity';
import { WorkflowTagMapping } from '@/databases/entities/WorkflowTagMapping';
import { TagEntity } from '@/databases/entities/TagEntity';
import { ActiveWorkflowRunner } from '../../ActiveWorkflowRunner';
import type { SourceControllPullOptions } from './types/sourceControlPullWorkFolder';
import { In } from 'typeorm';
import { isUniqueConstraintError } from '../../ResponseHelper';
@Service()
export class SourceControlImportService {
private gitFolder: string;
private workflowExportFolder: string;
private credentialExportFolder: string;
constructor() {
const userFolder = UserSettings.getUserN8nFolderPath();
this.gitFolder = path.join(userFolder, SOURCE_CONTROL_GIT_FOLDER);
this.workflowExportFolder = path.join(this.gitFolder, SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER);
this.credentialExportFolder = path.join(
this.gitFolder,
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
);
}
private async getOwnerGlobalRole() {
const ownerCredentiallRole = await Db.collections.Role.findOne({
where: { name: 'owner', scope: 'global' },
});
if (!ownerCredentiallRole) {
throw new Error(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
}
return ownerCredentiallRole;
}
private async getOwnerCredentialRole() {
const ownerCredentiallRole = await Db.collections.Role.findOne({
where: { name: 'owner', scope: 'credential' },
});
if (!ownerCredentiallRole) {
throw new Error(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
}
return ownerCredentiallRole;
}
private async getOwnerWorkflowRole() {
const ownerWorkflowRole = await Db.collections.Role.findOne({
where: { name: 'owner', scope: 'workflow' },
});
if (!ownerWorkflowRole) {
throw new Error(`Failed to find owner workflow role. ${UM_FIX_INSTRUCTION}`);
}
return ownerWorkflowRole;
}
private async importCredentialsFromFiles(
userId: string,
): Promise<Array<{ id: string; name: string; type: string }>> {
const credentialFiles = await glob('*.json', {
cwd: this.credentialExportFolder,
absolute: true,
});
const existingCredentials = await Db.collections.Credentials.find();
const ownerCredentialRole = await this.getOwnerCredentialRole();
const ownerGlobalRole = await this.getOwnerGlobalRole();
const encryptionKey = await UserSettings.getEncryptionKey();
let importCredentialsResult: Array<{ id: string; name: string; type: string }> = [];
await Db.transaction(async (transactionManager) => {
importCredentialsResult = await Promise.all(
credentialFiles.map(async (file) => {
LoggerProxy.debug(`Importing credentials file ${file}`);
const credential = jsonParse<ExportableCredential>(
await fsReadFile(file, { encoding: 'utf8' }),
);
const existingCredential = existingCredentials.find(
(e) => e.id === credential.id && e.type === credential.type,
);
const sharedOwner = await Db.collections.SharedCredentials.findOne({
select: ['userId'],
where: {
credentialsId: credential.id,
roleId: In([ownerCredentialRole.id, ownerGlobalRole.id]),
},
});
const { name, type, data, id, nodesAccess } = credential;
const newCredentialObject = new Credentials({ id, name }, type, []);
if (existingCredential?.data) {
newCredentialObject.data = existingCredential.data;
} else {
newCredentialObject.setData(data, encryptionKey);
}
newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || [];
LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`);
await transactionManager.upsert(CredentialsEntity, newCredentialObject, ['id']);
if (!sharedOwner) {
const newSharedCredential = new SharedCredentials();
newSharedCredential.credentialsId = newCredentialObject.id as string;
newSharedCredential.userId = userId;
newSharedCredential.roleId = ownerGlobalRole.id;
await transactionManager.upsert(SharedCredentials, { ...newSharedCredential }, [
'credentialsId',
'userId',
]);
}
return {
id: newCredentialObject.id as string,
name: newCredentialObject.name,
type: newCredentialObject.type,
};
}),
);
});
return importCredentialsResult.filter((e) => e !== undefined);
}
private async importVariablesFromFile(valueOverrides?: {
[key: string]: string;
}): Promise<{ imported: string[] }> {
const variablesFile = await glob(SOURCE_CONTROL_VARIABLES_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
const result: { imported: string[] } = { imported: [] };
if (variablesFile.length > 0) {
LoggerProxy.debug(`Importing variables from file ${variablesFile[0]}`);
const importedVariables = jsonParse<Array<Partial<Variables>>>(
await fsReadFile(variablesFile[0], { encoding: 'utf8' }),
{ fallbackValue: [] },
);
const overriddenKeys = Object.keys(valueOverrides ?? {});
for (const variable of importedVariables) {
if (!variable.key) {
continue;
}
// by default no value is stored remotely, so an empty string is retuned
// it must be changed to undefined so as to not overwrite existing values!
if (variable.value === '') {
variable.value = undefined;
}
if (overriddenKeys.includes(variable.key) && valueOverrides) {
variable.value = valueOverrides[variable.key];
overriddenKeys.splice(overriddenKeys.indexOf(variable.key), 1);
}
try {
await Db.collections.Variables.upsert({ ...variable }, ['id']);
} catch (errorUpsert) {
if (isUniqueConstraintError(errorUpsert as Error)) {
LoggerProxy.debug(`Variable ${variable.key} already exists, updating instead`);
try {
await Db.collections.Variables.update({ key: variable.key }, { ...variable });
} catch (errorUpdate) {
LoggerProxy.debug(`Failed to update variable ${variable.key}, skipping`);
LoggerProxy.debug((errorUpdate as Error).message);
}
}
} finally {
result.imported.push(variable.key);
}
}
// add remaining overrides as new variables
if (overriddenKeys.length > 0 && valueOverrides) {
for (const key of overriddenKeys) {
result.imported.push(key);
const newVariable = new Variables({ key, value: valueOverrides[key] });
await Db.collections.Variables.save(newVariable);
}
}
}
return result;
}
private async importTagsFromFile() {
const tagsFile = await glob(SOURCE_CONTROL_TAGS_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
if (tagsFile.length > 0) {
LoggerProxy.debug(`Importing tags from file ${tagsFile[0]}`);
const mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>(
await fsReadFile(tagsFile[0], { encoding: 'utf8' }),
{ fallbackValue: { tags: [], mappings: [] } },
);
const existingWorkflowIds = new Set(
(
await Db.collections.Workflow.find({
select: ['id'],
})
).map((e) => e.id),
);
await Db.transaction(async (transactionManager) => {
await Promise.all(
mappedTags.tags.map(async (tag) => {
await transactionManager.upsert(
TagEntity,
{
...tag,
},
{
skipUpdateIfNoValuesChanged: true,
conflictPaths: { id: true },
},
);
}),
);
await Promise.all(
mappedTags.mappings.map(async (mapping) => {
if (!existingWorkflowIds.has(String(mapping.workflowId))) return;
await transactionManager.upsert(
WorkflowTagMapping,
{ tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) },
{
skipUpdateIfNoValuesChanged: true,
conflictPaths: { tagId: true, workflowId: true },
},
);
}),
);
});
return mappedTags;
}
return { tags: [], mappings: [] };
}
private async importWorkflowsFromFiles(
userId: string,
): Promise<Array<{ id: string; name: string }>> {
const workflowFiles = await glob('*.json', {
cwd: this.workflowExportFolder,
absolute: true,
});
const existingWorkflows = await Db.collections.Workflow.find({
select: ['id', 'name', 'active', 'versionId'],
});
const ownerWorkflowRole = await this.getOwnerWorkflowRole();
const workflowRunner = Container.get(ActiveWorkflowRunner);
let importWorkflowsResult = new Array<{ id: string; name: string }>();
await Db.transaction(async (transactionManager) => {
importWorkflowsResult = await Promise.all(
workflowFiles.map(async (file) => {
LoggerProxy.debug(`Parsing workflow file ${file}`);
const importedWorkflow = jsonParse<IWorkflowToImport>(
await fsReadFile(file, { encoding: 'utf8' }),
);
const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id);
if (existingWorkflow?.versionId === importedWorkflow.versionId) {
LoggerProxy.debug(
`Skipping import of workflow ${
importedWorkflow.id ?? 'n/a'
} - versionId is up to date`,
);
return {
id: importedWorkflow.id ?? 'n/a',
name: 'skipped',
};
}
LoggerProxy.debug(`Importing workflow ${importedWorkflow.id ?? 'n/a'}`);
importedWorkflow.active = existingWorkflow?.active ?? false;
LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`);
const upsertResult = await transactionManager.upsert(
WorkflowEntity,
{ ...importedWorkflow },
['id'],
);
if (upsertResult?.identifiers?.length !== 1) {
throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`);
}
// due to sequential Ids, this may have changed during the insert
// TODO: once IDs are unique and we removed autoincrement, remove this
const upsertedWorkflowId = upsertResult.identifiers[0].id as string;
await transactionManager.upsert(
SharedWorkflow,
{
workflowId: upsertedWorkflowId,
userId,
roleId: ownerWorkflowRole.id,
},
['workflowId', 'userId'],
);
if (existingWorkflow?.active) {
try {
// remove active pre-import workflow
LoggerProxy.debug(`Deactivating workflow id ${existingWorkflow.id}`);
await workflowRunner.remove(existingWorkflow.id);
// try activating the imported workflow
LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`);
await workflowRunner.add(existingWorkflow.id, 'activate');
} catch (error) {
LoggerProxy.error(
`Failed to activate workflow ${existingWorkflow.id}`,
error as Error,
);
}
}
return {
id: importedWorkflow.id ?? 'unknown',
name: file,
};
}),
);
});
return importWorkflowsResult;
}
async importFromWorkFolder(options: SourceControllPullOptions): Promise<ImportResult> {
try {
const importedVariables = await this.importVariablesFromFile(options.variables);
const importedCredentials = await this.importCredentialsFromFiles(options.userId);
const importWorkflows = await this.importWorkflowsFromFiles(options.userId);
const importTags = await this.importTagsFromFile();
return {
variables: importedVariables,
credentials: importedCredentials,
workflows: importWorkflows,
tags: importTags,
};
} catch (error) {
throw Error(`Failed to import workflows from work folder: ${(error as Error).message}`);
}
}
}

View File

@@ -1,28 +1,28 @@
import { Service } from 'typedi';
import { VersionControlPreferences } from './types/versionControlPreferences';
import { SourceControlPreferences } from './types/sourceControlPreferences';
import type { ValidationError } from 'class-validator';
import { validate } from 'class-validator';
import { readFileSync as fsReadFileSync, existsSync as fsExistsSync } from 'fs';
import { writeFile as fsWriteFile, rm as fsRm } from 'fs/promises';
import {
generateSshKeyPair,
isVersionControlLicensed,
versionControlFoldersExistCheck,
} from './versionControlHelper.ee';
isSourceControlLicensed,
sourceControlFoldersExistCheck,
} from './sourceControlHelper.ee';
import { UserSettings } from 'n8n-core';
import { LoggerProxy, jsonParse } from 'n8n-workflow';
import * as Db from '@/Db';
import {
VERSION_CONTROL_SSH_FOLDER,
VERSION_CONTROL_GIT_FOLDER,
VERSION_CONTROL_SSH_KEY_NAME,
VERSION_CONTROL_PREFERENCES_DB_KEY,
SOURCE_CONTROL_SSH_FOLDER,
SOURCE_CONTROL_GIT_FOLDER,
SOURCE_CONTROL_SSH_KEY_NAME,
SOURCE_CONTROL_PREFERENCES_DB_KEY,
} from './constants';
import path from 'path';
@Service()
export class VersionControlPreferencesService {
private _versionControlPreferences: VersionControlPreferences = new VersionControlPreferences();
export class SourceControlPreferencesService {
private _sourceControlPreferences: SourceControlPreferences = new SourceControlPreferences();
private sshKeyName: string;
@@ -32,23 +32,24 @@ export class VersionControlPreferencesService {
constructor() {
const userFolder = UserSettings.getUserN8nFolderPath();
this.sshFolder = path.join(userFolder, VERSION_CONTROL_SSH_FOLDER);
this.gitFolder = path.join(userFolder, VERSION_CONTROL_GIT_FOLDER);
this.sshKeyName = path.join(this.sshFolder, VERSION_CONTROL_SSH_KEY_NAME);
this.sshFolder = path.join(userFolder, SOURCE_CONTROL_SSH_FOLDER);
this.gitFolder = path.join(userFolder, SOURCE_CONTROL_GIT_FOLDER);
this.sshKeyName = path.join(this.sshFolder, SOURCE_CONTROL_SSH_KEY_NAME);
}
public get versionControlPreferences(): VersionControlPreferences {
public get sourceControlPreferences(): SourceControlPreferences {
return {
...this._versionControlPreferences,
connected: this._versionControlPreferences.connected ?? false,
...this._sourceControlPreferences,
connected: this._sourceControlPreferences.connected ?? false,
publicKey: this.getPublicKey(),
};
}
public set versionControlPreferences(preferences: Partial<VersionControlPreferences>) {
this._versionControlPreferences = VersionControlPreferences.merge(
// merge the new preferences with the existing preferences when setting
public set sourceControlPreferences(preferences: Partial<SourceControlPreferences>) {
this._sourceControlPreferences = SourceControlPreferences.merge(
preferences,
this._versionControlPreferences,
this._sourceControlPreferences,
);
}
@@ -77,8 +78,8 @@ export class VersionControlPreferencesService {
* Will generate an ed25519 key pair and save it to the database and the file system
* Note: this will overwrite any existing key pair
*/
async generateAndSaveKeyPair(): Promise<VersionControlPreferences> {
versionControlFoldersExistCheck([this.gitFolder, this.sshFolder]);
async generateAndSaveKeyPair(): Promise<SourceControlPreferences> {
sourceControlFoldersExistCheck([this.gitFolder, this.sshFolder]);
const keyPair = generateSshKeyPair('ed25519');
if (keyPair.publicKey && keyPair.privateKey) {
try {
@@ -95,30 +96,30 @@ export class VersionControlPreferencesService {
}
isBranchReadOnly(): boolean {
return this._versionControlPreferences.branchReadOnly;
return this._sourceControlPreferences.branchReadOnly;
}
isVersionControlConnected(): boolean {
return this.versionControlPreferences.connected;
isSourceControlConnected(): boolean {
return this.sourceControlPreferences.connected;
}
isVersionControlLicensedAndEnabled(): boolean {
return this.isVersionControlConnected() && isVersionControlLicensed();
isSourceControlLicensedAndEnabled(): boolean {
return this.isSourceControlConnected() && isSourceControlLicensed();
}
getBranchName(): string {
return this.versionControlPreferences.branchName;
return this.sourceControlPreferences.branchName;
}
getPreferences(): VersionControlPreferences {
return this.versionControlPreferences;
getPreferences(): SourceControlPreferences {
return this.sourceControlPreferences;
}
async validateVersionControlPreferences(
preferences: Partial<VersionControlPreferences>,
async validateSourceControlPreferences(
preferences: Partial<SourceControlPreferences>,
allowMissingProperties = true,
): Promise<ValidationError[]> {
const preferencesObject = new VersionControlPreferences(preferences);
const preferencesObject = new SourceControlPreferences(preferences);
const validationResult = await validate(preferencesObject, {
forbidUnknownValues: false,
skipMissingProperties: allowMissingProperties,
@@ -126,45 +127,45 @@ export class VersionControlPreferencesService {
validationError: { target: false },
});
if (validationResult.length > 0) {
throw new Error(`Invalid version control preferences: ${JSON.stringify(validationResult)}`);
throw new Error(`Invalid source control preferences: ${JSON.stringify(validationResult)}`);
}
return validationResult;
}
async setPreferences(
preferences: Partial<VersionControlPreferences>,
preferences: Partial<SourceControlPreferences>,
saveToDb = true,
): Promise<VersionControlPreferences> {
versionControlFoldersExistCheck([this.gitFolder, this.sshFolder]);
): Promise<SourceControlPreferences> {
sourceControlFoldersExistCheck([this.gitFolder, this.sshFolder]);
if (!this.hasKeyPairFiles()) {
LoggerProxy.debug('No key pair files found, generating new pair');
await this.generateAndSaveKeyPair();
}
this.versionControlPreferences = preferences;
this.sourceControlPreferences = preferences;
if (saveToDb) {
const settingsValue = JSON.stringify(this._versionControlPreferences);
const settingsValue = JSON.stringify(this._sourceControlPreferences);
try {
await Db.collections.Settings.save({
key: VERSION_CONTROL_PREFERENCES_DB_KEY,
key: SOURCE_CONTROL_PREFERENCES_DB_KEY,
value: settingsValue,
loadOnStartup: true,
});
} catch (error) {
throw new Error(`Failed to save version control preferences: ${(error as Error).message}`);
throw new Error(`Failed to save source control preferences: ${(error as Error).message}`);
}
}
return this.versionControlPreferences;
return this.sourceControlPreferences;
}
async loadFromDbAndApplyVersionControlPreferences(): Promise<
VersionControlPreferences | undefined
async loadFromDbAndApplySourceControlPreferences(): Promise<
SourceControlPreferences | undefined
> {
const loadedPreferences = await Db.collections.Settings.findOne({
where: { key: VERSION_CONTROL_PREFERENCES_DB_KEY },
where: { key: SOURCE_CONTROL_PREFERENCES_DB_KEY },
});
if (loadedPreferences) {
try {
const preferences = jsonParse<VersionControlPreferences>(loadedPreferences.value);
const preferences = jsonParse<SourceControlPreferences>(loadedPreferences.value);
if (preferences) {
// set local preferences but don't write back to db
await this.setPreferences(preferences, false);
@@ -172,11 +173,11 @@ export class VersionControlPreferencesService {
}
} catch (error) {
LoggerProxy.warn(
`Could not parse Version Control settings from database: ${(error as Error).message}`,
`Could not parse Source Control settings from database: ${(error as Error).message}`,
);
}
}
await this.setPreferences(new VersionControlPreferences(), true);
return this.versionControlPreferences;
await this.setPreferences(new SourceControlPreferences(), true);
return this.sourceControlPreferences;
}
}

View File

@@ -0,0 +1,9 @@
import type { ICredentialDataDecryptedObject, ICredentialNodeAccess } from 'n8n-workflow';
export interface ExportableCredential {
id: string;
name: string;
type: string;
data: ICredentialDataDecryptedObject;
nodesAccess: ICredentialNodeAccess[];
}

View File

@@ -7,7 +7,7 @@ export interface ImportResult {
name: string;
}>;
credentials: Array<{ id: string; name: string; type: string }>;
variables: { added: string[]; changed: string[] };
variables: { imported: string[] };
tags: { tags: TagEntity[]; mappings: WorkflowTagMapping[] };
removedFiles?: string[];
}

View File

@@ -0,0 +1,22 @@
import type { AuthenticatedRequest } from '@/requests';
import type { SourceControlPreferences } from './sourceControlPreferences';
import type { SourceControlSetBranch } from './sourceControlSetBranch';
import type { SourceControlCommit } from './sourceControlCommit';
import type { SourceControlStage } from './sourceControlStage';
import type { SourceControlPush } from './sourceControlPush';
import type { SourceControlPushWorkFolder } from './sourceControlPushWorkFolder';
import type { SourceControlPullWorkFolder } from './sourceControlPullWorkFolder';
import type { SourceControlDisconnect } from './sourceControlDisconnect';
import type { SourceControlSetReadOnly } from './sourceControlSetReadOnly';
export declare namespace SourceControlRequest {
type UpdatePreferences = AuthenticatedRequest<{}, {}, Partial<SourceControlPreferences>, {}>;
type SetReadOnly = AuthenticatedRequest<{}, {}, SourceControlSetReadOnly, {}>;
type SetBranch = AuthenticatedRequest<{}, {}, SourceControlSetBranch, {}>;
type Commit = AuthenticatedRequest<{}, {}, SourceControlCommit, {}>;
type Stage = AuthenticatedRequest<{}, {}, SourceControlStage, {}>;
type Push = AuthenticatedRequest<{}, {}, SourceControlPush, {}>;
type Disconnect = AuthenticatedRequest<{}, {}, SourceControlDisconnect, {}>;
type PushWorkFolder = AuthenticatedRequest<{}, {}, SourceControlPushWorkFolder, {}>;
type PullWorkFolder = AuthenticatedRequest<{}, {}, SourceControlPullWorkFolder, {}>;
}

View File

@@ -1,6 +1,6 @@
import { IsString } from 'class-validator';
export class VersionControlCommit {
export class SourceControlCommit {
@IsString()
message: string;
}

View File

@@ -1,6 +1,6 @@
import { IsBoolean, IsOptional } from 'class-validator';
export class VersionControlDisconnect {
export class SourceControlDisconnect {
@IsBoolean()
@IsOptional()
keepKeyPair?: boolean;

View File

@@ -1,7 +1,7 @@
import { IsBoolean, IsEmail, IsHexColor, IsOptional, IsString } from 'class-validator';
export class VersionControlPreferences {
constructor(preferences: Partial<VersionControlPreferences> | undefined = undefined) {
export class SourceControlPreferences {
constructor(preferences: Partial<SourceControlPreferences> | undefined = undefined) {
if (preferences) Object.assign(this, preferences);
}
@@ -34,15 +34,15 @@ export class VersionControlPreferences {
@IsBoolean()
readonly initRepo?: boolean;
static fromJSON(json: Partial<VersionControlPreferences>): VersionControlPreferences {
return new VersionControlPreferences(json);
static fromJSON(json: Partial<SourceControlPreferences>): SourceControlPreferences {
return new SourceControlPreferences(json);
}
static merge(
preferences: Partial<VersionControlPreferences>,
defaultPreferences: Partial<VersionControlPreferences>,
): VersionControlPreferences {
return new VersionControlPreferences({
preferences: Partial<SourceControlPreferences>,
defaultPreferences: Partial<SourceControlPreferences>,
): SourceControlPreferences {
return new SourceControlPreferences({
connected: preferences.connected ?? defaultPreferences.connected,
repositoryUrl: preferences.repositoryUrl ?? defaultPreferences.repositoryUrl,
authorName: preferences.authorName ?? defaultPreferences.authorName,

View File

@@ -1,6 +1,6 @@
import { IsBoolean, IsObject, IsOptional, IsString } from 'class-validator';
export class VersionControlPullWorkFolder {
export class SourceControlPullWorkFolder {
@IsBoolean()
@IsOptional()
force?: boolean;
@@ -18,7 +18,7 @@ export class VersionControlPullWorkFolder {
variables?: { [key: string]: string };
}
export class VersionControllPullOptions {
export class SourceControllPullOptions {
userId: string;
force?: boolean;

View File

@@ -1,6 +1,6 @@
import { IsBoolean, IsOptional } from 'class-validator';
export class VersionControlPush {
export class SourceControlPush {
@IsBoolean()
@IsOptional()
force?: boolean;

View File

@@ -1,6 +1,6 @@
import { IsBoolean, IsOptional, IsString } from 'class-validator';
export class VersionControlPushWorkFolder {
export class SourceControlPushWorkFolder {
@IsBoolean()
@IsOptional()
force?: boolean;

View File

@@ -1,6 +1,6 @@
import { IsString } from 'class-validator';
export class VersionControlSetBranch {
export class SourceControlSetBranch {
@IsString()
branch: string;
}

View File

@@ -1,6 +1,6 @@
import { IsBoolean } from 'class-validator';
export class VersionControlSetReadOnly {
export class SourceControlSetReadOnly {
@IsBoolean()
branchReadOnly: boolean;
}

View File

@@ -1,6 +1,6 @@
import { IsOptional, IsString } from 'class-validator';
export class VersionControlStage {
export class SourceControlStage {
@IsString({ each: true })
@IsOptional()
fileNames?: Set<string>;

View File

@@ -0,0 +1,19 @@
export type SourceControlledFileStatus =
| 'new'
| 'modified'
| 'deleted'
| 'created'
| 'renamed'
| 'conflicted'
| 'unknown';
export type SourceControlledFileLocation = 'local' | 'remote';
export type SourceControlledFileType = 'credential' | 'workflow' | 'tags' | 'variables' | 'file';
export type SourceControlledFile = {
file: string;
id: string;
name: string;
type: SourceControlledFileType;
status: SourceControlledFileStatus;
location: SourceControlledFileLocation;
conflict: boolean;
};

View File

@@ -50,12 +50,9 @@ EEVariablesController.post(
);
EEVariablesController.patch(
'/:id(\\d+)',
'/:id(\\w+)',
ResponseHelper.send(async (req: VariablesRequest.Update) => {
const id = parseInt(req.params.id);
if (isNaN(id)) {
throw new ResponseHelper.BadRequestError('Invalid variable id ' + req.params.id);
}
const id = req.params.id;
if (req.user.globalRole.name !== 'owner') {
LoggerProxy.info('Attempt to update a variable blocked due to lack of permissions', {
id,

View File

@@ -40,12 +40,9 @@ variablesController.post(
);
variablesController.get(
'/:id(\\d+)',
'/:id(\\w+)',
ResponseHelper.send(async (req: VariablesRequest.Get) => {
const id = parseInt(req.params.id);
if (isNaN(id)) {
throw new ResponseHelper.BadRequestError('Invalid variable id ' + req.params.id);
}
const id = req.params.id;
const variable = await VariablesService.get(id);
if (variable === null) {
throw new ResponseHelper.NotFoundError(`Variable with id ${req.params.id} not found`);
@@ -55,19 +52,16 @@ variablesController.get(
);
variablesController.patch(
'/:id(\\d+)',
'/:id(\\w+)',
ResponseHelper.send(async () => {
throw new ResponseHelper.BadRequestError('No variables license found');
}),
);
variablesController.delete(
'/:id(\\d+)',
'/:id(\\w+)',
ResponseHelper.send(async (req: VariablesRequest.Delete) => {
const id = parseInt(req.params.id);
if (isNaN(id)) {
throw new ResponseHelper.BadRequestError('Invalid variable id ' + req.params.id);
}
const id = req.params.id;
if (req.user.globalRole.name !== 'owner') {
LoggerProxy.info('Attempt to delete a variable blocked due to lack of permissions', {
id,

View File

@@ -4,6 +4,7 @@ import { InternalHooks } from '@/InternalHooks';
import Container from 'typedi';
import { canCreateNewVariable } from './enviromentHelpers';
import { VariablesService } from './variables.service';
import { generateNanoId } from '../../databases/utils/generators';
export class VariablesLicenseError extends Error {}
export class VariablesValidationError extends Error {}
@@ -32,12 +33,14 @@ export class EEVariablesService extends VariablesService {
this.validateVariable(variable);
void Container.get(InternalHooks).onVariableCreated({ variable_type: variable.type });
return collections.Variables.save(variable);
return collections.Variables.save({
...variable,
id: generateNanoId(),
});
}
static async update(id: number, variable: Omit<Variables, 'id'>): Promise<Variables> {
static async update(id: string, variable: Omit<Variables, 'id'>): Promise<Variables> {
this.validateVariable(variable);
await collections.Variables.update(id, variable);
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return (await this.get(id))!;

View File

@@ -10,11 +10,11 @@ export class VariablesService {
return collections.Variables.count();
}
static async get(id: number): Promise<Variables | null> {
static async get(id: string): Promise<Variables | null> {
return collections.Variables.findOne({ where: { id } });
}
static async delete(id: number): Promise<void> {
static async delete(id: string): Promise<void> {
await collections.Variables.delete(id);
}
}

View File

@@ -1,15 +0,0 @@
export const VERSION_CONTROL_PREFERENCES_DB_KEY = 'features.versionControl';
export const VERSION_CONTROL_GIT_FOLDER = 'git';
export const VERSION_CONTROL_GIT_KEY_COMMENT = 'n8n deploy key';
export const VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER = 'workflows';
export const VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER = 'credentials';
export const VERSION_CONTROL_VARIABLES_EXPORT_FILE = 'variables.json';
export const VERSION_CONTROL_TAGS_EXPORT_FILE = 'tags.json';
export const VERSION_CONTROL_SSH_FOLDER = 'ssh';
export const VERSION_CONTROL_SSH_KEY_NAME = 'key';
export const VERSION_CONTROL_DEFAULT_BRANCH = 'main';
export const VERSION_CONTROL_ORIGIN = 'origin';
export const VERSION_CONTROL_API_ROOT = 'version-control';
export const VERSION_CONTROL_README = `
# n8n Version Control
`;

View File

@@ -1,21 +0,0 @@
import type { RequestHandler } from 'express';
import { isVersionControlLicensed } from '../versionControlHelper.ee';
import Container from 'typedi';
import { VersionControlPreferencesService } from '../versionControlPreferences.service.ee';
export const versionControlLicensedAndEnabledMiddleware: RequestHandler = (req, res, next) => {
const versionControlPreferencesService = Container.get(VersionControlPreferencesService);
if (versionControlPreferencesService.isVersionControlLicensedAndEnabled()) {
next();
} else {
res.status(401).json({ status: 'error', message: 'Unauthorized' });
}
};
export const versionControlLicensedMiddleware: RequestHandler = (req, res, next) => {
if (isVersionControlLicensed()) {
next();
} else {
res.status(401).json({ status: 'error', message: 'Unauthorized' });
}
};

View File

@@ -1,8 +0,0 @@
import type { ICredentialDataDecryptedObject } from 'n8n-workflow';
export interface ExportableCredential {
id: string;
name: string;
type: string;
data: ICredentialDataDecryptedObject;
}

View File

@@ -1,22 +0,0 @@
import type { AuthenticatedRequest } from '@/requests';
import type { VersionControlPreferences } from './versionControlPreferences';
import type { VersionControlSetBranch } from './versionControlSetBranch';
import type { VersionControlCommit } from './versionControlCommit';
import type { VersionControlStage } from './versionControlStage';
import type { VersionControlPush } from './versionControlPush';
import type { VersionControlPushWorkFolder } from './versionControlPushWorkFolder';
import type { VersionControlPullWorkFolder } from './versionControlPullWorkFolder';
import type { VersionControlDisconnect } from './versionControlDisconnect';
import type { VersionControlSetReadOnly } from './versionControlSetReadOnly';
export declare namespace VersionControlRequest {
type UpdatePreferences = AuthenticatedRequest<{}, {}, Partial<VersionControlPreferences>, {}>;
type SetReadOnly = AuthenticatedRequest<{}, {}, VersionControlSetReadOnly, {}>;
type SetBranch = AuthenticatedRequest<{}, {}, VersionControlSetBranch, {}>;
type Commit = AuthenticatedRequest<{}, {}, VersionControlCommit, {}>;
type Stage = AuthenticatedRequest<{}, {}, VersionControlStage, {}>;
type Push = AuthenticatedRequest<{}, {}, VersionControlPush, {}>;
type Disconnect = AuthenticatedRequest<{}, {}, VersionControlDisconnect, {}>;
type PushWorkFolder = AuthenticatedRequest<{}, {}, VersionControlPushWorkFolder, {}>;
type PullWorkFolder = AuthenticatedRequest<{}, {}, VersionControlPullWorkFolder, {}>;
}

View File

@@ -1,19 +0,0 @@
export type VersionControlledFileStatus =
| 'new'
| 'modified'
| 'deleted'
| 'created'
| 'renamed'
| 'conflicted'
| 'unknown';
export type VersionControlledFileLocation = 'local' | 'remote';
export type VersionControlledFileType = 'credential' | 'workflow' | 'tags' | 'variables' | 'file';
export type VersionControlledFile = {
file: string;
id: string;
name: string;
type: VersionControlledFileType;
status: VersionControlledFileStatus;
location: VersionControlledFileLocation;
conflict: boolean;
};

View File

@@ -1,338 +0,0 @@
import { Authorized, Get, Post, Patch, RestController } from '@/decorators';
import {
versionControlLicensedMiddleware,
versionControlLicensedAndEnabledMiddleware,
} from './middleware/versionControlEnabledMiddleware.ee';
import { VersionControlService } from './versionControl.service.ee';
import { VersionControlRequest } from './types/requests';
import type { VersionControlPreferences } from './types/versionControlPreferences';
import { BadRequestError } from '@/ResponseHelper';
import type { PullResult, PushResult, StatusResult } from 'simple-git';
import { AuthenticatedRequest } from '../../requests';
import express from 'express';
import type { ImportResult } from './types/importResult';
import type { VersionControlPushWorkFolder } from './types/versionControlPushWorkFolder';
import { VersionControlPreferencesService } from './versionControlPreferences.service.ee';
import type { VersionControlledFile } from './types/versionControlledFile';
import { VERSION_CONTROL_API_ROOT, VERSION_CONTROL_DEFAULT_BRANCH } from './constants';
@RestController(`/${VERSION_CONTROL_API_ROOT}`)
export class VersionControlController {
constructor(
private versionControlService: VersionControlService,
private versionControlPreferencesService: VersionControlPreferencesService,
) {}
@Authorized('any')
@Get('/preferences', { middlewares: [versionControlLicensedMiddleware] })
async getPreferences(): Promise<VersionControlPreferences> {
// returns the settings with the privateKey property redacted
return this.versionControlPreferencesService.getPreferences();
}
@Authorized(['global', 'owner'])
@Post('/preferences', { middlewares: [versionControlLicensedMiddleware] })
async setPreferences(req: VersionControlRequest.UpdatePreferences) {
if (
req.body.branchReadOnly === undefined &&
this.versionControlPreferencesService.isVersionControlConnected()
) {
throw new BadRequestError(
'Cannot change preferences while connected to a version control provider. Please disconnect first.',
);
}
try {
const sanitizedPreferences: Partial<VersionControlPreferences> = {
...req.body,
initRepo: req.body.initRepo ?? true, // default to true if not specified
connected: undefined,
publicKey: undefined,
};
await this.versionControlPreferencesService.validateVersionControlPreferences(
sanitizedPreferences,
);
const updatedPreferences = await this.versionControlPreferencesService.setPreferences(
sanitizedPreferences,
);
if (sanitizedPreferences.initRepo === true) {
try {
await this.versionControlService.initializeRepository({
...updatedPreferences,
branchName:
updatedPreferences.branchName === ''
? VERSION_CONTROL_DEFAULT_BRANCH
: updatedPreferences.branchName,
initRepo: true,
});
if (this.versionControlPreferencesService.getPreferences().branchName !== '') {
await this.versionControlPreferencesService.setPreferences({
connected: true,
});
}
} catch (error) {
// if initialization fails, run cleanup to remove any intermediate state and throw the error
await this.versionControlService.disconnect({ keepKeyPair: true });
throw error;
}
}
await this.versionControlService.init();
return this.versionControlPreferencesService.getPreferences();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Patch('/preferences', { middlewares: [versionControlLicensedMiddleware] })
async updatePreferences(req: VersionControlRequest.UpdatePreferences) {
try {
const sanitizedPreferences: Partial<VersionControlPreferences> = {
...req.body,
initRepo: false,
connected: undefined,
publicKey: undefined,
repositoryUrl: undefined,
authorName: undefined,
authorEmail: undefined,
};
const currentPreferences = this.versionControlPreferencesService.getPreferences();
await this.versionControlPreferencesService.validateVersionControlPreferences(
sanitizedPreferences,
);
if (
sanitizedPreferences.branchName &&
sanitizedPreferences.branchName !== currentPreferences.branchName
) {
await this.versionControlService.setBranch(sanitizedPreferences.branchName);
}
if (sanitizedPreferences.branchColor || sanitizedPreferences.branchReadOnly !== undefined) {
await this.versionControlPreferencesService.setPreferences(
{
branchColor: sanitizedPreferences.branchColor,
branchReadOnly: sanitizedPreferences.branchReadOnly,
},
true,
);
}
await this.versionControlService.init();
return this.versionControlPreferencesService.getPreferences();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/disconnect', { middlewares: [versionControlLicensedMiddleware] })
async disconnect(req: VersionControlRequest.Disconnect) {
try {
return await this.versionControlService.disconnect(req.body);
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized('any')
@Get('/get-branches', { middlewares: [versionControlLicensedMiddleware] })
async getBranches() {
try {
return await this.versionControlService.getBranches();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/push-workfolder', { middlewares: [versionControlLicensedAndEnabledMiddleware] })
async pushWorkfolder(
req: VersionControlRequest.PushWorkFolder,
res: express.Response,
): Promise<PushResult | VersionControlledFile[]> {
if (this.versionControlPreferencesService.isBranchReadOnly()) {
throw new BadRequestError('Cannot push onto read-only branch.');
}
try {
const result = await this.versionControlService.pushWorkfolder(req.body);
if ((result as PushResult).pushed) {
res.statusCode = 200;
} else {
res.statusCode = 409;
}
return result;
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/pull-workfolder', { middlewares: [versionControlLicensedAndEnabledMiddleware] })
async pullWorkfolder(
req: VersionControlRequest.PullWorkFolder,
res: express.Response,
): Promise<VersionControlledFile[] | ImportResult | PullResult | StatusResult | undefined> {
try {
const result = await this.versionControlService.pullWorkfolder({
force: req.body.force,
variables: req.body.variables,
userId: req.user.id,
importAfterPull: req.body.importAfterPull ?? true,
});
if ((result as ImportResult)?.workflows) {
res.statusCode = 200;
} else {
res.statusCode = 409;
}
return result;
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Get('/reset-workfolder', { middlewares: [versionControlLicensedAndEnabledMiddleware] })
async resetWorkfolder(
req: VersionControlRequest.PullWorkFolder,
): Promise<ImportResult | undefined> {
try {
return await this.versionControlService.resetWorkfolder({
force: req.body.force,
variables: req.body.variables,
userId: req.user.id,
importAfterPull: req.body.importAfterPull ?? true,
});
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized('any')
@Get('/get-status', { middlewares: [versionControlLicensedAndEnabledMiddleware] })
async getStatus() {
try {
return await this.versionControlService.getStatus();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized('any')
@Get('/status', { middlewares: [versionControlLicensedMiddleware] })
async status(): Promise<StatusResult> {
try {
return await this.versionControlService.status();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/generate-key-pair', { middlewares: [versionControlLicensedMiddleware] })
async generateKeyPair(): Promise<VersionControlPreferences> {
try {
const result = await this.versionControlPreferencesService.generateAndSaveKeyPair();
return result;
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
// #region Version Control Test Functions
//TODO: SEPARATE FUNCTIONS FOR DEVELOPMENT ONLY
//TODO: REMOVE THESE FUNCTIONS AFTER TESTING
@Authorized(['global', 'owner'])
@Get('/export', { middlewares: [versionControlLicensedMiddleware] })
async export() {
try {
return await this.versionControlService.export();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Get('/import', { middlewares: [versionControlLicensedMiddleware] })
async import(req: AuthenticatedRequest) {
try {
return await this.versionControlService.import({
userId: req.user.id,
});
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized('any')
@Get('/fetch')
async fetch() {
try {
return await this.versionControlService.fetch();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized('any')
@Get('/diff')
async diff() {
try {
return await this.versionControlService.diff();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/push')
async push(req: VersionControlRequest.Push): Promise<PushResult> {
if (this.versionControlPreferencesService.isBranchReadOnly()) {
throw new BadRequestError('Cannot push onto read-only branch.');
}
try {
return await this.versionControlService.push(req.body.force);
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/commit')
async commit(req: VersionControlRequest.Commit) {
try {
return await this.versionControlService.commit(req.body.message);
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/stage')
async stage(req: VersionControlRequest.Stage): Promise<{ staged: string[] } | string> {
try {
return await this.versionControlService.stage(req.body as VersionControlPushWorkFolder);
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Post('/unstage')
async unstage(): Promise<StatusResult | string> {
try {
return await this.versionControlService.unstage();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
@Authorized(['global', 'owner'])
@Get('/pull')
async pull(): Promise<PullResult> {
try {
return await this.versionControlService.pull();
} catch (error) {
throw new BadRequestError((error as { message: string }).message);
}
}
// #endregion
}

View File

@@ -1,674 +0,0 @@
import Container, { Service } from 'typedi';
import path from 'path';
import {
VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER,
VERSION_CONTROL_GIT_FOLDER,
VERSION_CONTROL_TAGS_EXPORT_FILE,
VERSION_CONTROL_VARIABLES_EXPORT_FILE,
VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER,
} from './constants';
import * as Db from '@/Db';
import glob from 'fast-glob';
import type { ICredentialDataDecryptedObject } from 'n8n-workflow';
import { LoggerProxy, jsonParse } from 'n8n-workflow';
import { writeFile as fsWriteFile, readFile as fsReadFile, rm as fsRm } from 'fs/promises';
import { VersionControlGitService } from './versionControlGit.service.ee';
import { Credentials, UserSettings } from 'n8n-core';
import type { IWorkflowToImport } from '@/Interfaces';
import type { ExportableWorkflow } from './types/exportableWorkflow';
import type { ExportableCredential } from './types/exportableCredential';
import type { ExportResult } from './types/exportResult';
import { SharedWorkflow } from '@/databases/entities/SharedWorkflow';
import { CredentialsEntity } from '@/databases/entities/CredentialsEntity';
import { Variables } from '@/databases/entities/Variables';
import type { ImportResult } from './types/importResult';
import { UM_FIX_INSTRUCTION } from '@/commands/BaseCommand';
import config from '@/config';
import { SharedCredentials } from '@/databases/entities/SharedCredentials';
import { WorkflowEntity } from '@/databases/entities/WorkflowEntity';
import { WorkflowTagMapping } from '@/databases/entities/WorkflowTagMapping';
import { TagEntity } from '@/databases/entities/TagEntity';
import { ActiveWorkflowRunner } from '../../ActiveWorkflowRunner';
import without from 'lodash/without';
import type { VersionControllPullOptions } from './types/versionControlPullWorkFolder';
import { versionControlFoldersExistCheck } from './versionControlHelper.ee';
import { In } from 'typeorm';
@Service()
export class VersionControlExportService {
private gitFolder: string;
private workflowExportFolder: string;
private credentialExportFolder: string;
constructor(private gitService: VersionControlGitService) {
const userFolder = UserSettings.getUserN8nFolderPath();
this.gitFolder = path.join(userFolder, VERSION_CONTROL_GIT_FOLDER);
this.workflowExportFolder = path.join(this.gitFolder, VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER);
this.credentialExportFolder = path.join(
this.gitFolder,
VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER,
);
}
getWorkflowPath(workflowId: string): string {
return path.join(this.workflowExportFolder, `${workflowId}.json`);
}
getCredentialsPath(credentialsId: string): string {
return path.join(this.credentialExportFolder, `${credentialsId}.json`);
}
getTagsPath(): string {
return path.join(this.gitFolder, VERSION_CONTROL_TAGS_EXPORT_FILE);
}
getVariablesPath(): string {
return path.join(this.gitFolder, VERSION_CONTROL_VARIABLES_EXPORT_FILE);
}
async getWorkflowFromFile(
filePath: string,
root = this.gitFolder,
): Promise<IWorkflowToImport | undefined> {
try {
const importedWorkflow = jsonParse<IWorkflowToImport>(
await fsReadFile(path.join(root, filePath), { encoding: 'utf8' }),
);
return importedWorkflow;
} catch (error) {
return undefined;
}
}
async getCredentialFromFile(
filePath: string,
root = this.gitFolder,
): Promise<ExportableCredential | undefined> {
try {
const credential = jsonParse<ExportableCredential>(
await fsReadFile(path.join(root, filePath), { encoding: 'utf8' }),
);
return credential;
} catch (error) {
return undefined;
}
}
private async getOwnerGlobalRole() {
const ownerCredentiallRole = await Db.collections.Role.findOne({
where: { name: 'owner', scope: 'global' },
});
if (!ownerCredentiallRole) {
throw new Error(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
}
return ownerCredentiallRole;
}
private async getOwnerCredentialRole() {
const ownerCredentiallRole = await Db.collections.Role.findOne({
where: { name: 'owner', scope: 'credential' },
});
if (!ownerCredentiallRole) {
throw new Error(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
}
return ownerCredentiallRole;
}
private async getOwnerWorkflowRole() {
const ownerWorkflowRole = await Db.collections.Role.findOne({
where: { name: 'owner', scope: 'workflow' },
});
if (!ownerWorkflowRole) {
throw new Error(`Failed to find owner workflow role. ${UM_FIX_INSTRUCTION}`);
}
return ownerWorkflowRole;
}
async cleanWorkFolder() {
try {
const workflowFiles = await glob('*.json', {
cwd: this.workflowExportFolder,
absolute: true,
});
const credentialFiles = await glob('*.json', {
cwd: this.credentialExportFolder,
absolute: true,
});
const variablesFile = await glob(VERSION_CONTROL_VARIABLES_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
const tagsFile = await glob(VERSION_CONTROL_TAGS_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
await Promise.all(tagsFile.map(async (e) => fsRm(e)));
await Promise.all(variablesFile.map(async (e) => fsRm(e)));
await Promise.all(workflowFiles.map(async (e) => fsRm(e)));
await Promise.all(credentialFiles.map(async (e) => fsRm(e)));
LoggerProxy.debug('Cleaned work folder.');
} catch (error) {
LoggerProxy.error(`Failed to clean work folder: ${(error as Error).message}`);
}
}
async deleteRepositoryFolder() {
try {
await fsRm(this.gitFolder, { recursive: true });
} catch (error) {
LoggerProxy.error(`Failed to delete work folder: ${(error as Error).message}`);
}
}
private async rmDeletedWorkflowsFromExportFolder(
workflowsToBeExported: SharedWorkflow[],
): Promise<Set<string>> {
const sharedWorkflowsFileNames = new Set<string>(
workflowsToBeExported.map((e) => this.getWorkflowPath(e?.workflow?.name)),
);
const existingWorkflowsInFolder = new Set<string>(
await glob('*.json', {
cwd: this.workflowExportFolder,
absolute: true,
}),
);
const deletedWorkflows = new Set(existingWorkflowsInFolder);
for (const elem of sharedWorkflowsFileNames) {
deletedWorkflows.delete(elem);
}
try {
await Promise.all([...deletedWorkflows].map(async (e) => fsRm(e)));
} catch (error) {
LoggerProxy.error(`Failed to delete workflows from work folder: ${(error as Error).message}`);
}
return deletedWorkflows;
}
private async writeExportableWorkflowsToExportFolder(workflowsToBeExported: SharedWorkflow[]) {
await Promise.all(
workflowsToBeExported.map(async (e) => {
if (!e.workflow) {
LoggerProxy.debug(
`Found no corresponding workflow ${e.workflowId ?? 'unknown'}, skipping export`,
);
return;
}
const fileName = this.getWorkflowPath(e.workflow?.id);
const sanitizedWorkflow: ExportableWorkflow = {
active: e.workflow?.active,
id: e.workflow?.id,
name: e.workflow?.name,
nodes: e.workflow?.nodes,
connections: e.workflow?.connections,
settings: e.workflow?.settings,
triggerCount: e.workflow?.triggerCount,
owner: e.user.email,
versionId: e.workflow?.versionId,
};
LoggerProxy.debug(`Writing workflow ${e.workflowId} to ${fileName}`);
return fsWriteFile(fileName, JSON.stringify(sanitizedWorkflow, null, 2));
}),
);
}
async exportWorkflowsToWorkFolder(): Promise<ExportResult> {
try {
versionControlFoldersExistCheck([this.workflowExportFolder]);
const sharedWorkflows = await Db.collections.SharedWorkflow.find({
relations: ['workflow', 'role', 'user'],
where: {
role: {
name: 'owner',
scope: 'workflow',
},
},
});
// before exporting, figure out which workflows have been deleted and remove them from the export folder
const removedFiles = await this.rmDeletedWorkflowsFromExportFolder(sharedWorkflows);
// write the workflows to the export folder as json files
await this.writeExportableWorkflowsToExportFolder(sharedWorkflows);
return {
count: sharedWorkflows.length,
folder: this.workflowExportFolder,
files: sharedWorkflows.map((e) => ({
id: e?.workflow?.id,
name: this.getWorkflowPath(e?.workflow?.name),
})),
removedFiles: [...removedFiles],
};
} catch (error) {
throw Error(`Failed to export workflows to work folder: ${(error as Error).message}`);
}
}
async exportVariablesToWorkFolder(): Promise<ExportResult> {
try {
versionControlFoldersExistCheck([this.gitFolder]);
const variables = await Db.collections.Variables.find();
// do not export empty variables
if (variables.length === 0) {
return {
count: 0,
folder: this.gitFolder,
files: [],
};
}
const fileName = this.getVariablesPath();
const sanitizedVariables = variables.map((e) => ({ ...e, value: '' }));
await fsWriteFile(fileName, JSON.stringify(sanitizedVariables, null, 2));
return {
count: sanitizedVariables.length,
folder: this.gitFolder,
files: [
{
id: '',
name: fileName,
},
],
};
} catch (error) {
throw Error(`Failed to export variables to work folder: ${(error as Error).message}`);
}
}
async exportTagsToWorkFolder(): Promise<ExportResult> {
try {
versionControlFoldersExistCheck([this.gitFolder]);
const tags = await Db.collections.Tag.find();
const mappings = await Db.collections.WorkflowTagMapping.find();
const fileName = this.getTagsPath();
await fsWriteFile(
fileName,
JSON.stringify(
{
tags: tags.map((tag) => ({ id: tag.id, name: tag.name })),
mappings,
},
null,
2,
),
);
return {
count: tags.length,
folder: this.gitFolder,
files: [
{
id: '',
name: fileName,
},
],
};
} catch (error) {
throw Error(`Failed to export variables to work folder: ${(error as Error).message}`);
}
}
private replaceCredentialData = (
data: ICredentialDataDecryptedObject,
): ICredentialDataDecryptedObject => {
for (const [key] of Object.entries(data)) {
try {
if (typeof data[key] === 'object') {
data[key] = this.replaceCredentialData(data[key] as ICredentialDataDecryptedObject);
} else if (typeof data[key] === 'string') {
data[key] = (data[key] as string)?.startsWith('={{') ? data[key] : '';
} else if (typeof data[key] === 'number') {
// TODO: leaving numbers in for now, but maybe we should remove them
// data[key] = 0;
}
} catch (error) {
LoggerProxy.error(`Failed to sanitize credential data: ${(error as Error).message}`);
throw error;
}
}
return data;
};
async exportCredentialsToWorkFolder(): Promise<ExportResult> {
try {
versionControlFoldersExistCheck([this.credentialExportFolder]);
const sharedCredentials = await Db.collections.SharedCredentials.find({
relations: ['credentials', 'role', 'user'],
});
const encryptionKey = await UserSettings.getEncryptionKey();
await Promise.all(
sharedCredentials.map(async (sharedCredential) => {
const { name, type, nodesAccess, data, id } = sharedCredential.credentials;
const credentialObject = new Credentials({ id, name }, type, nodesAccess, data);
const plainData = credentialObject.getData(encryptionKey);
const sanitizedData = this.replaceCredentialData(plainData);
const fileName = path.join(
this.credentialExportFolder,
`${sharedCredential.credentials.id}.json`,
);
const sanitizedCredential: ExportableCredential = {
id: sharedCredential.credentials.id,
name: sharedCredential.credentials.name,
type: sharedCredential.credentials.type,
data: sanitizedData,
};
LoggerProxy.debug(`Writing credential ${sharedCredential.credentials.id} to ${fileName}`);
return fsWriteFile(fileName, JSON.stringify(sanitizedCredential, null, 2));
}),
);
return {
count: sharedCredentials.length,
folder: this.credentialExportFolder,
files: sharedCredentials.map((e) => ({
id: e.credentials.id,
name: path.join(this.credentialExportFolder, `${e.credentials.name}.json`),
})),
};
} catch (error) {
throw Error(`Failed to export credentials to work folder: ${(error as Error).message}`);
}
}
private async importCredentialsFromFiles(
userId: string,
): Promise<Array<{ id: string; name: string; type: string }>> {
const credentialFiles = await glob('*.json', {
cwd: this.credentialExportFolder,
absolute: true,
});
const existingCredentials = await Db.collections.Credentials.find();
const ownerCredentialRole = await this.getOwnerCredentialRole();
const ownerGlobalRole = await this.getOwnerGlobalRole();
const encryptionKey = await UserSettings.getEncryptionKey();
let importCredentialsResult: Array<{ id: string; name: string; type: string }> = [];
await Db.transaction(async (transactionManager) => {
importCredentialsResult = await Promise.all(
credentialFiles.map(async (file) => {
LoggerProxy.debug(`Importing credentials file ${file}`);
const credential = jsonParse<ExportableCredential>(
await fsReadFile(file, { encoding: 'utf8' }),
);
const existingCredential = existingCredentials.find(
(e) => e.id === credential.id && e.type === credential.type,
);
const sharedOwner = await Db.collections.SharedCredentials.findOne({
select: ['userId'],
where: {
credentialsId: credential.id,
roleId: In([ownerCredentialRole.id, ownerGlobalRole.id]),
},
});
const { name, type, data, id } = credential;
const newCredentialObject = new Credentials({ id, name }, type, []);
if (existingCredential?.data) {
newCredentialObject.data = existingCredential.data;
} else {
newCredentialObject.setData(data, encryptionKey);
}
if (existingCredential?.nodesAccess) {
newCredentialObject.nodesAccess = existingCredential.nodesAccess;
}
LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`);
await transactionManager.upsert(CredentialsEntity, newCredentialObject, ['id']);
if (!sharedOwner) {
const newSharedCredential = new SharedCredentials();
newSharedCredential.credentialsId = newCredentialObject.id as string;
newSharedCredential.userId = userId;
newSharedCredential.roleId = ownerGlobalRole.id;
await transactionManager.upsert(SharedCredentials, { ...newSharedCredential }, [
'credentialsId',
'userId',
]);
}
// TODO: once IDs are unique, remove this
if (config.getEnv('database.type') === 'postgresdb') {
await transactionManager.query(
"SELECT setval('credentials_entity_id_seq', (SELECT MAX(id) from credentials_entity))",
);
}
return {
id: newCredentialObject.id as string,
name: newCredentialObject.name,
type: newCredentialObject.type,
};
}),
);
});
return importCredentialsResult.filter((e) => e !== undefined);
}
private async importVariablesFromFile(valueOverrides?: {
[key: string]: string;
}): Promise<{ added: string[]; changed: string[] }> {
const variablesFile = await glob(VERSION_CONTROL_VARIABLES_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
if (variablesFile.length > 0) {
LoggerProxy.debug(`Importing variables from file ${variablesFile[0]}`);
const overriddenKeys = Object.keys(valueOverrides ?? {});
const importedVariables = jsonParse<Variables[]>(
await fsReadFile(variablesFile[0], { encoding: 'utf8' }),
{ fallbackValue: [] },
);
const importedKeys = importedVariables.map((variable) => variable.key);
const existingVariables = await Db.collections.Variables.find();
const existingKeys = existingVariables.map((variable) => variable.key);
const addedKeysFromImport = without(importedKeys, ...existingKeys);
const addedKeysFromOverride = without(overriddenKeys, ...existingKeys);
const addedVariables = importedVariables.filter((e) => addedKeysFromImport.includes(e.key));
addedKeysFromOverride.forEach((key) => {
addedVariables.push({
key,
value: valueOverrides ? valueOverrides[key] : '',
type: 'string',
} as Variables);
});
// first round, add missing variable keys to Db without touching values
await Db.transaction(async (transactionManager) => {
await Promise.all(
addedVariables.map(async (addedVariable) => {
await transactionManager.insert(Variables, {
...addedVariable,
id: undefined,
});
}),
);
});
// second round, update values of existing variables if overridden
if (valueOverrides) {
await Db.transaction(async (transactionManager) => {
await Promise.all(
overriddenKeys.map(async (key) => {
await transactionManager.update(Variables, { key }, { value: valueOverrides[key] });
}),
);
});
}
return {
added: [...addedKeysFromImport, ...addedKeysFromOverride],
changed: without(overriddenKeys, ...addedKeysFromOverride),
};
}
return { added: [], changed: [] };
}
private async importTagsFromFile() {
const tagsFile = await glob(VERSION_CONTROL_TAGS_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
if (tagsFile.length > 0) {
LoggerProxy.debug(`Importing tags from file ${tagsFile[0]}`);
const mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>(
await fsReadFile(tagsFile[0], { encoding: 'utf8' }),
{ fallbackValue: { tags: [], mappings: [] } },
);
const existingWorkflowIds = new Set(
(
await Db.collections.Workflow.find({
select: ['id'],
})
).map((e) => e.id),
);
await Db.transaction(async (transactionManager) => {
await Promise.all(
mappedTags.tags.map(async (tag) => {
await transactionManager.upsert(
TagEntity,
{
...tag,
},
{
skipUpdateIfNoValuesChanged: true,
conflictPaths: { id: true },
},
);
}),
);
await Promise.all(
mappedTags.mappings.map(async (mapping) => {
if (!existingWorkflowIds.has(String(mapping.workflowId))) return;
await transactionManager.upsert(
WorkflowTagMapping,
{ tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) },
{
skipUpdateIfNoValuesChanged: true,
conflictPaths: { tagId: true, workflowId: true },
},
);
}),
);
});
return mappedTags;
}
return { tags: [], mappings: [] };
}
private async importWorkflowsFromFiles(
userId: string,
): Promise<Array<{ id: string; name: string }>> {
const workflowFiles = await glob('*.json', {
cwd: this.workflowExportFolder,
absolute: true,
});
const existingWorkflows = await Db.collections.Workflow.find({
select: ['id', 'name', 'active', 'versionId'],
});
const ownerWorkflowRole = await this.getOwnerWorkflowRole();
const workflowRunner = Container.get(ActiveWorkflowRunner);
let importWorkflowsResult = new Array<{ id: string; name: string }>();
// TODO: once IDs are unique and we removed autoincrement, remove this
if (config.getEnv('database.type') === 'postgresdb') {
await Db.transaction(async (transactionManager) => {
await transactionManager.query(
'ALTER SEQUENCE IF EXISTS "workflow_entity_id_seq" RESTART;',
);
await transactionManager.query(
"SELECT setval('workflow_entity_id_seq', (SELECT MAX(id) from workflow_entity) );",
// "SELECT setval('workflow_entity_id_seq', (SELECT MAX(v) FROM (VALUES (1), ((SELECT MAX(id) from workflow_entity))) as value(v)));",
);
});
}
await Db.transaction(async (transactionManager) => {
importWorkflowsResult = await Promise.all(
workflowFiles.map(async (file) => {
LoggerProxy.debug(`Parsing workflow file ${file}`);
const importedWorkflow = jsonParse<IWorkflowToImport>(
await fsReadFile(file, { encoding: 'utf8' }),
);
const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id);
if (existingWorkflow?.versionId === importedWorkflow.versionId) {
LoggerProxy.debug(
`Skipping import of workflow ${
importedWorkflow.id ?? 'n/a'
} - versionId is up to date`,
);
return {
id: importedWorkflow.id ?? 'n/a',
name: 'skipped',
};
}
LoggerProxy.debug(`Importing workflow ${importedWorkflow.id ?? 'n/a'}`);
importedWorkflow.active = existingWorkflow?.active ?? false;
LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`);
const upsertResult = await transactionManager.upsert(
WorkflowEntity,
{ ...importedWorkflow },
['id'],
);
if (upsertResult?.identifiers?.length !== 1) {
throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`);
}
// due to sequential Ids, this may have changed during the insert
// TODO: once IDs are unique and we removed autoincrement, remove this
const upsertedWorkflowId = upsertResult.identifiers[0].id as string;
await transactionManager.upsert(
SharedWorkflow,
{
workflowId: upsertedWorkflowId,
userId,
roleId: ownerWorkflowRole.id,
},
['workflowId', 'userId'],
);
if (existingWorkflow?.active) {
try {
// remove active pre-import workflow
LoggerProxy.debug(`Deactivating workflow id ${existingWorkflow.id}`);
await workflowRunner.remove(existingWorkflow.id);
// try activating the imported workflow
LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`);
await workflowRunner.add(existingWorkflow.id, 'activate');
} catch (error) {
LoggerProxy.error(
`Failed to activate workflow ${existingWorkflow.id}`,
error as Error,
);
}
}
return {
id: importedWorkflow.id ?? 'unknown',
name: file,
};
}),
);
});
return importWorkflowsResult;
}
async importFromWorkFolder(options: VersionControllPullOptions): Promise<ImportResult> {
try {
const importedVariables = await this.importVariablesFromFile(options.variables);
const importedCredentials = await this.importCredentialsFromFiles(options.userId);
const importWorkflows = await this.importWorkflowsFromFiles(options.userId);
const importTags = await this.importTagsFromFile();
return {
variables: importedVariables,
credentials: importedCredentials,
workflows: importWorkflows,
tags: importTags,
};
} catch (error) {
throw Error(`Failed to import workflows from work folder: ${(error as Error).message}`);
}
}
}

View File

@@ -1,7 +1,5 @@
import { parse, stringify } from 'flatted';
import type { IRun, IRunExecutionData, ITaskData } from 'n8n-workflow';
import { NodeOperationError, WorkflowOperationError } from 'n8n-workflow';
import * as Db from '@/Db';
import type { EventMessageTypes, EventNamesTypes } from '../EventMessageClasses';
import type { DateTime } from 'luxon';
import { Push } from '@/push';
@@ -11,24 +9,21 @@ import { eventBus } from './MessageEventBus';
import { Container } from 'typedi';
import { InternalHooks } from '@/InternalHooks';
import { getWorkflowHooksMain } from '@/WorkflowExecuteAdditionalData';
import { ExecutionRepository } from '@/databases/repositories';
export async function recoverExecutionDataFromEventLogMessages(
executionId: string,
messages: EventMessageTypes[],
applyToDb = true,
): Promise<IRunExecutionData | undefined> {
const executionEntry = await Db.collections.Execution.findOne({
where: {
id: executionId,
},
const executionEntry = await Container.get(ExecutionRepository).findSingleExecution(executionId, {
includeData: true,
unflattenData: true,
});
if (executionEntry && messages) {
let executionData: IRunExecutionData | undefined;
let executionData = executionEntry.data;
let workflowError: WorkflowOperationError | undefined;
try {
executionData = parse(executionEntry.data) as IRunExecutionData;
} catch {}
if (!executionData) {
executionData = { resultData: { runData: {} } };
}
@@ -156,8 +151,8 @@ export async function recoverExecutionDataFromEventLogMessages(
if (applyToDb) {
const newStatus = executionEntry.status === 'failed' ? 'failed' : 'crashed';
await Db.collections.Execution.update(executionId, {
data: stringify(executionData),
await Container.get(ExecutionRepository).updateExistingExecution(executionId, {
data: executionData,
status: newStatus,
stoppedAt: lastNodeRunTimestamp?.toJSDate(),
});

View File

@@ -1,10 +1,10 @@
import { Container } from 'typedi';
import type { IExecutionFlattedDb } from '@/Interfaces';
import type { ExecutionStatus } from 'n8n-workflow';
import { License } from '@/License';
import type { IExecutionFlattedDb, IExecutionResponse } from '@/Interfaces';
export function getStatusUsingPreviousExecutionStatusMethod(
execution: IExecutionFlattedDb,
execution: IExecutionFlattedDb | IExecutionResponse,
): ExecutionStatus {
if (execution.waitTill) {
return 'waiting';

View File

@@ -2,23 +2,13 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { validate as jsonSchemaValidate } from 'jsonschema';
import { BinaryDataManager } from 'n8n-core';
import type {
IDataObject,
IWorkflowBase,
JsonObject,
ExecutionStatus,
IRunExecutionData,
NodeOperationError,
IExecutionsSummary,
} from 'n8n-workflow';
import { deepCopy, LoggerProxy, jsonParse, Workflow } from 'n8n-workflow';
import type { FindOperator, FindOptionsWhere } from 'typeorm';
import { In, IsNull, LessThanOrEqual, MoreThanOrEqual, Not, Raw } from 'typeorm';
import type { IWorkflowBase, JsonObject, ExecutionStatus } from 'n8n-workflow';
import { LoggerProxy, jsonParse, Workflow } from 'n8n-workflow';
import type { FindOperator } from 'typeorm';
import { In } from 'typeorm';
import { ActiveExecutions } from '@/ActiveExecutions';
import config from '@/config';
import type { User } from '@db/entities/User';
import type { ExecutionEntity } from '@db/entities/ExecutionEntity';
import type {
IExecutionFlattedResponse,
IExecutionResponse,
@@ -33,16 +23,11 @@ import { getSharedWorkflowIds } from '@/WorkflowHelpers';
import { WorkflowRunner } from '@/WorkflowRunner';
import * as Db from '@/Db';
import * as GenericHelpers from '@/GenericHelpers';
import { parse } from 'flatted';
import { Container } from 'typedi';
import {
getStatusUsingPreviousExecutionStatusMethod,
isAdvancedExecutionFiltersEnabled,
} from './executionHelpers';
import { ExecutionMetadata } from '@db/entities/ExecutionMetadata';
import { DateUtils } from 'typeorm/util/DateUtils';
import { getStatusUsingPreviousExecutionStatusMethod } from './executionHelpers';
import { ExecutionRepository } from '@/databases/repositories';
interface IGetExecutionsQueryFilter {
export interface IGetExecutionsQueryFilter {
id?: FindOperator<string> | string;
finished?: boolean;
mode?: string;
@@ -102,102 +87,6 @@ export class ExecutionsService {
return getSharedWorkflowIds(user, ['owner']);
}
/**
* Helper function to retrieve count of Executions
*/
static async getExecutionsCount(
countFilter: IDataObject,
user: User,
metadata?: Array<{ key: string; value: string }>,
): Promise<{ count: number; estimated: boolean }> {
const dbType = config.getEnv('database.type');
const filteredFields = Object.keys(countFilter).filter((field) => field !== 'id');
// For databases other than Postgres, do a regular count
// when filtering based on `workflowId` or `finished` fields.
if (
dbType !== 'postgresdb' ||
metadata?.length ||
filteredFields.length > 0 ||
user.globalRole.name !== 'owner'
) {
const sharedWorkflowIds = await this.getWorkflowIdsForUser(user);
let query = Db.collections.Execution.createQueryBuilder('execution')
.select()
.orderBy('execution.id', 'DESC')
.where({ workflowId: In(sharedWorkflowIds) });
if (metadata?.length) {
query = query.leftJoinAndSelect(ExecutionMetadata, 'md', 'md.executionId = execution.id');
for (const md of metadata) {
query = query.andWhere('md.key = :key AND md.value = :value', md);
}
}
if (filteredFields.length > 0) {
query = query.andWhere(countFilter);
}
const count = await query.getCount();
return { count, estimated: false };
}
try {
// Get an estimate of rows count.
const estimateRowsNumberSql =
"SELECT n_live_tup FROM pg_stat_all_tables WHERE relname = 'execution_entity';";
const rows: Array<{ n_live_tup: string }> = await Db.collections.Execution.query(
estimateRowsNumberSql,
);
const estimate = parseInt(rows[0].n_live_tup, 10);
// If over 100k, return just an estimate.
if (estimate > 100_000) {
// if less than 100k, we get the real count as even a full
// table scan should not take so long.
return { count: estimate, estimated: true };
}
} catch (error) {
LoggerProxy.warn(`Failed to get executions count from Postgres: ${error}`);
}
const sharedWorkflowIds = await getSharedWorkflowIds(user);
const count = await Db.collections.Execution.count({
where: {
workflowId: In(sharedWorkflowIds),
},
});
return { count, estimated: false };
}
static massageFilters(filter: IDataObject): void {
if (filter) {
if (filter.waitTill === true) {
filter.waitTill = Not(IsNull());
// eslint-disable-next-line @typescript-eslint/no-unnecessary-boolean-literal-compare
} else if (filter.finished === false) {
filter.waitTill = IsNull();
} else {
delete filter.waitTill;
}
if (Array.isArray(filter.metadata)) {
delete filter.metadata;
}
if ('startedAfter' in filter) {
delete filter.startedAfter;
}
if ('startedBefore' in filter) {
delete filter.startedBefore;
}
}
}
static async getExecutionsList(req: ExecutionRequest.GetAll): Promise<IExecutionsListResponse> {
const sharedWorkflowIds = await this.getWorkflowIdsForUser(req.user);
if (sharedWorkflowIds.length === 0) {
@@ -266,167 +155,23 @@ export class ExecutionsService {
.map(({ id }) => id),
);
const findWhere: FindOptionsWhere<ExecutionEntity> = {
workflowId: In(sharedWorkflowIds),
};
if (filter?.status) {
Object.assign(findWhere, { status: In(filter.status) });
}
if (filter?.finished) {
Object.assign(findWhere, { finished: filter.finished });
}
const rangeQuery: string[] = [];
const rangeQueryParams: {
lastId?: string;
firstId?: string;
executingWorkflowIds?: string[];
} = {};
if (req.query.lastId) {
rangeQuery.push('execution.id < :lastId');
rangeQueryParams.lastId = req.query.lastId;
}
if (req.query.firstId) {
rangeQuery.push('execution.id > :firstId');
rangeQueryParams.firstId = req.query.firstId;
}
if (executingWorkflowIds.length > 0) {
rangeQuery.push('execution.id NOT IN (:...executingWorkflowIds)');
rangeQueryParams.executingWorkflowIds = executingWorkflowIds;
}
if (rangeQuery.length) {
Object.assign(findWhere, {
id: Raw(() => rangeQuery.join(' and '), rangeQueryParams),
});
}
// Omit `data` from the Execution since it is the largest and not necessary for the list.
let query = Db.collections.Execution.createQueryBuilder('execution')
.select([
'execution.id',
'execution.finished',
'execution.mode',
'execution.retryOf',
'execution.retrySuccessId',
'execution.waitTill',
'execution.startedAt',
'execution.stoppedAt',
'execution.workflowData',
'execution.status',
])
.orderBy('execution.id', 'DESC')
.take(limit)
.where(findWhere);
const countFilter = deepCopy(filter ?? {});
const metadata = isAdvancedExecutionFiltersEnabled() ? filter?.metadata : undefined;
if (metadata?.length) {
query = query.leftJoin(ExecutionMetadata, 'md', 'md.executionId = execution.id');
for (const md of metadata) {
query = query.andWhere('md.key = :key AND md.value = :value', md);
}
}
if (filter?.startedAfter) {
query = query.andWhere({
startedAt: MoreThanOrEqual(
DateUtils.mixedDateToUtcDatetimeString(new Date(filter.startedAfter)),
),
});
}
if (filter?.startedBefore) {
query = query.andWhere({
startedAt: LessThanOrEqual(
DateUtils.mixedDateToUtcDatetimeString(new Date(filter.startedBefore)),
),
});
}
// deepcopy breaks the In operator so we need to reapply it
if (filter?.status) {
Object.assign(filter, { status: In(filter.status) });
Object.assign(countFilter, { status: In(filter.status) });
}
if (filter) {
this.massageFilters(filter as IDataObject);
query = query.andWhere(filter);
}
this.massageFilters(countFilter as IDataObject);
countFilter.id = Not(In(executingWorkflowIds));
const executions = await query.getMany();
const { count, estimated } = await this.getExecutionsCount(
countFilter as IDataObject,
req.user,
metadata,
const { count, estimated } = await Container.get(ExecutionRepository).countExecutions(
filter,
sharedWorkflowIds,
executingWorkflowIds,
req.user.globalRole.name === 'owner',
);
const formattedExecutions: IExecutionsSummary[] = executions.map((execution) => {
// inject potential node execution errors into the execution response
const nodeExecutionStatus = {};
let lastNodeExecuted;
let executionError;
// fill execution status for old executions that will return null
if (!execution.status) {
execution.status = getStatusUsingPreviousExecutionStatusMethod(execution);
}
try {
const data = parse(execution.data) as IRunExecutionData;
lastNodeExecuted = data?.resultData?.lastNodeExecuted ?? '';
executionError = data?.resultData?.error;
if (data?.resultData?.runData) {
for (const key of Object.keys(data.resultData.runData)) {
const errors = data.resultData.runData[key]
?.filter((taskdata) => taskdata.error?.name)
?.map((taskdata) => {
if (taskdata.error?.name === 'NodeOperationError') {
return {
name: (taskdata.error as NodeOperationError).name,
message: (taskdata.error as NodeOperationError).message,
description: (taskdata.error as NodeOperationError).description,
};
} else {
return {
name: taskdata.error?.name,
};
}
});
Object.assign(nodeExecutionStatus, {
[key]: {
executionStatus: data.resultData.runData[key][0].executionStatus,
errors,
data: data.resultData.runData[key][0].data ?? undefined,
},
});
}
}
} catch {}
return {
id: execution.id,
finished: execution.finished,
mode: execution.mode,
retryOf: execution.retryOf?.toString(),
retrySuccessId: execution?.retrySuccessId?.toString(),
waitTill: execution.waitTill as Date | undefined,
startedAt: execution.startedAt,
stoppedAt: execution.stoppedAt,
workflowId: execution.workflowData?.id ?? '',
workflowName: execution.workflowData?.name,
status: execution.status,
lastNodeExecuted,
executionError,
nodeExecutionStatus,
} as IExecutionsSummary;
});
const formattedExecutions = await Container.get(ExecutionRepository).searchExecutions(
filter,
limit,
executingWorkflowIds,
sharedWorkflowIds,
{
lastId: req.query.lastId,
firstId: req.query.firstId,
},
);
return {
count,
results: formattedExecutions,
@@ -441,11 +186,13 @@ export class ExecutionsService {
if (!sharedWorkflowIds.length) return undefined;
const { id: executionId } = req.params;
const execution = await Db.collections.Execution.findOne({
const execution = await Container.get(ExecutionRepository).findSingleExecution(executionId, {
where: {
id: executionId,
workflowId: In(sharedWorkflowIds),
},
includeData: true,
unflattenData: false,
});
if (!execution) {
@@ -460,11 +207,6 @@ export class ExecutionsService {
execution.status = getStatusUsingPreviousExecutionStatusMethod(execution);
}
if (req.query.unflattedResponse === 'true') {
return ResponseHelper.unflattenExecutionData(execution);
}
// @ts-ignore
return execution;
}
@@ -473,11 +215,12 @@ export class ExecutionsService {
if (!sharedWorkflowIds.length) return false;
const { id: executionId } = req.params;
const execution = await Db.collections.Execution.findOne({
const execution = await Container.get(ExecutionRepository).findSingleExecution(executionId, {
where: {
id: executionId,
workflowId: In(sharedWorkflowIds),
},
includeData: true,
unflattenData: true,
});
if (!execution) {
@@ -493,22 +236,20 @@ export class ExecutionsService {
);
}
const fullExecutionData = ResponseHelper.unflattenExecutionData(execution);
if (fullExecutionData.finished) {
if (execution.finished) {
throw new Error('The execution succeeded, so it cannot be retried.');
}
const executionMode = 'retry';
fullExecutionData.workflowData.active = false;
execution.workflowData.active = false;
// Start the workflow
const data: IWorkflowExecutionDataProcess = {
executionMode,
executionData: fullExecutionData.data,
executionData: execution.data,
retryOf: req.params.id,
workflowData: fullExecutionData.workflowData,
workflowData: execution.workflowData,
userId: req.user.id,
};
@@ -532,7 +273,7 @@ export class ExecutionsService {
if (req.body.loadWorkflow) {
// Loads the currently saved workflow to execute instead of the
// one saved at the time of the execution.
const workflowId = fullExecutionData.workflowData.id as string;
const workflowId = execution.workflowData.id as string;
const workflowData = (await Db.collections.Workflow.findOneBy({
id: workflowId,
})) as IWorkflowBase;
@@ -614,50 +355,9 @@ export class ExecutionsService {
}
}
if (!deleteBefore && !ids) {
throw new Error('Either "deleteBefore" or "ids" must be present in the request body');
}
const where: FindOptionsWhere<ExecutionEntity> = { workflowId: In(sharedWorkflowIds) };
if (deleteBefore) {
// delete executions by date, if user may access the underlying workflows
where.startedAt = LessThanOrEqual(deleteBefore);
Object.assign(where, requestFilters);
if (where.status) {
where.status = In(requestFiltersRaw!.status as string[]);
}
} else if (ids) {
// delete executions by IDs, if user may access the underlying workflows
where.id = In(ids);
} else return;
const executions = await Db.collections.Execution.find({
select: ['id'],
where,
return Container.get(ExecutionRepository).deleteExecutions(requestFilters, sharedWorkflowIds, {
deleteBefore,
ids,
});
if (!executions.length) {
if (ids) {
LoggerProxy.error('Failed to delete an execution due to insufficient permissions', {
userId: req.user.id,
executionIds: ids,
});
}
return;
}
const idsToDelete = executions.map(({ id }) => id);
const binaryDataManager = BinaryDataManager.getInstance();
await Promise.all(
idsToDelete.map(async (id) => binaryDataManager.deleteBinaryDataByExecutionId(id)),
);
do {
// Delete in batches to avoid "SQLITE_ERROR: Expression tree is too large (maximum depth 1000)" error
const batch = idsToDelete.splice(0, 500);
await Db.collections.Execution.delete(batch);
} while (idsToDelete.length > 0);
}
}

Some files were not shown because too many files have changed in this diff Show More