From c3ba0123ad0913140707dbf56fafa1d4dd0f3de3 Mon Sep 17 00:00:00 2001 From: Michael Auerswald Date: Tue, 20 Jun 2023 19:13:18 +0200 Subject: [PATCH] feat: Migrate integer primary keys to nanoids (#6345) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * first commit for postgres migration * (not working) * sqlite migration * quicksave * fix tests * fix pg test * fix postgres * fix variables import * fix execution saving * add user settings fix * change migration to single lines * patch preferences endpoint * cleanup * improve variable import * cleanup unusued code * Update packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts Co-authored-by: Omar Ajoue * address review notes * fix var update/import * refactor: Separate execution data to its own table (#6323) * wip: Temporary migration process * refactor: Create boilerplate repository methods for executions * fix: Lint issues * refactor: Added search endpoint to repository * refactor: Make the execution list work again * wip: Updating how we create and update executions everywhere * fix: Lint issues and remove most of the direct access to execution model * refactor: Remove includeWorkflowData flag and fix more tests * fix: Lint issues * fix: Fixed ordering of executions for FE, removed transaction when saving execution and removed unnecessary update * refactor: Add comment about missing feature * refactor: Refactor counting executions * refactor: Add migration for other dbms and fix issues found * refactor: Fix lint issues * refactor: Remove unnecessary comment and auto inject repo to internal hooks * refactor: remove type assertion * fix: Fix broken tests * fix: Remove unnecessary import * Remove unnecessary toString() call Co-authored-by: Iván Ovejero * fix: Address comments after review * refactor: Remove unused import * fix: Lint issues * fix: Add correct migration files --------- Co-authored-by: Iván Ovejero * remove null values from credential export * fix: Fix an issue with queue mode where all running execution would be returned * fix: Update n8n node to allow for workflow ids with letters * set upstream on set branch * remove typo * add nodeAccess to credentials * fix unsaved run check for undefined id * fix(core): Rename version control feature to source control (#6480) * rename versionControl to sourceControl * fix source control tooltip wording --------- Co-authored-by: Romain Minaud * fix(editor): Pay 548 hide the set up version control button (#6485) * feat(DebugHelper Node): Fix and include in main app (#6406) * improve node a bit * fixing continueOnFail() ton contain error in json * improve pairedItem * fix random data returning object results * fix nanoId length typo * update pnpm-lock file --------- Co-authored-by: Marcus * fix(editor): Remove setup source control CTA button * fix(editor): Remove setup source control CTA button --------- Co-authored-by: Michael Auerswald Co-authored-by: Marcus * fix(editor): Update source control docs links (#6488) * feat(DebugHelper Node): Fix and include in main app (#6406) * improve node a bit * fixing continueOnFail() ton contain error in json * improve pairedItem * fix random data returning object results * fix nanoId length typo * update pnpm-lock file --------- Co-authored-by: Marcus * feat(editor): Replace root events with event bus events (no-changelog) (#6454) * feat: replace root events with event bus events * fix: prevent cypress from replacing global with globalThis in import path * feat: remove emitter mixin * fix: replace component events with event bus * fix: fix linting issue * fix: fix breaking expression switch * chore: prettify ndv e2e suite code * fix(editor): Update source control docs links --------- Co-authored-by: Michael Auerswald Co-authored-by: Marcus Co-authored-by: Alex Grozav * fix tag endpoint regex --------- Co-authored-by: Omar Ajoue Co-authored-by: Iván Ovejero Co-authored-by: Romain Minaud Co-authored-by: Csaba Tuncsik Co-authored-by: Marcus Co-authored-by: Alex Grozav --- packages/cli/package.json | 3 +- packages/cli/src/ActiveExecutions.ts | 26 +- packages/cli/src/Db.ts | 2 + packages/cli/src/GenericHelpers.ts | 8 +- packages/cli/src/Interfaces.ts | 15 +- packages/cli/src/InternalHooks.ts | 13 +- packages/cli/src/License.ts | 4 +- packages/cli/src/PublicApi/types.d.ts | 6 +- .../credentials/spec/paths/credentials.id.yml | 14 +- .../credentials/spec/schemas/credential.yml | 10 +- .../handlers/executions/executions.handler.ts | 4 +- .../handlers/executions/executions.service.ts | 104 ++- .../sourceControl/sourceControl.handler.ts | 47 ++ .../spec/paths/sourceControl.yml} | 6 +- .../spec/schemas/importResult.yml | 0 .../spec/schemas/pull.yml | 0 .../versionControl/versionControl.handler.ts | 47 -- .../spec/schemas/parameters/workflowId.yml | 2 +- .../workflows/spec/schemas/workflow.yml | 14 +- .../handlers/workflows/workflows.handler.ts | 1 + packages/cli/src/PublicApi/v1/openapi.yml | 10 +- .../v1/shared/spec/schemas/_index.yml | 4 +- packages/cli/src/ResponseHelper.ts | 4 +- packages/cli/src/Server.ts | 69 +- packages/cli/src/WaitTracker.ts | 47 +- packages/cli/src/WaitingWebhooks.ts | 17 +- .../cli/src/WorkflowExecuteAdditionalData.ts | 70 +- packages/cli/src/WorkflowRunner.ts | 43 +- packages/cli/src/api/e2e.api.ts | 2 +- .../cli/src/audit/risks/credentials.risk.ts | 14 +- packages/cli/src/commands/import/workflow.ts | 11 +- packages/cli/src/commands/worker.ts | 44 +- packages/cli/src/constants.ts | 2 +- .../cli/src/controllers/tags.controller.ts | 4 +- .../credentials/credentials.controller.ee.ts | 2 +- .../src/credentials/credentials.controller.ts | 6 +- .../databases/entities/CredentialsEntity.ts | 23 +- .../src/databases/entities/ExecutionData.ts | 27 + .../src/databases/entities/ExecutionEntity.ts | 34 +- .../databases/entities/SharedCredentials.ts | 3 +- .../src/databases/entities/SharedWorkflow.ts | 3 +- .../cli/src/databases/entities/TagEntity.ts | 23 +- .../cli/src/databases/entities/Variables.ts | 21 +- .../src/databases/entities/WebhookEntity.ts | 4 +- .../src/databases/entities/WorkflowEntity.ts | 23 +- .../databases/entities/WorkflowStatistics.ts | 3 +- .../databases/entities/WorkflowTagMapping.ts | 3 +- packages/cli/src/databases/entities/index.ts | 2 + ...690000000001-MigrateIntegerKeysToString.ts | 252 +++++++ .../1690000000030-SeparateExecutionData.ts | 43 ++ .../src/databases/migrations/mysqldb/index.ts | 4 + ...690000000000-MigrateIntegerKeysToString.ts | 262 +++++++ .../1690000000020-SeparateExecutionData.ts | 42 ++ .../databases/migrations/postgresdb/index.ts | 4 + ...690000000002-MigrateIntegerKeysToString.ts | 185 +++++ .../1690000000010-SeparateExecutionData.ts | 46 ++ .../src/databases/migrations/sqlite/index.ts | 4 + .../repositories/execution.repository.ts | 402 ++++++++++- .../repositories/executionData.repository.ts | 10 + .../cli/src/databases/repositories/index.ts | 1 + .../cli/src/databases/utils/generators.ts | 6 + .../environments/sourceControl/constants.ts | 15 + .../sourceControlEnabledMiddleware.ee.ts | 21 + .../sourceControl.controller.ee.ts | 235 ++++++ .../sourceControl.service.ee.ts} | 197 +++-- .../sourceControlExport.service.ee.ts | 336 +++++++++ .../sourceControlGit.service.ee.ts} | 55 +- .../sourceControlHelper.ee.ts} | 12 +- .../sourceControlImport.service.ee.ts | 363 ++++++++++ .../sourceControlPreferences.service.ee.ts} | 97 +-- .../types/exportResult.ts | 0 .../types/exportableCredential.ts | 9 + .../types/exportableWorkflow.ts | 0 .../types/importResult.ts | 2 +- .../types/keyPair.ts | 0 .../sourceControl/types/requests.ts | 22 + .../types/sourceControlCommit.ts} | 2 +- .../types/sourceControlDisconnect.ts} | 2 +- .../types/sourceControlPreferences.ts} | 16 +- .../types/sourceControlPullWorkFolder.ts} | 4 +- .../types/sourceControlPush.ts} | 2 +- .../types/sourceControlPushWorkFolder.ts} | 2 +- .../types/sourceControlSetBranch.ts} | 2 +- .../types/sourceControlSetReadOnly.ts} | 2 +- .../types/sourceControlStage.ts} | 2 +- .../types/sourceControlledFile.ts | 19 + .../variables/variables.controller.ee.ts | 7 +- .../variables/variables.controller.ts | 16 +- .../variables/variables.service.ee.ts | 9 +- .../variables/variables.service.ts | 4 +- .../environments/versionControl/constants.ts | 15 - .../versionControlEnabledMiddleware.ee.ts | 21 - .../types/exportableCredential.ts | 8 - .../versionControl/types/requests.ts | 22 - .../types/versionControlledFile.ts | 19 - .../versionControl.controller.ee.ts | 338 --------- .../versionControlExport.service.ee.ts | 674 ------------------ .../eventbus/MessageEventBus/recoverEvents.ts | 19 +- .../cli/src/executions/executionHelpers.ts | 4 +- .../cli/src/executions/executions.service.ts | 372 +--------- packages/cli/src/utils.ts | 3 +- .../src/workflows/workflows.controller.ee.ts | 6 +- .../cli/src/workflows/workflows.controller.ts | 6 +- .../audit/credentials.risk.test.ts | 30 +- .../integration/audit/database.risk.test.ts | 4 + .../integration/audit/filesystem.risk.test.ts | 7 +- .../integration/audit/instance.risk.test.ts | 4 + .../test/integration/audit/nodes.risk.test.ts | 7 +- .../integration/commands/import.cmd.test.ts | 2 +- .../environments/VersionControl.test.ts | 12 +- .../integration/publicApi/credentials.test.ts | 1 - .../integration/publicApi/workflows.test.ts | 3 +- .../cli/test/integration/shared/testDb.ts | 35 +- .../cli/test/integration/shared/types.d.ts | 2 +- packages/cli/test/integration/shared/utils.ts | 14 +- .../workflows.controller.ee.test.ts | 1 - .../integration/workflows.controller.test.ts | 3 + .../cli/test/unit/ActiveExecutions.test.ts | 29 +- packages/cli/test/unit/VersionControl.test.ts | 4 +- packages/editor-ui/src/App.vue | 8 +- packages/editor-ui/src/Interface.ts | 6 +- .../src/__tests__/server/endpoints/index.ts | 4 +- .../__tests__/server/endpoints/settings.ts | 2 +- .../{versionControl.ts => sourceControl.ts} | 26 +- packages/editor-ui/src/__tests__/utils.ts | 2 +- .../{versionControl.ts => sourceControl.ts} | 36 +- .../src/components/MainHeader/MainHeader.vue | 6 +- .../editor-ui/src/components/MainSidebar.vue | 12 +- ...ntrol.vue => MainSidebarSourceControl.vue} | 78 +- packages/editor-ui/src/components/Modals.vue | 12 +- .../src/components/SettingsSidebar.vue | 18 +- ...l.ee.vue => SourceControlPushModal.ee.vue} | 52 +- .../src/components/WorkflowSettings.vue | 6 +- ...st.ts => MainSidebarSourceControl.test.ts} | 35 +- packages/editor-ui/src/constants.ts | 6 +- .../src/plugins/i18n/locales/en.json | 162 +++-- packages/editor-ui/src/router.ts | 10 +- packages/editor-ui/src/stores/canvas.store.ts | 6 +- packages/editor-ui/src/stores/index.ts | 2 +- ...ontrol.store.ts => sourceControl.store.ts} | 18 +- packages/editor-ui/src/stores/ui.store.ts | 4 +- ...ils.test.ts => sourceControlUtils.test.ts} | 14 +- .../editor-ui/src/utils/executionUtils.ts | 13 +- packages/editor-ui/src/utils/index.ts | 2 +- .../editor-ui/src/utils/sourceControlUtils.ts | 27 + .../src/utils/versionControlUtils.ts | 27 - .../editor-ui/src/views/CredentialsView.vue | 10 +- packages/editor-ui/src/views/NodeView.vue | 6 +- ...nControl.vue => SettingsSourceControl.vue} | 182 +++-- .../editor-ui/src/views/VariablesView.vue | 15 +- .../editor-ui/src/views/WorkflowsView.vue | 12 +- ....test.ts => SettingsSourceControl.test.ts} | 38 +- .../nodes/CircleCi/PipelineDescription.ts | 2 +- .../nodes-base/nodes/N8n/WorkflowLocator.ts | 6 +- packages/workflow/src/Interfaces.ts | 2 +- pnpm-lock.yaml | 3 + 156 files changed, 3499 insertions(+), 2594 deletions(-) create mode 100644 packages/cli/src/PublicApi/v1/handlers/sourceControl/sourceControl.handler.ts rename packages/cli/src/PublicApi/v1/handlers/{versionControl/spec/paths/versionControl.yml => sourceControl/spec/paths/sourceControl.yml} (73%) rename packages/cli/src/PublicApi/v1/handlers/{versionControl => sourceControl}/spec/schemas/importResult.yml (100%) rename packages/cli/src/PublicApi/v1/handlers/{versionControl => sourceControl}/spec/schemas/pull.yml (100%) delete mode 100644 packages/cli/src/PublicApi/v1/handlers/versionControl/versionControl.handler.ts create mode 100644 packages/cli/src/databases/entities/ExecutionData.ts create mode 100644 packages/cli/src/databases/migrations/mysqldb/1690000000001-MigrateIntegerKeysToString.ts create mode 100644 packages/cli/src/databases/migrations/mysqldb/1690000000030-SeparateExecutionData.ts create mode 100644 packages/cli/src/databases/migrations/postgresdb/1690000000000-MigrateIntegerKeysToString.ts create mode 100644 packages/cli/src/databases/migrations/postgresdb/1690000000020-SeparateExecutionData.ts create mode 100644 packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts create mode 100644 packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts create mode 100644 packages/cli/src/databases/repositories/executionData.repository.ts create mode 100644 packages/cli/src/databases/utils/generators.ts create mode 100644 packages/cli/src/environments/sourceControl/constants.ts create mode 100644 packages/cli/src/environments/sourceControl/middleware/sourceControlEnabledMiddleware.ee.ts create mode 100644 packages/cli/src/environments/sourceControl/sourceControl.controller.ee.ts rename packages/cli/src/environments/{versionControl/versionControl.service.ee.ts => sourceControl/sourceControl.service.ee.ts} (59%) create mode 100644 packages/cli/src/environments/sourceControl/sourceControlExport.service.ee.ts rename packages/cli/src/environments/{versionControl/versionControlGit.service.ee.ts => sourceControl/sourceControlGit.service.ee.ts} (83%) rename packages/cli/src/environments/{versionControl/versionControlHelper.ee.ts => sourceControl/sourceControlHelper.ee.ts} (83%) create mode 100644 packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts rename packages/cli/src/environments/{versionControl/versionControlPreferences.service.ee.ts => sourceControl/sourceControlPreferences.service.ee.ts} (52%) rename packages/cli/src/environments/{versionControl => sourceControl}/types/exportResult.ts (100%) create mode 100644 packages/cli/src/environments/sourceControl/types/exportableCredential.ts rename packages/cli/src/environments/{versionControl => sourceControl}/types/exportableWorkflow.ts (100%) rename packages/cli/src/environments/{versionControl => sourceControl}/types/importResult.ts (88%) rename packages/cli/src/environments/{versionControl => sourceControl}/types/keyPair.ts (100%) create mode 100644 packages/cli/src/environments/sourceControl/types/requests.ts rename packages/cli/src/environments/{versionControl/types/versionControlCommit.ts => sourceControl/types/sourceControlCommit.ts} (68%) rename packages/cli/src/environments/{versionControl/types/versionControlDisconnect.ts => sourceControl/types/sourceControlDisconnect.ts} (73%) rename packages/cli/src/environments/{versionControl/types/versionControlPreferences.ts => sourceControl/types/sourceControlPreferences.ts} (70%) rename packages/cli/src/environments/{versionControl/types/versionControlPullWorkFolder.ts => sourceControl/types/sourceControlPullWorkFolder.ts} (83%) rename packages/cli/src/environments/{versionControl/types/versionControlPush.ts => sourceControl/types/sourceControlPush.ts} (75%) rename packages/cli/src/environments/{versionControl/types/versionControlPushWorkFolder.ts => sourceControl/types/sourceControlPushWorkFolder.ts} (90%) rename packages/cli/src/environments/{versionControl/types/versionControlSetBranch.ts => sourceControl/types/sourceControlSetBranch.ts} (66%) rename packages/cli/src/environments/{versionControl/types/versionControlSetReadOnly.ts => sourceControl/types/sourceControlSetReadOnly.ts} (68%) rename packages/cli/src/environments/{versionControl/types/versionControlStage.ts => sourceControl/types/sourceControlStage.ts} (88%) create mode 100644 packages/cli/src/environments/sourceControl/types/sourceControlledFile.ts delete mode 100644 packages/cli/src/environments/versionControl/constants.ts delete mode 100644 packages/cli/src/environments/versionControl/middleware/versionControlEnabledMiddleware.ee.ts delete mode 100644 packages/cli/src/environments/versionControl/types/exportableCredential.ts delete mode 100644 packages/cli/src/environments/versionControl/types/requests.ts delete mode 100644 packages/cli/src/environments/versionControl/types/versionControlledFile.ts delete mode 100644 packages/cli/src/environments/versionControl/versionControl.controller.ee.ts delete mode 100644 packages/cli/src/environments/versionControl/versionControlExport.service.ee.ts rename packages/editor-ui/src/__tests__/server/endpoints/{versionControl.ts => sourceControl.ts} (52%) rename packages/editor-ui/src/api/{versionControl.ts => sourceControl.ts} (50%) rename packages/editor-ui/src/components/{MainSidebarVersionControl.vue => MainSidebarSourceControl.vue} (60%) rename packages/editor-ui/src/components/{VersionControlPushModal.ee.vue => SourceControlPushModal.ee.vue} (72%) rename packages/editor-ui/src/components/__tests__/{MainSidebarVersionControl.test.ts => MainSidebarSourceControl.test.ts} (64%) rename packages/editor-ui/src/stores/{versionControl.store.ts => sourceControl.store.ts} (84%) rename packages/editor-ui/src/utils/__tests__/{versionControlUtils.test.ts => sourceControlUtils.test.ts} (79%) create mode 100644 packages/editor-ui/src/utils/sourceControlUtils.ts delete mode 100644 packages/editor-ui/src/utils/versionControlUtils.ts rename packages/editor-ui/src/views/{SettingsVersionControl.vue => SettingsSourceControl.vue} (53%) rename packages/editor-ui/src/views/__tests__/{SettingsVersionControl.test.ts => SettingsSourceControl.test.ts} (73%) diff --git a/packages/cli/package.json b/packages/cli/package.json index 1eb626456d..a338c2633a 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -102,8 +102,8 @@ "tsconfig-paths": "^4.1.2" }, "dependencies": { - "@n8n_io/license-sdk": "~2.4.0", "@n8n/client-oauth2": "workspace:*", + "@n8n_io/license-sdk": "~2.4.0", "@oclif/command": "^1.8.16", "@oclif/core": "^1.16.4", "@oclif/errors": "^1.3.6", @@ -152,6 +152,7 @@ "n8n-editor-ui": "workspace:*", "n8n-nodes-base": "workspace:*", "n8n-workflow": "workspace:*", + "nanoid": "^3.3.6", "nodemailer": "^6.7.1", "oauth-1.0a": "^2.2.6", "open": "^7.0.0", diff --git a/packages/cli/src/ActiveExecutions.ts b/packages/cli/src/ActiveExecutions.ts index 590cba36cc..db84d1da8a 100644 --- a/packages/cli/src/ActiveExecutions.ts +++ b/packages/cli/src/ActiveExecutions.ts @@ -11,19 +11,16 @@ import type { import { createDeferredPromise, LoggerProxy } from 'n8n-workflow'; import type { ChildProcess } from 'child_process'; -import { stringify } from 'flatted'; import type PCancelable from 'p-cancelable'; -import * as Db from '@/Db'; import type { IExecutingWorkflowData, IExecutionDb, - IExecutionFlattedDb, IExecutionsCurrentSummary, IWorkflowExecutionDataProcess, } from '@/Interfaces'; -import * as ResponseHelper from '@/ResponseHelper'; import { isWorkflowIdValid } from '@/utils'; -import { Service } from 'typedi'; +import Container, { Service } from 'typedi'; +import { ExecutionRepository } from './databases/repositories'; @Service() export class ActiveExecutions { @@ -61,15 +58,10 @@ export class ActiveExecutions { fullExecutionData.workflowId = workflowId; } - const execution = ResponseHelper.flattenExecutionData(fullExecutionData); - - const executionResult = await Db.collections.Execution.save(execution as IExecutionFlattedDb); - // TODO: what is going on here? - executionId = - typeof executionResult.id === 'object' - ? // @ts-ignore - executionResult.id!.toString() - : executionResult.id + ''; + const executionResult = await Container.get(ExecutionRepository).createNewExecution( + fullExecutionData, + ); + executionId = executionResult.id; if (executionId === undefined) { throw new Error('There was an issue assigning an execution id to the execution'); } @@ -77,14 +69,14 @@ export class ActiveExecutions { } else { // Is an existing execution we want to finish so update in DB - const execution: Pick = { + const execution: Pick = { id: executionId, - data: stringify(executionData.executionData!), + data: executionData.executionData!, waitTill: null, status: executionStatus, }; - await Db.collections.Execution.update(executionId, execution); + await Container.get(ExecutionRepository).updateExistingExecution(executionId, execution); } this.activeExecutions[executionId] = { diff --git a/packages/cli/src/Db.ts b/packages/cli/src/Db.ts index 1e0b168688..528a9f417c 100644 --- a/packages/cli/src/Db.ts +++ b/packages/cli/src/Db.ts @@ -29,6 +29,7 @@ import { AuthProviderSyncHistoryRepository, CredentialsRepository, EventDestinationsRepository, + ExecutionDataRepository, ExecutionMetadataRepository, ExecutionRepository, InstalledNodesRepository, @@ -175,6 +176,7 @@ export async function init(testConnectionOptions?: ConnectionOptions): Promise> { Credentials: CredentialsRepository; EventDestinations: EventDestinationsRepository; Execution: ExecutionRepository; + ExecutionData: ExecutionDataRepository; ExecutionMetadata: ExecutionMetadataRepository; InstalledNodes: InstalledNodesRepository; InstalledPackages: InstalledPackagesRepository; @@ -217,19 +219,6 @@ export interface IExecutionFlattedResponse extends IExecutionFlatted { retryOf?: string; } -export interface IExecutionResponseApi { - id: string; - mode: WorkflowExecuteMode; - startedAt: Date; - stoppedAt?: Date; - workflowId?: string; - finished: boolean; - retryOf?: string; - retrySuccessId?: string; - data?: object; - waitTill?: Date | null; - workflowData: IWorkflowBase; -} export interface IExecutionsListResponse { count: number; // results: IExecutionShortResponse[]; diff --git a/packages/cli/src/InternalHooks.ts b/packages/cli/src/InternalHooks.ts index 4656658c1d..54436694f9 100644 --- a/packages/cli/src/InternalHooks.ts +++ b/packages/cli/src/InternalHooks.ts @@ -29,9 +29,9 @@ import { RoleService } from './role/role.service'; import { eventBus } from './eventbus'; import type { User } from '@db/entities/User'; import { N8N_VERSION } from '@/constants'; -import * as Db from '@/Db'; import { NodeTypes } from './NodeTypes'; import type { ExecutionMetadata } from './databases/entities/ExecutionMetadata'; +import { ExecutionRepository } from './databases/repositories'; function userToPayload(user: User): { userId: string; @@ -57,6 +57,7 @@ export class InternalHooks implements IInternalHooksClass { private telemetry: Telemetry, private nodeTypes: NodeTypes, private roleService: RoleService, + private executionRepository: ExecutionRepository, ) {} async init(instanceId: string) { @@ -236,7 +237,9 @@ export class InternalHooks implements IInternalHooksClass { data: IWorkflowExecutionDataProcess, ): Promise { void Promise.all([ - Db.collections.Execution.update(executionId, { status: 'running' }), + this.executionRepository.updateExistingExecution(executionId, { + status: 'running', + }), eventBus.sendWorkflowEvent({ eventName: 'n8n.workflow.started', payload: { @@ -425,12 +428,6 @@ export class InternalHooks implements IInternalHooksClass { } } - promises.push( - Db.collections.Execution.update(executionId, { - status: executionStatus, - }) as unknown as Promise, - ); - promises.push( properties.success ? eventBus.sendWorkflowEvent({ diff --git a/packages/cli/src/License.ts b/packages/cli/src/License.ts index 4d252d4ad6..7921136561 100644 --- a/packages/cli/src/License.ts +++ b/packages/cli/src/License.ts @@ -129,8 +129,8 @@ export class License { return this.isFeatureEnabled(LICENSE_FEATURES.VARIABLES); } - isVersionControlLicensed() { - return this.isFeatureEnabled(LICENSE_FEATURES.VERSION_CONTROL); + isSourceControlLicensed() { + return this.isFeatureEnabled(LICENSE_FEATURES.SOURCE_CONTROL); } isAPIDisabled() { diff --git a/packages/cli/src/PublicApi/types.d.ts b/packages/cli/src/PublicApi/types.d.ts index f5795b877b..636d160588 100644 --- a/packages/cli/src/PublicApi/types.d.ts +++ b/packages/cli/src/PublicApi/types.d.ts @@ -163,14 +163,14 @@ export interface IJsonSchema { required: string[]; } -export class VersionControlPull { +export class SourceControlPull { force?: boolean; variables?: { [key: string]: string }; } -export declare namespace PublicVersionControlRequest { - type Pull = AuthenticatedRequest<{}, {}, VersionControlPull, {}>; +export declare namespace PublicSourceControlRequest { + type Pull = AuthenticatedRequest<{}, {}, SourceControlPull, {}>; } // ---------------------------------- diff --git a/packages/cli/src/PublicApi/v1/handlers/credentials/spec/paths/credentials.id.yml b/packages/cli/src/PublicApi/v1/handlers/credentials/spec/paths/credentials.id.yml index b80f81ac02..acacd4ad98 100644 --- a/packages/cli/src/PublicApi/v1/handlers/credentials/spec/paths/credentials.id.yml +++ b/packages/cli/src/PublicApi/v1/handlers/credentials/spec/paths/credentials.id.yml @@ -12,15 +12,15 @@ delete: description: The credential ID that needs to be deleted required: true schema: - type: number + type: string responses: - '200': + "200": description: Operation successful. content: application/json: schema: - $ref: '../schemas/credential.yml' - '401': - $ref: '../../../../shared/spec/responses/unauthorized.yml' - '404': - $ref: '../../../../shared/spec/responses/notFound.yml' + $ref: "../schemas/credential.yml" + "401": + $ref: "../../../../shared/spec/responses/unauthorized.yml" + "404": + $ref: "../../../../shared/spec/responses/notFound.yml" diff --git a/packages/cli/src/PublicApi/v1/handlers/credentials/spec/schemas/credential.yml b/packages/cli/src/PublicApi/v1/handlers/credentials/spec/schemas/credential.yml index 9c66817475..585d6d29b5 100644 --- a/packages/cli/src/PublicApi/v1/handlers/credentials/spec/schemas/credential.yml +++ b/packages/cli/src/PublicApi/v1/handlers/credentials/spec/schemas/credential.yml @@ -5,9 +5,9 @@ required: type: object properties: id: - type: number + type: string readOnly: true - example: 42 + example: R2DjclaysHbqn778 name: type: string example: Joe's Github Credentials @@ -17,14 +17,14 @@ properties: data: type: object writeOnly: true - example: { token: 'ada612vad6fa5df4adf5a5dsf4389adsf76da7s' } + example: { token: "ada612vad6fa5df4adf5a5dsf4389adsf76da7s" } createdAt: type: string format: date-time readOnly: true - example: '2022-04-29T11:02:29.842Z' + example: "2022-04-29T11:02:29.842Z" updatedAt: type: string format: date-time readOnly: true - example: '2022-04-29T11:02:29.842Z' + example: "2022-04-29T11:02:29.842Z" diff --git a/packages/cli/src/PublicApi/v1/handlers/executions/executions.handler.ts b/packages/cli/src/PublicApi/v1/handlers/executions/executions.handler.ts index 2ac5f72a55..57d18e9bf7 100644 --- a/packages/cli/src/PublicApi/v1/handlers/executions/executions.handler.ts +++ b/packages/cli/src/PublicApi/v1/handlers/executions/executions.handler.ts @@ -37,7 +37,7 @@ export = { return res.status(404).json({ message: 'Not Found' }); } - await BinaryDataManager.getInstance().deleteBinaryDataByExecutionId(execution.id); + await BinaryDataManager.getInstance().deleteBinaryDataByExecutionId(execution.id!); await deleteExecution(execution); @@ -111,7 +111,7 @@ export = { const executions = await getExecutions(filters); - const newLastId = !executions.length ? '0' : executions.slice(-1)[0].id; + const newLastId = !executions.length ? '0' : (executions.slice(-1)[0].id as string); filters.lastId = newLastId; diff --git a/packages/cli/src/PublicApi/v1/handlers/executions/executions.service.ts b/packages/cli/src/PublicApi/v1/handlers/executions/executions.service.ts index 4ae709dea6..36572cb5f3 100644 --- a/packages/cli/src/PublicApi/v1/handlers/executions/executions.service.ts +++ b/packages/cli/src/PublicApi/v1/handlers/executions/executions.service.ts @@ -1,61 +1,26 @@ -import { parse } from 'flatted'; -import type { FindOptionsWhere } from 'typeorm'; -import { In, Not, Raw, LessThan, IsNull } from 'typeorm'; +import type { DeleteResult, FindOptionsWhere } from 'typeorm'; +import { In, Not, Raw, LessThan } from 'typeorm'; import * as Db from '@/Db'; -import type { IExecutionFlattedDb, IExecutionResponseApi } from '@/Interfaces'; +import type { IExecutionBase, IExecutionFlattedDb } from '@/Interfaces'; import type { ExecutionStatus } from 'n8n-workflow'; - -function prepareExecutionData( - execution: IExecutionFlattedDb | null, -): IExecutionResponseApi | undefined { - if (!execution) return undefined; - - // @ts-ignore - if (!execution.data) return execution; - - return { - ...execution, - data: parse(execution.data) as object, - }; -} +import Container from 'typedi'; +import { ExecutionRepository } from '@/databases/repositories'; function getStatusCondition(status: ExecutionStatus) { - const condition: Pick< - FindOptionsWhere, - 'finished' | 'waitTill' | 'stoppedAt' - > = {}; + const condition: Pick, 'status'> = {}; if (status === 'success') { - condition.finished = true; + condition.status = 'success'; } else if (status === 'waiting') { - condition.waitTill = Not(IsNull()); + condition.status = 'waiting'; } else if (status === 'error') { - condition.stoppedAt = Not(IsNull()); - condition.finished = false; + condition.status = In(['error', 'crashed', 'failed']); } return condition; } -function getExecutionSelectableProperties(includeData?: boolean): Array { - const selectFields: Array = [ - 'id', - 'mode', - 'retryOf', - 'retrySuccessId', - 'startedAt', - 'stoppedAt', - 'workflowId', - 'waitTill', - 'finished', - ]; - - if (includeData) selectFields.push('data'); - - return selectFields; -} - export async function getExecutions(params: { limit: number; includeData?: boolean; @@ -63,7 +28,7 @@ export async function getExecutions(params: { workflowIds?: string[]; status?: ExecutionStatus; excludedExecutionsIds?: string[]; -}): Promise { +}): Promise { let where: FindOptionsWhere = {}; if (params.lastId && params.excludedExecutionsIds?.length) { @@ -85,14 +50,29 @@ export async function getExecutions(params: { where = { ...where, workflowId: In(params.workflowIds) }; } - const executions = await Db.collections.Execution.find({ - select: getExecutionSelectableProperties(params.includeData), - where, - order: { id: 'DESC' }, - take: params.limit, - }); - - return executions.map(prepareExecutionData) as IExecutionResponseApi[]; + return Container.get(ExecutionRepository).findMultipleExecutions( + { + select: [ + 'id', + 'mode', + 'retryOf', + 'retrySuccessId', + 'startedAt', + 'stoppedAt', + 'workflowId', + 'waitTill', + 'finished', + ], + where, + order: { id: 'DESC' }, + take: params.limit, + relations: ['executionData'], + }, + { + includeData: params.includeData, + unflattenData: true, + }, + ); } export async function getExecutionsCount(data: { @@ -102,6 +82,7 @@ export async function getExecutionsCount(data: { status?: ExecutionStatus; excludedWorkflowIds?: string[]; }): Promise { + // TODO: Consider moving this to the repository as well const executions = await Db.collections.Execution.count({ where: { ...(data.lastId && { id: LessThan(data.lastId) }), @@ -119,21 +100,16 @@ export async function getExecutionInWorkflows( id: string, workflowIds: string[], includeData?: boolean, -): Promise { - const execution = await Db.collections.Execution.findOne({ - select: getExecutionSelectableProperties(includeData), +): Promise { + return Container.get(ExecutionRepository).findSingleExecution(id, { where: { - id, workflowId: In(workflowIds), }, + includeData, + unflattenData: true, }); - - return prepareExecutionData(execution); } -export async function deleteExecution( - execution: IExecutionResponseApi | undefined, -): Promise { - // @ts-ignore - return Db.collections.Execution.remove(execution); +export async function deleteExecution(execution: IExecutionBase): Promise { + return Container.get(ExecutionRepository).deleteExecution(execution.id as string); } diff --git a/packages/cli/src/PublicApi/v1/handlers/sourceControl/sourceControl.handler.ts b/packages/cli/src/PublicApi/v1/handlers/sourceControl/sourceControl.handler.ts new file mode 100644 index 0000000000..daeb6f22e4 --- /dev/null +++ b/packages/cli/src/PublicApi/v1/handlers/sourceControl/sourceControl.handler.ts @@ -0,0 +1,47 @@ +import type express from 'express'; +import type { StatusResult } from 'simple-git'; +import type { PublicSourceControlRequest } from '../../../types'; +import { authorize } from '../../shared/middlewares/global.middleware'; +import type { ImportResult } from '@/environments/sourceControl/types/importResult'; +import Container from 'typedi'; +import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee'; +import { SourceControlPreferencesService } from '@/environments/sourceControl/sourceControlPreferences.service.ee'; +import { isSourceControlLicensed } from '@/environments/sourceControl/sourceControlHelper.ee'; + +export = { + pull: [ + authorize(['owner', 'member']), + async ( + req: PublicSourceControlRequest.Pull, + res: express.Response, + ): Promise> => { + const sourceControlPreferencesService = Container.get(SourceControlPreferencesService); + if (!isSourceControlLicensed()) { + return res + .status(401) + .json({ status: 'Error', message: 'Source Control feature is not licensed' }); + } + if (!sourceControlPreferencesService.isSourceControlConnected()) { + return res + .status(400) + .json({ status: 'Error', message: 'Source Control is not connected to a repository' }); + } + try { + const sourceControlService = Container.get(SourceControlService); + const result = await sourceControlService.pullWorkfolder({ + force: req.body.force, + variables: req.body.variables, + userId: req.user.id, + importAfterPull: true, + }); + if ((result as ImportResult)?.workflows) { + return res.status(200).send(result as ImportResult); + } else { + return res.status(409).send(result); + } + } catch (error) { + return res.status(400).send((error as { message: string }).message); + } + }, + ], +}; diff --git a/packages/cli/src/PublicApi/v1/handlers/versionControl/spec/paths/versionControl.yml b/packages/cli/src/PublicApi/v1/handlers/sourceControl/spec/paths/sourceControl.yml similarity index 73% rename from packages/cli/src/PublicApi/v1/handlers/versionControl/spec/paths/versionControl.yml rename to packages/cli/src/PublicApi/v1/handlers/sourceControl/spec/paths/sourceControl.yml index 0d0c8d78d2..d883ac5529 100644 --- a/packages/cli/src/PublicApi/v1/handlers/versionControl/spec/paths/versionControl.yml +++ b/packages/cli/src/PublicApi/v1/handlers/sourceControl/spec/paths/sourceControl.yml @@ -1,10 +1,10 @@ post: x-eov-operation-id: pull - x-eov-operation-handler: v1/handlers/versionControl/versionControl.handler + x-eov-operation-handler: v1/handlers/sourceControl/sourceControl.handler tags: - - VersionControl + - SourceControl summary: Pull changes from the remote repository - description: Requires the Version Control feature to be licensed and connected to a repository. + description: Requires the Source Control feature to be licensed and connected to a repository. requestBody: description: Pull options required: true diff --git a/packages/cli/src/PublicApi/v1/handlers/versionControl/spec/schemas/importResult.yml b/packages/cli/src/PublicApi/v1/handlers/sourceControl/spec/schemas/importResult.yml similarity index 100% rename from packages/cli/src/PublicApi/v1/handlers/versionControl/spec/schemas/importResult.yml rename to packages/cli/src/PublicApi/v1/handlers/sourceControl/spec/schemas/importResult.yml diff --git a/packages/cli/src/PublicApi/v1/handlers/versionControl/spec/schemas/pull.yml b/packages/cli/src/PublicApi/v1/handlers/sourceControl/spec/schemas/pull.yml similarity index 100% rename from packages/cli/src/PublicApi/v1/handlers/versionControl/spec/schemas/pull.yml rename to packages/cli/src/PublicApi/v1/handlers/sourceControl/spec/schemas/pull.yml diff --git a/packages/cli/src/PublicApi/v1/handlers/versionControl/versionControl.handler.ts b/packages/cli/src/PublicApi/v1/handlers/versionControl/versionControl.handler.ts deleted file mode 100644 index 90cead0fe9..0000000000 --- a/packages/cli/src/PublicApi/v1/handlers/versionControl/versionControl.handler.ts +++ /dev/null @@ -1,47 +0,0 @@ -import type express from 'express'; -import type { StatusResult } from 'simple-git'; -import type { PublicVersionControlRequest } from '../../../types'; -import { authorize } from '../../shared/middlewares/global.middleware'; -import type { ImportResult } from '@/environments/versionControl/types/importResult'; -import Container from 'typedi'; -import { VersionControlService } from '@/environments/versionControl/versionControl.service.ee'; -import { VersionControlPreferencesService } from '@/environments/versionControl/versionControlPreferences.service.ee'; -import { isVersionControlLicensed } from '@/environments/versionControl/versionControlHelper.ee'; - -export = { - pull: [ - authorize(['owner', 'member']), - async ( - req: PublicVersionControlRequest.Pull, - res: express.Response, - ): Promise> => { - const versionControlPreferencesService = Container.get(VersionControlPreferencesService); - if (!isVersionControlLicensed()) { - return res - .status(401) - .json({ status: 'Error', message: 'Version Control feature is not licensed' }); - } - if (!versionControlPreferencesService.isVersionControlConnected()) { - return res - .status(400) - .json({ status: 'Error', message: 'Version Control is not connected to a repository' }); - } - try { - const versionControlService = Container.get(VersionControlService); - const result = await versionControlService.pullWorkfolder({ - force: req.body.force, - variables: req.body.variables, - userId: req.user.id, - importAfterPull: true, - }); - if ((result as ImportResult)?.workflows) { - return res.status(200).send(result as ImportResult); - } else { - return res.status(409).send(result); - } - } catch (error) { - return res.status(400).send((error as { message: string }).message); - } - }, - ], -}; diff --git a/packages/cli/src/PublicApi/v1/handlers/workflows/spec/schemas/parameters/workflowId.yml b/packages/cli/src/PublicApi/v1/handlers/workflows/spec/schemas/parameters/workflowId.yml index fb802e67a2..e8389a78b0 100644 --- a/packages/cli/src/PublicApi/v1/handlers/workflows/spec/schemas/parameters/workflowId.yml +++ b/packages/cli/src/PublicApi/v1/handlers/workflows/spec/schemas/parameters/workflowId.yml @@ -3,4 +3,4 @@ in: path description: The ID of the workflow. required: true schema: - type: number + type: string diff --git a/packages/cli/src/PublicApi/v1/handlers/workflows/spec/schemas/workflow.yml b/packages/cli/src/PublicApi/v1/handlers/workflows/spec/schemas/workflow.yml index f04070f85b..acaa15f34d 100644 --- a/packages/cli/src/PublicApi/v1/handlers/workflows/spec/schemas/workflow.yml +++ b/packages/cli/src/PublicApi/v1/handlers/workflows/spec/schemas/workflow.yml @@ -7,9 +7,9 @@ required: - settings properties: id: - type: number + type: string readOnly: true - example: 1 + example: 2tUt1wbLX592XDdX name: type: string example: Workflow 1 @@ -27,23 +27,23 @@ properties: nodes: type: array items: - $ref: './node.yml' + $ref: "./node.yml" connections: type: object - example: { main: [{ node: 'Jira', type: 'main', index: 0 }] } + example: { main: [{ node: "Jira", type: "main", index: 0 }] } settings: - $ref: './workflowSettings.yml' + $ref: "./workflowSettings.yml" staticData: example: { lastId: 1 } nullable: true anyOf: - type: string - format: 'jsonString' + format: "jsonString" nullable: true - type: object nullable: true tags: type: array items: - $ref: './tag.yml' + $ref: "./tag.yml" readOnly: true diff --git a/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts b/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts index 5eaa8fd232..2049bde475 100644 --- a/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts +++ b/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts @@ -154,6 +154,7 @@ export = { const { id } = req.params; const updateData = new WorkflowEntity(); Object.assign(updateData, req.body); + updateData.id = id; const sharedWorkflow = await getSharedWorkflow(req.user, id); diff --git a/packages/cli/src/PublicApi/v1/openapi.yml b/packages/cli/src/PublicApi/v1/openapi.yml index fcb94b4092..b37e9f1315 100644 --- a/packages/cli/src/PublicApi/v1/openapi.yml +++ b/packages/cli/src/PublicApi/v1/openapi.yml @@ -1,7 +1,7 @@ --- openapi: 3.0.0 info: - title: n8n Public API11 + title: n8n Public API description: n8n Public API termsOfService: https://n8n.io/legal/terms contact: @@ -24,8 +24,8 @@ tags: description: Operations about workflows - name: Credential description: Operations about credentials - - name: VersionControl - description: Operations about version control + - name: SourceControl + description: Operations about source control paths: /audit: @@ -48,8 +48,8 @@ paths: $ref: "./handlers/workflows/spec/paths/workflows.id.activate.yml" /workflows/{id}/deactivate: $ref: "./handlers/workflows/spec/paths/workflows.id.deactivate.yml" - /version-control/pull: - $ref: "./handlers/versionControl/spec/paths/versionControl.yml" + /source-control/pull: + $ref: "./handlers/sourceControl/spec/paths/sourceControl.yml" components: schemas: $ref: "./shared/spec/schemas/_index.yml" diff --git a/packages/cli/src/PublicApi/v1/shared/spec/schemas/_index.yml b/packages/cli/src/PublicApi/v1/shared/spec/schemas/_index.yml index 686abd2384..438fb1b59a 100644 --- a/packages/cli/src/PublicApi/v1/shared/spec/schemas/_index.yml +++ b/packages/cli/src/PublicApi/v1/shared/spec/schemas/_index.yml @@ -21,6 +21,6 @@ CredentialType: Audit: $ref: "./../../../handlers/audit/spec/schemas/audit.yml" Pull: - $ref: "./../../../handlers/versionControl/spec/schemas/pull.yml" + $ref: "./../../../handlers/sourceControl/spec/schemas/pull.yml" ImportResult: - $ref: "./../../../handlers/versionControl/spec/schemas/importResult.yml" + $ref: "./../../../handlers/sourceControl/spec/schemas/importResult.yml" diff --git a/packages/cli/src/ResponseHelper.ts b/packages/cli/src/ResponseHelper.ts index f69349de26..907895914f 100644 --- a/packages/cli/src/ResponseHelper.ts +++ b/packages/cli/src/ResponseHelper.ts @@ -171,7 +171,7 @@ export function sendErrorResponse(res: Response, error: Error) { res.status(httpStatusCode).json(response); } -const isUniqueConstraintError = (error: Error) => +export const isUniqueConstraintError = (error: Error) => ['unique', 'duplicate'].some((s) => error.message.toLowerCase().includes(s)); /** @@ -215,6 +215,7 @@ export function send( * * @param {IExecutionDb} fullExecutionData The data to flatten */ +// TODO: Remove this functions since it's purpose should be fulfilled by the execution repository export function flattenExecutionData(fullExecutionData: IExecutionDb): IExecutionFlatted { // Flatten the data const returnData: IExecutionFlatted = { @@ -251,6 +252,7 @@ export function flattenExecutionData(fullExecutionData: IExecutionDb): IExecutio * * @param {IExecutionFlattedDb} fullExecutionData The data to unflatten */ +// TODO: Remove this functions since it's purpose should be fulfilled by the execution repository export function unflattenExecutionData(fullExecutionData: IExecutionFlattedDb): IExecutionResponse { const returnData: IExecutionResponse = { id: fullExecutionData.id, diff --git a/packages/cli/src/Server.ts b/packages/cli/src/Server.ts index 545c6449b2..58f3b63604 100644 --- a/packages/cli/src/Server.ts +++ b/packages/cli/src/Server.ts @@ -21,7 +21,7 @@ import cookieParser from 'cookie-parser'; import express from 'express'; import { engine as expressHandlebars } from 'express-handlebars'; import type { ServeStaticOptions } from 'serve-static'; -import type { FindManyOptions } from 'typeorm'; +import type { FindManyOptions, FindOptionsWhere } from 'typeorm'; import { Not, In } from 'typeorm'; import type { AxiosRequestConfig } from 'axios'; import axios from 'axios'; @@ -114,7 +114,6 @@ import type { ICredentialsDb, ICredentialsOverwrite, IDiagnosticInfo, - IExecutionFlattedDb, IExecutionsStopData, } from '@/Interfaces'; import { ActiveExecutions } from '@/ActiveExecutions'; @@ -167,10 +166,12 @@ import { isLdapCurrentAuthenticationMethod, isSamlCurrentAuthenticationMethod, } from './sso/ssoHelpers'; -import { isVersionControlLicensed } from '@/environments/versionControl/versionControlHelper.ee'; -import { VersionControlService } from '@/environments/versionControl/versionControl.service.ee'; -import { VersionControlController } from '@/environments/versionControl/versionControl.controller.ee'; -import { VersionControlPreferencesService } from './environments/versionControl/versionControlPreferences.service.ee'; +import { isSourceControlLicensed } from '@/environments/sourceControl/sourceControlHelper.ee'; +import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee'; +import { SourceControlController } from '@/environments/sourceControl/sourceControl.controller.ee'; +import { SourceControlPreferencesService } from './environments/sourceControl/sourceControlPreferences.service.ee'; +import { ExecutionRepository } from './databases/repositories'; +import type { ExecutionEntity } from './databases/entities/ExecutionEntity'; const exec = promisify(callbackExec); @@ -312,7 +313,7 @@ export class Server extends AbstractServer { logStreaming: false, advancedExecutionFilters: false, variables: false, - versionControl: false, + sourceControl: false, auditLogs: false, }, hideUsagePage: config.getEnv('hideUsagePage'), @@ -430,7 +431,7 @@ export class Server extends AbstractServer { saml: isSamlLicensed(), advancedExecutionFilters: isAdvancedExecutionFiltersEnabled(), variables: isVariablesEnabled(), - versionControl: isVersionControlLicensed(), + sourceControl: isSourceControlLicensed(), }); if (isLdapEnabled()) { @@ -467,8 +468,8 @@ export class Server extends AbstractServer { const mailer = Container.get(UserManagementMailer); const postHog = this.postHog; const samlService = Container.get(SamlService); - const versionControlService = Container.get(VersionControlService); - const versionControlPreferencesService = Container.get(VersionControlPreferencesService); + const sourceControlService = Container.get(SourceControlService); + const sourceControlPreferencesService = Container.get(SourceControlPreferencesService); const controllers: object[] = [ new EventBusController(), @@ -497,7 +498,7 @@ export class Server extends AbstractServer { postHog, }), new SamlController(samlService), - new VersionControlController(versionControlService, versionControlPreferencesService), + new SourceControlController(sourceControlService, sourceControlPreferencesService), ]; if (isLdapEnabled()) { @@ -637,15 +638,12 @@ export class Server extends AbstractServer { this.app.use(`/${this.restEndpoint}/variables`, variablesController); // ---------------------------------------- - // Version Control + // Source Control // ---------------------------------------- - - // initialize SamlService if it is licensed, even if not enabled, to - // set up the initial environment try { - await Container.get(VersionControlService).init(); + await Container.get(SourceControlService).init(); } catch (error) { - LoggerProxy.warn(`Version Control initialization failed: ${error.message}`); + LoggerProxy.warn(`Source Control initialization failed: ${error.message}`); } // ---------------------------------------- @@ -1154,7 +1152,9 @@ export class Server extends AbstractServer { if (!currentlyRunningExecutionIds.length) return []; - const findOptions: FindManyOptions = { + const findOptions: FindManyOptions & { + where: FindOptionsWhere; + } = { select: ['id', 'workflowId', 'mode', 'retryOf', 'startedAt', 'stoppedAt', 'status'], order: { id: 'DESC' }, where: { @@ -1170,19 +1170,23 @@ export class Server extends AbstractServer { if (req.query.filter) { const { workflowId, status, finished } = jsonParse(req.query.filter); if (workflowId && sharedWorkflowIds.includes(workflowId)) { - Object.assign(findOptions.where!, { workflowId }); + Object.assign(findOptions.where, { workflowId }); + } else { + Object.assign(findOptions.where, { workflowId: In(sharedWorkflowIds) }); } if (status) { - Object.assign(findOptions.where!, { status: In(status) }); + Object.assign(findOptions.where, { status: In(status) }); } if (finished) { - Object.assign(findOptions.where!, { finished }); + Object.assign(findOptions.where, { finished }); } } else { - Object.assign(findOptions.where!, { workflowId: In(sharedWorkflowIds) }); + Object.assign(findOptions.where, { workflowId: In(sharedWorkflowIds) }); } - const executions = await Db.collections.Execution.find(findOptions); + const executions = await Container.get(ExecutionRepository).findMultipleExecutions( + findOptions, + ); if (!executions.length) return []; @@ -1247,14 +1251,16 @@ export class Server extends AbstractServer { throw new ResponseHelper.NotFoundError('Execution not found'); } - const execution = await Db.collections.Execution.exist({ - where: { - id: executionId, - workflowId: In(sharedWorkflowIds), + const fullExecutionData = await Container.get(ExecutionRepository).findSingleExecution( + executionId, + { + where: { + workflowId: In(sharedWorkflowIds), + }, }, - }); + ); - if (!execution) { + if (!fullExecutionData) { throw new ResponseHelper.NotFoundError('Execution not found'); } @@ -1292,11 +1298,6 @@ export class Server extends AbstractServer { await queue.stopJob(job); } - const executionDb = (await Db.collections.Execution.findOneBy({ - id: req.params.id, - })) as IExecutionFlattedDb; - const fullExecutionData = ResponseHelper.unflattenExecutionData(executionDb); - const returnData: IExecutionsStopData = { mode: fullExecutionData.mode, startedAt: new Date(fullExecutionData.startedAt), diff --git a/packages/cli/src/WaitTracker.ts b/packages/cli/src/WaitTracker.ts index 0b9902b3b2..4d01e5b524 100644 --- a/packages/cli/src/WaitTracker.ts +++ b/packages/cli/src/WaitTracker.ts @@ -8,16 +8,14 @@ import { LoggerProxy as Logger, WorkflowOperationError, } from 'n8n-workflow'; -import { Service } from 'typedi'; +import Container, { Service } from 'typedi'; import type { FindManyOptions, ObjectLiteral } from 'typeorm'; import { Not, LessThanOrEqual } from 'typeorm'; import { DateUtils } from 'typeorm/util/DateUtils'; import config from '@/config'; -import * as Db from '@/Db'; import * as ResponseHelper from '@/ResponseHelper'; import type { - IExecutionFlattedDb, IExecutionResponse, IExecutionsStopData, IWorkflowExecutionDataProcess, @@ -25,6 +23,8 @@ import type { import { WorkflowRunner } from '@/WorkflowRunner'; import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper'; import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEventBus/recoverEvents'; +import { ExecutionRepository } from './databases/repositories'; +import type { ExecutionEntity } from './databases/entities/ExecutionEntity'; @Service() export class WaitTracker { @@ -37,7 +37,7 @@ export class WaitTracker { mainTimer: NodeJS.Timeout; - constructor() { + constructor(private executionRepository: ExecutionRepository) { // Poll every 60 seconds a list of upcoming executions this.mainTimer = setInterval(() => { void this.getWaitingExecutions(); @@ -50,7 +50,7 @@ export class WaitTracker { async getWaitingExecutions() { Logger.debug('Wait tracker querying database for waiting executions'); // Find all the executions which should be triggered in the next 70 seconds - const findQuery: FindManyOptions = { + const findQuery: FindManyOptions = { select: ['id', 'waitTill'], where: { waitTill: LessThanOrEqual(new Date(Date.now() + 70000)), @@ -70,7 +70,7 @@ export class WaitTracker { ); } - const executions = await Db.collections.Execution.find(findQuery); + const executions = await this.executionRepository.findMultipleExecutions(findQuery); if (executions.length === 0) { return; @@ -106,9 +106,11 @@ export class WaitTracker { } // Also check in database - const execution = await Db.collections.Execution.findOneBy({ id: executionId }); + const execution = await this.executionRepository.findSingleExecution(executionId, { + includeData: true, + }); - if (execution === null) { + if (!execution) { throw new Error(`The execution ID "${executionId}" could not be found.`); } @@ -124,12 +126,17 @@ export class WaitTracker { // if the execution ended in an unforseen, non-cancelable state, try to recover it await recoverExecutionDataFromEventLogMessages(executionId, [], true); // find recovered data - const recoveredExecution = await Db.collections.Execution.findOneBy({ id: executionId }); - if (recoveredExecution) { - fullExecutionData = ResponseHelper.unflattenExecutionData(recoveredExecution); - } else { + const restoredExecution = await Container.get(ExecutionRepository).findSingleExecution( + executionId, + { + includeData: true, + unflattenData: true, + }, + ); + if (!restoredExecution) { throw new Error(`Execution ${executionId} could not be recovered or canceled.`); } + fullExecutionData = restoredExecution; } // Set in execution in DB as failed and remove waitTill time const error = new WorkflowOperationError('Workflow-Execution has been canceled!'); @@ -144,11 +151,9 @@ export class WaitTracker { fullExecutionData.waitTill = null; fullExecutionData.status = 'canceled'; - await Db.collections.Execution.update( + await Container.get(ExecutionRepository).updateExistingExecution( executionId, - ResponseHelper.flattenExecutionData({ - ...fullExecutionData, - }) as IExecutionFlattedDb, + fullExecutionData, ); return { @@ -166,16 +171,14 @@ export class WaitTracker { (async () => { // Get the data to execute - const fullExecutionDataFlatted = await Db.collections.Execution.findOneBy({ - id: executionId, + const fullExecutionData = await this.executionRepository.findSingleExecution(executionId, { + includeData: true, + unflattenData: true, }); - if (fullExecutionDataFlatted === null) { + if (!fullExecutionData) { throw new Error(`The execution with the id "${executionId}" does not exist.`); } - - const fullExecutionData = ResponseHelper.unflattenExecutionData(fullExecutionDataFlatted); - if (fullExecutionData.finished) { throw new Error('The execution did succeed and can so not be started again.'); } diff --git a/packages/cli/src/WaitingWebhooks.ts b/packages/cli/src/WaitingWebhooks.ts index e5f8497b48..bd2a6d5388 100644 --- a/packages/cli/src/WaitingWebhooks.ts +++ b/packages/cli/src/WaitingWebhooks.ts @@ -6,17 +6,17 @@ import { NodeHelpers, Workflow, LoggerProxy as Logger } from 'n8n-workflow'; import { Service } from 'typedi'; import type express from 'express'; -import * as Db from '@/Db'; import * as ResponseHelper from '@/ResponseHelper'; import * as WebhookHelpers from '@/WebhookHelpers'; import { NodeTypes } from '@/NodeTypes'; import type { IExecutionResponse, IResponseCallbackData, IWorkflowDb } from '@/Interfaces'; import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper'; +import { ExecutionRepository } from './databases/repositories'; @Service() export class WaitingWebhooks { - constructor(private nodeTypes: NodeTypes) {} + constructor(private nodeTypes: NodeTypes, private executionRepository: ExecutionRepository) {} async executeWebhook( httpMethod: WebhookHttpMethod, @@ -39,19 +39,20 @@ export class WaitingWebhooks { const executionId = pathParts.shift(); const path = pathParts.join('/'); - const execution = await Db.collections.Execution.findOneBy({ id: executionId }); + const execution = await this.executionRepository.findSingleExecution(executionId as string, { + includeData: true, + unflattenData: true, + }); - if (execution === null) { + if (!execution) { throw new ResponseHelper.NotFoundError(`The execution "${executionId} does not exist.`); } - const fullExecutionData = ResponseHelper.unflattenExecutionData(execution); - - if (fullExecutionData.finished || fullExecutionData.data.resultData.error) { + if (execution.finished || execution.data.resultData.error) { throw new ResponseHelper.ConflictError(`The execution "${executionId} has finished already.`); } - return this.startExecution(httpMethod, path, fullExecutionData, req, res); + return this.startExecution(httpMethod, path, execution, req, res); } async startExecution( diff --git a/packages/cli/src/WorkflowExecuteAdditionalData.ts b/packages/cli/src/WorkflowExecuteAdditionalData.ts index ca3c2acb1a..7ea2556438 100644 --- a/packages/cli/src/WorkflowExecuteAdditionalData.ts +++ b/packages/cli/src/WorkflowExecuteAdditionalData.ts @@ -54,7 +54,6 @@ import { ExternalHooks } from '@/ExternalHooks'; import type { IExecutionDb, IExecutionFlattedDb, - IExecutionResponse, IPushDataExecutionFinished, IWorkflowExecuteProcess, IWorkflowExecutionDataProcess, @@ -62,7 +61,6 @@ import type { } from '@/Interfaces'; import { NodeTypes } from '@/NodeTypes'; import { Push } from '@/push'; -import * as ResponseHelper from '@/ResponseHelper'; import * as WebhookHelpers from '@/WebhookHelpers'; import * as WorkflowHelpers from '@/WorkflowHelpers'; import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper'; @@ -72,6 +70,7 @@ import { WorkflowsService } from './workflows/workflows.services'; import { Container } from 'typedi'; import { InternalHooks } from '@/InternalHooks'; import type { ExecutionMetadata } from '@db/entities/ExecutionMetadata'; +import { ExecutionRepository } from './databases/repositories'; const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType'); @@ -185,7 +184,7 @@ export function executeErrorWorkflow( /** * Prunes Saved Execution which are older than configured. * Throttled to be executed just once in configured timeframe. - * + * TODO: Consider moving this whole function to the repository or at least the queries */ let throttling = false; async function pruneExecutionData(this: WorkflowHooks): Promise { @@ -220,7 +219,6 @@ async function pruneExecutionData(this: WorkflowHooks): Promise { } } - const isBinaryModeDefaultMode = config.getEnv('binaryDataManager.mode') === 'default'; try { setTimeout(() => { throttling = false; @@ -236,8 +234,7 @@ async function pruneExecutionData(this: WorkflowHooks): Promise { ).map(({ id }) => id); await Db.collections.Execution.delete({ id: In(executionIds) }); // Mark binary data for deletion for all executions - if (!isBinaryModeDefaultMode) - await BinaryDataManager.getInstance().markDataForDeletionByExecutionIds(executionIds); + await BinaryDataManager.getInstance().markDataForDeletionByExecutionIds(executionIds); } while (executionIds.length > 0); } catch (error) { ErrorReporter.error(error); @@ -435,15 +432,19 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx { executionId: this.executionId, nodeName }, ); - const execution = await Db.collections.Execution.findOneBy({ id: this.executionId }); + const fullExecutionData = await Container.get(ExecutionRepository).findSingleExecution( + this.executionId, + { + includeData: true, + unflattenData: true, + }, + ); - if (execution === null) { + if (!fullExecutionData) { // Something went badly wrong if this happens. // This check is here mostly to make typescript happy. return; } - const fullExecutionData: IExecutionResponse = - ResponseHelper.unflattenExecutionData(execution); if (fullExecutionData.finished) { // We already received ´workflowExecuteAfter´ webhook, so this is just an async call @@ -482,10 +483,9 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx fullExecutionData.status = 'running'; - const flattenedExecutionData = ResponseHelper.flattenExecutionData(fullExecutionData); - await Db.collections.Execution.update( + await Container.get(ExecutionRepository).updateExistingExecution( this.executionId, - flattenedExecutionData as IExecutionFlattedDb, + fullExecutionData, ); } catch (err) { ErrorReporter.error(err); @@ -578,10 +578,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { if (isManualMode && !saveManualExecutions && !fullRunData.waitTill) { // Data is always saved, so we remove from database - await Db.collections.Execution.delete(this.executionId); - await BinaryDataManager.getInstance().markDataForDeletionByExecutionId( - this.executionId, - ); + await Container.get(ExecutionRepository).deleteExecution(this.executionId); return; } @@ -605,6 +602,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { let workflowStatusFinal: ExecutionStatus = workflowDidSucceed ? 'success' : 'failed'; if (workflowHasCrashed) workflowStatusFinal = 'crashed'; if (workflowWasCanceled) workflowStatusFinal = 'canceled'; + if (fullRunData.waitTill) workflowStatusFinal = 'waiting'; if ( (workflowDidSucceed && saveDataSuccessExecution === 'none') || @@ -619,10 +617,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { this.retryOf, ); // Data is always saved, so we remove from database - await Db.collections.Execution.delete(this.executionId); - await BinaryDataManager.getInstance().markDataForDeletionByExecutionId( - this.executionId, - ); + await Container.get(ExecutionRepository).deleteExecution(this.executionId); return; } @@ -671,12 +666,9 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { stoppedAt: fullExecutionData.stoppedAt, }); - const executionData = ResponseHelper.flattenExecutionData(fullExecutionData); - - // Save the Execution in DB - await Db.collections.Execution.update( + await Container.get(ExecutionRepository).updateExistingExecution( this.executionId, - executionData as IExecutionFlattedDb, + fullExecutionData, ); try { @@ -688,9 +680,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { } if (fullRunData.finished === true && this.retryOf !== undefined) { - // If the retry was successful save the reference it on the original execution - // await Db.collections.Execution.save(executionData as IExecutionFlattedDb); - await Db.collections.Execution.update(this.retryOf, { + await Container.get(ExecutionRepository).updateExistingExecution(this.retryOf, { retrySuccessId: this.executionId, }); } @@ -778,6 +768,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { let workflowStatusFinal: ExecutionStatus = workflowDidSucceed ? 'success' : 'failed'; if (workflowHasCrashed) workflowStatusFinal = 'crashed'; if (workflowWasCanceled) workflowStatusFinal = 'canceled'; + if (fullRunData.waitTill) workflowStatusFinal = 'waiting'; if (!workflowDidSucceed) { executeErrorWorkflow( @@ -809,17 +800,15 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { fullExecutionData.workflowId = workflowId; } - const executionData = ResponseHelper.flattenExecutionData(fullExecutionData); - - // Save the Execution in DB - await Db.collections.Execution.update( + await Container.get(ExecutionRepository).updateExistingExecution( this.executionId, - executionData as IExecutionFlattedDb, + fullExecutionData, ); // For reasons(tm) the execution status is not updated correctly in the first update, so has to be written again (tbd) - await Db.collections.Execution.update(this.executionId, { - status: executionData.status, + + await Container.get(ExecutionRepository).updateExistingExecution(this.executionId, { + status: fullExecutionData.status, }); try { @@ -832,7 +821,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { if (fullRunData.finished === true && this.retryOf !== undefined) { // If the retry was successful save the reference it on the original execution - await Db.collections.Execution.update(this.retryOf, { + await Container.get(ExecutionRepository).updateExistingExecution(this.retryOf, { retrySuccessId: this.executionId, }); } @@ -1090,9 +1079,10 @@ async function executeWorkflow( // remove execution from active executions Container.get(ActiveExecutions).remove(executionId, fullRunData); - const executionData = ResponseHelper.flattenExecutionData(fullExecutionData); - - await Db.collections.Execution.update(executionId, executionData as IExecutionFlattedDb); + await Container.get(ExecutionRepository).updateExistingExecution( + executionId, + fullExecutionData, + ); throw { ...error, stack: error.stack, diff --git a/packages/cli/src/WorkflowRunner.ts b/packages/cli/src/WorkflowRunner.ts index c2ea34e908..0f636fc05f 100644 --- a/packages/cli/src/WorkflowRunner.ts +++ b/packages/cli/src/WorkflowRunner.ts @@ -11,7 +11,7 @@ /* eslint-disable @typescript-eslint/explicit-module-boundary-types */ /* eslint-disable @typescript-eslint/no-unused-vars */ import type { IProcessMessage } from 'n8n-core'; -import { BinaryDataManager, WorkflowExecute } from 'n8n-core'; +import { WorkflowExecute } from 'n8n-core'; import type { ExecutionError, @@ -34,10 +34,8 @@ import { fork } from 'child_process'; import { ActiveExecutions } from '@/ActiveExecutions'; import config from '@/config'; -import * as Db from '@/Db'; import { ExternalHooks } from '@/ExternalHooks'; import type { - IExecutionFlattedDb, IProcessMessageDataHook, IWorkflowExecutionDataProcess, IWorkflowExecutionDataProcessWithExecution, @@ -45,7 +43,6 @@ import type { import { NodeTypes } from '@/NodeTypes'; import type { Job, JobData, JobQueue, JobResponse } from '@/Queue'; import { Queue } from '@/Queue'; -import * as ResponseHelper from '@/ResponseHelper'; import * as WebhookHelpers from '@/WebhookHelpers'; import * as WorkflowHelpers from '@/WorkflowHelpers'; import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; @@ -57,6 +54,7 @@ import { eventBus } from './eventbus'; import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEventBus/recoverEvents'; import { Container } from 'typedi'; import { InternalHooks } from './InternalHooks'; +import { ExecutionRepository } from './databases/repositories'; export class WorkflowRunner { activeExecutions: ActiveExecutions; @@ -127,14 +125,22 @@ export class WorkflowRunner { } } - const executionFlattedData = await Db.collections.Execution.findOneBy({ id: executionId }); - - void Container.get(InternalHooks).onWorkflowCrashed( + const executionFlattedData = await Container.get(ExecutionRepository).findSingleExecution( executionId, - executionMode, - executionFlattedData?.workflowData, - executionFlattedData?.metadata, + { + includeData: true, + }, ); + + if (executionFlattedData) { + void Container.get(InternalHooks).onWorkflowCrashed( + executionId, + executionMode, + executionFlattedData?.workflowData, + // TODO: get metadata to be sent here + // executionFlattedData?.metadata, + ); + } } catch { // Ignore errors } @@ -566,10 +572,16 @@ export class WorkflowRunner { reject(error); } - const executionDb = (await Db.collections.Execution.findOneBy({ - id: executionId, - })) as IExecutionFlattedDb; - const fullExecutionData = ResponseHelper.unflattenExecutionData(executionDb); + const fullExecutionData = await Container.get(ExecutionRepository).findSingleExecution( + executionId, + { + includeData: true, + unflattenData: true, + }, + ); + if (!fullExecutionData) { + return reject(new Error(`Could not find execution with id "${executionId}"`)); + } const runData = { data: fullExecutionData.data, finished: fullExecutionData.finished, @@ -597,8 +609,7 @@ export class WorkflowRunner { (workflowDidSucceed && saveDataSuccessExecution === 'none') || (!workflowDidSucceed && saveDataErrorExecution === 'none') ) { - await Db.collections.Execution.delete(executionId); - await BinaryDataManager.getInstance().markDataForDeletionByExecutionId(executionId); + await Container.get(ExecutionRepository).deleteExecution(executionId); } // eslint-disable-next-line id-denylist } catch (err) { diff --git a/packages/cli/src/api/e2e.api.ts b/packages/cli/src/api/e2e.api.ts index 32b6425f1f..efecd7c34c 100644 --- a/packages/cli/src/api/e2e.api.ts +++ b/packages/cli/src/api/e2e.api.ts @@ -29,7 +29,7 @@ const enabledFeatures = { [LICENSE_FEATURES.SAML]: false, [LICENSE_FEATURES.LOG_STREAMING]: false, [LICENSE_FEATURES.ADVANCED_EXECUTION_FILTERS]: false, - [LICENSE_FEATURES.VERSION_CONTROL]: false, + [LICENSE_FEATURES.SOURCE_CONTROL]: false, }; type Feature = keyof typeof enabledFeatures; diff --git a/packages/cli/src/audit/risks/credentials.risk.ts b/packages/cli/src/audit/risks/credentials.risk.ts index 804e642a4f..7e0898b0c4 100644 --- a/packages/cli/src/audit/risks/credentials.risk.ts +++ b/packages/cli/src/audit/risks/credentials.risk.ts @@ -6,6 +6,8 @@ import config from '@/config'; import { CREDENTIALS_REPORT } from '@/audit/constants'; import type { WorkflowEntity } from '@db/entities/WorkflowEntity'; import type { Risk } from '@/audit/types'; +import Container from 'typedi'; +import { ExecutionRepository } from '@/databases/repositories'; async function getAllCredsInUse(workflows: WorkflowEntity[]) { const credsInAnyUse = new Set(); @@ -44,12 +46,14 @@ async function getExecutionsInPastDays(days: number) { const utcDate = DateUtils.mixedDateToUtcDatetimeString(date) as string; - return Db.collections.Execution.find({ - select: ['workflowData'], - where: { - startedAt: MoreThanOrEqual(utcDate) as unknown as FindOperator, + return Container.get(ExecutionRepository).findMultipleExecutions( + { + where: { + startedAt: MoreThanOrEqual(utcDate) as unknown as FindOperator, + }, }, - }); + { includeData: true }, + ); } /** diff --git a/packages/cli/src/commands/import/workflow.ts b/packages/cli/src/commands/import/workflow.ts index 7faa184e5e..4ac3c9a6ba 100644 --- a/packages/cli/src/commands/import/workflow.ts +++ b/packages/cli/src/commands/import/workflow.ts @@ -6,7 +6,6 @@ import glob from 'fast-glob'; import { Container } from 'typedi'; import type { EntityManager } from 'typeorm'; import { v4 as uuid } from 'uuid'; -import config from '@/config'; import * as Db from '@/Db'; import { SharedWorkflow } from '@db/entities/SharedWorkflow'; import { WorkflowEntity } from '@db/entities/WorkflowEntity'; @@ -18,6 +17,7 @@ import { disableAutoGeneratedIds } from '@db/utils/commandHelpers'; import type { ICredentialsDb, IWorkflowToImport } from '@/Interfaces'; import { replaceInvalidCredentials } from '@/WorkflowHelpers'; import { BaseCommand, UM_FIX_INSTRUCTION } from '../BaseCommand'; +import { generateNanoId } from '@/databases/utils/generators'; function assertHasWorkflowsToImport(workflows: unknown): asserts workflows is IWorkflowToImport[] { if (!Array.isArray(workflows)) { @@ -117,6 +117,9 @@ export class ImportWorkflowsCommand extends BaseCommand { const workflow = jsonParse( fs.readFileSync(file, { encoding: 'utf8' }), ); + if (!workflow.id) { + workflow.id = generateNanoId(); + } if (credentials.length > 0) { workflow.nodes.forEach((node: INode) => { @@ -227,12 +230,6 @@ export class ImportWorkflowsCommand extends BaseCommand { }, ['workflowId', 'userId'], ); - if (config.getEnv('database.type') === 'postgresdb') { - const tablePrefix = config.getEnv('database.tablePrefix'); - await this.transactionManager.query( - `SELECT setval('${tablePrefix}workflow_entity_id_seq', (SELECT MAX(id) from "${tablePrefix}workflow_entity"))`, - ); - } } private async getOwner() { diff --git a/packages/cli/src/commands/worker.ts b/packages/cli/src/commands/worker.ts index e51619e131..f346277cbd 100644 --- a/packages/cli/src/commands/worker.ts +++ b/packages/cli/src/commands/worker.ts @@ -22,6 +22,7 @@ import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper'; import { generateFailedExecutionFromError } from '@/WorkflowHelpers'; import { N8N_VERSION } from '@/constants'; import { BaseCommand } from './BaseCommand'; +import { ExecutionRepository } from '@/databases/repositories'; export class Worker extends BaseCommand { static description = '\nStarts a n8n worker'; @@ -89,9 +90,15 @@ export class Worker extends BaseCommand { async runJob(job: Job, nodeTypes: INodeTypes): Promise { const { executionId, loadStaticData } = job.data; - const executionDb = await Db.collections.Execution.findOneBy({ id: executionId }); + const fullExecutionData = await Container.get(ExecutionRepository).findSingleExecution( + executionId, + { + includeData: true, + unflattenData: true, + }, + ); - if (!executionDb) { + if (!fullExecutionData) { LoggerProxy.error( `Worker failed to find data of execution "${executionId}" in database. Cannot continue.`, { executionId }, @@ -100,15 +107,14 @@ export class Worker extends BaseCommand { `Unable to find data of execution "${executionId}" in database. Aborting execution.`, ); } - const currentExecutionDb = ResponseHelper.unflattenExecutionData(executionDb); - const workflowId = currentExecutionDb.workflowData.id!; + const workflowId = fullExecutionData.workflowData.id!; LoggerProxy.info( `Start job: ${job.id} (Workflow ID: ${workflowId} | Execution: ${executionId})`, ); const workflowOwner = await getWorkflowOwner(workflowId); - let { staticData } = currentExecutionDb.workflowData; + let { staticData } = fullExecutionData.workflowData; if (loadStaticData) { const workflowData = await Db.collections.Workflow.findOne({ select: ['id', 'staticData'], @@ -126,7 +132,7 @@ export class Worker extends BaseCommand { staticData = workflowData.staticData; } - const workflowSettings = currentExecutionDb.workflowData.settings ?? {}; + const workflowSettings = fullExecutionData.workflowData.settings ?? {}; let workflowTimeout = workflowSettings.executionTimeout ?? config.getEnv('executions.timeout'); // initialize with default @@ -138,13 +144,13 @@ export class Worker extends BaseCommand { const workflow = new Workflow({ id: workflowId, - name: currentExecutionDb.workflowData.name, - nodes: currentExecutionDb.workflowData.nodes, - connections: currentExecutionDb.workflowData.connections, - active: currentExecutionDb.workflowData.active, + name: fullExecutionData.workflowData.name, + nodes: fullExecutionData.workflowData.nodes, + connections: fullExecutionData.workflowData.connections, + active: fullExecutionData.workflowData.active, nodeTypes, staticData, - settings: currentExecutionDb.workflowData.settings, + settings: fullExecutionData.workflowData.settings, }); const additionalData = await WorkflowExecuteAdditionalData.getBase( @@ -153,10 +159,10 @@ export class Worker extends BaseCommand { executionTimeoutTimestamp, ); additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerExecuter( - currentExecutionDb.mode, + fullExecutionData.mode, job.data.executionId, - currentExecutionDb.workflowData, - { retryOf: currentExecutionDb.retryOf as string }, + fullExecutionData.workflowData, + { retryOf: fullExecutionData.retryOf as string }, ); try { @@ -164,7 +170,7 @@ export class Worker extends BaseCommand { } catch (error) { if (error instanceof NodeOperationError) { const failedExecution = generateFailedExecutionFromError( - currentExecutionDb.mode, + fullExecutionData.mode, error, error.node, ); @@ -192,17 +198,17 @@ export class Worker extends BaseCommand { let workflowExecute: WorkflowExecute; let workflowRun: PCancelable; - if (currentExecutionDb.data !== undefined) { + if (fullExecutionData.data !== undefined) { workflowExecute = new WorkflowExecute( additionalData, - currentExecutionDb.mode, - currentExecutionDb.data, + fullExecutionData.mode, + fullExecutionData.data, ); workflowRun = workflowExecute.processRunExecutionData(workflow); } else { // Execute all nodes // Can execute without webhook so go on - workflowExecute = new WorkflowExecute(additionalData, currentExecutionDb.mode); + workflowExecute = new WorkflowExecute(additionalData, fullExecutionData.mode); workflowRun = workflowExecute.run(workflow); } diff --git a/packages/cli/src/constants.ts b/packages/cli/src/constants.ts index 4bdfbfe2e2..821a788edc 100644 --- a/packages/cli/src/constants.ts +++ b/packages/cli/src/constants.ts @@ -77,7 +77,7 @@ export const enum LICENSE_FEATURES { LOG_STREAMING = 'feat:logStreaming', ADVANCED_EXECUTION_FILTERS = 'feat:advancedExecutionFilters', VARIABLES = 'feat:variables', - VERSION_CONTROL = 'feat:versionControl', + SOURCE_CONTROL = 'feat:sourceControl', API_DISABLED = 'feat:apiDisabled', } diff --git a/packages/cli/src/controllers/tags.controller.ts b/packages/cli/src/controllers/tags.controller.ts index 7b68d95aaf..856f7df144 100644 --- a/packages/cli/src/controllers/tags.controller.ts +++ b/packages/cli/src/controllers/tags.controller.ts @@ -75,7 +75,7 @@ export class TagsController { } // Updates a tag - @Patch('/:id(\\d+)') + @Patch('/:id(\\w+)') async updateTag(req: TagsRequest.Update): Promise { const { name } = req.body; const { id } = req.params; @@ -93,7 +93,7 @@ export class TagsController { } @Authorized(['global', 'owner']) - @Delete('/:id(\\d+)') + @Delete('/:id(\\w+)') async deleteTag(req: TagsRequest.Delete) { const { id } = req.params; await this.externalHooks.run('tag.beforeDelete', [id]); diff --git a/packages/cli/src/credentials/credentials.controller.ee.ts b/packages/cli/src/credentials/credentials.controller.ee.ts index c780aec062..409fb2c057 100644 --- a/packages/cli/src/credentials/credentials.controller.ee.ts +++ b/packages/cli/src/credentials/credentials.controller.ee.ts @@ -51,7 +51,7 @@ EECredentialsController.get( * GET /credentials/:id */ EECredentialsController.get( - '/:id(\\d+)', + '/:id(\\w+)', (req, res, next) => (req.params.id === 'new' ? next('router') : next()), // skip ee router and use free one for naming ResponseHelper.send(async (req: CredentialRequest.Get) => { const { id: credentialId } = req.params; diff --git a/packages/cli/src/credentials/credentials.controller.ts b/packages/cli/src/credentials/credentials.controller.ts index 6a4223ba2b..ffa7cf9681 100644 --- a/packages/cli/src/credentials/credentials.controller.ts +++ b/packages/cli/src/credentials/credentials.controller.ts @@ -65,7 +65,7 @@ credentialsController.get( * GET /credentials/:id */ credentialsController.get( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async (req: CredentialRequest.Get) => { const { id: credentialId } = req.params; const includeDecryptedData = req.query.includeData === 'true'; @@ -147,7 +147,7 @@ credentialsController.post( * PATCH /credentials/:id */ credentialsController.patch( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async (req: CredentialRequest.Update): Promise => { const { id: credentialId } = req.params; @@ -198,7 +198,7 @@ credentialsController.patch( * DELETE /credentials/:id */ credentialsController.delete( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async (req: CredentialRequest.Delete) => { const { id: credentialId } = req.params; diff --git a/packages/cli/src/databases/entities/CredentialsEntity.ts b/packages/cli/src/databases/entities/CredentialsEntity.ts index b02b1fb0a8..f9cb8fa49f 100644 --- a/packages/cli/src/databases/entities/CredentialsEntity.ts +++ b/packages/cli/src/databases/entities/CredentialsEntity.ts @@ -1,15 +1,28 @@ import type { ICredentialNodeAccess } from 'n8n-workflow'; -import { Column, Entity, Generated, Index, OneToMany, PrimaryColumn } from 'typeorm'; +import { BeforeInsert, Column, Entity, Index, OneToMany, PrimaryColumn } from 'typeorm'; import { IsArray, IsObject, IsString, Length } from 'class-validator'; import type { SharedCredentials } from './SharedCredentials'; import { AbstractEntity, jsonColumnType } from './AbstractEntity'; import type { ICredentialsDb } from '@/Interfaces'; -import { idStringifier } from '../utils/transformers'; - +import { generateNanoId } from '../utils/generators'; @Entity() export class CredentialsEntity extends AbstractEntity implements ICredentialsDb { - @Generated() - @PrimaryColumn({ transformer: idStringifier }) + constructor(data?: Partial) { + super(); + Object.assign(this, data); + if (!this.id) { + this.id = generateNanoId(); + } + } + + @BeforeInsert() + nanoId(): void { + if (!this.id) { + this.id = generateNanoId(); + } + } + + @PrimaryColumn('varchar') id: string; @Column({ length: 128 }) diff --git a/packages/cli/src/databases/entities/ExecutionData.ts b/packages/cli/src/databases/entities/ExecutionData.ts new file mode 100644 index 0000000000..06143c74fb --- /dev/null +++ b/packages/cli/src/databases/entities/ExecutionData.ts @@ -0,0 +1,27 @@ +import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm'; +import { idStringifier } from '../utils/transformers'; +import { ExecutionEntity } from './ExecutionEntity'; +import { jsonColumnType } from './AbstractEntity'; +import { IWorkflowBase } from 'n8n-workflow'; + +@Entity() +export class ExecutionData { + @Column('text') + data: string; + + // WARNING: the workflowData column has been changed from IWorkflowDb to IWorkflowBase + // when ExecutionData was introduced as a separate entity. + // This is because manual executions of unsaved workflows have no workflow id + // and IWorkflowDb has it as a mandatory field. IWorkflowBase reflects the correct + // data structure for this entity. + @Column(jsonColumnType) + workflowData: IWorkflowBase; + + @PrimaryColumn({ transformer: idStringifier }) + executionId: string; + + @ManyToOne('ExecutionEntity', 'data', { + onDelete: 'CASCADE', + }) + execution: ExecutionEntity; +} diff --git a/packages/cli/src/databases/entities/ExecutionEntity.ts b/packages/cli/src/databases/entities/ExecutionEntity.ts index c16365bbcf..8301346a71 100644 --- a/packages/cli/src/databases/entities/ExecutionEntity.ts +++ b/packages/cli/src/databases/entities/ExecutionEntity.ts @@ -1,10 +1,20 @@ import { ExecutionStatus, WorkflowExecuteMode } from 'n8n-workflow'; -import { Column, Entity, Generated, Index, OneToMany, PrimaryColumn } from 'typeorm'; -import { datetimeColumnType, jsonColumnType } from './AbstractEntity'; -import { IWorkflowDb } from '@/Interfaces'; -import type { IExecutionFlattedDb } from '@/Interfaces'; +import { + Column, + Entity, + Generated, + Index, + ManyToOne, + OneToMany, + OneToOne, + PrimaryColumn, + Relation, +} from 'typeorm'; +import { datetimeColumnType } from './AbstractEntity'; import { idStringifier } from '../utils/transformers'; +import type { ExecutionData } from './ExecutionData'; import type { ExecutionMetadata } from './ExecutionMetadata'; +import { WorkflowEntity } from './WorkflowEntity'; @Entity() @Index(['workflowId', 'id']) @@ -12,14 +22,11 @@ import type { ExecutionMetadata } from './ExecutionMetadata'; @Index(['finished', 'id']) @Index(['workflowId', 'finished', 'id']) @Index(['workflowId', 'waitTill', 'id']) -export class ExecutionEntity implements IExecutionFlattedDb { +export class ExecutionEntity { @Generated() @PrimaryColumn({ transformer: idStringifier }) id: string; - @Column('text') - data: string; - @Column() finished: boolean; @@ -42,10 +49,7 @@ export class ExecutionEntity implements IExecutionFlattedDb { @Column({ type: datetimeColumnType, nullable: true }) stoppedAt: Date; - @Column(jsonColumnType) - workflowData: IWorkflowDb; - - @Column({ nullable: true, transformer: idStringifier }) + @Column({ nullable: true }) workflowId: string; @Column({ type: datetimeColumnType, nullable: true }) @@ -53,4 +57,10 @@ export class ExecutionEntity implements IExecutionFlattedDb { @OneToMany('ExecutionMetadata', 'execution') metadata: ExecutionMetadata[]; + + @OneToOne('ExecutionData', 'execution') + executionData: Relation; + + @ManyToOne('WorkflowEntity') + workflow: WorkflowEntity; } diff --git a/packages/cli/src/databases/entities/SharedCredentials.ts b/packages/cli/src/databases/entities/SharedCredentials.ts index d1c46fd8a3..bda9006125 100644 --- a/packages/cli/src/databases/entities/SharedCredentials.ts +++ b/packages/cli/src/databases/entities/SharedCredentials.ts @@ -3,7 +3,6 @@ import { CredentialsEntity } from './CredentialsEntity'; import { User } from './User'; import { Role } from './Role'; import { AbstractEntity } from './AbstractEntity'; -import { idStringifier } from '../utils/transformers'; @Entity() export class SharedCredentials extends AbstractEntity { @@ -22,6 +21,6 @@ export class SharedCredentials extends AbstractEntity { @ManyToOne('CredentialsEntity', 'shared') credentials: CredentialsEntity; - @PrimaryColumn({ transformer: idStringifier }) + @PrimaryColumn() credentialsId: string; } diff --git a/packages/cli/src/databases/entities/SharedWorkflow.ts b/packages/cli/src/databases/entities/SharedWorkflow.ts index 57d8a8b506..12b9dd1def 100644 --- a/packages/cli/src/databases/entities/SharedWorkflow.ts +++ b/packages/cli/src/databases/entities/SharedWorkflow.ts @@ -3,7 +3,6 @@ import { WorkflowEntity } from './WorkflowEntity'; import { User } from './User'; import { Role } from './Role'; import { AbstractEntity } from './AbstractEntity'; -import { idStringifier } from '../utils/transformers'; @Entity() export class SharedWorkflow extends AbstractEntity { @@ -22,6 +21,6 @@ export class SharedWorkflow extends AbstractEntity { @ManyToOne('WorkflowEntity', 'shared') workflow: WorkflowEntity; - @PrimaryColumn({ transformer: idStringifier }) + @PrimaryColumn() workflowId: string; } diff --git a/packages/cli/src/databases/entities/TagEntity.ts b/packages/cli/src/databases/entities/TagEntity.ts index de25ba482b..8e9f518f22 100644 --- a/packages/cli/src/databases/entities/TagEntity.ts +++ b/packages/cli/src/databases/entities/TagEntity.ts @@ -1,15 +1,28 @@ -import { Column, Entity, Generated, Index, ManyToMany, OneToMany, PrimaryColumn } from 'typeorm'; +import { BeforeInsert, Column, Entity, Index, ManyToMany, OneToMany, PrimaryColumn } from 'typeorm'; import { IsString, Length } from 'class-validator'; - -import { idStringifier } from '../utils/transformers'; import type { WorkflowEntity } from './WorkflowEntity'; import type { WorkflowTagMapping } from './WorkflowTagMapping'; import { AbstractEntity } from './AbstractEntity'; +import { generateNanoId } from '../utils/generators'; @Entity() export class TagEntity extends AbstractEntity { - @Generated() - @PrimaryColumn({ transformer: idStringifier }) + constructor(data?: Partial) { + super(); + Object.assign(this, data); + if (!this.id) { + this.id = generateNanoId(); + } + } + + @BeforeInsert() + nanoId() { + if (!this.id) { + this.id = generateNanoId(); + } + } + + @PrimaryColumn('varchar') id: string; @Column({ length: 24 }) diff --git a/packages/cli/src/databases/entities/Variables.ts b/packages/cli/src/databases/entities/Variables.ts index 64eef5a9fc..6564da2fd0 100644 --- a/packages/cli/src/databases/entities/Variables.ts +++ b/packages/cli/src/databases/entities/Variables.ts @@ -1,9 +1,24 @@ -import { Column, Entity, PrimaryGeneratedColumn } from 'typeorm'; +import { BeforeInsert, Column, Entity, PrimaryColumn } from 'typeorm'; +import { generateNanoId } from '../utils/generators'; @Entity() export class Variables { - @PrimaryGeneratedColumn() - id: number; + constructor(data?: Partial) { + Object.assign(this, data); + if (!this.id) { + this.id = generateNanoId(); + } + } + + @BeforeInsert() + nanoId() { + if (!this.id) { + this.id = generateNanoId(); + } + } + + @PrimaryColumn('varchar') + id: string; @Column('text') key: string; diff --git a/packages/cli/src/databases/entities/WebhookEntity.ts b/packages/cli/src/databases/entities/WebhookEntity.ts index 0ef75dc84f..208de86f20 100644 --- a/packages/cli/src/databases/entities/WebhookEntity.ts +++ b/packages/cli/src/databases/entities/WebhookEntity.ts @@ -1,11 +1,9 @@ import { Column, Entity, Index, PrimaryColumn } from 'typeorm'; -import { idStringifier } from '../utils/transformers'; - @Entity() @Index(['webhookId', 'method', 'pathLength']) export class WebhookEntity { - @Column({ transformer: idStringifier }) + @Column() workflowId: string; @PrimaryColumn() diff --git a/packages/cli/src/databases/entities/WorkflowEntity.ts b/packages/cli/src/databases/entities/WorkflowEntity.ts index bee34796f1..e40c1e1b7d 100644 --- a/packages/cli/src/databases/entities/WorkflowEntity.ts +++ b/packages/cli/src/databases/entities/WorkflowEntity.ts @@ -4,9 +4,9 @@ import { IConnections, IDataObject, IWorkflowSettings } from 'n8n-workflow'; import type { IBinaryKeyData, INode, IPairedItemData } from 'n8n-workflow'; import { + BeforeInsert, Column, Entity, - Generated, Index, JoinColumn, JoinTable, @@ -20,14 +20,29 @@ import type { TagEntity } from './TagEntity'; import type { SharedWorkflow } from './SharedWorkflow'; import type { WorkflowStatistics } from './WorkflowStatistics'; import type { WorkflowTagMapping } from './WorkflowTagMapping'; -import { idStringifier, objectRetriever, sqlite } from '../utils/transformers'; +import { objectRetriever, sqlite } from '../utils/transformers'; import { AbstractEntity, jsonColumnType } from './AbstractEntity'; import type { IWorkflowDb } from '@/Interfaces'; +import { generateNanoId } from '../utils/generators'; @Entity() export class WorkflowEntity extends AbstractEntity implements IWorkflowDb { - @Generated() - @PrimaryColumn({ transformer: idStringifier }) + constructor(data?: Partial) { + super(); + Object.assign(this, data); + if (!this.id) { + this.id = generateNanoId(); + } + } + + @BeforeInsert() + nanoId() { + if (!this.id) { + this.id = generateNanoId(); + } + } + + @PrimaryColumn('varchar') id: string; // TODO: Add XSS check diff --git a/packages/cli/src/databases/entities/WorkflowStatistics.ts b/packages/cli/src/databases/entities/WorkflowStatistics.ts index 5181bb257c..177000a0b0 100644 --- a/packages/cli/src/databases/entities/WorkflowStatistics.ts +++ b/packages/cli/src/databases/entities/WorkflowStatistics.ts @@ -1,5 +1,4 @@ import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm'; -import { idStringifier } from '../utils/transformers'; import { datetimeColumnType } from './AbstractEntity'; import { WorkflowEntity } from './WorkflowEntity'; @@ -25,6 +24,6 @@ export class WorkflowStatistics { @ManyToOne('WorkflowEntity', 'shared') workflow: WorkflowEntity; - @PrimaryColumn({ transformer: idStringifier }) + @PrimaryColumn() workflowId: string; } diff --git a/packages/cli/src/databases/entities/WorkflowTagMapping.ts b/packages/cli/src/databases/entities/WorkflowTagMapping.ts index 88b92b0b19..69f74bc8a6 100644 --- a/packages/cli/src/databases/entities/WorkflowTagMapping.ts +++ b/packages/cli/src/databases/entities/WorkflowTagMapping.ts @@ -1,11 +1,10 @@ import { Entity, JoinColumn, ManyToOne, PrimaryColumn } from 'typeorm'; -import { idStringifier } from '../utils/transformers'; import type { TagEntity } from './TagEntity'; import type { WorkflowEntity } from './WorkflowEntity'; @Entity({ name: 'workflows_tags' }) export class WorkflowTagMapping { - @PrimaryColumn({ transformer: idStringifier }) + @PrimaryColumn() workflowId: string; @ManyToOne('WorkflowEntity', 'tagMappings') diff --git a/packages/cli/src/databases/entities/index.ts b/packages/cli/src/databases/entities/index.ts index 8e2fdd7587..34efac244a 100644 --- a/packages/cli/src/databases/entities/index.ts +++ b/packages/cli/src/databases/entities/index.ts @@ -18,6 +18,7 @@ import { WorkflowEntity } from './WorkflowEntity'; import { WorkflowTagMapping } from './WorkflowTagMapping'; import { WorkflowStatistics } from './WorkflowStatistics'; import { ExecutionMetadata } from './ExecutionMetadata'; +import { ExecutionData } from './ExecutionData'; export const entities = { AuthIdentity, @@ -39,4 +40,5 @@ export const entities = { WorkflowTagMapping, WorkflowStatistics, ExecutionMetadata, + ExecutionData, }; diff --git a/packages/cli/src/databases/migrations/mysqldb/1690000000001-MigrateIntegerKeysToString.ts b/packages/cli/src/databases/migrations/mysqldb/1690000000001-MigrateIntegerKeysToString.ts new file mode 100644 index 0000000000..ab12145bb2 --- /dev/null +++ b/packages/cli/src/databases/migrations/mysqldb/1690000000001-MigrateIntegerKeysToString.ts @@ -0,0 +1,252 @@ +import type { MigrationContext, ReversibleMigration } from '@db/types'; + +export class MigrateIntegerKeysToString1690000000001 implements ReversibleMigration { + async up({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_entity RENAME COLUMN id to tmp_id;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN id varchar(36) NOT NULL;`, + ); + await queryRunner.query(`UPDATE ${tablePrefix}workflow_entity SET id = CONVERT(tmp_id, CHAR);`); + await queryRunner.query( + `CREATE INDEX \`TMP_idx_workflow_entity_id\` ON ${tablePrefix}workflow_entity (\`id\`);`, + ); + + await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity RENAME COLUMN id to tmp_id;`); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}tag_entity ADD COLUMN id varchar(36) NOT NULL;`, + ); + await queryRunner.query(`UPDATE ${tablePrefix}tag_entity SET id = CONVERT(tmp_id, CHAR);`); + await queryRunner.query( + `CREATE INDEX \`TMP_idx_tag_entity_id\` ON ${tablePrefix}tag_entity (\`id\`);`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN \`workflowId\` varchar(36) NOT NULL;`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}workflows_tags SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags RENAME COLUMN \`tagId\` to \`tmp_tagId\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN \`tagId\` varchar(36) NOT NULL;`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}workflows_tags SET \`tagId\` = CONVERT(\`tmp_tagId\`, CHAR);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`workflowId\`, \`tagId\`);`, + ); + await queryRunner.query( + `CREATE INDEX \`idx_workflows_tags_workflowid\` ON ${tablePrefix}workflows_tags (\`workflowId\`);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags DROP FOREIGN KEY \`FK_54b2f0343d6a2078fa137443869\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT \`fk_workflows_tags_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags DROP FOREIGN KEY \`FK_77505b341625b0b4768082e2171\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT \`fk_workflows_tags_tag_id\` FOREIGN KEY (\`tagId\`) REFERENCES tag_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN \`tmp_workflowId\`;`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN \`tmp_tagId\`;`); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow ADD COLUMN \`workflowId\` varchar(36) NOT NULL;`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}shared_workflow SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`userId\`, \`workflowId\`);`, + ); + await queryRunner.query( + `CREATE INDEX \`idx_shared_workflow_workflow_id\` ON ${tablePrefix}shared_workflow (\`workflowId\`);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow DROP FOREIGN KEY \`FK_b83f8d2530884b66a9c848c8b88\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow ADD CONSTRAINT \`fk_shared_workflow_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow DROP COLUMN \`tmp_workflowId\`;`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics ADD COLUMN \`workflowId\` varchar(36) NOT NULL;`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}workflow_statistics SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`, + ); + await queryRunner.query( + `CREATE INDEX \`idx_workflow_statistics_workflow_id\` ON ${tablePrefix}workflow_statistics (\`workflowId\`);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics DROP FOREIGN KEY \`workflow_statistics_ibfk_1\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics ADD CONSTRAINT \`fk_workflow_statistics_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`workflowId\`, \`name\`);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics DROP COLUMN \`tmp_workflowId\`;`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}webhook_entity RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}webhook_entity ADD COLUMN \`workflowId\` varchar(36) NOT NULL;`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}webhook_entity SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN \`tmp_workflowId\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}webhook_entity ADD CONSTRAINT \`fk_webhook_entity_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity RENAME COLUMN \`workflowId\` to \`tmp_workflowId\`;`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity ADD COLUMN \`workflowId\` varchar(36);`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}execution_entity SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`, + ); + await queryRunner.query( + `CREATE INDEX \`idx_execution_entity_workflow_id_id\` ON ${tablePrefix}execution_entity (\`workflowId\`,\`id\`);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity DROP FOREIGN KEY \`FK_execution_entity_workflowId\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity ADD CONSTRAINT \`fk_execution_entity_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `DROP INDEX \`IDX_81fc04c8a17de15835713505e4\` ON ${tablePrefix}execution_entity;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity DROP COLUMN \`tmp_workflowId\`;`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_entity MODIFY COLUMN tmp_id INT NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_entity DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`id\`);`, + ); + await queryRunner.query( + `DROP INDEX \`TMP_idx_workflow_entity_id\` ON ${tablePrefix}workflow_entity;`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN tmp_id;`); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}tag_entity MODIFY COLUMN tmp_id INT NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}tag_entity DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`id\`);`, + ); + await queryRunner.query(`DROP INDEX \`TMP_idx_tag_entity_id\` ON ${tablePrefix}tag_entity;`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity DROP COLUMN tmp_id;`); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}credentials_entity RENAME COLUMN id to tmp_id;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}credentials_entity ADD COLUMN id varchar(36) NOT NULL;`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}credentials_entity SET id = CONVERT(tmp_id, CHAR);`, + ); + await queryRunner.query( + `CREATE INDEX \`TMP_idx_credentials_entity_id\` ON ${tablePrefix}credentials_entity (\`id\`);`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials RENAME COLUMN credentialsId to tmp_credentialsId;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials ADD COLUMN credentialsId varchar(36) NOT NULL;`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}shared_credentials SET credentialsId = CONVERT(tmp_credentialsId, CHAR);`, + ); + await queryRunner.query( + `CREATE INDEX \`idx_shared_credentials_id\` ON ${tablePrefix}shared_credentials (\`credentialsId\`);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials DROP FOREIGN KEY \`FK_68661def1d4bcf2451ac8dbd949\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials ADD CONSTRAINT \`fk_shared_credentials_credentials_id\` FOREIGN KEY (\`credentialsId\`) REFERENCES credentials_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials MODIFY COLUMN tmp_credentialsId INT NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`userId\`,\`credentialsId\`);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials DROP COLUMN tmp_credentialsId;`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}credentials_entity MODIFY COLUMN tmp_id INT NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}credentials_entity DROP CONSTRAINT \`PRIMARY\`, ADD PRIMARY KEY (\`id\`);`, + ); + await queryRunner.query( + `DROP INDEX \`TMP_idx_credentials_entity_id\` ON ${tablePrefix}credentials_entity;`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity DROP COLUMN tmp_id;`); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}variables RENAME COLUMN \`id\` to \`tmp_id\`;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}variables ADD COLUMN \`id\` varchar(36) NOT NULL;`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}variables SET \`id\` = CONVERT(\`tmp_id\`, CHAR);`, + ); + await queryRunner.query( + `CREATE INDEX \`TMP_idx_variables_id\` ON ${tablePrefix}variables (\`id\`);`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}variables CHANGE \`tmp_id\` \`tmp_id\` int NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}variables DROP PRIMARY KEY, ADD PRIMARY KEY (\`id\`);`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}variables DROP COLUMN \`tmp_id\`;`); + } + + // eslint-disable-next-line @typescript-eslint/no-empty-function, @typescript-eslint/no-unused-vars + async down({ queryRunner, tablePrefix }: MigrationContext) {} +} diff --git a/packages/cli/src/databases/migrations/mysqldb/1690000000030-SeparateExecutionData.ts b/packages/cli/src/databases/migrations/mysqldb/1690000000030-SeparateExecutionData.ts new file mode 100644 index 0000000000..7bf58e9c7d --- /dev/null +++ b/packages/cli/src/databases/migrations/mysqldb/1690000000030-SeparateExecutionData.ts @@ -0,0 +1,43 @@ +import type { MigrationContext, ReversibleMigration } from '@db/types'; + +export class SeparateExecutionData1690000000030 implements ReversibleMigration { + async up({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query( + `CREATE TABLE ${tablePrefix}execution_data ( + executionId int(11) NOT NULL primary key, + workflowData json NOT NULL, + data TEXT NOT NULL, + CONSTRAINT \`${tablePrefix}execution_data_FK\` FOREIGN KEY (\`executionId\`) REFERENCES \`${tablePrefix}execution_entity\` (\`id\`) ON DELETE CASCADE + ) + ENGINE=InnoDB`, + ); + + await queryRunner.query( + `INSERT INTO ${tablePrefix}execution_data ( + executionId, + workflowData, + data) + SELECT id, workflowData, data FROM ${tablePrefix}execution_entity + `, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity DROP COLUMN workflowData, DROP COLUMN data`, + ); + } + + async down({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity + ADD workflowData json NULL, + ADD data text NULL`, + ); + + await queryRunner.query( + `UPDATE ${tablePrefix}execution_entity SET workflowData = ${tablePrefix}execution_data.workflowData, data = ${tablePrefix}execution_data.data + FROM ${tablePrefix}execution_data WHERE ${tablePrefix}execution_data.executionId = ${tablePrefix}execution_entity.id`, + ); + + await queryRunner.query(`DROP TABLE ${tablePrefix}execution_data`); + } +} diff --git a/packages/cli/src/databases/migrations/mysqldb/index.ts b/packages/cli/src/databases/migrations/mysqldb/index.ts index 18542624f3..ac84b759e2 100644 --- a/packages/cli/src/databases/migrations/mysqldb/index.ts +++ b/packages/cli/src/databases/migrations/mysqldb/index.ts @@ -38,6 +38,8 @@ import { UpdateRunningExecutionStatus1677236788851 } from './1677236788851-Updat import { CreateExecutionMetadataTable1679416281779 } from './1679416281779-CreateExecutionMetadataTable'; import { CreateVariables1677501636753 } from './1677501636753-CreateVariables'; import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty'; +import { MigrateIntegerKeysToString1690000000001 } from './1690000000001-MigrateIntegerKeysToString'; +import { SeparateExecutionData1690000000030 } from './1690000000030-SeparateExecutionData'; export const mysqlMigrations: Migration[] = [ InitialMigration1588157391238, @@ -79,4 +81,6 @@ export const mysqlMigrations: Migration[] = [ CreateExecutionMetadataTable1679416281779, CreateVariables1677501636753, AddUserActivatedProperty1681134145996, + MigrateIntegerKeysToString1690000000001, + SeparateExecutionData1690000000030, ]; diff --git a/packages/cli/src/databases/migrations/postgresdb/1690000000000-MigrateIntegerKeysToString.ts b/packages/cli/src/databases/migrations/postgresdb/1690000000000-MigrateIntegerKeysToString.ts new file mode 100644 index 0000000000..e422d179a5 --- /dev/null +++ b/packages/cli/src/databases/migrations/postgresdb/1690000000000-MigrateIntegerKeysToString.ts @@ -0,0 +1,262 @@ +/* eslint-disable n8n-local-rules/no-unneeded-backticks */ +import type { MigrationContext, ReversibleMigration } from '@db/types'; + +export class MigrateIntegerKeysToString1690000000000 implements ReversibleMigration { + async up({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_entity RENAME COLUMN id to tmp_id;`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN id varchar(36);`); + await queryRunner.query(`UPDATE ${tablePrefix}workflow_entity SET id = tmp_id::text;`); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN id SET NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN tmp_id DROP DEFAULT;`, + ); + await queryRunner.query(`DROP SEQUENCE IF EXISTS ${tablePrefix}workflow_entity_id_seq;`); + await queryRunner.query( + `CREATE UNIQUE INDEX "pk_workflow_entity_id" ON ${tablePrefix}workflow_entity ("id");`, + ); + + await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity RENAME COLUMN id to tmp_id;`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ADD COLUMN id varchar(36);`); + await queryRunner.query(`UPDATE ${tablePrefix}tag_entity SET id = tmp_id::text;`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN id SET NOT NULL;`); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}tag_entity ALTER COLUMN tmp_id DROP DEFAULT;`, + ); + await queryRunner.query(`DROP SEQUENCE IF EXISTS tag_entity_id_seq;`); + await queryRunner.query( + `CREATE UNIQUE INDEX "pk_tag_entity_id" ON ${tablePrefix}tag_entity ("id");`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags RENAME COLUMN "workflowId" to "tmp_workflowId";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN "workflowId" varchar(36);`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}workflows_tags SET "workflowId" = "tmp_workflowId"::text;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ALTER COLUMN "workflowId" SET NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags RENAME COLUMN "tagId" to "tmp_tagId";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN "tagId" varchar(36);`, + ); + await queryRunner.query(`UPDATE ${tablePrefix}workflows_tags SET "tagId" = "tmp_tagId"::text;`); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ALTER COLUMN "tagId" SET NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT IF EXISTS "FK_31140eb41f019805b40d0087449";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT IF EXISTS "FK_5e29bfe9e22c5d6567f509d4a46";`, + ); + await queryRunner.query( + `CREATE UNIQUE INDEX "pk_workflows_tags" ON ${tablePrefix}workflows_tags ("workflowId","tagId");`, + ); + await queryRunner.query(`DROP INDEX IF EXISTS "idx_31140eb41f019805b40d008744";`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT "PK_a60448a90e51a114e95e2a125b3", + ADD CONSTRAINT "pk_workflows_tags" PRIMARY KEY USING INDEX "pk_workflows_tags";`); + await queryRunner.query( + `CREATE INDEX "idx_workflows_tags_workflow_id" ON ${tablePrefix}workflows_tags ("workflowId");`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "fk_workflows_tags_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "fk_workflows_tags_tag_id" FOREIGN KEY ("tagId") REFERENCES tag_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN "tmp_workflowId";`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN "tmp_tagId";`); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow RENAME COLUMN "workflowId" to "tmp_workflowId";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow ADD COLUMN "workflowId" varchar(36);`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}shared_workflow SET "workflowId" = "tmp_workflowId"::text;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow ALTER COLUMN "workflowId" SET NOT NULL;`, + ); + await queryRunner.query( + `CREATE UNIQUE INDEX "pk_shared_workflow_id" ON ${tablePrefix}shared_workflow ("userId","workflowId");`, + ); + await queryRunner.query(`DROP INDEX IF EXISTS "IDX_65a0933c0f19d278881653bf81d35064";`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}shared_workflow DROP CONSTRAINT "PK_cc5d5a71c7b2591f5154ffb0c785e85e", + ADD CONSTRAINT "pk_shared_workflow_id" PRIMARY KEY USING INDEX "pk_shared_workflow_id";`); + await queryRunner.query( + `CREATE INDEX "idx_shared_workflow_workflow_id" ON ${tablePrefix}shared_workflow ("workflowId");`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow ADD CONSTRAINT "fk_shared_workflow_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_workflow DROP COLUMN "tmp_workflowId";`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics RENAME COLUMN "workflowId" to "tmp_workflowId";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics ADD COLUMN "workflowId" varchar(36);`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}workflow_statistics SET "workflowId" = "tmp_workflowId"::text;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics ALTER COLUMN "workflowId" SET NOT NULL;`, + ); + await queryRunner.query( + `CREATE UNIQUE INDEX "pk_workflow_statistics" ON ${tablePrefix}workflow_statistics ("workflowId","name");`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_statistics DROP CONSTRAINT IF EXISTS "workflow_statistics_pkey", + ADD CONSTRAINT "pk_workflow_statistics" PRIMARY KEY USING INDEX "pk_workflow_statistics";`); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics DROP COLUMN "tmp_workflowId";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_statistics ADD CONSTRAINT "fk_workflow_statistics_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}webhook_entity RENAME COLUMN "workflowId" to "tmp_workflowId";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}webhook_entity ADD COLUMN "workflowId" varchar(36);`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}webhook_entity SET "workflowId" = "tmp_workflowId"::text;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}webhook_entity ALTER COLUMN "workflowId" SET NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "tmp_workflowId";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}webhook_entity ADD CONSTRAINT "fk_webhook_entity_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity RENAME COLUMN "workflowId" to "tmp_workflowId";`, + ); + // -- Intentionally NOT setting colum to NOT NULL + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity ADD COLUMN "workflowId" varchar(36);`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}execution_entity SET "workflowId" = "tmp_workflowId"::text;`, + ); + await queryRunner.query(`DROP INDEX IF EXISTS "IDX_d160d4771aba5a0d78943edbe3";`); + await queryRunner.query(`DROP INDEX IF EXISTS "IDX_4f474ac92be81610439aaad61e";`); + await queryRunner.query(`DROP INDEX IF EXISTS "IDX_58154df94c686818c99fb754ce";`); + // -- index idx_33228da131bb1112247cf52a42 is a duplicate of IDX_33228da131bb1112247cf52a42 + await queryRunner.query(`DROP INDEX IF EXISTS "idx_33228da131bb1112247cf52a42";`); + await queryRunner.query( + `CREATE INDEX "idx_execution_entity_workflow_id_id" ON ${tablePrefix}execution_entity ("workflowId","id");`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity DROP COLUMN "tmp_workflowId";`, + ); + // -- FK was missing in prev schema - should it be added? + await queryRunner.query( + `ALTER TABLE ${tablePrefix}execution_entity ADD CONSTRAINT "fk_execution_entity_workflow_id" FOREIGN KEY ("workflowId") REFERENCES workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}workflow_entity DROP CONSTRAINT IF EXISTS "pk_eded7d72664448da7745d551207";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}tag_entity DROP CONSTRAINT IF EXISTS "PK_7a50a9b74ae6855c0dcaee25052";`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN tmp_id;`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity DROP COLUMN tmp_id;`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ADD PRIMARY KEY (id);`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity ADD PRIMARY KEY (id);`); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}credentials_entity RENAME COLUMN id to tmp_id;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}credentials_entity ADD COLUMN id varchar(36);`, + ); + await queryRunner.query(`UPDATE ${tablePrefix}credentials_entity SET id = tmp_id::text;`); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN id SET NOT NULL;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN tmp_id DROP DEFAULT;`, + ); + await queryRunner.query(`DROP SEQUENCE IF EXISTS credentials_entity_id_seq;`); + await queryRunner.query( + `CREATE UNIQUE INDEX "pk_credentials_entity_id" ON ${tablePrefix}credentials_entity ("id");`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials RENAME COLUMN "credentialsId" to "tmp_credentialsId";`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials ADD COLUMN "credentialsId" varchar(36);`, + ); + await queryRunner.query( + `UPDATE ${tablePrefix}shared_credentials SET "credentialsId" = "tmp_credentialsId"::text;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials ALTER COLUMN "credentialsId" SET NOT NULL;`, + ); + await queryRunner.query( + `CREATE UNIQUE INDEX "pk_shared_credentials_id" ON ${tablePrefix}shared_credentials ("userId","credentialsId");`, + ); + await queryRunner.query(`DROP INDEX IF EXISTS "IDX_829d16efa0e265cb076d50eca8d21733";`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}shared_credentials DROP CONSTRAINT "PK_10dd1527ffb639609be7aadd98f628c6", + ADD CONSTRAINT "pk_shared_credentials_id" PRIMARY KEY USING INDEX "pk_shared_credentials_id";`); + await queryRunner.query( + `CREATE INDEX "idx_shared_credentials_credentials_id" ON ${tablePrefix}shared_credentials ("credentialsId");`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials ADD CONSTRAINT "fk_shared_credentials_credentials_id" FOREIGN KEY ("credentialsId") REFERENCES credentials_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}shared_credentials DROP COLUMN "tmp_credentialsId";`, + ); + + await queryRunner.query( + `ALTER TABLE ${tablePrefix}credentials_entity DROP CONSTRAINT IF EXISTS "pk_814c3d3c36e8a27fa8edb761b0e";`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity DROP COLUMN tmp_id;`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ADD PRIMARY KEY (id);`); + + await queryRunner.query(`ALTER TABLE ${tablePrefix}variables RENAME COLUMN id to tmp_id;`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}variables ADD COLUMN id varchar(36);`); + await queryRunner.query(`UPDATE ${tablePrefix}variables SET id = tmp_id::text;`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}variables ALTER COLUMN id SET NOT NULL;`); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}variables ALTER COLUMN tmp_id DROP DEFAULT;`, + ); + await queryRunner.query(`DROP SEQUENCE IF EXISTS variables_id_seq;`); + await queryRunner.query( + `CREATE UNIQUE INDEX "pk_variables_id" ON ${tablePrefix}variables ("id");`, + ); + await queryRunner.query( + `ALTER TABLE ${tablePrefix}variables DROP CONSTRAINT IF EXISTS "variables_pkey";`, + ); + await queryRunner.query(`ALTER TABLE ${tablePrefix}variables DROP COLUMN tmp_id;`); + await queryRunner.query(`ALTER TABLE ${tablePrefix}variables ADD PRIMARY KEY (id);`); + } + + // eslint-disable-next-line @typescript-eslint/no-empty-function, @typescript-eslint/no-unused-vars + async down({ queryRunner, tablePrefix }: MigrationContext) {} +} diff --git a/packages/cli/src/databases/migrations/postgresdb/1690000000020-SeparateExecutionData.ts b/packages/cli/src/databases/migrations/postgresdb/1690000000020-SeparateExecutionData.ts new file mode 100644 index 0000000000..4ee413b4b3 --- /dev/null +++ b/packages/cli/src/databases/migrations/postgresdb/1690000000020-SeparateExecutionData.ts @@ -0,0 +1,42 @@ +import type { MigrationContext, ReversibleMigration } from '@db/types'; + +export class SeparateExecutionData1690000000020 implements ReversibleMigration { + async up({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query( + `CREATE TABLE "${tablePrefix}execution_data" ( + "executionId" integer NOT NULL, + "workflowData" json NOT NULL, + "data" text NOT NULL, + CONSTRAINT "${tablePrefix}execution_data_fk" FOREIGN KEY ("executionId") REFERENCES ${tablePrefix}execution_entity(id) ON DELETE CASCADE + )`, + ); + + await queryRunner.query( + `INSERT INTO "${tablePrefix}execution_data" ( + "executionId", + "workflowData", + "data") + SELECT "id", "workflowData", "data" FROM "${tablePrefix}execution_entity" + `, + ); + + await queryRunner.query( + `ALTER TABLE "${tablePrefix}execution_entity" DROP COLUMN "workflowData", DROP COLUMN "data"`, + ); + } + + async down({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query( + `ALTER TABLE "${tablePrefix}execution_entity" + ADD "workflowData" json NULL, + ADD "data" text NULL`, + ); + + await queryRunner.query( + `UPDATE "${tablePrefix}execution_entity" SET "workflowData" = "execution_data"."workflowData", "data" = "execution_data"."data" + FROM "${tablePrefix}execution_data" WHERE "${tablePrefix}execution_data"."executionId" = "${tablePrefix}execution_entity"."id"`, + ); + + await queryRunner.query(`DROP TABLE "${tablePrefix}execution_data"`); + } +} diff --git a/packages/cli/src/databases/migrations/postgresdb/index.ts b/packages/cli/src/databases/migrations/postgresdb/index.ts index 8c047b84f2..95c025acf6 100644 --- a/packages/cli/src/databases/migrations/postgresdb/index.ts +++ b/packages/cli/src/databases/migrations/postgresdb/index.ts @@ -36,6 +36,8 @@ import { UpdateRunningExecutionStatus1677236854063 } from './1677236854063-Updat import { CreateExecutionMetadataTable1679416281778 } from './1679416281778-CreateExecutionMetadataTable'; import { CreateVariables1677501636754 } from './1677501636754-CreateVariables'; import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty'; +import { MigrateIntegerKeysToString1690000000000 } from './1690000000000-MigrateIntegerKeysToString'; +import { SeparateExecutionData1690000000020 } from './1690000000020-SeparateExecutionData'; export const postgresMigrations: Migration[] = [ InitialMigration1587669153312, @@ -75,4 +77,6 @@ export const postgresMigrations: Migration[] = [ CreateExecutionMetadataTable1679416281778, CreateVariables1677501636754, AddUserActivatedProperty1681134145996, + MigrateIntegerKeysToString1690000000000, + SeparateExecutionData1690000000020, ]; diff --git a/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts b/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts new file mode 100644 index 0000000000..614252f503 --- /dev/null +++ b/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts @@ -0,0 +1,185 @@ +import type { MigrationContext, ReversibleMigration } from '@db/types'; + +export class MigrateIntegerKeysToString1690000000002 implements ReversibleMigration { + transaction = false as const; + + async up({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query('PRAGMA foreign_keys=OFF'); + await queryRunner.startTransaction(); + await queryRunner.query(` +CREATE TABLE "${tablePrefix}TMP_workflow_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text, "pinData" text, "versionId" varchar(36), "triggerCount" integer NOT NULL DEFAULT 0);`); + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_workflow_entity" SELECT * FROM "${tablePrefix}workflow_entity";`, + ); + await queryRunner.query('DROP TABLE "workflow_entity";'); + await queryRunner.query(`ALTER TABLE "${tablePrefix}TMP_workflow_entity" RENAME TO "${tablePrefix}workflow_entity"; +`); + + await queryRunner.query(` +CREATE TABLE "${tablePrefix}TMP_tag_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(24) NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')));`); + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_tag_entity" SELECT * FROM "${tablePrefix}tag_entity";`, + ); + await queryRunner.query('DROP TABLE "tag_entity";'); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_tag_entity" RENAME TO "${tablePrefix}tag_entity";`, + ); + + await queryRunner.query(` +CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "FK_workflows_tags_workflow_entity" FOREIGN KEY ("workflowId") REFERENCES "workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_workflows_tags_tag_entity" FOREIGN KEY ("tagId") REFERENCES "${tablePrefix}tag_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("workflowId", "tagId"));`); + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_workflows_tags" SELECT * FROM "${tablePrefix}workflows_tags";`, + ); + await queryRunner.query(`DROP TABLE "${tablePrefix}workflows_tags";`); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_workflows_tags" RENAME TO "${tablePrefix}workflows_tags";`, + ); + await queryRunner.query( + `CREATE INDEX "idx_workflows_tags_tag_id" ON "${tablePrefix}workflows_tags" ("tagId");`, + ); + await queryRunner.query( + `CREATE INDEX "idx_workflows_tags_workflow_id" ON "${tablePrefix}workflows_tags" ("workflowId");`, + ); + + await queryRunner.query(`CREATE TABLE "${tablePrefix}TMP_workflow_statistics" ( + "count" INTEGER DEFAULT 0, + "latestEvent" DATETIME, + "name" VARCHAR(128) NOT NULL, + "workflowId" VARCHAR(36), + PRIMARY KEY("workflowId", "name"), + FOREIGN KEY("workflowId") REFERENCES "${tablePrefix}workflow_entity"("id") ON DELETE CASCADE + );`); + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_workflow_statistics" SELECT * FROM "${tablePrefix}workflow_statistics";`, + ); + await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_statistics";`); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_workflow_statistics" RENAME TO "${tablePrefix}workflow_statistics";`, + ); + + await queryRunner.query( + `CREATE TABLE "${tablePrefix}TMP_shared_workflow" ( + "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + "roleId" integer NOT NULL, "userId" varchar NOT NULL, + "workflowId" VARCHAR(36) NOT NULL, + CONSTRAINT "FK_shared_workflow_role" FOREIGN KEY ("roleId") REFERENCES "role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT "FK_shared_workflow_user" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, + CONSTRAINT "FK_shared_workflow_workflow_entity" FOREIGN KEY ("workflowId") REFERENCES "workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, + PRIMARY KEY ("userId", "workflowId"));`, + ); + + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_shared_workflow" SELECT * FROM "${tablePrefix}shared_workflow";`, + ); + + await queryRunner.query(`DROP TABLE "${tablePrefix}shared_workflow";`); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_shared_workflow" RENAME TO "${tablePrefix}shared_workflow";`, + ); + await queryRunner.query( + `CREATE INDEX "idx_shared_workflow_workflow_id" ON "${tablePrefix}shared_workflow" ("workflowId");`, + ); + + await queryRunner.query( + `CREATE TABLE "${tablePrefix}TMP_webhook_entity" ("workflowId" varchar(36) NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, "webhookId" varchar, "pathLength" integer, PRIMARY KEY ("webhookPath", "method"));`, + ); + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_webhook_entity" SELECT * FROM "${tablePrefix}webhook_entity";`, + ); + await queryRunner.query(`DROP TABLE "${tablePrefix}webhook_entity";`); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_webhook_entity" RENAME TO "${tablePrefix}webhook_entity";`, + ); + await queryRunner.query( + `CREATE INDEX "idx_webhook_entity_webhook_path_method" ON "${tablePrefix}webhook_entity" ("webhookId","method","pathLength");`, + ); + + await queryRunner.query(`CREATE TABLE "${tablePrefix}TMP_execution_entity" ( + "id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, + "workflowId" varchar(36), + "finished" boolean NOT NULL, + "mode" varchar NOT NULL, + "retryOf" varchar, + "retrySuccessId" varchar, + "startedAt" datetime NOT NULL, + "stoppedAt" datetime, + "waitTill" datetime, + "workflowData" text NOT NULL, + "data" text NOT NULL, "status" varchar, + FOREIGN KEY("workflowId") REFERENCES "workflow_entity" ("id") ON DELETE CASCADE + );`); + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_execution_entity" SELECT * FROM "${tablePrefix}execution_entity";`, + ); + await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity";`); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_execution_entity" RENAME TO "${tablePrefix}execution_entity";`, + ); + await queryRunner.query( + `CREATE INDEX "idx_execution_entity_stopped_at" ON "${tablePrefix}execution_entity" ("stoppedAt");`, + ); + await queryRunner.query( + `CREATE INDEX "idx_execution_entity_wait_till" ON "${tablePrefix}execution_entity" ("waitTill");`, + ); + + await queryRunner.query( + `CREATE TABLE "${tablePrefix}TMP_credentials_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(32) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')));`, + ); + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_credentials_entity" SELECT * FROM "${tablePrefix}credentials_entity";`, + ); + await queryRunner.query(`DROP TABLE "${tablePrefix}credentials_entity";`); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_credentials_entity" RENAME TO "${tablePrefix}credentials_entity";`, + ); + await queryRunner.query( + `CREATE INDEX "idx_credentials_entity_type" ON "${tablePrefix}credentials_entity" ("type");`, + ); + + await queryRunner.query( + `CREATE TABLE "${tablePrefix}TMP_shared_credentials" ("createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + "roleId" integer NOT NULL, + "userId" varchar NOT NULL, "credentialsId" varchar(36) NOT NULL, + CONSTRAINT "FK_shared_credentials_role" FOREIGN KEY ("roleId") REFERENCES "role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT "FK_shared_credentials_user" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, + CONSTRAINT "FK_shared_credentials_credentials" FOREIGN KEY ("credentialsId") REFERENCES "${tablePrefix}credentials_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("userId", "credentialsId"));`, + ); + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_shared_credentials" SELECT * FROM "${tablePrefix}shared_credentials";`, + ); + await queryRunner.query(`DROP TABLE "${tablePrefix}shared_credentials";`); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_shared_credentials" RENAME TO "${tablePrefix}shared_credentials";`, + ); + await queryRunner.query( + `CREATE INDEX "idx_shared_credentials_credentials" ON "${tablePrefix}shared_credentials" ("credentialsId");`, + ); + await queryRunner.query( + `CREATE UNIQUE INDEX "idx_shared_credentials_user_credentials" ON "${tablePrefix}shared_credentials" ("userId","credentialsId");`, + ); + + await queryRunner.query(`CREATE TABLE "${tablePrefix}TMP_variables" ( + id varchar(36) PRIMARY KEY NOT NULL, + "key" TEXT NOT NULL, + "type" TEXT NOT NULL DEFAULT ('string'), + value TEXT, + UNIQUE("key") + );`); + await queryRunner.query( + `INSERT INTO "${tablePrefix}TMP_variables" SELECT * FROM "${tablePrefix}variables";`, + ); + await queryRunner.query(`DROP TABLE "${tablePrefix}variables";`); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_variables" RENAME TO "${tablePrefix}variables";`, + ); + await queryRunner.query(`CREATE UNIQUE INDEX "idx_variables_key" ON "${tablePrefix}variables" ("key"); +`); + await queryRunner.commitTransaction(); + await queryRunner.query('PRAGMA foreign_keys=ON'); + } + + // eslint-disable-next-line @typescript-eslint/no-empty-function, @typescript-eslint/no-unused-vars + async down({ queryRunner, tablePrefix }: MigrationContext) {} +} diff --git a/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts b/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts new file mode 100644 index 0000000000..3d8943a7a6 --- /dev/null +++ b/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts @@ -0,0 +1,46 @@ +import type { MigrationContext, ReversibleMigration } from '@/databases/types'; + +export class SeparateExecutionData1690000000010 implements ReversibleMigration { + async up({ queryRunner, tablePrefix }: MigrationContext): Promise { + await queryRunner.query( + `CREATE TABLE "${tablePrefix}execution_data" ( + "executionId" int PRIMARY KEY NOT NULL, + "workflowData" text NOT NULL, + "data" text NOT NULL, + FOREIGN KEY("executionId") REFERENCES "${tablePrefix}execution_entity" ("id") ON DELETE CASCADE + )`, + ); + + await queryRunner.query( + `INSERT INTO "${tablePrefix}execution_data" ( + "executionId", + "workflowData", + "data") + SELECT "id", "workflowData", "data" FROM "${tablePrefix}execution_entity" + `, + ); + + await queryRunner.query( + `ALTER TABLE \`${tablePrefix}execution_entity\` DROP COLUMN "workflowData"`, + ); + await queryRunner.query(`ALTER TABLE \`${tablePrefix}execution_entity\` DROP COLUMN "data"`); + } + + async down({ queryRunner, tablePrefix }: MigrationContext): Promise { + await queryRunner.query( + `ALTER TABLE \`${tablePrefix}execution_entity\` ADD COLUMN "workflowData" text NULL`, + ); + await queryRunner.query( + `ALTER TABLE \`${tablePrefix}execution_entity\` ADD COLUMN "data" text NULL`, + ); + + await queryRunner.query( + `UPDATE "${tablePrefix}execution_entity" SET "workflowData" = (SELECT "workflowData" FROM "${tablePrefix}execution_data" WHERE "${tablePrefix}execution_data"."executionId" = "${tablePrefix}execution_entity"."id")`, + ); + await queryRunner.query( + `UPDATE "${tablePrefix}execution_entity" SET "data" = (SELECT "data" FROM "${tablePrefix}execution_data" WHERE "${tablePrefix}execution_data"."executionId" = "${tablePrefix}execution_entity"."id")`, + ); + + await queryRunner.query(`DROP TABLE "${tablePrefix}execution_data"`); + } +} diff --git a/packages/cli/src/databases/migrations/sqlite/index.ts b/packages/cli/src/databases/migrations/sqlite/index.ts index 57f57ba1dc..bedd8cd14a 100644 --- a/packages/cli/src/databases/migrations/sqlite/index.ts +++ b/packages/cli/src/databases/migrations/sqlite/index.ts @@ -35,6 +35,8 @@ import { UpdateRunningExecutionStatus1677237073720 } from './1677237073720-Updat import { CreateExecutionMetadataTable1679416281777 } from './1679416281777-CreateExecutionMetadataTable'; import { CreateVariables1677501636752 } from './1677501636752-CreateVariables'; import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty'; +import { MigrateIntegerKeysToString1690000000002 } from './1690000000002-MigrateIntegerKeysToString'; +import { SeparateExecutionData1690000000010 } from './1690000000010-SeparateExecutionData'; const sqliteMigrations: Migration[] = [ InitialMigration1588102412422, @@ -73,6 +75,8 @@ const sqliteMigrations: Migration[] = [ CreateVariables1677501636752, CreateExecutionMetadataTable1679416281777, AddUserActivatedProperty1681134145996, + MigrateIntegerKeysToString1690000000002, + SeparateExecutionData1690000000010, ]; export { sqliteMigrations }; diff --git a/packages/cli/src/databases/repositories/execution.repository.ts b/packages/cli/src/databases/repositories/execution.repository.ts index bc332ed966..6c69d316d2 100644 --- a/packages/cli/src/databases/repositories/execution.repository.ts +++ b/packages/cli/src/databases/repositories/execution.repository.ts @@ -1,10 +1,408 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, In, LessThanOrEqual, MoreThanOrEqual, Repository } from 'typeorm'; +import type { + FindManyOptions, + FindOneOptions, + FindOptionsWhere, + SelectQueryBuilder, +} from 'typeorm'; import { ExecutionEntity } from '../entities/ExecutionEntity'; +import { parse, stringify } from 'flatted'; +import type { + IExecutionBase, + IExecutionDb, + IExecutionFlattedDb, + IExecutionResponse, +} from '@/Interfaces'; +import { LoggerProxy } from 'n8n-workflow'; +import type { IExecutionsSummary, IRunExecutionData } from 'n8n-workflow'; +import { ExecutionDataRepository } from './executionData.repository'; +import type { ExecutionData } from '../entities/ExecutionData'; +import type { IGetExecutionsQueryFilter } from '@/executions/executions.service'; +import { isAdvancedExecutionFiltersEnabled } from '@/executions/executionHelpers'; +import { ExecutionMetadata } from '../entities/ExecutionMetadata'; +import { DateUtils } from 'typeorm/util/DateUtils'; +import { BinaryDataManager } from 'n8n-core'; +import config from '@/config'; + +function parseFiltersToQueryBuilder( + qb: SelectQueryBuilder, + filters?: IGetExecutionsQueryFilter, +) { + if (filters?.status) { + qb.andWhere('execution.status IN (:...workflowStatus)', { + workflowStatus: filters.status, + }); + } + if (filters?.finished) { + qb.andWhere({ finished: filters.finished }); + } + if (filters?.metadata && isAdvancedExecutionFiltersEnabled()) { + qb.leftJoin(ExecutionMetadata, 'md', 'md.executionId = execution.id'); + for (const md of filters.metadata) { + qb.andWhere('md.key = :key AND md.value = :value', md); + } + } + if (filters?.startedAfter) { + qb.andWhere({ + startedAt: MoreThanOrEqual( + DateUtils.mixedDateToUtcDatetimeString(new Date(filters.startedAfter)), + ), + }); + } + if (filters?.startedBefore) { + qb.andWhere({ + startedAt: LessThanOrEqual( + DateUtils.mixedDateToUtcDatetimeString(new Date(filters.startedBefore)), + ), + }); + } + if (filters?.workflowId) { + qb.andWhere({ + workflowId: filters.workflowId, + }); + } +} @Service() export class ExecutionRepository extends Repository { - constructor(dataSource: DataSource) { + private executionDataRepository: ExecutionDataRepository; + + constructor(dataSource: DataSource, executionDataRepository: ExecutionDataRepository) { super(ExecutionEntity, dataSource.manager); + this.executionDataRepository = executionDataRepository; + } + + async findMultipleExecutions( + queryParams: FindManyOptions, + options?: { + unflattenData: true; + includeData?: true; + }, + ): Promise; + async findMultipleExecutions( + queryParams: FindManyOptions, + options?: { + unflattenData?: false | undefined; + includeData?: true; + }, + ): Promise; + async findMultipleExecutions( + queryParams: FindManyOptions, + options?: { + unflattenData?: boolean; + includeData?: boolean; + }, + ): Promise; + async findMultipleExecutions( + queryParams: FindManyOptions, + options?: { + unflattenData?: boolean; + includeData?: boolean; + }, + ): Promise { + if (options?.includeData) { + if (!queryParams.relations) { + queryParams.relations = []; + } + (queryParams.relations as string[]).push('executionData'); + } + + const executions = await this.find(queryParams); + + if (options?.includeData && options?.unflattenData) { + return executions.map((execution) => { + const { executionData, ...rest } = execution; + return { + ...rest, + data: parse(executionData.data) as IRunExecutionData, + workflowData: executionData.workflowData, + } as IExecutionResponse; + }); + } else if (options?.includeData) { + return executions.map((execution) => { + const { executionData, ...rest } = execution; + return { + ...rest, + data: execution.executionData.data, + workflowData: execution.executionData.workflowData, + } as IExecutionFlattedDb; + }); + } + + return executions.map((execution) => { + const { executionData, ...rest } = execution; + return rest; + }); + } + + async findSingleExecution( + id: string, + options?: { + includeData: true; + unflattenData: true; + where?: FindOptionsWhere; + }, + ): Promise; + async findSingleExecution( + id: string, + options?: { + includeData: true; + unflattenData?: false | undefined; + where?: FindOptionsWhere; + }, + ): Promise; + async findSingleExecution( + id: string, + options?: { + includeData?: boolean; + unflattenData?: boolean; + where?: FindOptionsWhere; + }, + ): Promise; + async findSingleExecution( + id: string, + options?: { + includeData?: boolean; + unflattenData?: boolean; + where?: FindOptionsWhere; + }, + ): Promise { + const whereClause: FindOneOptions = { + where: { + id, + ...options?.where, + }, + }; + if (options?.includeData) { + whereClause.relations = ['executionData']; + } + + const execution = await this.findOne(whereClause); + + if (!execution) { + return undefined; + } + + const { executionData, ...rest } = execution; + + if (options?.includeData && options?.unflattenData) { + return { + ...rest, + data: parse(execution.executionData.data) as IRunExecutionData, + workflowData: execution.executionData.workflowData, + } as IExecutionResponse; + } else if (options?.includeData) { + return { + ...rest, + data: execution.executionData.data, + workflowData: execution.executionData.workflowData, + } as IExecutionFlattedDb; + } + + return rest; + } + + async createNewExecution(execution: IExecutionDb) { + const { data, workflowData, ...rest } = execution; + + const newExecution = await this.save(rest); + await this.executionDataRepository.save({ + execution: newExecution, + workflowData, + data: stringify(data), + }); + + return newExecution; + } + + async updateExistingExecution(executionId: string, execution: Partial) { + // Se isolate startedAt because it must be set when the execution starts and should never change. + // So we prevent updating it, if it's sent (it usually is and causes problems to executions that + // are resumed after waiting for some time, as a new startedAt is set) + const { id, data, workflowData, startedAt, ...executionInformation } = execution; + if (Object.keys(executionInformation).length > 0) { + await this.update({ id: executionId }, executionInformation); + } + + if (data || workflowData) { + const executionData: Partial = {}; + if (workflowData) { + executionData.workflowData = workflowData; + } + if (data) { + executionData.data = stringify(data); + } + // @ts-ignore + await this.executionDataRepository.update({ executionId }, executionData); + } + } + + async deleteExecution(executionId: string) { + // TODO: Should this be awaited? Should we add a catch in case it fails? + await BinaryDataManager.getInstance().deleteBinaryDataByExecutionId(executionId); + return this.delete({ id: executionId }); + } + + async countExecutions( + filters: IGetExecutionsQueryFilter | undefined, + accessibleWorkflowIds: string[], + currentlyRunningExecutions: string[], + isOwner: boolean, + ): Promise<{ count: number; estimated: boolean }> { + const dbType = config.getEnv('database.type'); + if (dbType !== 'postgresdb' || (filters && Object.keys(filters).length > 0) || !isOwner) { + const query = this.createQueryBuilder('execution').andWhere( + 'execution.workflowId IN (:...accessibleWorkflowIds)', + { accessibleWorkflowIds }, + ); + if (currentlyRunningExecutions.length > 0) { + query.andWhere('execution.id NOT IN (:...currentlyRunningExecutions)', { + currentlyRunningExecutions, + }); + } + + parseFiltersToQueryBuilder(query, filters); + + const count = await query.getCount(); + return { count, estimated: false }; + } + + try { + // Get an estimate of rows count. + const estimateRowsNumberSql = + "SELECT n_live_tup FROM pg_stat_all_tables WHERE relname = 'execution_entity';"; + const rows = (await this.query(estimateRowsNumberSql)) as Array<{ n_live_tup: string }>; + + const estimate = parseInt(rows[0].n_live_tup, 10); + // If over 100k, return just an estimate. + if (estimate > 100_000) { + // if less than 100k, we get the real count as even a full + // table scan should not take so long. + return { count: estimate, estimated: true }; + } + } catch (error) { + if (error instanceof Error) { + LoggerProxy.warn(`Failed to get executions count from Postgres: ${error.message}`, { + error, + }); + } + } + + const count = await this.count({ + where: { + workflowId: In(accessibleWorkflowIds), + }, + }); + + return { count, estimated: false }; + } + + async searchExecutions( + filters: IGetExecutionsQueryFilter | undefined, + limit: number, + excludedExecutionIds: string[], + accessibleWorkflowIds: string[], + additionalFilters?: { lastId?: string; firstId?: string }, + ): Promise { + if (accessibleWorkflowIds.length === 0) { + return []; + } + const query = this.createQueryBuilder('execution') + .select([ + 'execution.id', + 'execution.finished', + 'execution.mode', + 'execution.retryOf', + 'execution.retrySuccessId', + 'execution.status', + 'execution.startedAt', + 'execution.stoppedAt', + 'execution.workflowId', + 'execution.waitTill', + 'workflow.name', + ]) + .innerJoin('execution.workflow', 'workflow') + .limit(limit) + // eslint-disable-next-line @typescript-eslint/naming-convention + .orderBy({ 'execution.id': 'DESC' }) + .andWhere('execution.workflowId IN (:...accessibleWorkflowIds)', { accessibleWorkflowIds }); + + if (excludedExecutionIds.length > 0) { + query.andWhere('execution.id NOT IN (:...excludedExecutionIds)', { excludedExecutionIds }); + } + + if (additionalFilters?.lastId) { + query.andWhere('execution.id < :lastId', { lastId: additionalFilters.lastId }); + } + if (additionalFilters?.firstId) { + query.andWhere('execution.id > :firstId', { firstId: additionalFilters.firstId }); + } + + parseFiltersToQueryBuilder(query, filters); + + const executions = await query.getMany(); + + return executions.map((execution) => { + const { workflow, waitTill, ...rest } = execution; + return { + ...rest, + waitTill: waitTill ?? undefined, + workflowName: workflow.name, + }; + }); + } + + async deleteExecutions( + filters: IGetExecutionsQueryFilter | undefined, + accessibleWorkflowIds: string[], + deleteConditions: { + deleteBefore?: Date; + ids?: string[]; + }, + ) { + if (!deleteConditions?.deleteBefore && !deleteConditions?.ids) { + throw new Error('Either "deleteBefore" or "ids" must be present in the request body'); + } + + const query = this.createQueryBuilder('execution') + .select(['execution.id']) + .andWhere('execution.workflowId IN (:...accessibleWorkflowIds)', { accessibleWorkflowIds }); + + if (deleteConditions.deleteBefore) { + // delete executions by date, if user may access the underlying workflows + query.andWhere('execution.startedAt <= :deleteBefore', { + deleteBefore: deleteConditions.deleteBefore, + }); + // Filters are only used when filtering by date + parseFiltersToQueryBuilder(query, filters); + } else if (deleteConditions.ids) { + // delete executions by IDs, if user may access the underlying workflows + query.andWhere('execution.id IN (:...executionIds)', { executionIds: deleteConditions.ids }); + } + + const executions = await query.getMany(); + + if (!executions.length) { + if (deleteConditions.ids) { + LoggerProxy.error('Failed to delete an execution due to insufficient permissions', { + executionIds: deleteConditions.ids, + }); + } + return; + } + + const idsToDelete = executions.map(({ id }) => id); + + const binaryDataManager = BinaryDataManager.getInstance(); + await Promise.all( + idsToDelete.map(async (id) => binaryDataManager.deleteBinaryDataByExecutionId(id)), + ); + + do { + // Delete in batches to avoid "SQLITE_ERROR: Expression tree is too large (maximum depth 1000)" error + const batch = idsToDelete.splice(0, 500); + await this.delete(batch); + } while (idsToDelete.length > 0); } } diff --git a/packages/cli/src/databases/repositories/executionData.repository.ts b/packages/cli/src/databases/repositories/executionData.repository.ts new file mode 100644 index 0000000000..869267b86a --- /dev/null +++ b/packages/cli/src/databases/repositories/executionData.repository.ts @@ -0,0 +1,10 @@ +import { Service } from 'typedi'; +import { DataSource, Repository } from 'typeorm'; +import { ExecutionData } from '../entities/ExecutionData'; + +@Service() +export class ExecutionDataRepository extends Repository { + constructor(dataSource: DataSource) { + super(ExecutionData, dataSource.manager); + } +} diff --git a/packages/cli/src/databases/repositories/index.ts b/packages/cli/src/databases/repositories/index.ts index 04a11369c6..21d78f6ceb 100644 --- a/packages/cli/src/databases/repositories/index.ts +++ b/packages/cli/src/databases/repositories/index.ts @@ -2,6 +2,7 @@ export { AuthIdentityRepository } from './authIdentity.repository'; export { AuthProviderSyncHistoryRepository } from './authProviderSyncHistory.repository'; export { CredentialsRepository } from './credentials.repository'; export { EventDestinationsRepository } from './eventDestinations.repository'; +export { ExecutionDataRepository } from './executionData.repository'; export { ExecutionMetadataRepository } from './executionMetadata.repository'; export { ExecutionRepository } from './execution.repository'; export { InstalledNodesRepository } from './installedNodes.repository'; diff --git a/packages/cli/src/databases/utils/generators.ts b/packages/cli/src/databases/utils/generators.ts new file mode 100644 index 0000000000..bf257a3571 --- /dev/null +++ b/packages/cli/src/databases/utils/generators.ts @@ -0,0 +1,6 @@ +import { customAlphabet } from 'nanoid'; +const nanoid = customAlphabet('0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', 16); + +export function generateNanoId() { + return nanoid(); +} diff --git a/packages/cli/src/environments/sourceControl/constants.ts b/packages/cli/src/environments/sourceControl/constants.ts new file mode 100644 index 0000000000..c3f8c2390d --- /dev/null +++ b/packages/cli/src/environments/sourceControl/constants.ts @@ -0,0 +1,15 @@ +export const SOURCE_CONTROL_PREFERENCES_DB_KEY = 'features.sourceControl'; +export const SOURCE_CONTROL_GIT_FOLDER = 'git'; +export const SOURCE_CONTROL_GIT_KEY_COMMENT = 'n8n deploy key'; +export const SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER = 'workflows'; +export const SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER = 'credentials'; +export const SOURCE_CONTROL_VARIABLES_EXPORT_FILE = 'variables.json'; +export const SOURCE_CONTROL_TAGS_EXPORT_FILE = 'tags.json'; +export const SOURCE_CONTROL_SSH_FOLDER = 'ssh'; +export const SOURCE_CONTROL_SSH_KEY_NAME = 'key'; +export const SOURCE_CONTROL_DEFAULT_BRANCH = 'main'; +export const SOURCE_CONTROL_ORIGIN = 'origin'; +export const SOURCE_CONTROL_API_ROOT = 'source-control'; +export const SOURCE_CONTROL_README = ` +# n8n Source Control +`; diff --git a/packages/cli/src/environments/sourceControl/middleware/sourceControlEnabledMiddleware.ee.ts b/packages/cli/src/environments/sourceControl/middleware/sourceControlEnabledMiddleware.ee.ts new file mode 100644 index 0000000000..940d10ad02 --- /dev/null +++ b/packages/cli/src/environments/sourceControl/middleware/sourceControlEnabledMiddleware.ee.ts @@ -0,0 +1,21 @@ +import type { RequestHandler } from 'express'; +import { isSourceControlLicensed } from '../sourceControlHelper.ee'; +import Container from 'typedi'; +import { SourceControlPreferencesService } from '../sourceControlPreferences.service.ee'; + +export const sourceControlLicensedAndEnabledMiddleware: RequestHandler = (req, res, next) => { + const sourceControlPreferencesService = Container.get(SourceControlPreferencesService); + if (sourceControlPreferencesService.isSourceControlLicensedAndEnabled()) { + next(); + } else { + res.status(401).json({ status: 'error', message: 'Unauthorized' }); + } +}; + +export const sourceControlLicensedMiddleware: RequestHandler = (req, res, next) => { + if (isSourceControlLicensed()) { + next(); + } else { + res.status(401).json({ status: 'error', message: 'Unauthorized' }); + } +}; diff --git a/packages/cli/src/environments/sourceControl/sourceControl.controller.ee.ts b/packages/cli/src/environments/sourceControl/sourceControl.controller.ee.ts new file mode 100644 index 0000000000..7907e63127 --- /dev/null +++ b/packages/cli/src/environments/sourceControl/sourceControl.controller.ee.ts @@ -0,0 +1,235 @@ +import { Authorized, Get, Post, Patch, RestController } from '@/decorators'; +import { + sourceControlLicensedMiddleware, + sourceControlLicensedAndEnabledMiddleware, +} from './middleware/sourceControlEnabledMiddleware.ee'; +import { SourceControlService } from './sourceControl.service.ee'; +import { SourceControlRequest } from './types/requests'; +import type { SourceControlPreferences } from './types/sourceControlPreferences'; +import { BadRequestError } from '@/ResponseHelper'; +import type { PullResult, PushResult, StatusResult } from 'simple-git'; +import express from 'express'; +import type { ImportResult } from './types/importResult'; +import { SourceControlPreferencesService } from './sourceControlPreferences.service.ee'; +import type { SourceControlledFile } from './types/sourceControlledFile'; +import { SOURCE_CONTROL_API_ROOT, SOURCE_CONTROL_DEFAULT_BRANCH } from './constants'; + +@RestController(`/${SOURCE_CONTROL_API_ROOT}`) +export class SourceControlController { + constructor( + private sourceControlService: SourceControlService, + private sourceControlPreferencesService: SourceControlPreferencesService, + ) {} + + @Authorized('any') + @Get('/preferences', { middlewares: [sourceControlLicensedMiddleware] }) + async getPreferences(): Promise { + // returns the settings with the privateKey property redacted + return this.sourceControlPreferencesService.getPreferences(); + } + + @Authorized(['global', 'owner']) + @Post('/preferences', { middlewares: [sourceControlLicensedMiddleware] }) + async setPreferences(req: SourceControlRequest.UpdatePreferences) { + if ( + req.body.branchReadOnly === undefined && + this.sourceControlPreferencesService.isSourceControlConnected() + ) { + throw new BadRequestError( + 'Cannot change preferences while connected to a source control provider. Please disconnect first.', + ); + } + try { + const sanitizedPreferences: Partial = { + ...req.body, + initRepo: req.body.initRepo ?? true, // default to true if not specified + connected: undefined, + publicKey: undefined, + }; + await this.sourceControlPreferencesService.validateSourceControlPreferences( + sanitizedPreferences, + ); + const updatedPreferences = await this.sourceControlPreferencesService.setPreferences( + sanitizedPreferences, + ); + if (sanitizedPreferences.initRepo === true) { + try { + await this.sourceControlService.initializeRepository({ + ...updatedPreferences, + branchName: + updatedPreferences.branchName === '' + ? SOURCE_CONTROL_DEFAULT_BRANCH + : updatedPreferences.branchName, + initRepo: true, + }); + if (this.sourceControlPreferencesService.getPreferences().branchName !== '') { + await this.sourceControlPreferencesService.setPreferences({ + connected: true, + }); + } + } catch (error) { + // if initialization fails, run cleanup to remove any intermediate state and throw the error + await this.sourceControlService.disconnect({ keepKeyPair: true }); + throw error; + } + } + await this.sourceControlService.init(); + return this.sourceControlPreferencesService.getPreferences(); + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } + + @Authorized(['global', 'owner']) + @Patch('/preferences', { middlewares: [sourceControlLicensedMiddleware] }) + async updatePreferences(req: SourceControlRequest.UpdatePreferences) { + try { + const sanitizedPreferences: Partial = { + ...req.body, + initRepo: false, + connected: undefined, + publicKey: undefined, + repositoryUrl: undefined, + authorName: undefined, + authorEmail: undefined, + }; + const currentPreferences = this.sourceControlPreferencesService.getPreferences(); + await this.sourceControlPreferencesService.validateSourceControlPreferences( + sanitizedPreferences, + ); + if ( + sanitizedPreferences.branchName && + sanitizedPreferences.branchName !== currentPreferences.branchName + ) { + await this.sourceControlService.setBranch(sanitizedPreferences.branchName); + } + if (sanitizedPreferences.branchColor || sanitizedPreferences.branchReadOnly !== undefined) { + await this.sourceControlPreferencesService.setPreferences( + { + branchColor: sanitizedPreferences.branchColor, + branchReadOnly: sanitizedPreferences.branchReadOnly, + }, + true, + ); + } + await this.sourceControlService.init(); + return this.sourceControlPreferencesService.getPreferences(); + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } + + @Authorized(['global', 'owner']) + @Post('/disconnect', { middlewares: [sourceControlLicensedMiddleware] }) + async disconnect(req: SourceControlRequest.Disconnect) { + try { + return await this.sourceControlService.disconnect(req.body); + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } + + @Authorized('any') + @Get('/get-branches', { middlewares: [sourceControlLicensedMiddleware] }) + async getBranches() { + try { + return await this.sourceControlService.getBranches(); + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } + + @Authorized(['global', 'owner']) + @Post('/push-workfolder', { middlewares: [sourceControlLicensedAndEnabledMiddleware] }) + async pushWorkfolder( + req: SourceControlRequest.PushWorkFolder, + res: express.Response, + ): Promise { + if (this.sourceControlPreferencesService.isBranchReadOnly()) { + throw new BadRequestError('Cannot push onto read-only branch.'); + } + try { + const result = await this.sourceControlService.pushWorkfolder(req.body); + if ((result as PushResult).pushed) { + res.statusCode = 200; + } else { + res.statusCode = 409; + } + return result; + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } + + @Authorized(['global', 'owner']) + @Post('/pull-workfolder', { middlewares: [sourceControlLicensedAndEnabledMiddleware] }) + async pullWorkfolder( + req: SourceControlRequest.PullWorkFolder, + res: express.Response, + ): Promise { + try { + const result = await this.sourceControlService.pullWorkfolder({ + force: req.body.force, + variables: req.body.variables, + userId: req.user.id, + importAfterPull: req.body.importAfterPull ?? true, + }); + if ((result as ImportResult)?.workflows) { + res.statusCode = 200; + } else { + res.statusCode = 409; + } + return result; + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } + + @Authorized(['global', 'owner']) + @Get('/reset-workfolder', { middlewares: [sourceControlLicensedAndEnabledMiddleware] }) + async resetWorkfolder( + req: SourceControlRequest.PullWorkFolder, + ): Promise { + try { + return await this.sourceControlService.resetWorkfolder({ + force: req.body.force, + variables: req.body.variables, + userId: req.user.id, + importAfterPull: req.body.importAfterPull ?? true, + }); + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } + + @Authorized('any') + @Get('/get-status', { middlewares: [sourceControlLicensedAndEnabledMiddleware] }) + async getStatus() { + try { + return await this.sourceControlService.getStatus(); + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } + + @Authorized('any') + @Get('/status', { middlewares: [sourceControlLicensedMiddleware] }) + async status(): Promise { + try { + return await this.sourceControlService.status(); + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } + + @Authorized(['global', 'owner']) + @Post('/generate-key-pair', { middlewares: [sourceControlLicensedMiddleware] }) + async generateKeyPair(): Promise { + try { + const result = await this.sourceControlPreferencesService.generateAndSaveKeyPair(); + return result; + } catch (error) { + throw new BadRequestError((error as { message: string }).message); + } + } +} diff --git a/packages/cli/src/environments/versionControl/versionControl.service.ee.ts b/packages/cli/src/environments/sourceControl/sourceControl.service.ee.ts similarity index 59% rename from packages/cli/src/environments/versionControl/versionControl.service.ee.ts rename to packages/cli/src/environments/sourceControl/sourceControl.service.ee.ts index a2c2a30709..d1c5a3ccf6 100644 --- a/packages/cli/src/environments/versionControl/versionControl.service.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControl.service.ee.ts @@ -1,45 +1,39 @@ import { Service } from 'typedi'; import path from 'path'; import * as Db from '@/Db'; -import { versionControlFoldersExistCheck } from './versionControlHelper.ee'; -import type { VersionControlPreferences } from './types/versionControlPreferences'; +import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee'; +import type { SourceControlPreferences } from './types/sourceControlPreferences'; import { - VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER, - VERSION_CONTROL_GIT_FOLDER, - VERSION_CONTROL_README, - VERSION_CONTROL_SSH_FOLDER, - VERSION_CONTROL_SSH_KEY_NAME, - VERSION_CONTROL_TAGS_EXPORT_FILE, - VERSION_CONTROL_VARIABLES_EXPORT_FILE, - VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER, + SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, + SOURCE_CONTROL_GIT_FOLDER, + SOURCE_CONTROL_README, + SOURCE_CONTROL_SSH_FOLDER, + SOURCE_CONTROL_SSH_KEY_NAME, + SOURCE_CONTROL_TAGS_EXPORT_FILE, + SOURCE_CONTROL_VARIABLES_EXPORT_FILE, + SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, } from './constants'; import { LoggerProxy } from 'n8n-workflow'; -import { VersionControlGitService } from './versionControlGit.service.ee'; +import { SourceControlGitService } from './sourceControlGit.service.ee'; import { UserSettings } from 'n8n-core'; -import type { - CommitResult, - DiffResult, - FetchResult, - PullResult, - PushResult, - StatusResult, -} from 'simple-git'; +import type { PushResult, StatusResult } from 'simple-git'; import type { ExportResult } from './types/exportResult'; -import { VersionControlExportService } from './versionControlExport.service.ee'; +import { SourceControlExportService } from './sourceControlExport.service.ee'; import { BadRequestError } from '../../ResponseHelper'; import type { ImportResult } from './types/importResult'; -import type { VersionControlPushWorkFolder } from './types/versionControlPushWorkFolder'; -import type { VersionControllPullOptions } from './types/versionControlPullWorkFolder'; +import type { SourceControlPushWorkFolder } from './types/sourceControlPushWorkFolder'; +import type { SourceControllPullOptions } from './types/sourceControlPullWorkFolder'; import type { - VersionControlledFileLocation, - VersionControlledFile, - VersionControlledFileStatus, - VersionControlledFileType, -} from './types/versionControlledFile'; -import { VersionControlPreferencesService } from './versionControlPreferences.service.ee'; + SourceControlledFileLocation, + SourceControlledFile, + SourceControlledFileStatus, + SourceControlledFileType, +} from './types/sourceControlledFile'; +import { SourceControlPreferencesService } from './sourceControlPreferences.service.ee'; import { writeFileSync } from 'fs'; +import { SourceControlImportService } from './sourceControlImport.service.ee'; @Service() -export class VersionControlService { +export class SourceControlService { private sshKeyName: string; private sshFolder: string; @@ -47,22 +41,23 @@ export class VersionControlService { private gitFolder: string; constructor( - private gitService: VersionControlGitService, - private versionControlPreferencesService: VersionControlPreferencesService, - private versionControlExportService: VersionControlExportService, + private gitService: SourceControlGitService, + private sourceControlPreferencesService: SourceControlPreferencesService, + private sourceControlExportService: SourceControlExportService, + private sourceControlImportService: SourceControlImportService, ) { const userFolder = UserSettings.getUserN8nFolderPath(); - this.sshFolder = path.join(userFolder, VERSION_CONTROL_SSH_FOLDER); - this.gitFolder = path.join(userFolder, VERSION_CONTROL_GIT_FOLDER); - this.sshKeyName = path.join(this.sshFolder, VERSION_CONTROL_SSH_KEY_NAME); + this.sshFolder = path.join(userFolder, SOURCE_CONTROL_SSH_FOLDER); + this.gitFolder = path.join(userFolder, SOURCE_CONTROL_GIT_FOLDER); + this.sshKeyName = path.join(this.sshFolder, SOURCE_CONTROL_SSH_KEY_NAME); } async init(): Promise { this.gitService.resetService(); - versionControlFoldersExistCheck([this.gitFolder, this.sshFolder]); - await this.versionControlPreferencesService.loadFromDbAndApplyVersionControlPreferences(); + sourceControlFoldersExistCheck([this.gitFolder, this.sshFolder]); + await this.sourceControlPreferencesService.loadFromDbAndApplySourceControlPreferences(); await this.gitService.initService({ - versionControlPreferences: this.versionControlPreferencesService.getPreferences(), + sourceControlPreferences: this.sourceControlPreferencesService.getPreferences(), gitFolder: this.gitFolder, sshKeyName: this.sshKeyName, sshFolder: this.sshFolder, @@ -71,22 +66,22 @@ export class VersionControlService { async disconnect(options: { keepKeyPair?: boolean } = {}) { try { - await this.versionControlPreferencesService.setPreferences({ + await this.sourceControlPreferencesService.setPreferences({ connected: false, branchName: '', }); - await this.versionControlExportService.deleteRepositoryFolder(); + await this.sourceControlExportService.deleteRepositoryFolder(); if (!options.keepKeyPair) { - await this.versionControlPreferencesService.deleteKeyPairFiles(); + await this.sourceControlPreferencesService.deleteKeyPairFiles(); } this.gitService.resetService(); - return this.versionControlPreferencesService.versionControlPreferences; + return this.sourceControlPreferencesService.sourceControlPreferences; } catch (error) { - throw Error(`Failed to disconnect from version control: ${(error as Error).message}`); + throw Error(`Failed to disconnect from source control: ${(error as Error).message}`); } } - async initializeRepository(preferences: VersionControlPreferences) { + async initializeRepository(preferences: SourceControlPreferences) { if (!this.gitService.git) { await this.init(); } @@ -108,7 +103,7 @@ export class VersionControlService { } else { if (getBranchesResult.branches?.length === 0) { try { - writeFileSync(path.join(this.gitFolder, '/README.md'), VERSION_CONTROL_README); + writeFileSync(path.join(this.gitFolder, '/README.md'), SOURCE_CONTROL_README); await this.gitService.stage(new Set(['README.md'])); await this.gitService.commit('Initial commit'); @@ -121,7 +116,7 @@ export class VersionControlService { LoggerProxy.error(`Failed to create initial commit: ${(fileError as Error).message}`); } } else { - await this.versionControlPreferencesService.setPreferences({ + await this.sourceControlPreferencesService.setPreferences({ branchName: '', connected: true, }); @@ -144,20 +139,20 @@ export class VersionControlService { }; try { // comment next line if needed - await this.versionControlExportService.cleanWorkFolder(); - result.tags = await this.versionControlExportService.exportTagsToWorkFolder(); - result.variables = await this.versionControlExportService.exportVariablesToWorkFolder(); - result.workflows = await this.versionControlExportService.exportWorkflowsToWorkFolder(); - result.credentials = await this.versionControlExportService.exportCredentialsToWorkFolder(); + await this.sourceControlExportService.cleanWorkFolder(); + result.tags = await this.sourceControlExportService.exportTagsToWorkFolder(); + result.variables = await this.sourceControlExportService.exportVariablesToWorkFolder(); + result.workflows = await this.sourceControlExportService.exportWorkflowsToWorkFolder(); + result.credentials = await this.sourceControlExportService.exportCredentialsToWorkFolder(); } catch (error) { throw new BadRequestError((error as { message: string }).message); } return result; } - async import(options: VersionControllPullOptions): Promise { + async import(options: SourceControllPullOptions): Promise { try { - return await this.versionControlExportService.importFromWorkFolder(options); + return await this.sourceControlImportService.importFromWorkFolder(options); } catch (error) { throw new BadRequestError((error as { message: string }).message); } @@ -170,7 +165,7 @@ export class VersionControlService { } async setBranch(branch: string): Promise<{ branches: string[]; currentBranch: string }> { - await this.versionControlPreferencesService.setPreferences({ + await this.sourceControlPreferencesService.setPreferences({ branchName: branch, connected: branch?.length > 0, }); @@ -179,9 +174,9 @@ export class VersionControlService { // will reset the branch to the remote branch and pull // this will discard all local changes - async resetWorkfolder(options: VersionControllPullOptions): Promise { + async resetWorkfolder(options: SourceControllPullOptions): Promise { const currentBranch = await this.gitService.getCurrentBranch(); - await this.versionControlExportService.cleanWorkFolder(); + await this.sourceControlExportService.cleanWorkFolder(); await this.gitService.resetBranch({ hard: true, target: currentBranch.remote, @@ -194,9 +189,9 @@ export class VersionControlService { } async pushWorkfolder( - options: VersionControlPushWorkFolder, - ): Promise { - if (this.versionControlPreferencesService.isBranchReadOnly()) { + options: SourceControlPushWorkFolder, + ): Promise { + if (this.sourceControlPreferencesService.isBranchReadOnly()) { throw new BadRequestError('Cannot push onto read-only branch.'); } if (!options.skipDiff) { @@ -211,13 +206,13 @@ export class VersionControlService { await this.stage(options); await this.gitService.commit(options.message ?? 'Updated Workfolder'); return this.gitService.push({ - branch: this.versionControlPreferencesService.getBranchName(), + branch: this.sourceControlPreferencesService.getBranchName(), force: options.force ?? false, }); } async pullWorkfolder( - options: VersionControllPullOptions, + options: SourceControllPullOptions, ): Promise { await this.resetWorkfolder({ importAfterPull: false, @@ -238,17 +233,17 @@ export class VersionControlService { } async stage( - options: Pick, + options: Pick, ): Promise<{ staged: string[] } | string> { const { fileNames, credentialIds, workflowIds } = options; const status = await this.gitService.status(); let mergedFileNames = new Set(); fileNames?.forEach((e) => mergedFileNames.add(e)); credentialIds?.forEach((e) => - mergedFileNames.add(this.versionControlExportService.getCredentialsPath(e)), + mergedFileNames.add(this.sourceControlExportService.getCredentialsPath(e)), ); workflowIds?.forEach((e) => - mergedFileNames.add(this.versionControlExportService.getWorkflowPath(e)), + mergedFileNames.add(this.sourceControlExportService.getWorkflowPath(e)), ); if (mergedFileNames.size === 0) { mergedFileNames = new Set([ @@ -280,16 +275,16 @@ export class VersionControlService { return this.gitService.status(); } - private async fileNameToVersionControlledFile( + private async fileNameToSourceControlledFile( fileName: string, - location: VersionControlledFileLocation, + location: SourceControlledFileLocation, statusResult: StatusResult, - ): Promise { + ): Promise { let id: string | undefined = undefined; let name = ''; let conflict = false; - let status: VersionControlledFileStatus = 'unknown'; - let type: VersionControlledFileType = 'file'; + let status: SourceControlledFileStatus = 'unknown'; + let type: SourceControlledFileType = 'file'; // initialize status from git status result if (statusResult.not_added.find((e) => e === fileName)) status = 'new'; @@ -300,11 +295,11 @@ export class VersionControlService { else if (statusResult.deleted.find((e) => e === fileName)) status = 'deleted'; else if (statusResult.modified.find((e) => e === fileName)) status = 'modified'; - if (fileName.startsWith(VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER)) { + if (fileName.startsWith(SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER)) { type = 'workflow'; if (status === 'deleted') { id = fileName - .replace(VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER, '') + .replace(SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, '') .replace(/[\/,\\]/, '') .replace('.json', ''); if (location === 'remote') { @@ -318,13 +313,13 @@ export class VersionControlService { name = '(deleted)'; } } else { - const workflow = await this.versionControlExportService.getWorkflowFromFile(fileName); + const workflow = await this.sourceControlExportService.getWorkflowFromFile(fileName); if (!workflow?.id) { if (location === 'local') { return; } id = fileName - .replace(VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER + '/', '') + .replace(SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER + '/', '') .replace('.json', ''); status = 'created'; } else { @@ -333,11 +328,11 @@ export class VersionControlService { } } } - if (fileName.startsWith(VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER)) { + if (fileName.startsWith(SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER)) { type = 'credential'; if (status === 'deleted') { id = fileName - .replace(VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER, '') + .replace(SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, '') .replace(/[\/,\\]/, '') .replace('.json', ''); if (location === 'remote') { @@ -351,13 +346,13 @@ export class VersionControlService { name = '(deleted)'; } } else { - const credential = await this.versionControlExportService.getCredentialFromFile(fileName); + const credential = await this.sourceControlExportService.getCredentialFromFile(fileName); if (!credential?.id) { if (location === 'local') { return; } id = fileName - .replace(VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER + '/', '') + .replace(SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER + '/', '') .replace('.json', ''); status = 'created'; } else { @@ -367,13 +362,13 @@ export class VersionControlService { } } - if (fileName.startsWith(VERSION_CONTROL_VARIABLES_EXPORT_FILE)) { + if (fileName.startsWith(SOURCE_CONTROL_VARIABLES_EXPORT_FILE)) { id = 'variables'; name = 'variables'; type = 'variables'; } - if (fileName.startsWith(VERSION_CONTROL_TAGS_EXPORT_FILE)) { + if (fileName.startsWith(SOURCE_CONTROL_TAGS_EXPORT_FILE)) { id = 'tags'; name = 'tags'; type = 'tags'; @@ -392,29 +387,29 @@ export class VersionControlService { }; } - async getStatus(): Promise { + async getStatus(): Promise { await this.export(); await this.stage({}); await this.gitService.fetch(); - const versionControlledFiles: VersionControlledFile[] = []; + const sourceControlledFiles: SourceControlledFile[] = []; const diffRemote = await this.gitService.diffRemote(); const diffLocal = await this.gitService.diffLocal(); const status = await this.gitService.status(); await Promise.all([ ...(diffRemote?.files.map(async (e) => { - const resolvedFile = await this.fileNameToVersionControlledFile(e.file, 'remote', status); + const resolvedFile = await this.fileNameToSourceControlledFile(e.file, 'remote', status); if (resolvedFile) { - versionControlledFiles.push(resolvedFile); + sourceControlledFiles.push(resolvedFile); } }) ?? []), ...(diffLocal?.files.map(async (e) => { - const resolvedFile = await this.fileNameToVersionControlledFile(e.file, 'local', status); + const resolvedFile = await this.fileNameToSourceControlledFile(e.file, 'local', status); if (resolvedFile) { - versionControlledFiles.push(resolvedFile); + sourceControlledFiles.push(resolvedFile); } }) ?? []), ]); - versionControlledFiles.forEach((e, index, array) => { + sourceControlledFiles.forEach((e, index, array) => { const similarItems = array.filter( (f) => f.type === e.type && (f.file === e.file || f.id === e.id), ); @@ -424,34 +419,6 @@ export class VersionControlService { }); } }); - return versionControlledFiles; + return sourceControlledFiles; } - - // #region Version Control Test Functions - //TODO: SEPARATE FUNCTIONS FOR DEVELOPMENT ONLY - //TODO: REMOVE THESE FUNCTIONS AFTER TESTING - - async commit(message?: string): Promise { - return this.gitService.commit(message ?? 'Updated Workfolder'); - } - - async fetch(): Promise { - return this.gitService.fetch(); - } - - async diff(): Promise { - return this.gitService.diff(); - } - - async pull(): Promise { - return this.gitService.pull(); - } - - async push(force = false): Promise { - return this.gitService.push({ - branch: this.versionControlPreferencesService.getBranchName(), - force, - }); - } - // #endregion } diff --git a/packages/cli/src/environments/sourceControl/sourceControlExport.service.ee.ts b/packages/cli/src/environments/sourceControl/sourceControlExport.service.ee.ts new file mode 100644 index 0000000000..1352c8c7b8 --- /dev/null +++ b/packages/cli/src/environments/sourceControl/sourceControlExport.service.ee.ts @@ -0,0 +1,336 @@ +import { Service } from 'typedi'; +import path from 'path'; +import { + SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, + SOURCE_CONTROL_GIT_FOLDER, + SOURCE_CONTROL_TAGS_EXPORT_FILE, + SOURCE_CONTROL_VARIABLES_EXPORT_FILE, + SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, +} from './constants'; +import * as Db from '@/Db'; +import glob from 'fast-glob'; +import type { ICredentialDataDecryptedObject } from 'n8n-workflow'; +import { LoggerProxy, jsonParse } from 'n8n-workflow'; +import { writeFile as fsWriteFile, readFile as fsReadFile, rm as fsRm } from 'fs/promises'; +import { Credentials, UserSettings } from 'n8n-core'; +import type { IWorkflowToImport } from '@/Interfaces'; +import type { ExportableWorkflow } from './types/exportableWorkflow'; +import type { ExportableCredential } from './types/exportableCredential'; +import type { ExportResult } from './types/exportResult'; +import type { SharedWorkflow } from '@/databases/entities/SharedWorkflow'; +import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee'; + +@Service() +export class SourceControlExportService { + private gitFolder: string; + + private workflowExportFolder: string; + + private credentialExportFolder: string; + + constructor() { + const userFolder = UserSettings.getUserN8nFolderPath(); + this.gitFolder = path.join(userFolder, SOURCE_CONTROL_GIT_FOLDER); + this.workflowExportFolder = path.join(this.gitFolder, SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER); + this.credentialExportFolder = path.join( + this.gitFolder, + SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, + ); + } + + getWorkflowPath(workflowId: string): string { + return path.join(this.workflowExportFolder, `${workflowId}.json`); + } + + getCredentialsPath(credentialsId: string): string { + return path.join(this.credentialExportFolder, `${credentialsId}.json`); + } + + getTagsPath(): string { + return path.join(this.gitFolder, SOURCE_CONTROL_TAGS_EXPORT_FILE); + } + + getVariablesPath(): string { + return path.join(this.gitFolder, SOURCE_CONTROL_VARIABLES_EXPORT_FILE); + } + + async getWorkflowFromFile( + filePath: string, + root = this.gitFolder, + ): Promise { + try { + const importedWorkflow = jsonParse( + await fsReadFile(path.join(root, filePath), { encoding: 'utf8' }), + ); + return importedWorkflow; + } catch (error) { + return undefined; + } + } + + async getCredentialFromFile( + filePath: string, + root = this.gitFolder, + ): Promise { + try { + const credential = jsonParse( + await fsReadFile(path.join(root, filePath), { encoding: 'utf8' }), + ); + return credential; + } catch (error) { + return undefined; + } + } + + async cleanWorkFolder() { + try { + const workflowFiles = await glob('*.json', { + cwd: this.workflowExportFolder, + absolute: true, + }); + const credentialFiles = await glob('*.json', { + cwd: this.credentialExportFolder, + absolute: true, + }); + const variablesFile = await glob(SOURCE_CONTROL_VARIABLES_EXPORT_FILE, { + cwd: this.gitFolder, + absolute: true, + }); + const tagsFile = await glob(SOURCE_CONTROL_TAGS_EXPORT_FILE, { + cwd: this.gitFolder, + absolute: true, + }); + await Promise.all(tagsFile.map(async (e) => fsRm(e))); + await Promise.all(variablesFile.map(async (e) => fsRm(e))); + await Promise.all(workflowFiles.map(async (e) => fsRm(e))); + await Promise.all(credentialFiles.map(async (e) => fsRm(e))); + LoggerProxy.debug('Cleaned work folder.'); + } catch (error) { + LoggerProxy.error(`Failed to clean work folder: ${(error as Error).message}`); + } + } + + async deleteRepositoryFolder() { + try { + await fsRm(this.gitFolder, { recursive: true }); + } catch (error) { + LoggerProxy.error(`Failed to delete work folder: ${(error as Error).message}`); + } + } + + private async rmDeletedWorkflowsFromExportFolder( + workflowsToBeExported: SharedWorkflow[], + ): Promise> { + const sharedWorkflowsFileNames = new Set( + workflowsToBeExported.map((e) => this.getWorkflowPath(e?.workflow?.name)), + ); + const existingWorkflowsInFolder = new Set( + await glob('*.json', { + cwd: this.workflowExportFolder, + absolute: true, + }), + ); + const deletedWorkflows = new Set(existingWorkflowsInFolder); + for (const elem of sharedWorkflowsFileNames) { + deletedWorkflows.delete(elem); + } + try { + await Promise.all([...deletedWorkflows].map(async (e) => fsRm(e))); + } catch (error) { + LoggerProxy.error(`Failed to delete workflows from work folder: ${(error as Error).message}`); + } + return deletedWorkflows; + } + + private async writeExportableWorkflowsToExportFolder(workflowsToBeExported: SharedWorkflow[]) { + await Promise.all( + workflowsToBeExported.map(async (e) => { + if (!e.workflow) { + LoggerProxy.debug( + `Found no corresponding workflow ${e.workflowId ?? 'unknown'}, skipping export`, + ); + return; + } + const fileName = this.getWorkflowPath(e.workflow?.id); + const sanitizedWorkflow: ExportableWorkflow = { + active: e.workflow?.active, + id: e.workflow?.id, + name: e.workflow?.name, + nodes: e.workflow?.nodes, + connections: e.workflow?.connections, + settings: e.workflow?.settings, + triggerCount: e.workflow?.triggerCount, + owner: e.user.email, + versionId: e.workflow?.versionId, + }; + LoggerProxy.debug(`Writing workflow ${e.workflowId} to ${fileName}`); + return fsWriteFile(fileName, JSON.stringify(sanitizedWorkflow, null, 2)); + }), + ); + } + + async exportWorkflowsToWorkFolder(): Promise { + try { + sourceControlFoldersExistCheck([this.workflowExportFolder]); + const sharedWorkflows = await Db.collections.SharedWorkflow.find({ + relations: ['workflow', 'role', 'user'], + where: { + role: { + name: 'owner', + scope: 'workflow', + }, + }, + }); + + // before exporting, figure out which workflows have been deleted and remove them from the export folder + const removedFiles = await this.rmDeletedWorkflowsFromExportFolder(sharedWorkflows); + // write the workflows to the export folder as json files + await this.writeExportableWorkflowsToExportFolder(sharedWorkflows); + return { + count: sharedWorkflows.length, + folder: this.workflowExportFolder, + files: sharedWorkflows.map((e) => ({ + id: e?.workflow?.id, + name: this.getWorkflowPath(e?.workflow?.name), + })), + removedFiles: [...removedFiles], + }; + } catch (error) { + throw Error(`Failed to export workflows to work folder: ${(error as Error).message}`); + } + } + + async exportVariablesToWorkFolder(): Promise { + try { + sourceControlFoldersExistCheck([this.gitFolder]); + const variables = await Db.collections.Variables.find(); + // do not export empty variables + if (variables.length === 0) { + return { + count: 0, + folder: this.gitFolder, + files: [], + }; + } + const fileName = this.getVariablesPath(); + const sanitizedVariables = variables.map((e) => ({ ...e, value: '' })); + await fsWriteFile(fileName, JSON.stringify(sanitizedVariables, null, 2)); + return { + count: sanitizedVariables.length, + folder: this.gitFolder, + files: [ + { + id: '', + name: fileName, + }, + ], + }; + } catch (error) { + throw Error(`Failed to export variables to work folder: ${(error as Error).message}`); + } + } + + async exportTagsToWorkFolder(): Promise { + try { + sourceControlFoldersExistCheck([this.gitFolder]); + const tags = await Db.collections.Tag.find(); + // do not export empty tags + if (tags.length === 0) { + return { + count: 0, + folder: this.gitFolder, + files: [], + }; + } + const mappings = await Db.collections.WorkflowTagMapping.find(); + const fileName = this.getTagsPath(); + await fsWriteFile( + fileName, + JSON.stringify( + { + tags: tags.map((tag) => ({ id: tag.id, name: tag.name })), + mappings, + }, + null, + 2, + ), + ); + return { + count: tags.length, + folder: this.gitFolder, + files: [ + { + id: '', + name: fileName, + }, + ], + }; + } catch (error) { + throw Error(`Failed to export variables to work folder: ${(error as Error).message}`); + } + } + + private replaceCredentialData = ( + data: ICredentialDataDecryptedObject, + ): ICredentialDataDecryptedObject => { + for (const [key] of Object.entries(data)) { + try { + if (data[key] === null) { + delete data[key]; // remove invalid null values + } else if (typeof data[key] === 'object') { + data[key] = this.replaceCredentialData(data[key] as ICredentialDataDecryptedObject); + } else if (typeof data[key] === 'string') { + data[key] = (data[key] as string)?.startsWith('={{') ? data[key] : ''; + } else if (typeof data[key] === 'number') { + // TODO: leaving numbers in for now, but maybe we should remove them + continue; + } + } catch (error) { + LoggerProxy.error(`Failed to sanitize credential data: ${(error as Error).message}`); + throw error; + } + } + return data; + }; + + async exportCredentialsToWorkFolder(): Promise { + try { + sourceControlFoldersExistCheck([this.credentialExportFolder]); + const sharedCredentials = await Db.collections.SharedCredentials.find({ + relations: ['credentials', 'role', 'user'], + }); + const encryptionKey = await UserSettings.getEncryptionKey(); + await Promise.all( + sharedCredentials.map(async (sharedCredential) => { + const { name, type, nodesAccess, data, id } = sharedCredential.credentials; + const credentialObject = new Credentials({ id, name }, type, nodesAccess, data); + const plainData = credentialObject.getData(encryptionKey); + const sanitizedData = this.replaceCredentialData(plainData); + const fileName = path.join( + this.credentialExportFolder, + `${sharedCredential.credentials.id}.json`, + ); + const sanitizedCredential: ExportableCredential = { + id: sharedCredential.credentials.id, + name: sharedCredential.credentials.name, + type: sharedCredential.credentials.type, + data: sanitizedData, + nodesAccess: sharedCredential.credentials.nodesAccess, + }; + LoggerProxy.debug(`Writing credential ${sharedCredential.credentials.id} to ${fileName}`); + return fsWriteFile(fileName, JSON.stringify(sanitizedCredential, null, 2)); + }), + ); + return { + count: sharedCredentials.length, + folder: this.credentialExportFolder, + files: sharedCredentials.map((e) => ({ + id: e.credentials.id, + name: path.join(this.credentialExportFolder, `${e.credentials.name}.json`), + })), + }; + } catch (error) { + throw Error(`Failed to export credentials to work folder: ${(error as Error).message}`); + } + } +} diff --git a/packages/cli/src/environments/versionControl/versionControlGit.service.ee.ts b/packages/cli/src/environments/sourceControl/sourceControlGit.service.ee.ts similarity index 83% rename from packages/cli/src/environments/versionControl/versionControlGit.service.ee.ts rename to packages/cli/src/environments/sourceControl/sourceControlGit.service.ee.ts index c038b09371..9c11963458 100644 --- a/packages/cli/src/environments/versionControl/versionControlGit.service.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControlGit.service.ee.ts @@ -13,12 +13,12 @@ import type { StatusResult, } from 'simple-git'; import { simpleGit } from 'simple-git'; -import type { VersionControlPreferences } from './types/versionControlPreferences'; -import { VERSION_CONTROL_DEFAULT_BRANCH, VERSION_CONTROL_ORIGIN } from './constants'; -import { versionControlFoldersExistCheck } from './versionControlHelper.ee'; +import type { SourceControlPreferences } from './types/sourceControlPreferences'; +import { SOURCE_CONTROL_DEFAULT_BRANCH, SOURCE_CONTROL_ORIGIN } from './constants'; +import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee'; @Service() -export class VersionControlGitService { +export class SourceControlGitService { git: SimpleGit | null = null; private gitOptions: Partial = {}; @@ -49,12 +49,17 @@ export class VersionControlGitService { } async initService(options: { - versionControlPreferences: VersionControlPreferences; + sourceControlPreferences: SourceControlPreferences; gitFolder: string; sshFolder: string; sshKeyName: string; }): Promise { - const { versionControlPreferences, gitFolder, sshKeyName, sshFolder } = options; + const { + sourceControlPreferences: sourceControlPreferences, + gitFolder, + sshKeyName, + sshFolder, + } = options; LoggerProxy.debug('GitService.init'); if (this.git !== null) { return; @@ -63,7 +68,7 @@ export class VersionControlGitService { this.preInitCheck(); LoggerProxy.debug('Git pre-check passed'); - versionControlFoldersExistCheck([gitFolder, sshFolder]); + sourceControlFoldersExistCheck([gitFolder, sshFolder]); const sshKnownHosts = path.join(sshFolder, 'known_hosts'); const sshCommand = `ssh -o UserKnownHostsFile=${sshKnownHosts} -o StrictHostKeyChecking=no -i ${sshKeyName}`; @@ -85,9 +90,9 @@ export class VersionControlGitService { if (!(await this.checkRepositorySetup())) { await this.git.init(); } - if (!(await this.hasRemote(versionControlPreferences.repositoryUrl))) { - if (versionControlPreferences.connected && versionControlPreferences.repositoryUrl) { - await this.initRepository(versionControlPreferences); + if (!(await this.hasRemote(sourceControlPreferences.repositoryUrl))) { + if (sourceControlPreferences.connected && sourceControlPreferences.repositoryUrl) { + await this.initRepository(sourceControlPreferences); } } } @@ -96,11 +101,6 @@ export class VersionControlGitService { this.git = null; } - resetLocalRepository() { - // TODO: Implement - this.git = null; - } - async checkRepositorySetup(): Promise { if (!this.git) { throw new Error('Git is not initialized'); @@ -123,7 +123,7 @@ export class VersionControlGitService { try { const remotes = await this.git.getRemotes(true); const foundRemote = remotes.find( - (e) => e.name === VERSION_CONTROL_ORIGIN && e.refs.push === remote, + (e) => e.name === SOURCE_CONTROL_ORIGIN && e.refs.push === remote, ); if (foundRemote) { LoggerProxy.debug(`Git remote found: ${foundRemote.name}: ${foundRemote.refs.push}`); @@ -137,15 +137,15 @@ export class VersionControlGitService { } async initRepository( - versionControlPreferences: Pick< - VersionControlPreferences, + sourceControlPreferences: Pick< + SourceControlPreferences, 'repositoryUrl' | 'authorEmail' | 'authorName' | 'branchName' | 'initRepo' >, ): Promise { if (!this.git) { throw new Error('Git is not initialized'); } - if (versionControlPreferences.initRepo) { + if (sourceControlPreferences.initRepo) { try { await this.git.init(); } catch (error) { @@ -153,7 +153,7 @@ export class VersionControlGitService { } } try { - await this.git.addRemote(VERSION_CONTROL_ORIGIN, versionControlPreferences.repositoryUrl); + await this.git.addRemote(SOURCE_CONTROL_ORIGIN, sourceControlPreferences.repositoryUrl); } catch (error) { if ((error as Error).message.includes('remote origin already exists')) { LoggerProxy.debug(`Git remote already exists: ${(error as Error).message}`); @@ -161,13 +161,13 @@ export class VersionControlGitService { throw error; } } - await this.git.addConfig('user.email', versionControlPreferences.authorEmail); - await this.git.addConfig('user.name', versionControlPreferences.authorName); - if (versionControlPreferences.initRepo) { + await this.git.addConfig('user.email', sourceControlPreferences.authorEmail); + await this.git.addConfig('user.name', sourceControlPreferences.authorName); + if (sourceControlPreferences.initRepo) { try { const branches = await this.getBranches(); if (branches.branches?.length === 0) { - await this.git.raw(['branch', '-M', versionControlPreferences.branchName]); + await this.git.raw(['branch', '-M', sourceControlPreferences.branchName]); } } catch (error) { LoggerProxy.debug(`Git init: ${(error as Error).message}`); @@ -203,6 +203,7 @@ export class VersionControlGitService { throw new Error('Git is not initialized'); } await this.git.checkout(branch); + await this.git.branch([`--set-upstream-to=${SOURCE_CONTROL_ORIGIN}/${branch}`, branch]); return this.getBranches(); } @@ -272,7 +273,7 @@ export class VersionControlGitService { async push( options: { force: boolean; branch: string } = { force: false, - branch: VERSION_CONTROL_DEFAULT_BRANCH, + branch: SOURCE_CONTROL_DEFAULT_BRANCH, }, ): Promise { const { force, branch } = options; @@ -280,9 +281,9 @@ export class VersionControlGitService { throw new Error('Git is not initialized'); } if (force) { - return this.git.push(VERSION_CONTROL_ORIGIN, branch, ['-f']); + return this.git.push(SOURCE_CONTROL_ORIGIN, branch, ['-f']); } - return this.git.push(VERSION_CONTROL_ORIGIN, branch); + return this.git.push(SOURCE_CONTROL_ORIGIN, branch); } async stage(files: Set, deletedFiles?: Set): Promise { diff --git a/packages/cli/src/environments/versionControl/versionControlHelper.ee.ts b/packages/cli/src/environments/sourceControl/sourceControlHelper.ee.ts similarity index 83% rename from packages/cli/src/environments/versionControl/versionControlHelper.ee.ts rename to packages/cli/src/environments/sourceControl/sourceControlHelper.ee.ts index b27f236035..656fe4ab74 100644 --- a/packages/cli/src/environments/versionControl/versionControlHelper.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControlHelper.ee.ts @@ -5,9 +5,9 @@ import sshpk from 'sshpk'; import type { KeyPair } from './types/keyPair'; import { constants as fsConstants, mkdirSync, accessSync } from 'fs'; import { LoggerProxy } from 'n8n-workflow'; -import { VERSION_CONTROL_GIT_KEY_COMMENT } from './constants'; +import { SOURCE_CONTROL_GIT_KEY_COMMENT } from './constants'; -export function versionControlFoldersExistCheck(folders: string[]) { +export function sourceControlFoldersExistCheck(folders: string[]) { // running these file access function synchronously to avoid race conditions folders.forEach((folder) => { try { @@ -22,9 +22,9 @@ export function versionControlFoldersExistCheck(folders: string[]) { }); } -export function isVersionControlLicensed() { +export function isSourceControlLicensed() { const license = Container.get(License); - return license.isVersionControlLicensed(); + return license.isSourceControlLicensed(); } export function generateSshKeyPair(keyType: 'ed25519' | 'rsa' = 'ed25519') { @@ -55,10 +55,10 @@ export function generateSshKeyPair(keyType: 'ed25519' | 'rsa' = 'ed25519') { break; } const keyPublic = sshpk.parseKey(generatedKeyPair.publicKey, 'pem'); - keyPublic.comment = VERSION_CONTROL_GIT_KEY_COMMENT; + keyPublic.comment = SOURCE_CONTROL_GIT_KEY_COMMENT; keyPair.publicKey = keyPublic.toString('ssh'); const keyPrivate = sshpk.parsePrivateKey(generatedKeyPair.privateKey, 'pem'); - keyPrivate.comment = VERSION_CONTROL_GIT_KEY_COMMENT; + keyPrivate.comment = SOURCE_CONTROL_GIT_KEY_COMMENT; keyPair.privateKey = keyPrivate.toString('ssh-private'); return { privateKey: keyPair.privateKey, diff --git a/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts b/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts new file mode 100644 index 0000000000..af4d5946d8 --- /dev/null +++ b/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts @@ -0,0 +1,363 @@ +import Container, { Service } from 'typedi'; +import path from 'path'; +import { + SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, + SOURCE_CONTROL_GIT_FOLDER, + SOURCE_CONTROL_TAGS_EXPORT_FILE, + SOURCE_CONTROL_VARIABLES_EXPORT_FILE, + SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, +} from './constants'; +import * as Db from '@/Db'; +import glob from 'fast-glob'; +import { LoggerProxy, jsonParse } from 'n8n-workflow'; +import { readFile as fsReadFile } from 'fs/promises'; +import { Credentials, UserSettings } from 'n8n-core'; +import type { IWorkflowToImport } from '@/Interfaces'; +import type { ExportableCredential } from './types/exportableCredential'; +import { SharedWorkflow } from '@/databases/entities/SharedWorkflow'; +import { CredentialsEntity } from '@/databases/entities/CredentialsEntity'; +import { Variables } from '@/databases/entities/Variables'; +import type { ImportResult } from './types/importResult'; +import { UM_FIX_INSTRUCTION } from '@/commands/BaseCommand'; +import { SharedCredentials } from '@/databases/entities/SharedCredentials'; +import { WorkflowEntity } from '@/databases/entities/WorkflowEntity'; +import { WorkflowTagMapping } from '@/databases/entities/WorkflowTagMapping'; +import { TagEntity } from '@/databases/entities/TagEntity'; +import { ActiveWorkflowRunner } from '../../ActiveWorkflowRunner'; +import type { SourceControllPullOptions } from './types/sourceControlPullWorkFolder'; +import { In } from 'typeorm'; +import { isUniqueConstraintError } from '../../ResponseHelper'; + +@Service() +export class SourceControlImportService { + private gitFolder: string; + + private workflowExportFolder: string; + + private credentialExportFolder: string; + + constructor() { + const userFolder = UserSettings.getUserN8nFolderPath(); + this.gitFolder = path.join(userFolder, SOURCE_CONTROL_GIT_FOLDER); + this.workflowExportFolder = path.join(this.gitFolder, SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER); + this.credentialExportFolder = path.join( + this.gitFolder, + SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, + ); + } + + private async getOwnerGlobalRole() { + const ownerCredentiallRole = await Db.collections.Role.findOne({ + where: { name: 'owner', scope: 'global' }, + }); + + if (!ownerCredentiallRole) { + throw new Error(`Failed to find owner. ${UM_FIX_INSTRUCTION}`); + } + + return ownerCredentiallRole; + } + + private async getOwnerCredentialRole() { + const ownerCredentiallRole = await Db.collections.Role.findOne({ + where: { name: 'owner', scope: 'credential' }, + }); + + if (!ownerCredentiallRole) { + throw new Error(`Failed to find owner. ${UM_FIX_INSTRUCTION}`); + } + + return ownerCredentiallRole; + } + + private async getOwnerWorkflowRole() { + const ownerWorkflowRole = await Db.collections.Role.findOne({ + where: { name: 'owner', scope: 'workflow' }, + }); + + if (!ownerWorkflowRole) { + throw new Error(`Failed to find owner workflow role. ${UM_FIX_INSTRUCTION}`); + } + + return ownerWorkflowRole; + } + + private async importCredentialsFromFiles( + userId: string, + ): Promise> { + const credentialFiles = await glob('*.json', { + cwd: this.credentialExportFolder, + absolute: true, + }); + const existingCredentials = await Db.collections.Credentials.find(); + const ownerCredentialRole = await this.getOwnerCredentialRole(); + const ownerGlobalRole = await this.getOwnerGlobalRole(); + const encryptionKey = await UserSettings.getEncryptionKey(); + let importCredentialsResult: Array<{ id: string; name: string; type: string }> = []; + await Db.transaction(async (transactionManager) => { + importCredentialsResult = await Promise.all( + credentialFiles.map(async (file) => { + LoggerProxy.debug(`Importing credentials file ${file}`); + const credential = jsonParse( + await fsReadFile(file, { encoding: 'utf8' }), + ); + const existingCredential = existingCredentials.find( + (e) => e.id === credential.id && e.type === credential.type, + ); + const sharedOwner = await Db.collections.SharedCredentials.findOne({ + select: ['userId'], + where: { + credentialsId: credential.id, + roleId: In([ownerCredentialRole.id, ownerGlobalRole.id]), + }, + }); + + const { name, type, data, id, nodesAccess } = credential; + const newCredentialObject = new Credentials({ id, name }, type, []); + if (existingCredential?.data) { + newCredentialObject.data = existingCredential.data; + } else { + newCredentialObject.setData(data, encryptionKey); + } + newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || []; + + LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`); + await transactionManager.upsert(CredentialsEntity, newCredentialObject, ['id']); + + if (!sharedOwner) { + const newSharedCredential = new SharedCredentials(); + newSharedCredential.credentialsId = newCredentialObject.id as string; + newSharedCredential.userId = userId; + newSharedCredential.roleId = ownerGlobalRole.id; + + await transactionManager.upsert(SharedCredentials, { ...newSharedCredential }, [ + 'credentialsId', + 'userId', + ]); + } + + return { + id: newCredentialObject.id as string, + name: newCredentialObject.name, + type: newCredentialObject.type, + }; + }), + ); + }); + return importCredentialsResult.filter((e) => e !== undefined); + } + + private async importVariablesFromFile(valueOverrides?: { + [key: string]: string; + }): Promise<{ imported: string[] }> { + const variablesFile = await glob(SOURCE_CONTROL_VARIABLES_EXPORT_FILE, { + cwd: this.gitFolder, + absolute: true, + }); + const result: { imported: string[] } = { imported: [] }; + if (variablesFile.length > 0) { + LoggerProxy.debug(`Importing variables from file ${variablesFile[0]}`); + const importedVariables = jsonParse>>( + await fsReadFile(variablesFile[0], { encoding: 'utf8' }), + { fallbackValue: [] }, + ); + const overriddenKeys = Object.keys(valueOverrides ?? {}); + + for (const variable of importedVariables) { + if (!variable.key) { + continue; + } + // by default no value is stored remotely, so an empty string is retuned + // it must be changed to undefined so as to not overwrite existing values! + if (variable.value === '') { + variable.value = undefined; + } + if (overriddenKeys.includes(variable.key) && valueOverrides) { + variable.value = valueOverrides[variable.key]; + overriddenKeys.splice(overriddenKeys.indexOf(variable.key), 1); + } + try { + await Db.collections.Variables.upsert({ ...variable }, ['id']); + } catch (errorUpsert) { + if (isUniqueConstraintError(errorUpsert as Error)) { + LoggerProxy.debug(`Variable ${variable.key} already exists, updating instead`); + try { + await Db.collections.Variables.update({ key: variable.key }, { ...variable }); + } catch (errorUpdate) { + LoggerProxy.debug(`Failed to update variable ${variable.key}, skipping`); + LoggerProxy.debug((errorUpdate as Error).message); + } + } + } finally { + result.imported.push(variable.key); + } + } + + // add remaining overrides as new variables + if (overriddenKeys.length > 0 && valueOverrides) { + for (const key of overriddenKeys) { + result.imported.push(key); + const newVariable = new Variables({ key, value: valueOverrides[key] }); + await Db.collections.Variables.save(newVariable); + } + } + } + return result; + } + + private async importTagsFromFile() { + const tagsFile = await glob(SOURCE_CONTROL_TAGS_EXPORT_FILE, { + cwd: this.gitFolder, + absolute: true, + }); + if (tagsFile.length > 0) { + LoggerProxy.debug(`Importing tags from file ${tagsFile[0]}`); + const mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>( + await fsReadFile(tagsFile[0], { encoding: 'utf8' }), + { fallbackValue: { tags: [], mappings: [] } }, + ); + const existingWorkflowIds = new Set( + ( + await Db.collections.Workflow.find({ + select: ['id'], + }) + ).map((e) => e.id), + ); + + await Db.transaction(async (transactionManager) => { + await Promise.all( + mappedTags.tags.map(async (tag) => { + await transactionManager.upsert( + TagEntity, + { + ...tag, + }, + { + skipUpdateIfNoValuesChanged: true, + conflictPaths: { id: true }, + }, + ); + }), + ); + await Promise.all( + mappedTags.mappings.map(async (mapping) => { + if (!existingWorkflowIds.has(String(mapping.workflowId))) return; + await transactionManager.upsert( + WorkflowTagMapping, + { tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) }, + { + skipUpdateIfNoValuesChanged: true, + conflictPaths: { tagId: true, workflowId: true }, + }, + ); + }), + ); + }); + return mappedTags; + } + return { tags: [], mappings: [] }; + } + + private async importWorkflowsFromFiles( + userId: string, + ): Promise> { + const workflowFiles = await glob('*.json', { + cwd: this.workflowExportFolder, + absolute: true, + }); + + const existingWorkflows = await Db.collections.Workflow.find({ + select: ['id', 'name', 'active', 'versionId'], + }); + + const ownerWorkflowRole = await this.getOwnerWorkflowRole(); + const workflowRunner = Container.get(ActiveWorkflowRunner); + + let importWorkflowsResult = new Array<{ id: string; name: string }>(); + await Db.transaction(async (transactionManager) => { + importWorkflowsResult = await Promise.all( + workflowFiles.map(async (file) => { + LoggerProxy.debug(`Parsing workflow file ${file}`); + const importedWorkflow = jsonParse( + await fsReadFile(file, { encoding: 'utf8' }), + ); + const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id); + if (existingWorkflow?.versionId === importedWorkflow.versionId) { + LoggerProxy.debug( + `Skipping import of workflow ${ + importedWorkflow.id ?? 'n/a' + } - versionId is up to date`, + ); + return { + id: importedWorkflow.id ?? 'n/a', + name: 'skipped', + }; + } + LoggerProxy.debug(`Importing workflow ${importedWorkflow.id ?? 'n/a'}`); + importedWorkflow.active = existingWorkflow?.active ?? false; + LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`); + const upsertResult = await transactionManager.upsert( + WorkflowEntity, + { ...importedWorkflow }, + ['id'], + ); + if (upsertResult?.identifiers?.length !== 1) { + throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`); + } + // due to sequential Ids, this may have changed during the insert + // TODO: once IDs are unique and we removed autoincrement, remove this + const upsertedWorkflowId = upsertResult.identifiers[0].id as string; + await transactionManager.upsert( + SharedWorkflow, + { + workflowId: upsertedWorkflowId, + userId, + roleId: ownerWorkflowRole.id, + }, + ['workflowId', 'userId'], + ); + + if (existingWorkflow?.active) { + try { + // remove active pre-import workflow + LoggerProxy.debug(`Deactivating workflow id ${existingWorkflow.id}`); + await workflowRunner.remove(existingWorkflow.id); + // try activating the imported workflow + LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`); + await workflowRunner.add(existingWorkflow.id, 'activate'); + } catch (error) { + LoggerProxy.error( + `Failed to activate workflow ${existingWorkflow.id}`, + error as Error, + ); + } + } + + return { + id: importedWorkflow.id ?? 'unknown', + name: file, + }; + }), + ); + }); + return importWorkflowsResult; + } + + async importFromWorkFolder(options: SourceControllPullOptions): Promise { + try { + const importedVariables = await this.importVariablesFromFile(options.variables); + const importedCredentials = await this.importCredentialsFromFiles(options.userId); + const importWorkflows = await this.importWorkflowsFromFiles(options.userId); + const importTags = await this.importTagsFromFile(); + + return { + variables: importedVariables, + credentials: importedCredentials, + workflows: importWorkflows, + tags: importTags, + }; + } catch (error) { + throw Error(`Failed to import workflows from work folder: ${(error as Error).message}`); + } + } +} diff --git a/packages/cli/src/environments/versionControl/versionControlPreferences.service.ee.ts b/packages/cli/src/environments/sourceControl/sourceControlPreferences.service.ee.ts similarity index 52% rename from packages/cli/src/environments/versionControl/versionControlPreferences.service.ee.ts rename to packages/cli/src/environments/sourceControl/sourceControlPreferences.service.ee.ts index c17ba8d7f6..ef71321aab 100644 --- a/packages/cli/src/environments/versionControl/versionControlPreferences.service.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControlPreferences.service.ee.ts @@ -1,28 +1,28 @@ import { Service } from 'typedi'; -import { VersionControlPreferences } from './types/versionControlPreferences'; +import { SourceControlPreferences } from './types/sourceControlPreferences'; import type { ValidationError } from 'class-validator'; import { validate } from 'class-validator'; import { readFileSync as fsReadFileSync, existsSync as fsExistsSync } from 'fs'; import { writeFile as fsWriteFile, rm as fsRm } from 'fs/promises'; import { generateSshKeyPair, - isVersionControlLicensed, - versionControlFoldersExistCheck, -} from './versionControlHelper.ee'; + isSourceControlLicensed, + sourceControlFoldersExistCheck, +} from './sourceControlHelper.ee'; import { UserSettings } from 'n8n-core'; import { LoggerProxy, jsonParse } from 'n8n-workflow'; import * as Db from '@/Db'; import { - VERSION_CONTROL_SSH_FOLDER, - VERSION_CONTROL_GIT_FOLDER, - VERSION_CONTROL_SSH_KEY_NAME, - VERSION_CONTROL_PREFERENCES_DB_KEY, + SOURCE_CONTROL_SSH_FOLDER, + SOURCE_CONTROL_GIT_FOLDER, + SOURCE_CONTROL_SSH_KEY_NAME, + SOURCE_CONTROL_PREFERENCES_DB_KEY, } from './constants'; import path from 'path'; @Service() -export class VersionControlPreferencesService { - private _versionControlPreferences: VersionControlPreferences = new VersionControlPreferences(); +export class SourceControlPreferencesService { + private _sourceControlPreferences: SourceControlPreferences = new SourceControlPreferences(); private sshKeyName: string; @@ -32,23 +32,24 @@ export class VersionControlPreferencesService { constructor() { const userFolder = UserSettings.getUserN8nFolderPath(); - this.sshFolder = path.join(userFolder, VERSION_CONTROL_SSH_FOLDER); - this.gitFolder = path.join(userFolder, VERSION_CONTROL_GIT_FOLDER); - this.sshKeyName = path.join(this.sshFolder, VERSION_CONTROL_SSH_KEY_NAME); + this.sshFolder = path.join(userFolder, SOURCE_CONTROL_SSH_FOLDER); + this.gitFolder = path.join(userFolder, SOURCE_CONTROL_GIT_FOLDER); + this.sshKeyName = path.join(this.sshFolder, SOURCE_CONTROL_SSH_KEY_NAME); } - public get versionControlPreferences(): VersionControlPreferences { + public get sourceControlPreferences(): SourceControlPreferences { return { - ...this._versionControlPreferences, - connected: this._versionControlPreferences.connected ?? false, + ...this._sourceControlPreferences, + connected: this._sourceControlPreferences.connected ?? false, publicKey: this.getPublicKey(), }; } - public set versionControlPreferences(preferences: Partial) { - this._versionControlPreferences = VersionControlPreferences.merge( + // merge the new preferences with the existing preferences when setting + public set sourceControlPreferences(preferences: Partial) { + this._sourceControlPreferences = SourceControlPreferences.merge( preferences, - this._versionControlPreferences, + this._sourceControlPreferences, ); } @@ -77,8 +78,8 @@ export class VersionControlPreferencesService { * Will generate an ed25519 key pair and save it to the database and the file system * Note: this will overwrite any existing key pair */ - async generateAndSaveKeyPair(): Promise { - versionControlFoldersExistCheck([this.gitFolder, this.sshFolder]); + async generateAndSaveKeyPair(): Promise { + sourceControlFoldersExistCheck([this.gitFolder, this.sshFolder]); const keyPair = generateSshKeyPair('ed25519'); if (keyPair.publicKey && keyPair.privateKey) { try { @@ -95,30 +96,30 @@ export class VersionControlPreferencesService { } isBranchReadOnly(): boolean { - return this._versionControlPreferences.branchReadOnly; + return this._sourceControlPreferences.branchReadOnly; } - isVersionControlConnected(): boolean { - return this.versionControlPreferences.connected; + isSourceControlConnected(): boolean { + return this.sourceControlPreferences.connected; } - isVersionControlLicensedAndEnabled(): boolean { - return this.isVersionControlConnected() && isVersionControlLicensed(); + isSourceControlLicensedAndEnabled(): boolean { + return this.isSourceControlConnected() && isSourceControlLicensed(); } getBranchName(): string { - return this.versionControlPreferences.branchName; + return this.sourceControlPreferences.branchName; } - getPreferences(): VersionControlPreferences { - return this.versionControlPreferences; + getPreferences(): SourceControlPreferences { + return this.sourceControlPreferences; } - async validateVersionControlPreferences( - preferences: Partial, + async validateSourceControlPreferences( + preferences: Partial, allowMissingProperties = true, ): Promise { - const preferencesObject = new VersionControlPreferences(preferences); + const preferencesObject = new SourceControlPreferences(preferences); const validationResult = await validate(preferencesObject, { forbidUnknownValues: false, skipMissingProperties: allowMissingProperties, @@ -126,45 +127,45 @@ export class VersionControlPreferencesService { validationError: { target: false }, }); if (validationResult.length > 0) { - throw new Error(`Invalid version control preferences: ${JSON.stringify(validationResult)}`); + throw new Error(`Invalid source control preferences: ${JSON.stringify(validationResult)}`); } return validationResult; } async setPreferences( - preferences: Partial, + preferences: Partial, saveToDb = true, - ): Promise { - versionControlFoldersExistCheck([this.gitFolder, this.sshFolder]); + ): Promise { + sourceControlFoldersExistCheck([this.gitFolder, this.sshFolder]); if (!this.hasKeyPairFiles()) { LoggerProxy.debug('No key pair files found, generating new pair'); await this.generateAndSaveKeyPair(); } - this.versionControlPreferences = preferences; + this.sourceControlPreferences = preferences; if (saveToDb) { - const settingsValue = JSON.stringify(this._versionControlPreferences); + const settingsValue = JSON.stringify(this._sourceControlPreferences); try { await Db.collections.Settings.save({ - key: VERSION_CONTROL_PREFERENCES_DB_KEY, + key: SOURCE_CONTROL_PREFERENCES_DB_KEY, value: settingsValue, loadOnStartup: true, }); } catch (error) { - throw new Error(`Failed to save version control preferences: ${(error as Error).message}`); + throw new Error(`Failed to save source control preferences: ${(error as Error).message}`); } } - return this.versionControlPreferences; + return this.sourceControlPreferences; } - async loadFromDbAndApplyVersionControlPreferences(): Promise< - VersionControlPreferences | undefined + async loadFromDbAndApplySourceControlPreferences(): Promise< + SourceControlPreferences | undefined > { const loadedPreferences = await Db.collections.Settings.findOne({ - where: { key: VERSION_CONTROL_PREFERENCES_DB_KEY }, + where: { key: SOURCE_CONTROL_PREFERENCES_DB_KEY }, }); if (loadedPreferences) { try { - const preferences = jsonParse(loadedPreferences.value); + const preferences = jsonParse(loadedPreferences.value); if (preferences) { // set local preferences but don't write back to db await this.setPreferences(preferences, false); @@ -172,11 +173,11 @@ export class VersionControlPreferencesService { } } catch (error) { LoggerProxy.warn( - `Could not parse Version Control settings from database: ${(error as Error).message}`, + `Could not parse Source Control settings from database: ${(error as Error).message}`, ); } } - await this.setPreferences(new VersionControlPreferences(), true); - return this.versionControlPreferences; + await this.setPreferences(new SourceControlPreferences(), true); + return this.sourceControlPreferences; } } diff --git a/packages/cli/src/environments/versionControl/types/exportResult.ts b/packages/cli/src/environments/sourceControl/types/exportResult.ts similarity index 100% rename from packages/cli/src/environments/versionControl/types/exportResult.ts rename to packages/cli/src/environments/sourceControl/types/exportResult.ts diff --git a/packages/cli/src/environments/sourceControl/types/exportableCredential.ts b/packages/cli/src/environments/sourceControl/types/exportableCredential.ts new file mode 100644 index 0000000000..917b74132c --- /dev/null +++ b/packages/cli/src/environments/sourceControl/types/exportableCredential.ts @@ -0,0 +1,9 @@ +import type { ICredentialDataDecryptedObject, ICredentialNodeAccess } from 'n8n-workflow'; + +export interface ExportableCredential { + id: string; + name: string; + type: string; + data: ICredentialDataDecryptedObject; + nodesAccess: ICredentialNodeAccess[]; +} diff --git a/packages/cli/src/environments/versionControl/types/exportableWorkflow.ts b/packages/cli/src/environments/sourceControl/types/exportableWorkflow.ts similarity index 100% rename from packages/cli/src/environments/versionControl/types/exportableWorkflow.ts rename to packages/cli/src/environments/sourceControl/types/exportableWorkflow.ts diff --git a/packages/cli/src/environments/versionControl/types/importResult.ts b/packages/cli/src/environments/sourceControl/types/importResult.ts similarity index 88% rename from packages/cli/src/environments/versionControl/types/importResult.ts rename to packages/cli/src/environments/sourceControl/types/importResult.ts index 6be9885869..541e38d0e9 100644 --- a/packages/cli/src/environments/versionControl/types/importResult.ts +++ b/packages/cli/src/environments/sourceControl/types/importResult.ts @@ -7,7 +7,7 @@ export interface ImportResult { name: string; }>; credentials: Array<{ id: string; name: string; type: string }>; - variables: { added: string[]; changed: string[] }; + variables: { imported: string[] }; tags: { tags: TagEntity[]; mappings: WorkflowTagMapping[] }; removedFiles?: string[]; } diff --git a/packages/cli/src/environments/versionControl/types/keyPair.ts b/packages/cli/src/environments/sourceControl/types/keyPair.ts similarity index 100% rename from packages/cli/src/environments/versionControl/types/keyPair.ts rename to packages/cli/src/environments/sourceControl/types/keyPair.ts diff --git a/packages/cli/src/environments/sourceControl/types/requests.ts b/packages/cli/src/environments/sourceControl/types/requests.ts new file mode 100644 index 0000000000..7e2d0d9f7b --- /dev/null +++ b/packages/cli/src/environments/sourceControl/types/requests.ts @@ -0,0 +1,22 @@ +import type { AuthenticatedRequest } from '@/requests'; +import type { SourceControlPreferences } from './sourceControlPreferences'; +import type { SourceControlSetBranch } from './sourceControlSetBranch'; +import type { SourceControlCommit } from './sourceControlCommit'; +import type { SourceControlStage } from './sourceControlStage'; +import type { SourceControlPush } from './sourceControlPush'; +import type { SourceControlPushWorkFolder } from './sourceControlPushWorkFolder'; +import type { SourceControlPullWorkFolder } from './sourceControlPullWorkFolder'; +import type { SourceControlDisconnect } from './sourceControlDisconnect'; +import type { SourceControlSetReadOnly } from './sourceControlSetReadOnly'; + +export declare namespace SourceControlRequest { + type UpdatePreferences = AuthenticatedRequest<{}, {}, Partial, {}>; + type SetReadOnly = AuthenticatedRequest<{}, {}, SourceControlSetReadOnly, {}>; + type SetBranch = AuthenticatedRequest<{}, {}, SourceControlSetBranch, {}>; + type Commit = AuthenticatedRequest<{}, {}, SourceControlCommit, {}>; + type Stage = AuthenticatedRequest<{}, {}, SourceControlStage, {}>; + type Push = AuthenticatedRequest<{}, {}, SourceControlPush, {}>; + type Disconnect = AuthenticatedRequest<{}, {}, SourceControlDisconnect, {}>; + type PushWorkFolder = AuthenticatedRequest<{}, {}, SourceControlPushWorkFolder, {}>; + type PullWorkFolder = AuthenticatedRequest<{}, {}, SourceControlPullWorkFolder, {}>; +} diff --git a/packages/cli/src/environments/versionControl/types/versionControlCommit.ts b/packages/cli/src/environments/sourceControl/types/sourceControlCommit.ts similarity index 68% rename from packages/cli/src/environments/versionControl/types/versionControlCommit.ts rename to packages/cli/src/environments/sourceControl/types/sourceControlCommit.ts index 0264f4cb32..cf5b5c02d9 100644 --- a/packages/cli/src/environments/versionControl/types/versionControlCommit.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlCommit.ts @@ -1,6 +1,6 @@ import { IsString } from 'class-validator'; -export class VersionControlCommit { +export class SourceControlCommit { @IsString() message: string; } diff --git a/packages/cli/src/environments/versionControl/types/versionControlDisconnect.ts b/packages/cli/src/environments/sourceControl/types/sourceControlDisconnect.ts similarity index 73% rename from packages/cli/src/environments/versionControl/types/versionControlDisconnect.ts rename to packages/cli/src/environments/sourceControl/types/sourceControlDisconnect.ts index 2c4f8be008..229faf7ed5 100644 --- a/packages/cli/src/environments/versionControl/types/versionControlDisconnect.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlDisconnect.ts @@ -1,6 +1,6 @@ import { IsBoolean, IsOptional } from 'class-validator'; -export class VersionControlDisconnect { +export class SourceControlDisconnect { @IsBoolean() @IsOptional() keepKeyPair?: boolean; diff --git a/packages/cli/src/environments/versionControl/types/versionControlPreferences.ts b/packages/cli/src/environments/sourceControl/types/sourceControlPreferences.ts similarity index 70% rename from packages/cli/src/environments/versionControl/types/versionControlPreferences.ts rename to packages/cli/src/environments/sourceControl/types/sourceControlPreferences.ts index 829cd0a176..5cbecfb134 100644 --- a/packages/cli/src/environments/versionControl/types/versionControlPreferences.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlPreferences.ts @@ -1,7 +1,7 @@ import { IsBoolean, IsEmail, IsHexColor, IsOptional, IsString } from 'class-validator'; -export class VersionControlPreferences { - constructor(preferences: Partial | undefined = undefined) { +export class SourceControlPreferences { + constructor(preferences: Partial | undefined = undefined) { if (preferences) Object.assign(this, preferences); } @@ -34,15 +34,15 @@ export class VersionControlPreferences { @IsBoolean() readonly initRepo?: boolean; - static fromJSON(json: Partial): VersionControlPreferences { - return new VersionControlPreferences(json); + static fromJSON(json: Partial): SourceControlPreferences { + return new SourceControlPreferences(json); } static merge( - preferences: Partial, - defaultPreferences: Partial, - ): VersionControlPreferences { - return new VersionControlPreferences({ + preferences: Partial, + defaultPreferences: Partial, + ): SourceControlPreferences { + return new SourceControlPreferences({ connected: preferences.connected ?? defaultPreferences.connected, repositoryUrl: preferences.repositoryUrl ?? defaultPreferences.repositoryUrl, authorName: preferences.authorName ?? defaultPreferences.authorName, diff --git a/packages/cli/src/environments/versionControl/types/versionControlPullWorkFolder.ts b/packages/cli/src/environments/sourceControl/types/sourceControlPullWorkFolder.ts similarity index 83% rename from packages/cli/src/environments/versionControl/types/versionControlPullWorkFolder.ts rename to packages/cli/src/environments/sourceControl/types/sourceControlPullWorkFolder.ts index 937e0758b2..ef9d3d6650 100644 --- a/packages/cli/src/environments/versionControl/types/versionControlPullWorkFolder.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlPullWorkFolder.ts @@ -1,6 +1,6 @@ import { IsBoolean, IsObject, IsOptional, IsString } from 'class-validator'; -export class VersionControlPullWorkFolder { +export class SourceControlPullWorkFolder { @IsBoolean() @IsOptional() force?: boolean; @@ -18,7 +18,7 @@ export class VersionControlPullWorkFolder { variables?: { [key: string]: string }; } -export class VersionControllPullOptions { +export class SourceControllPullOptions { userId: string; force?: boolean; diff --git a/packages/cli/src/environments/versionControl/types/versionControlPush.ts b/packages/cli/src/environments/sourceControl/types/sourceControlPush.ts similarity index 75% rename from packages/cli/src/environments/versionControl/types/versionControlPush.ts rename to packages/cli/src/environments/sourceControl/types/sourceControlPush.ts index a8efa050de..94654e0b2c 100644 --- a/packages/cli/src/environments/versionControl/types/versionControlPush.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlPush.ts @@ -1,6 +1,6 @@ import { IsBoolean, IsOptional } from 'class-validator'; -export class VersionControlPush { +export class SourceControlPush { @IsBoolean() @IsOptional() force?: boolean; diff --git a/packages/cli/src/environments/versionControl/types/versionControlPushWorkFolder.ts b/packages/cli/src/environments/sourceControl/types/sourceControlPushWorkFolder.ts similarity index 90% rename from packages/cli/src/environments/versionControl/types/versionControlPushWorkFolder.ts rename to packages/cli/src/environments/sourceControl/types/sourceControlPushWorkFolder.ts index ff74af3fff..0fef05da1e 100644 --- a/packages/cli/src/environments/versionControl/types/versionControlPushWorkFolder.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlPushWorkFolder.ts @@ -1,6 +1,6 @@ import { IsBoolean, IsOptional, IsString } from 'class-validator'; -export class VersionControlPushWorkFolder { +export class SourceControlPushWorkFolder { @IsBoolean() @IsOptional() force?: boolean; diff --git a/packages/cli/src/environments/versionControl/types/versionControlSetBranch.ts b/packages/cli/src/environments/sourceControl/types/sourceControlSetBranch.ts similarity index 66% rename from packages/cli/src/environments/versionControl/types/versionControlSetBranch.ts rename to packages/cli/src/environments/sourceControl/types/sourceControlSetBranch.ts index 353ca1da3e..d5a2c72507 100644 --- a/packages/cli/src/environments/versionControl/types/versionControlSetBranch.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlSetBranch.ts @@ -1,6 +1,6 @@ import { IsString } from 'class-validator'; -export class VersionControlSetBranch { +export class SourceControlSetBranch { @IsString() branch: string; } diff --git a/packages/cli/src/environments/versionControl/types/versionControlSetReadOnly.ts b/packages/cli/src/environments/sourceControl/types/sourceControlSetReadOnly.ts similarity index 68% rename from packages/cli/src/environments/versionControl/types/versionControlSetReadOnly.ts rename to packages/cli/src/environments/sourceControl/types/sourceControlSetReadOnly.ts index 665c42a8d4..a5c1201563 100644 --- a/packages/cli/src/environments/versionControl/types/versionControlSetReadOnly.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlSetReadOnly.ts @@ -1,6 +1,6 @@ import { IsBoolean } from 'class-validator'; -export class VersionControlSetReadOnly { +export class SourceControlSetReadOnly { @IsBoolean() branchReadOnly: boolean; } diff --git a/packages/cli/src/environments/versionControl/types/versionControlStage.ts b/packages/cli/src/environments/sourceControl/types/sourceControlStage.ts similarity index 88% rename from packages/cli/src/environments/versionControl/types/versionControlStage.ts rename to packages/cli/src/environments/sourceControl/types/sourceControlStage.ts index b6184bb8e2..7569abf9b6 100644 --- a/packages/cli/src/environments/versionControl/types/versionControlStage.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlStage.ts @@ -1,6 +1,6 @@ import { IsOptional, IsString } from 'class-validator'; -export class VersionControlStage { +export class SourceControlStage { @IsString({ each: true }) @IsOptional() fileNames?: Set; diff --git a/packages/cli/src/environments/sourceControl/types/sourceControlledFile.ts b/packages/cli/src/environments/sourceControl/types/sourceControlledFile.ts new file mode 100644 index 0000000000..12b99457b7 --- /dev/null +++ b/packages/cli/src/environments/sourceControl/types/sourceControlledFile.ts @@ -0,0 +1,19 @@ +export type SourceControlledFileStatus = + | 'new' + | 'modified' + | 'deleted' + | 'created' + | 'renamed' + | 'conflicted' + | 'unknown'; +export type SourceControlledFileLocation = 'local' | 'remote'; +export type SourceControlledFileType = 'credential' | 'workflow' | 'tags' | 'variables' | 'file'; +export type SourceControlledFile = { + file: string; + id: string; + name: string; + type: SourceControlledFileType; + status: SourceControlledFileStatus; + location: SourceControlledFileLocation; + conflict: boolean; +}; diff --git a/packages/cli/src/environments/variables/variables.controller.ee.ts b/packages/cli/src/environments/variables/variables.controller.ee.ts index 3be05f3285..fa3c619781 100644 --- a/packages/cli/src/environments/variables/variables.controller.ee.ts +++ b/packages/cli/src/environments/variables/variables.controller.ee.ts @@ -50,12 +50,9 @@ EEVariablesController.post( ); EEVariablesController.patch( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async (req: VariablesRequest.Update) => { - const id = parseInt(req.params.id); - if (isNaN(id)) { - throw new ResponseHelper.BadRequestError('Invalid variable id ' + req.params.id); - } + const id = req.params.id; if (req.user.globalRole.name !== 'owner') { LoggerProxy.info('Attempt to update a variable blocked due to lack of permissions', { id, diff --git a/packages/cli/src/environments/variables/variables.controller.ts b/packages/cli/src/environments/variables/variables.controller.ts index 931df7784d..5380ea6185 100644 --- a/packages/cli/src/environments/variables/variables.controller.ts +++ b/packages/cli/src/environments/variables/variables.controller.ts @@ -40,12 +40,9 @@ variablesController.post( ); variablesController.get( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async (req: VariablesRequest.Get) => { - const id = parseInt(req.params.id); - if (isNaN(id)) { - throw new ResponseHelper.BadRequestError('Invalid variable id ' + req.params.id); - } + const id = req.params.id; const variable = await VariablesService.get(id); if (variable === null) { throw new ResponseHelper.NotFoundError(`Variable with id ${req.params.id} not found`); @@ -55,19 +52,16 @@ variablesController.get( ); variablesController.patch( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async () => { throw new ResponseHelper.BadRequestError('No variables license found'); }), ); variablesController.delete( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async (req: VariablesRequest.Delete) => { - const id = parseInt(req.params.id); - if (isNaN(id)) { - throw new ResponseHelper.BadRequestError('Invalid variable id ' + req.params.id); - } + const id = req.params.id; if (req.user.globalRole.name !== 'owner') { LoggerProxy.info('Attempt to delete a variable blocked due to lack of permissions', { id, diff --git a/packages/cli/src/environments/variables/variables.service.ee.ts b/packages/cli/src/environments/variables/variables.service.ee.ts index b5c48dcef0..f218329fab 100644 --- a/packages/cli/src/environments/variables/variables.service.ee.ts +++ b/packages/cli/src/environments/variables/variables.service.ee.ts @@ -4,6 +4,7 @@ import { InternalHooks } from '@/InternalHooks'; import Container from 'typedi'; import { canCreateNewVariable } from './enviromentHelpers'; import { VariablesService } from './variables.service'; +import { generateNanoId } from '../../databases/utils/generators'; export class VariablesLicenseError extends Error {} export class VariablesValidationError extends Error {} @@ -32,12 +33,14 @@ export class EEVariablesService extends VariablesService { this.validateVariable(variable); void Container.get(InternalHooks).onVariableCreated({ variable_type: variable.type }); - return collections.Variables.save(variable); + return collections.Variables.save({ + ...variable, + id: generateNanoId(), + }); } - static async update(id: number, variable: Omit): Promise { + static async update(id: string, variable: Omit): Promise { this.validateVariable(variable); - await collections.Variables.update(id, variable); // eslint-disable-next-line @typescript-eslint/no-non-null-assertion return (await this.get(id))!; diff --git a/packages/cli/src/environments/variables/variables.service.ts b/packages/cli/src/environments/variables/variables.service.ts index 646f9368f2..7cc26ee214 100644 --- a/packages/cli/src/environments/variables/variables.service.ts +++ b/packages/cli/src/environments/variables/variables.service.ts @@ -10,11 +10,11 @@ export class VariablesService { return collections.Variables.count(); } - static async get(id: number): Promise { + static async get(id: string): Promise { return collections.Variables.findOne({ where: { id } }); } - static async delete(id: number): Promise { + static async delete(id: string): Promise { await collections.Variables.delete(id); } } diff --git a/packages/cli/src/environments/versionControl/constants.ts b/packages/cli/src/environments/versionControl/constants.ts deleted file mode 100644 index 19eb138238..0000000000 --- a/packages/cli/src/environments/versionControl/constants.ts +++ /dev/null @@ -1,15 +0,0 @@ -export const VERSION_CONTROL_PREFERENCES_DB_KEY = 'features.versionControl'; -export const VERSION_CONTROL_GIT_FOLDER = 'git'; -export const VERSION_CONTROL_GIT_KEY_COMMENT = 'n8n deploy key'; -export const VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER = 'workflows'; -export const VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER = 'credentials'; -export const VERSION_CONTROL_VARIABLES_EXPORT_FILE = 'variables.json'; -export const VERSION_CONTROL_TAGS_EXPORT_FILE = 'tags.json'; -export const VERSION_CONTROL_SSH_FOLDER = 'ssh'; -export const VERSION_CONTROL_SSH_KEY_NAME = 'key'; -export const VERSION_CONTROL_DEFAULT_BRANCH = 'main'; -export const VERSION_CONTROL_ORIGIN = 'origin'; -export const VERSION_CONTROL_API_ROOT = 'version-control'; -export const VERSION_CONTROL_README = ` -# n8n Version Control -`; diff --git a/packages/cli/src/environments/versionControl/middleware/versionControlEnabledMiddleware.ee.ts b/packages/cli/src/environments/versionControl/middleware/versionControlEnabledMiddleware.ee.ts deleted file mode 100644 index 8a296d3e17..0000000000 --- a/packages/cli/src/environments/versionControl/middleware/versionControlEnabledMiddleware.ee.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { RequestHandler } from 'express'; -import { isVersionControlLicensed } from '../versionControlHelper.ee'; -import Container from 'typedi'; -import { VersionControlPreferencesService } from '../versionControlPreferences.service.ee'; - -export const versionControlLicensedAndEnabledMiddleware: RequestHandler = (req, res, next) => { - const versionControlPreferencesService = Container.get(VersionControlPreferencesService); - if (versionControlPreferencesService.isVersionControlLicensedAndEnabled()) { - next(); - } else { - res.status(401).json({ status: 'error', message: 'Unauthorized' }); - } -}; - -export const versionControlLicensedMiddleware: RequestHandler = (req, res, next) => { - if (isVersionControlLicensed()) { - next(); - } else { - res.status(401).json({ status: 'error', message: 'Unauthorized' }); - } -}; diff --git a/packages/cli/src/environments/versionControl/types/exportableCredential.ts b/packages/cli/src/environments/versionControl/types/exportableCredential.ts deleted file mode 100644 index b9d33e0520..0000000000 --- a/packages/cli/src/environments/versionControl/types/exportableCredential.ts +++ /dev/null @@ -1,8 +0,0 @@ -import type { ICredentialDataDecryptedObject } from 'n8n-workflow'; - -export interface ExportableCredential { - id: string; - name: string; - type: string; - data: ICredentialDataDecryptedObject; -} diff --git a/packages/cli/src/environments/versionControl/types/requests.ts b/packages/cli/src/environments/versionControl/types/requests.ts deleted file mode 100644 index d985e36b0b..0000000000 --- a/packages/cli/src/environments/versionControl/types/requests.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type { AuthenticatedRequest } from '@/requests'; -import type { VersionControlPreferences } from './versionControlPreferences'; -import type { VersionControlSetBranch } from './versionControlSetBranch'; -import type { VersionControlCommit } from './versionControlCommit'; -import type { VersionControlStage } from './versionControlStage'; -import type { VersionControlPush } from './versionControlPush'; -import type { VersionControlPushWorkFolder } from './versionControlPushWorkFolder'; -import type { VersionControlPullWorkFolder } from './versionControlPullWorkFolder'; -import type { VersionControlDisconnect } from './versionControlDisconnect'; -import type { VersionControlSetReadOnly } from './versionControlSetReadOnly'; - -export declare namespace VersionControlRequest { - type UpdatePreferences = AuthenticatedRequest<{}, {}, Partial, {}>; - type SetReadOnly = AuthenticatedRequest<{}, {}, VersionControlSetReadOnly, {}>; - type SetBranch = AuthenticatedRequest<{}, {}, VersionControlSetBranch, {}>; - type Commit = AuthenticatedRequest<{}, {}, VersionControlCommit, {}>; - type Stage = AuthenticatedRequest<{}, {}, VersionControlStage, {}>; - type Push = AuthenticatedRequest<{}, {}, VersionControlPush, {}>; - type Disconnect = AuthenticatedRequest<{}, {}, VersionControlDisconnect, {}>; - type PushWorkFolder = AuthenticatedRequest<{}, {}, VersionControlPushWorkFolder, {}>; - type PullWorkFolder = AuthenticatedRequest<{}, {}, VersionControlPullWorkFolder, {}>; -} diff --git a/packages/cli/src/environments/versionControl/types/versionControlledFile.ts b/packages/cli/src/environments/versionControl/types/versionControlledFile.ts deleted file mode 100644 index e8e0e6f773..0000000000 --- a/packages/cli/src/environments/versionControl/types/versionControlledFile.ts +++ /dev/null @@ -1,19 +0,0 @@ -export type VersionControlledFileStatus = - | 'new' - | 'modified' - | 'deleted' - | 'created' - | 'renamed' - | 'conflicted' - | 'unknown'; -export type VersionControlledFileLocation = 'local' | 'remote'; -export type VersionControlledFileType = 'credential' | 'workflow' | 'tags' | 'variables' | 'file'; -export type VersionControlledFile = { - file: string; - id: string; - name: string; - type: VersionControlledFileType; - status: VersionControlledFileStatus; - location: VersionControlledFileLocation; - conflict: boolean; -}; diff --git a/packages/cli/src/environments/versionControl/versionControl.controller.ee.ts b/packages/cli/src/environments/versionControl/versionControl.controller.ee.ts deleted file mode 100644 index 65da1b773e..0000000000 --- a/packages/cli/src/environments/versionControl/versionControl.controller.ee.ts +++ /dev/null @@ -1,338 +0,0 @@ -import { Authorized, Get, Post, Patch, RestController } from '@/decorators'; -import { - versionControlLicensedMiddleware, - versionControlLicensedAndEnabledMiddleware, -} from './middleware/versionControlEnabledMiddleware.ee'; -import { VersionControlService } from './versionControl.service.ee'; -import { VersionControlRequest } from './types/requests'; -import type { VersionControlPreferences } from './types/versionControlPreferences'; -import { BadRequestError } from '@/ResponseHelper'; -import type { PullResult, PushResult, StatusResult } from 'simple-git'; -import { AuthenticatedRequest } from '../../requests'; -import express from 'express'; -import type { ImportResult } from './types/importResult'; -import type { VersionControlPushWorkFolder } from './types/versionControlPushWorkFolder'; -import { VersionControlPreferencesService } from './versionControlPreferences.service.ee'; -import type { VersionControlledFile } from './types/versionControlledFile'; -import { VERSION_CONTROL_API_ROOT, VERSION_CONTROL_DEFAULT_BRANCH } from './constants'; - -@RestController(`/${VERSION_CONTROL_API_ROOT}`) -export class VersionControlController { - constructor( - private versionControlService: VersionControlService, - private versionControlPreferencesService: VersionControlPreferencesService, - ) {} - - @Authorized('any') - @Get('/preferences', { middlewares: [versionControlLicensedMiddleware] }) - async getPreferences(): Promise { - // returns the settings with the privateKey property redacted - return this.versionControlPreferencesService.getPreferences(); - } - - @Authorized(['global', 'owner']) - @Post('/preferences', { middlewares: [versionControlLicensedMiddleware] }) - async setPreferences(req: VersionControlRequest.UpdatePreferences) { - if ( - req.body.branchReadOnly === undefined && - this.versionControlPreferencesService.isVersionControlConnected() - ) { - throw new BadRequestError( - 'Cannot change preferences while connected to a version control provider. Please disconnect first.', - ); - } - try { - const sanitizedPreferences: Partial = { - ...req.body, - initRepo: req.body.initRepo ?? true, // default to true if not specified - connected: undefined, - publicKey: undefined, - }; - await this.versionControlPreferencesService.validateVersionControlPreferences( - sanitizedPreferences, - ); - const updatedPreferences = await this.versionControlPreferencesService.setPreferences( - sanitizedPreferences, - ); - if (sanitizedPreferences.initRepo === true) { - try { - await this.versionControlService.initializeRepository({ - ...updatedPreferences, - branchName: - updatedPreferences.branchName === '' - ? VERSION_CONTROL_DEFAULT_BRANCH - : updatedPreferences.branchName, - initRepo: true, - }); - if (this.versionControlPreferencesService.getPreferences().branchName !== '') { - await this.versionControlPreferencesService.setPreferences({ - connected: true, - }); - } - } catch (error) { - // if initialization fails, run cleanup to remove any intermediate state and throw the error - await this.versionControlService.disconnect({ keepKeyPair: true }); - throw error; - } - } - await this.versionControlService.init(); - return this.versionControlPreferencesService.getPreferences(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Patch('/preferences', { middlewares: [versionControlLicensedMiddleware] }) - async updatePreferences(req: VersionControlRequest.UpdatePreferences) { - try { - const sanitizedPreferences: Partial = { - ...req.body, - initRepo: false, - connected: undefined, - publicKey: undefined, - repositoryUrl: undefined, - authorName: undefined, - authorEmail: undefined, - }; - const currentPreferences = this.versionControlPreferencesService.getPreferences(); - await this.versionControlPreferencesService.validateVersionControlPreferences( - sanitizedPreferences, - ); - if ( - sanitizedPreferences.branchName && - sanitizedPreferences.branchName !== currentPreferences.branchName - ) { - await this.versionControlService.setBranch(sanitizedPreferences.branchName); - } - if (sanitizedPreferences.branchColor || sanitizedPreferences.branchReadOnly !== undefined) { - await this.versionControlPreferencesService.setPreferences( - { - branchColor: sanitizedPreferences.branchColor, - branchReadOnly: sanitizedPreferences.branchReadOnly, - }, - true, - ); - } - await this.versionControlService.init(); - return this.versionControlPreferencesService.getPreferences(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Post('/disconnect', { middlewares: [versionControlLicensedMiddleware] }) - async disconnect(req: VersionControlRequest.Disconnect) { - try { - return await this.versionControlService.disconnect(req.body); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized('any') - @Get('/get-branches', { middlewares: [versionControlLicensedMiddleware] }) - async getBranches() { - try { - return await this.versionControlService.getBranches(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Post('/push-workfolder', { middlewares: [versionControlLicensedAndEnabledMiddleware] }) - async pushWorkfolder( - req: VersionControlRequest.PushWorkFolder, - res: express.Response, - ): Promise { - if (this.versionControlPreferencesService.isBranchReadOnly()) { - throw new BadRequestError('Cannot push onto read-only branch.'); - } - try { - const result = await this.versionControlService.pushWorkfolder(req.body); - if ((result as PushResult).pushed) { - res.statusCode = 200; - } else { - res.statusCode = 409; - } - return result; - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Post('/pull-workfolder', { middlewares: [versionControlLicensedAndEnabledMiddleware] }) - async pullWorkfolder( - req: VersionControlRequest.PullWorkFolder, - res: express.Response, - ): Promise { - try { - const result = await this.versionControlService.pullWorkfolder({ - force: req.body.force, - variables: req.body.variables, - userId: req.user.id, - importAfterPull: req.body.importAfterPull ?? true, - }); - if ((result as ImportResult)?.workflows) { - res.statusCode = 200; - } else { - res.statusCode = 409; - } - return result; - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Get('/reset-workfolder', { middlewares: [versionControlLicensedAndEnabledMiddleware] }) - async resetWorkfolder( - req: VersionControlRequest.PullWorkFolder, - ): Promise { - try { - return await this.versionControlService.resetWorkfolder({ - force: req.body.force, - variables: req.body.variables, - userId: req.user.id, - importAfterPull: req.body.importAfterPull ?? true, - }); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized('any') - @Get('/get-status', { middlewares: [versionControlLicensedAndEnabledMiddleware] }) - async getStatus() { - try { - return await this.versionControlService.getStatus(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized('any') - @Get('/status', { middlewares: [versionControlLicensedMiddleware] }) - async status(): Promise { - try { - return await this.versionControlService.status(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Post('/generate-key-pair', { middlewares: [versionControlLicensedMiddleware] }) - async generateKeyPair(): Promise { - try { - const result = await this.versionControlPreferencesService.generateAndSaveKeyPair(); - return result; - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - // #region Version Control Test Functions - //TODO: SEPARATE FUNCTIONS FOR DEVELOPMENT ONLY - //TODO: REMOVE THESE FUNCTIONS AFTER TESTING - - @Authorized(['global', 'owner']) - @Get('/export', { middlewares: [versionControlLicensedMiddleware] }) - async export() { - try { - return await this.versionControlService.export(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Get('/import', { middlewares: [versionControlLicensedMiddleware] }) - async import(req: AuthenticatedRequest) { - try { - return await this.versionControlService.import({ - userId: req.user.id, - }); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized('any') - @Get('/fetch') - async fetch() { - try { - return await this.versionControlService.fetch(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized('any') - @Get('/diff') - async diff() { - try { - return await this.versionControlService.diff(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Post('/push') - async push(req: VersionControlRequest.Push): Promise { - if (this.versionControlPreferencesService.isBranchReadOnly()) { - throw new BadRequestError('Cannot push onto read-only branch.'); - } - try { - return await this.versionControlService.push(req.body.force); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Post('/commit') - async commit(req: VersionControlRequest.Commit) { - try { - return await this.versionControlService.commit(req.body.message); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Post('/stage') - async stage(req: VersionControlRequest.Stage): Promise<{ staged: string[] } | string> { - try { - return await this.versionControlService.stage(req.body as VersionControlPushWorkFolder); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Post('/unstage') - async unstage(): Promise { - try { - return await this.versionControlService.unstage(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - @Authorized(['global', 'owner']) - @Get('/pull') - async pull(): Promise { - try { - return await this.versionControlService.pull(); - } catch (error) { - throw new BadRequestError((error as { message: string }).message); - } - } - - // #endregion -} diff --git a/packages/cli/src/environments/versionControl/versionControlExport.service.ee.ts b/packages/cli/src/environments/versionControl/versionControlExport.service.ee.ts deleted file mode 100644 index 37544035f7..0000000000 --- a/packages/cli/src/environments/versionControl/versionControlExport.service.ee.ts +++ /dev/null @@ -1,674 +0,0 @@ -import Container, { Service } from 'typedi'; -import path from 'path'; -import { - VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER, - VERSION_CONTROL_GIT_FOLDER, - VERSION_CONTROL_TAGS_EXPORT_FILE, - VERSION_CONTROL_VARIABLES_EXPORT_FILE, - VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER, -} from './constants'; -import * as Db from '@/Db'; -import glob from 'fast-glob'; -import type { ICredentialDataDecryptedObject } from 'n8n-workflow'; -import { LoggerProxy, jsonParse } from 'n8n-workflow'; -import { writeFile as fsWriteFile, readFile as fsReadFile, rm as fsRm } from 'fs/promises'; -import { VersionControlGitService } from './versionControlGit.service.ee'; -import { Credentials, UserSettings } from 'n8n-core'; -import type { IWorkflowToImport } from '@/Interfaces'; -import type { ExportableWorkflow } from './types/exportableWorkflow'; -import type { ExportableCredential } from './types/exportableCredential'; -import type { ExportResult } from './types/exportResult'; -import { SharedWorkflow } from '@/databases/entities/SharedWorkflow'; -import { CredentialsEntity } from '@/databases/entities/CredentialsEntity'; -import { Variables } from '@/databases/entities/Variables'; -import type { ImportResult } from './types/importResult'; -import { UM_FIX_INSTRUCTION } from '@/commands/BaseCommand'; -import config from '@/config'; -import { SharedCredentials } from '@/databases/entities/SharedCredentials'; -import { WorkflowEntity } from '@/databases/entities/WorkflowEntity'; -import { WorkflowTagMapping } from '@/databases/entities/WorkflowTagMapping'; -import { TagEntity } from '@/databases/entities/TagEntity'; -import { ActiveWorkflowRunner } from '../../ActiveWorkflowRunner'; -import without from 'lodash/without'; -import type { VersionControllPullOptions } from './types/versionControlPullWorkFolder'; -import { versionControlFoldersExistCheck } from './versionControlHelper.ee'; -import { In } from 'typeorm'; - -@Service() -export class VersionControlExportService { - private gitFolder: string; - - private workflowExportFolder: string; - - private credentialExportFolder: string; - - constructor(private gitService: VersionControlGitService) { - const userFolder = UserSettings.getUserN8nFolderPath(); - this.gitFolder = path.join(userFolder, VERSION_CONTROL_GIT_FOLDER); - this.workflowExportFolder = path.join(this.gitFolder, VERSION_CONTROL_WORKFLOW_EXPORT_FOLDER); - this.credentialExportFolder = path.join( - this.gitFolder, - VERSION_CONTROL_CREDENTIAL_EXPORT_FOLDER, - ); - } - - getWorkflowPath(workflowId: string): string { - return path.join(this.workflowExportFolder, `${workflowId}.json`); - } - - getCredentialsPath(credentialsId: string): string { - return path.join(this.credentialExportFolder, `${credentialsId}.json`); - } - - getTagsPath(): string { - return path.join(this.gitFolder, VERSION_CONTROL_TAGS_EXPORT_FILE); - } - - getVariablesPath(): string { - return path.join(this.gitFolder, VERSION_CONTROL_VARIABLES_EXPORT_FILE); - } - - async getWorkflowFromFile( - filePath: string, - root = this.gitFolder, - ): Promise { - try { - const importedWorkflow = jsonParse( - await fsReadFile(path.join(root, filePath), { encoding: 'utf8' }), - ); - return importedWorkflow; - } catch (error) { - return undefined; - } - } - - async getCredentialFromFile( - filePath: string, - root = this.gitFolder, - ): Promise { - try { - const credential = jsonParse( - await fsReadFile(path.join(root, filePath), { encoding: 'utf8' }), - ); - return credential; - } catch (error) { - return undefined; - } - } - - private async getOwnerGlobalRole() { - const ownerCredentiallRole = await Db.collections.Role.findOne({ - where: { name: 'owner', scope: 'global' }, - }); - - if (!ownerCredentiallRole) { - throw new Error(`Failed to find owner. ${UM_FIX_INSTRUCTION}`); - } - - return ownerCredentiallRole; - } - - private async getOwnerCredentialRole() { - const ownerCredentiallRole = await Db.collections.Role.findOne({ - where: { name: 'owner', scope: 'credential' }, - }); - - if (!ownerCredentiallRole) { - throw new Error(`Failed to find owner. ${UM_FIX_INSTRUCTION}`); - } - - return ownerCredentiallRole; - } - - private async getOwnerWorkflowRole() { - const ownerWorkflowRole = await Db.collections.Role.findOne({ - where: { name: 'owner', scope: 'workflow' }, - }); - - if (!ownerWorkflowRole) { - throw new Error(`Failed to find owner workflow role. ${UM_FIX_INSTRUCTION}`); - } - - return ownerWorkflowRole; - } - - async cleanWorkFolder() { - try { - const workflowFiles = await glob('*.json', { - cwd: this.workflowExportFolder, - absolute: true, - }); - const credentialFiles = await glob('*.json', { - cwd: this.credentialExportFolder, - absolute: true, - }); - const variablesFile = await glob(VERSION_CONTROL_VARIABLES_EXPORT_FILE, { - cwd: this.gitFolder, - absolute: true, - }); - const tagsFile = await glob(VERSION_CONTROL_TAGS_EXPORT_FILE, { - cwd: this.gitFolder, - absolute: true, - }); - await Promise.all(tagsFile.map(async (e) => fsRm(e))); - await Promise.all(variablesFile.map(async (e) => fsRm(e))); - await Promise.all(workflowFiles.map(async (e) => fsRm(e))); - await Promise.all(credentialFiles.map(async (e) => fsRm(e))); - LoggerProxy.debug('Cleaned work folder.'); - } catch (error) { - LoggerProxy.error(`Failed to clean work folder: ${(error as Error).message}`); - } - } - - async deleteRepositoryFolder() { - try { - await fsRm(this.gitFolder, { recursive: true }); - } catch (error) { - LoggerProxy.error(`Failed to delete work folder: ${(error as Error).message}`); - } - } - - private async rmDeletedWorkflowsFromExportFolder( - workflowsToBeExported: SharedWorkflow[], - ): Promise> { - const sharedWorkflowsFileNames = new Set( - workflowsToBeExported.map((e) => this.getWorkflowPath(e?.workflow?.name)), - ); - const existingWorkflowsInFolder = new Set( - await glob('*.json', { - cwd: this.workflowExportFolder, - absolute: true, - }), - ); - const deletedWorkflows = new Set(existingWorkflowsInFolder); - for (const elem of sharedWorkflowsFileNames) { - deletedWorkflows.delete(elem); - } - try { - await Promise.all([...deletedWorkflows].map(async (e) => fsRm(e))); - } catch (error) { - LoggerProxy.error(`Failed to delete workflows from work folder: ${(error as Error).message}`); - } - return deletedWorkflows; - } - - private async writeExportableWorkflowsToExportFolder(workflowsToBeExported: SharedWorkflow[]) { - await Promise.all( - workflowsToBeExported.map(async (e) => { - if (!e.workflow) { - LoggerProxy.debug( - `Found no corresponding workflow ${e.workflowId ?? 'unknown'}, skipping export`, - ); - return; - } - const fileName = this.getWorkflowPath(e.workflow?.id); - const sanitizedWorkflow: ExportableWorkflow = { - active: e.workflow?.active, - id: e.workflow?.id, - name: e.workflow?.name, - nodes: e.workflow?.nodes, - connections: e.workflow?.connections, - settings: e.workflow?.settings, - triggerCount: e.workflow?.triggerCount, - owner: e.user.email, - versionId: e.workflow?.versionId, - }; - LoggerProxy.debug(`Writing workflow ${e.workflowId} to ${fileName}`); - return fsWriteFile(fileName, JSON.stringify(sanitizedWorkflow, null, 2)); - }), - ); - } - - async exportWorkflowsToWorkFolder(): Promise { - try { - versionControlFoldersExistCheck([this.workflowExportFolder]); - const sharedWorkflows = await Db.collections.SharedWorkflow.find({ - relations: ['workflow', 'role', 'user'], - where: { - role: { - name: 'owner', - scope: 'workflow', - }, - }, - }); - - // before exporting, figure out which workflows have been deleted and remove them from the export folder - const removedFiles = await this.rmDeletedWorkflowsFromExportFolder(sharedWorkflows); - // write the workflows to the export folder as json files - await this.writeExportableWorkflowsToExportFolder(sharedWorkflows); - return { - count: sharedWorkflows.length, - folder: this.workflowExportFolder, - files: sharedWorkflows.map((e) => ({ - id: e?.workflow?.id, - name: this.getWorkflowPath(e?.workflow?.name), - })), - removedFiles: [...removedFiles], - }; - } catch (error) { - throw Error(`Failed to export workflows to work folder: ${(error as Error).message}`); - } - } - - async exportVariablesToWorkFolder(): Promise { - try { - versionControlFoldersExistCheck([this.gitFolder]); - const variables = await Db.collections.Variables.find(); - // do not export empty variables - if (variables.length === 0) { - return { - count: 0, - folder: this.gitFolder, - files: [], - }; - } - const fileName = this.getVariablesPath(); - const sanitizedVariables = variables.map((e) => ({ ...e, value: '' })); - await fsWriteFile(fileName, JSON.stringify(sanitizedVariables, null, 2)); - return { - count: sanitizedVariables.length, - folder: this.gitFolder, - files: [ - { - id: '', - name: fileName, - }, - ], - }; - } catch (error) { - throw Error(`Failed to export variables to work folder: ${(error as Error).message}`); - } - } - - async exportTagsToWorkFolder(): Promise { - try { - versionControlFoldersExistCheck([this.gitFolder]); - const tags = await Db.collections.Tag.find(); - const mappings = await Db.collections.WorkflowTagMapping.find(); - const fileName = this.getTagsPath(); - await fsWriteFile( - fileName, - JSON.stringify( - { - tags: tags.map((tag) => ({ id: tag.id, name: tag.name })), - mappings, - }, - null, - 2, - ), - ); - return { - count: tags.length, - folder: this.gitFolder, - files: [ - { - id: '', - name: fileName, - }, - ], - }; - } catch (error) { - throw Error(`Failed to export variables to work folder: ${(error as Error).message}`); - } - } - - private replaceCredentialData = ( - data: ICredentialDataDecryptedObject, - ): ICredentialDataDecryptedObject => { - for (const [key] of Object.entries(data)) { - try { - if (typeof data[key] === 'object') { - data[key] = this.replaceCredentialData(data[key] as ICredentialDataDecryptedObject); - } else if (typeof data[key] === 'string') { - data[key] = (data[key] as string)?.startsWith('={{') ? data[key] : ''; - } else if (typeof data[key] === 'number') { - // TODO: leaving numbers in for now, but maybe we should remove them - // data[key] = 0; - } - } catch (error) { - LoggerProxy.error(`Failed to sanitize credential data: ${(error as Error).message}`); - throw error; - } - } - return data; - }; - - async exportCredentialsToWorkFolder(): Promise { - try { - versionControlFoldersExistCheck([this.credentialExportFolder]); - const sharedCredentials = await Db.collections.SharedCredentials.find({ - relations: ['credentials', 'role', 'user'], - }); - const encryptionKey = await UserSettings.getEncryptionKey(); - await Promise.all( - sharedCredentials.map(async (sharedCredential) => { - const { name, type, nodesAccess, data, id } = sharedCredential.credentials; - const credentialObject = new Credentials({ id, name }, type, nodesAccess, data); - const plainData = credentialObject.getData(encryptionKey); - const sanitizedData = this.replaceCredentialData(plainData); - const fileName = path.join( - this.credentialExportFolder, - `${sharedCredential.credentials.id}.json`, - ); - const sanitizedCredential: ExportableCredential = { - id: sharedCredential.credentials.id, - name: sharedCredential.credentials.name, - type: sharedCredential.credentials.type, - data: sanitizedData, - }; - LoggerProxy.debug(`Writing credential ${sharedCredential.credentials.id} to ${fileName}`); - return fsWriteFile(fileName, JSON.stringify(sanitizedCredential, null, 2)); - }), - ); - return { - count: sharedCredentials.length, - folder: this.credentialExportFolder, - files: sharedCredentials.map((e) => ({ - id: e.credentials.id, - name: path.join(this.credentialExportFolder, `${e.credentials.name}.json`), - })), - }; - } catch (error) { - throw Error(`Failed to export credentials to work folder: ${(error as Error).message}`); - } - } - - private async importCredentialsFromFiles( - userId: string, - ): Promise> { - const credentialFiles = await glob('*.json', { - cwd: this.credentialExportFolder, - absolute: true, - }); - const existingCredentials = await Db.collections.Credentials.find(); - const ownerCredentialRole = await this.getOwnerCredentialRole(); - const ownerGlobalRole = await this.getOwnerGlobalRole(); - const encryptionKey = await UserSettings.getEncryptionKey(); - let importCredentialsResult: Array<{ id: string; name: string; type: string }> = []; - await Db.transaction(async (transactionManager) => { - importCredentialsResult = await Promise.all( - credentialFiles.map(async (file) => { - LoggerProxy.debug(`Importing credentials file ${file}`); - const credential = jsonParse( - await fsReadFile(file, { encoding: 'utf8' }), - ); - const existingCredential = existingCredentials.find( - (e) => e.id === credential.id && e.type === credential.type, - ); - const sharedOwner = await Db.collections.SharedCredentials.findOne({ - select: ['userId'], - where: { - credentialsId: credential.id, - roleId: In([ownerCredentialRole.id, ownerGlobalRole.id]), - }, - }); - - const { name, type, data, id } = credential; - const newCredentialObject = new Credentials({ id, name }, type, []); - if (existingCredential?.data) { - newCredentialObject.data = existingCredential.data; - } else { - newCredentialObject.setData(data, encryptionKey); - } - if (existingCredential?.nodesAccess) { - newCredentialObject.nodesAccess = existingCredential.nodesAccess; - } - - LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`); - await transactionManager.upsert(CredentialsEntity, newCredentialObject, ['id']); - - if (!sharedOwner) { - const newSharedCredential = new SharedCredentials(); - newSharedCredential.credentialsId = newCredentialObject.id as string; - newSharedCredential.userId = userId; - newSharedCredential.roleId = ownerGlobalRole.id; - - await transactionManager.upsert(SharedCredentials, { ...newSharedCredential }, [ - 'credentialsId', - 'userId', - ]); - } - - // TODO: once IDs are unique, remove this - if (config.getEnv('database.type') === 'postgresdb') { - await transactionManager.query( - "SELECT setval('credentials_entity_id_seq', (SELECT MAX(id) from credentials_entity))", - ); - } - return { - id: newCredentialObject.id as string, - name: newCredentialObject.name, - type: newCredentialObject.type, - }; - }), - ); - }); - return importCredentialsResult.filter((e) => e !== undefined); - } - - private async importVariablesFromFile(valueOverrides?: { - [key: string]: string; - }): Promise<{ added: string[]; changed: string[] }> { - const variablesFile = await glob(VERSION_CONTROL_VARIABLES_EXPORT_FILE, { - cwd: this.gitFolder, - absolute: true, - }); - if (variablesFile.length > 0) { - LoggerProxy.debug(`Importing variables from file ${variablesFile[0]}`); - const overriddenKeys = Object.keys(valueOverrides ?? {}); - const importedVariables = jsonParse( - await fsReadFile(variablesFile[0], { encoding: 'utf8' }), - { fallbackValue: [] }, - ); - const importedKeys = importedVariables.map((variable) => variable.key); - const existingVariables = await Db.collections.Variables.find(); - const existingKeys = existingVariables.map((variable) => variable.key); - const addedKeysFromImport = without(importedKeys, ...existingKeys); - const addedKeysFromOverride = without(overriddenKeys, ...existingKeys); - const addedVariables = importedVariables.filter((e) => addedKeysFromImport.includes(e.key)); - addedKeysFromOverride.forEach((key) => { - addedVariables.push({ - key, - value: valueOverrides ? valueOverrides[key] : '', - type: 'string', - } as Variables); - }); - - // first round, add missing variable keys to Db without touching values - await Db.transaction(async (transactionManager) => { - await Promise.all( - addedVariables.map(async (addedVariable) => { - await transactionManager.insert(Variables, { - ...addedVariable, - id: undefined, - }); - }), - ); - }); - - // second round, update values of existing variables if overridden - if (valueOverrides) { - await Db.transaction(async (transactionManager) => { - await Promise.all( - overriddenKeys.map(async (key) => { - await transactionManager.update(Variables, { key }, { value: valueOverrides[key] }); - }), - ); - }); - } - return { - added: [...addedKeysFromImport, ...addedKeysFromOverride], - changed: without(overriddenKeys, ...addedKeysFromOverride), - }; - } - return { added: [], changed: [] }; - } - - private async importTagsFromFile() { - const tagsFile = await glob(VERSION_CONTROL_TAGS_EXPORT_FILE, { - cwd: this.gitFolder, - absolute: true, - }); - if (tagsFile.length > 0) { - LoggerProxy.debug(`Importing tags from file ${tagsFile[0]}`); - const mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>( - await fsReadFile(tagsFile[0], { encoding: 'utf8' }), - { fallbackValue: { tags: [], mappings: [] } }, - ); - const existingWorkflowIds = new Set( - ( - await Db.collections.Workflow.find({ - select: ['id'], - }) - ).map((e) => e.id), - ); - - await Db.transaction(async (transactionManager) => { - await Promise.all( - mappedTags.tags.map(async (tag) => { - await transactionManager.upsert( - TagEntity, - { - ...tag, - }, - { - skipUpdateIfNoValuesChanged: true, - conflictPaths: { id: true }, - }, - ); - }), - ); - await Promise.all( - mappedTags.mappings.map(async (mapping) => { - if (!existingWorkflowIds.has(String(mapping.workflowId))) return; - await transactionManager.upsert( - WorkflowTagMapping, - { tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) }, - { - skipUpdateIfNoValuesChanged: true, - conflictPaths: { tagId: true, workflowId: true }, - }, - ); - }), - ); - }); - return mappedTags; - } - return { tags: [], mappings: [] }; - } - - private async importWorkflowsFromFiles( - userId: string, - ): Promise> { - const workflowFiles = await glob('*.json', { - cwd: this.workflowExportFolder, - absolute: true, - }); - - const existingWorkflows = await Db.collections.Workflow.find({ - select: ['id', 'name', 'active', 'versionId'], - }); - - const ownerWorkflowRole = await this.getOwnerWorkflowRole(); - const workflowRunner = Container.get(ActiveWorkflowRunner); - - let importWorkflowsResult = new Array<{ id: string; name: string }>(); - // TODO: once IDs are unique and we removed autoincrement, remove this - if (config.getEnv('database.type') === 'postgresdb') { - await Db.transaction(async (transactionManager) => { - await transactionManager.query( - 'ALTER SEQUENCE IF EXISTS "workflow_entity_id_seq" RESTART;', - ); - await transactionManager.query( - "SELECT setval('workflow_entity_id_seq', (SELECT MAX(id) from workflow_entity) );", - // "SELECT setval('workflow_entity_id_seq', (SELECT MAX(v) FROM (VALUES (1), ((SELECT MAX(id) from workflow_entity))) as value(v)));", - ); - }); - } - await Db.transaction(async (transactionManager) => { - importWorkflowsResult = await Promise.all( - workflowFiles.map(async (file) => { - LoggerProxy.debug(`Parsing workflow file ${file}`); - const importedWorkflow = jsonParse( - await fsReadFile(file, { encoding: 'utf8' }), - ); - const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id); - if (existingWorkflow?.versionId === importedWorkflow.versionId) { - LoggerProxy.debug( - `Skipping import of workflow ${ - importedWorkflow.id ?? 'n/a' - } - versionId is up to date`, - ); - return { - id: importedWorkflow.id ?? 'n/a', - name: 'skipped', - }; - } - LoggerProxy.debug(`Importing workflow ${importedWorkflow.id ?? 'n/a'}`); - importedWorkflow.active = existingWorkflow?.active ?? false; - LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`); - const upsertResult = await transactionManager.upsert( - WorkflowEntity, - { ...importedWorkflow }, - ['id'], - ); - if (upsertResult?.identifiers?.length !== 1) { - throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`); - } - // due to sequential Ids, this may have changed during the insert - // TODO: once IDs are unique and we removed autoincrement, remove this - const upsertedWorkflowId = upsertResult.identifiers[0].id as string; - await transactionManager.upsert( - SharedWorkflow, - { - workflowId: upsertedWorkflowId, - userId, - roleId: ownerWorkflowRole.id, - }, - ['workflowId', 'userId'], - ); - - if (existingWorkflow?.active) { - try { - // remove active pre-import workflow - LoggerProxy.debug(`Deactivating workflow id ${existingWorkflow.id}`); - await workflowRunner.remove(existingWorkflow.id); - // try activating the imported workflow - LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`); - await workflowRunner.add(existingWorkflow.id, 'activate'); - } catch (error) { - LoggerProxy.error( - `Failed to activate workflow ${existingWorkflow.id}`, - error as Error, - ); - } - } - - return { - id: importedWorkflow.id ?? 'unknown', - name: file, - }; - }), - ); - }); - return importWorkflowsResult; - } - - async importFromWorkFolder(options: VersionControllPullOptions): Promise { - try { - const importedVariables = await this.importVariablesFromFile(options.variables); - const importedCredentials = await this.importCredentialsFromFiles(options.userId); - const importWorkflows = await this.importWorkflowsFromFiles(options.userId); - const importTags = await this.importTagsFromFile(); - - return { - variables: importedVariables, - credentials: importedCredentials, - workflows: importWorkflows, - tags: importTags, - }; - } catch (error) { - throw Error(`Failed to import workflows from work folder: ${(error as Error).message}`); - } - } -} diff --git a/packages/cli/src/eventbus/MessageEventBus/recoverEvents.ts b/packages/cli/src/eventbus/MessageEventBus/recoverEvents.ts index 516a3549ee..90dd30c849 100644 --- a/packages/cli/src/eventbus/MessageEventBus/recoverEvents.ts +++ b/packages/cli/src/eventbus/MessageEventBus/recoverEvents.ts @@ -1,7 +1,5 @@ -import { parse, stringify } from 'flatted'; import type { IRun, IRunExecutionData, ITaskData } from 'n8n-workflow'; import { NodeOperationError, WorkflowOperationError } from 'n8n-workflow'; -import * as Db from '@/Db'; import type { EventMessageTypes, EventNamesTypes } from '../EventMessageClasses'; import type { DateTime } from 'luxon'; import { Push } from '@/push'; @@ -11,24 +9,21 @@ import { eventBus } from './MessageEventBus'; import { Container } from 'typedi'; import { InternalHooks } from '@/InternalHooks'; import { getWorkflowHooksMain } from '@/WorkflowExecuteAdditionalData'; +import { ExecutionRepository } from '@/databases/repositories'; export async function recoverExecutionDataFromEventLogMessages( executionId: string, messages: EventMessageTypes[], applyToDb = true, ): Promise { - const executionEntry = await Db.collections.Execution.findOne({ - where: { - id: executionId, - }, + const executionEntry = await Container.get(ExecutionRepository).findSingleExecution(executionId, { + includeData: true, + unflattenData: true, }); if (executionEntry && messages) { - let executionData: IRunExecutionData | undefined; + let executionData = executionEntry.data; let workflowError: WorkflowOperationError | undefined; - try { - executionData = parse(executionEntry.data) as IRunExecutionData; - } catch {} if (!executionData) { executionData = { resultData: { runData: {} } }; } @@ -156,8 +151,8 @@ export async function recoverExecutionDataFromEventLogMessages( if (applyToDb) { const newStatus = executionEntry.status === 'failed' ? 'failed' : 'crashed'; - await Db.collections.Execution.update(executionId, { - data: stringify(executionData), + await Container.get(ExecutionRepository).updateExistingExecution(executionId, { + data: executionData, status: newStatus, stoppedAt: lastNodeRunTimestamp?.toJSDate(), }); diff --git a/packages/cli/src/executions/executionHelpers.ts b/packages/cli/src/executions/executionHelpers.ts index 148bd9b8b9..20105915b3 100644 --- a/packages/cli/src/executions/executionHelpers.ts +++ b/packages/cli/src/executions/executionHelpers.ts @@ -1,10 +1,10 @@ import { Container } from 'typedi'; -import type { IExecutionFlattedDb } from '@/Interfaces'; import type { ExecutionStatus } from 'n8n-workflow'; import { License } from '@/License'; +import type { IExecutionFlattedDb, IExecutionResponse } from '@/Interfaces'; export function getStatusUsingPreviousExecutionStatusMethod( - execution: IExecutionFlattedDb, + execution: IExecutionFlattedDb | IExecutionResponse, ): ExecutionStatus { if (execution.waitTill) { return 'waiting'; diff --git a/packages/cli/src/executions/executions.service.ts b/packages/cli/src/executions/executions.service.ts index 41372335f0..f6c8303b59 100644 --- a/packages/cli/src/executions/executions.service.ts +++ b/packages/cli/src/executions/executions.service.ts @@ -2,23 +2,13 @@ /* eslint-disable @typescript-eslint/no-non-null-assertion */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ import { validate as jsonSchemaValidate } from 'jsonschema'; -import { BinaryDataManager } from 'n8n-core'; -import type { - IDataObject, - IWorkflowBase, - JsonObject, - ExecutionStatus, - IRunExecutionData, - NodeOperationError, - IExecutionsSummary, -} from 'n8n-workflow'; -import { deepCopy, LoggerProxy, jsonParse, Workflow } from 'n8n-workflow'; -import type { FindOperator, FindOptionsWhere } from 'typeorm'; -import { In, IsNull, LessThanOrEqual, MoreThanOrEqual, Not, Raw } from 'typeorm'; +import type { IWorkflowBase, JsonObject, ExecutionStatus } from 'n8n-workflow'; +import { LoggerProxy, jsonParse, Workflow } from 'n8n-workflow'; +import type { FindOperator } from 'typeorm'; +import { In } from 'typeorm'; import { ActiveExecutions } from '@/ActiveExecutions'; import config from '@/config'; import type { User } from '@db/entities/User'; -import type { ExecutionEntity } from '@db/entities/ExecutionEntity'; import type { IExecutionFlattedResponse, IExecutionResponse, @@ -33,16 +23,11 @@ import { getSharedWorkflowIds } from '@/WorkflowHelpers'; import { WorkflowRunner } from '@/WorkflowRunner'; import * as Db from '@/Db'; import * as GenericHelpers from '@/GenericHelpers'; -import { parse } from 'flatted'; import { Container } from 'typedi'; -import { - getStatusUsingPreviousExecutionStatusMethod, - isAdvancedExecutionFiltersEnabled, -} from './executionHelpers'; -import { ExecutionMetadata } from '@db/entities/ExecutionMetadata'; -import { DateUtils } from 'typeorm/util/DateUtils'; +import { getStatusUsingPreviousExecutionStatusMethod } from './executionHelpers'; +import { ExecutionRepository } from '@/databases/repositories'; -interface IGetExecutionsQueryFilter { +export interface IGetExecutionsQueryFilter { id?: FindOperator | string; finished?: boolean; mode?: string; @@ -102,102 +87,6 @@ export class ExecutionsService { return getSharedWorkflowIds(user, ['owner']); } - /** - * Helper function to retrieve count of Executions - */ - static async getExecutionsCount( - countFilter: IDataObject, - user: User, - metadata?: Array<{ key: string; value: string }>, - ): Promise<{ count: number; estimated: boolean }> { - const dbType = config.getEnv('database.type'); - const filteredFields = Object.keys(countFilter).filter((field) => field !== 'id'); - - // For databases other than Postgres, do a regular count - // when filtering based on `workflowId` or `finished` fields. - if ( - dbType !== 'postgresdb' || - metadata?.length || - filteredFields.length > 0 || - user.globalRole.name !== 'owner' - ) { - const sharedWorkflowIds = await this.getWorkflowIdsForUser(user); - - let query = Db.collections.Execution.createQueryBuilder('execution') - .select() - .orderBy('execution.id', 'DESC') - .where({ workflowId: In(sharedWorkflowIds) }); - - if (metadata?.length) { - query = query.leftJoinAndSelect(ExecutionMetadata, 'md', 'md.executionId = execution.id'); - for (const md of metadata) { - query = query.andWhere('md.key = :key AND md.value = :value', md); - } - } - - if (filteredFields.length > 0) { - query = query.andWhere(countFilter); - } - - const count = await query.getCount(); - return { count, estimated: false }; - } - - try { - // Get an estimate of rows count. - const estimateRowsNumberSql = - "SELECT n_live_tup FROM pg_stat_all_tables WHERE relname = 'execution_entity';"; - const rows: Array<{ n_live_tup: string }> = await Db.collections.Execution.query( - estimateRowsNumberSql, - ); - - const estimate = parseInt(rows[0].n_live_tup, 10); - // If over 100k, return just an estimate. - if (estimate > 100_000) { - // if less than 100k, we get the real count as even a full - // table scan should not take so long. - return { count: estimate, estimated: true }; - } - } catch (error) { - LoggerProxy.warn(`Failed to get executions count from Postgres: ${error}`); - } - - const sharedWorkflowIds = await getSharedWorkflowIds(user); - - const count = await Db.collections.Execution.count({ - where: { - workflowId: In(sharedWorkflowIds), - }, - }); - - return { count, estimated: false }; - } - - static massageFilters(filter: IDataObject): void { - if (filter) { - if (filter.waitTill === true) { - filter.waitTill = Not(IsNull()); - // eslint-disable-next-line @typescript-eslint/no-unnecessary-boolean-literal-compare - } else if (filter.finished === false) { - filter.waitTill = IsNull(); - } else { - delete filter.waitTill; - } - - if (Array.isArray(filter.metadata)) { - delete filter.metadata; - } - - if ('startedAfter' in filter) { - delete filter.startedAfter; - } - - if ('startedBefore' in filter) { - delete filter.startedBefore; - } - } - } - static async getExecutionsList(req: ExecutionRequest.GetAll): Promise { const sharedWorkflowIds = await this.getWorkflowIdsForUser(req.user); if (sharedWorkflowIds.length === 0) { @@ -266,167 +155,23 @@ export class ExecutionsService { .map(({ id }) => id), ); - const findWhere: FindOptionsWhere = { - workflowId: In(sharedWorkflowIds), - }; - if (filter?.status) { - Object.assign(findWhere, { status: In(filter.status) }); - } - if (filter?.finished) { - Object.assign(findWhere, { finished: filter.finished }); - } - - const rangeQuery: string[] = []; - const rangeQueryParams: { - lastId?: string; - firstId?: string; - executingWorkflowIds?: string[]; - } = {}; - - if (req.query.lastId) { - rangeQuery.push('execution.id < :lastId'); - rangeQueryParams.lastId = req.query.lastId; - } - - if (req.query.firstId) { - rangeQuery.push('execution.id > :firstId'); - rangeQueryParams.firstId = req.query.firstId; - } - - if (executingWorkflowIds.length > 0) { - rangeQuery.push('execution.id NOT IN (:...executingWorkflowIds)'); - rangeQueryParams.executingWorkflowIds = executingWorkflowIds; - } - - if (rangeQuery.length) { - Object.assign(findWhere, { - id: Raw(() => rangeQuery.join(' and '), rangeQueryParams), - }); - } - - // Omit `data` from the Execution since it is the largest and not necessary for the list. - let query = Db.collections.Execution.createQueryBuilder('execution') - .select([ - 'execution.id', - 'execution.finished', - 'execution.mode', - 'execution.retryOf', - 'execution.retrySuccessId', - 'execution.waitTill', - 'execution.startedAt', - 'execution.stoppedAt', - 'execution.workflowData', - 'execution.status', - ]) - .orderBy('execution.id', 'DESC') - .take(limit) - .where(findWhere); - - const countFilter = deepCopy(filter ?? {}); - const metadata = isAdvancedExecutionFiltersEnabled() ? filter?.metadata : undefined; - - if (metadata?.length) { - query = query.leftJoin(ExecutionMetadata, 'md', 'md.executionId = execution.id'); - for (const md of metadata) { - query = query.andWhere('md.key = :key AND md.value = :value', md); - } - } - - if (filter?.startedAfter) { - query = query.andWhere({ - startedAt: MoreThanOrEqual( - DateUtils.mixedDateToUtcDatetimeString(new Date(filter.startedAfter)), - ), - }); - } - - if (filter?.startedBefore) { - query = query.andWhere({ - startedAt: LessThanOrEqual( - DateUtils.mixedDateToUtcDatetimeString(new Date(filter.startedBefore)), - ), - }); - } - - // deepcopy breaks the In operator so we need to reapply it - if (filter?.status) { - Object.assign(filter, { status: In(filter.status) }); - Object.assign(countFilter, { status: In(filter.status) }); - } - - if (filter) { - this.massageFilters(filter as IDataObject); - query = query.andWhere(filter); - } - - this.massageFilters(countFilter as IDataObject); - countFilter.id = Not(In(executingWorkflowIds)); - - const executions = await query.getMany(); - - const { count, estimated } = await this.getExecutionsCount( - countFilter as IDataObject, - req.user, - metadata, + const { count, estimated } = await Container.get(ExecutionRepository).countExecutions( + filter, + sharedWorkflowIds, + executingWorkflowIds, + req.user.globalRole.name === 'owner', ); - const formattedExecutions: IExecutionsSummary[] = executions.map((execution) => { - // inject potential node execution errors into the execution response - const nodeExecutionStatus = {}; - let lastNodeExecuted; - let executionError; - // fill execution status for old executions that will return null - if (!execution.status) { - execution.status = getStatusUsingPreviousExecutionStatusMethod(execution); - } - try { - const data = parse(execution.data) as IRunExecutionData; - lastNodeExecuted = data?.resultData?.lastNodeExecuted ?? ''; - executionError = data?.resultData?.error; - if (data?.resultData?.runData) { - for (const key of Object.keys(data.resultData.runData)) { - const errors = data.resultData.runData[key] - ?.filter((taskdata) => taskdata.error?.name) - ?.map((taskdata) => { - if (taskdata.error?.name === 'NodeOperationError') { - return { - name: (taskdata.error as NodeOperationError).name, - message: (taskdata.error as NodeOperationError).message, - description: (taskdata.error as NodeOperationError).description, - }; - } else { - return { - name: taskdata.error?.name, - }; - } - }); - Object.assign(nodeExecutionStatus, { - [key]: { - executionStatus: data.resultData.runData[key][0].executionStatus, - errors, - data: data.resultData.runData[key][0].data ?? undefined, - }, - }); - } - } - } catch {} - return { - id: execution.id, - finished: execution.finished, - mode: execution.mode, - retryOf: execution.retryOf?.toString(), - retrySuccessId: execution?.retrySuccessId?.toString(), - waitTill: execution.waitTill as Date | undefined, - startedAt: execution.startedAt, - stoppedAt: execution.stoppedAt, - workflowId: execution.workflowData?.id ?? '', - workflowName: execution.workflowData?.name, - status: execution.status, - lastNodeExecuted, - executionError, - nodeExecutionStatus, - } as IExecutionsSummary; - }); + const formattedExecutions = await Container.get(ExecutionRepository).searchExecutions( + filter, + limit, + executingWorkflowIds, + sharedWorkflowIds, + { + lastId: req.query.lastId, + firstId: req.query.firstId, + }, + ); return { count, results: formattedExecutions, @@ -441,11 +186,13 @@ export class ExecutionsService { if (!sharedWorkflowIds.length) return undefined; const { id: executionId } = req.params; - const execution = await Db.collections.Execution.findOne({ + const execution = await Container.get(ExecutionRepository).findSingleExecution(executionId, { where: { id: executionId, workflowId: In(sharedWorkflowIds), }, + includeData: true, + unflattenData: false, }); if (!execution) { @@ -460,11 +207,6 @@ export class ExecutionsService { execution.status = getStatusUsingPreviousExecutionStatusMethod(execution); } - if (req.query.unflattedResponse === 'true') { - return ResponseHelper.unflattenExecutionData(execution); - } - - // @ts-ignore return execution; } @@ -473,11 +215,12 @@ export class ExecutionsService { if (!sharedWorkflowIds.length) return false; const { id: executionId } = req.params; - const execution = await Db.collections.Execution.findOne({ + const execution = await Container.get(ExecutionRepository).findSingleExecution(executionId, { where: { - id: executionId, workflowId: In(sharedWorkflowIds), }, + includeData: true, + unflattenData: true, }); if (!execution) { @@ -493,22 +236,20 @@ export class ExecutionsService { ); } - const fullExecutionData = ResponseHelper.unflattenExecutionData(execution); - - if (fullExecutionData.finished) { + if (execution.finished) { throw new Error('The execution succeeded, so it cannot be retried.'); } const executionMode = 'retry'; - fullExecutionData.workflowData.active = false; + execution.workflowData.active = false; // Start the workflow const data: IWorkflowExecutionDataProcess = { executionMode, - executionData: fullExecutionData.data, + executionData: execution.data, retryOf: req.params.id, - workflowData: fullExecutionData.workflowData, + workflowData: execution.workflowData, userId: req.user.id, }; @@ -532,7 +273,7 @@ export class ExecutionsService { if (req.body.loadWorkflow) { // Loads the currently saved workflow to execute instead of the // one saved at the time of the execution. - const workflowId = fullExecutionData.workflowData.id as string; + const workflowId = execution.workflowData.id as string; const workflowData = (await Db.collections.Workflow.findOneBy({ id: workflowId, })) as IWorkflowBase; @@ -614,50 +355,9 @@ export class ExecutionsService { } } - if (!deleteBefore && !ids) { - throw new Error('Either "deleteBefore" or "ids" must be present in the request body'); - } - - const where: FindOptionsWhere = { workflowId: In(sharedWorkflowIds) }; - - if (deleteBefore) { - // delete executions by date, if user may access the underlying workflows - where.startedAt = LessThanOrEqual(deleteBefore); - Object.assign(where, requestFilters); - if (where.status) { - where.status = In(requestFiltersRaw!.status as string[]); - } - } else if (ids) { - // delete executions by IDs, if user may access the underlying workflows - where.id = In(ids); - } else return; - - const executions = await Db.collections.Execution.find({ - select: ['id'], - where, + return Container.get(ExecutionRepository).deleteExecutions(requestFilters, sharedWorkflowIds, { + deleteBefore, + ids, }); - - if (!executions.length) { - if (ids) { - LoggerProxy.error('Failed to delete an execution due to insufficient permissions', { - userId: req.user.id, - executionIds: ids, - }); - } - return; - } - - const idsToDelete = executions.map(({ id }) => id); - - const binaryDataManager = BinaryDataManager.getInstance(); - await Promise.all( - idsToDelete.map(async (id) => binaryDataManager.deleteBinaryDataByExecutionId(id)), - ); - - do { - // Delete in batches to avoid "SQLITE_ERROR: Expression tree is too large (maximum depth 1000)" error - const batch = idsToDelete.splice(0, 500); - await Db.collections.Execution.delete(batch); - } while (idsToDelete.length > 0); } } diff --git a/packages/cli/src/utils.ts b/packages/cli/src/utils.ts index c9eee2f4c8..5ceeeeb732 100644 --- a/packages/cli/src/utils.ts +++ b/packages/cli/src/utils.ts @@ -7,7 +7,8 @@ import { START_NODES } from './constants'; * Returns if the given id is a valid workflow id */ export function isWorkflowIdValid(id: string | null | undefined): boolean { - return !(typeof id === 'string' && isNaN(parseInt(id, 10))); + // TODO: could also check if id only contains nanoId characters + return typeof id === 'string' && id?.length <= 16; } function findWorkflowStart(executionMode: 'integrated' | 'cli') { diff --git a/packages/cli/src/workflows/workflows.controller.ee.ts b/packages/cli/src/workflows/workflows.controller.ee.ts index 399886cae2..14b4d56d96 100644 --- a/packages/cli/src/workflows/workflows.controller.ee.ts +++ b/packages/cli/src/workflows/workflows.controller.ee.ts @@ -86,7 +86,7 @@ EEWorkflowController.put( ); EEWorkflowController.get( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async (req: WorkflowRequest.Get) => { const { id: workflowId } = req.params; @@ -215,7 +215,7 @@ EEWorkflowController.get( ); EEWorkflowController.patch( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async (req: WorkflowRequest.Update) => { const { id: workflowId } = req.params; const forceSave = req.query.forceSave === 'true'; @@ -247,7 +247,7 @@ EEWorkflowController.post( const workflow = new WorkflowEntity(); Object.assign(workflow, req.body.workflowData); - if (workflow.id !== undefined) { + if (req.body.workflowData.id !== undefined) { const safeWorkflow = await EEWorkflows.preventTampering(workflow, workflow.id, req.user); req.body.workflowData.nodes = safeWorkflow.nodes; } diff --git a/packages/cli/src/workflows/workflows.controller.ts b/packages/cli/src/workflows/workflows.controller.ts index 99a46c78af..0191cf28cd 100644 --- a/packages/cli/src/workflows/workflows.controller.ts +++ b/packages/cli/src/workflows/workflows.controller.ts @@ -189,7 +189,7 @@ workflowsController.get( * GET /workflows/:id */ workflowsController.get( - '/:id(\\d+)', + '/:id(\\w+)', ResponseHelper.send(async (req: WorkflowRequest.Get) => { const { id: workflowId } = req.params; @@ -228,7 +228,7 @@ workflowsController.get( * PATCH /workflows/:id */ workflowsController.patch( - '/:id', + '/:id(\\w+)', ResponseHelper.send(async (req: WorkflowRequest.Update) => { const { id: workflowId } = req.params; @@ -254,7 +254,7 @@ workflowsController.patch( * DELETE /workflows/:id */ workflowsController.delete( - '/:id', + '/:id(\\w+)', ResponseHelper.send(async (req: WorkflowRequest.Delete) => { const { id: workflowId } = req.params; diff --git a/packages/cli/test/integration/audit/credentials.risk.test.ts b/packages/cli/test/integration/audit/credentials.risk.test.ts index 47e0cf1eee..395cfa2275 100644 --- a/packages/cli/test/integration/audit/credentials.risk.test.ts +++ b/packages/cli/test/integration/audit/credentials.risk.test.ts @@ -5,6 +5,7 @@ import { audit } from '@/audit'; import { CREDENTIALS_REPORT } from '@/audit/constants'; import { getRiskSection } from './utils'; import * as testDb from '../shared/testDb'; +import { generateNanoId } from '@/databases/utils/generators'; beforeAll(async () => { await testDb.init(); @@ -20,6 +21,7 @@ afterAll(async () => { test('should report credentials not in any use', async () => { const credentialDetails = { + id: generateNanoId(), name: 'My Slack Credential', data: 'U2FsdGVkX18WjITBG4IDqrGB1xE/uzVNjtwDAG3lP7E=', type: 'slackApi', @@ -27,6 +29,7 @@ test('should report credentials not in any use', async () => { }; const workflowDetails = { + id: generateNanoId(), name: 'My Test Workflow', active: false, connections: {}, @@ -57,13 +60,14 @@ test('should report credentials not in any use', async () => { expect(section.location).toHaveLength(1); expect(section.location[0]).toMatchObject({ - id: '1', + id: credentialDetails.id, name: 'My Slack Credential', }); }); test('should report credentials not in active use', async () => { const credentialDetails = { + id: generateNanoId(), name: 'My Slack Credential', data: 'U2FsdGVkX18WjITBG4IDqrGB1xE/uzVNjtwDAG3lP7E=', type: 'slackApi', @@ -73,6 +77,7 @@ test('should report credentials not in active use', async () => { const credential = await Db.collections.Credentials.save(credentialDetails); const workflowDetails = { + id: generateNanoId(), name: 'My Test Workflow', active: false, connections: {}, @@ -107,6 +112,7 @@ test('should report credentials not in active use', async () => { test('should report credential in not recently executed workflow', async () => { const credentialDetails = { + id: generateNanoId(), name: 'My Slack Credential', data: 'U2FsdGVkX18WjITBG4IDqrGB1xE/uzVNjtwDAG3lP7E=', type: 'slackApi', @@ -116,6 +122,7 @@ test('should report credential in not recently executed workflow', async () => { const credential = await Db.collections.Credentials.save(credentialDetails); const workflowDetails = { + id: generateNanoId(), name: 'My Test Workflow', active: false, connections: {}, @@ -142,16 +149,19 @@ test('should report credential in not recently executed workflow', async () => { const date = new Date(); date.setDate(date.getDate() - config.getEnv('security.audit.daysAbandonedWorkflow') - 1); - await Db.collections.Execution.save({ - data: '[]', + const savedExecution = await Db.collections.Execution.save({ finished: true, mode: 'manual', startedAt: date, stoppedAt: date, - workflowData: workflow, workflowId: workflow.id, waitTill: null, }); + await Db.collections.ExecutionData.save({ + execution: savedExecution, + data: '[]', + workflowData: workflow, + }); const testAudit = await audit(['credentials']); @@ -170,6 +180,7 @@ test('should report credential in not recently executed workflow', async () => { test('should not report credentials in recently executed workflow', async () => { const credentialDetails = { + id: generateNanoId(), name: 'My Slack Credential', data: 'U2FsdGVkX18WjITBG4IDqrGB1xE/uzVNjtwDAG3lP7E=', type: 'slackApi', @@ -179,6 +190,7 @@ test('should not report credentials in recently executed workflow', async () => const credential = await Db.collections.Credentials.save(credentialDetails); const workflowDetails = { + id: generateNanoId(), name: 'My Test Workflow', active: true, connections: {}, @@ -205,17 +217,21 @@ test('should not report credentials in recently executed workflow', async () => const date = new Date(); date.setDate(date.getDate() - config.getEnv('security.audit.daysAbandonedWorkflow') + 1); - await Db.collections.Execution.save({ - data: '[]', + const savedExecution = await Db.collections.Execution.save({ finished: true, mode: 'manual', startedAt: date, stoppedAt: date, - workflowData: workflow, workflowId: workflow.id, waitTill: null, }); + await Db.collections.ExecutionData.save({ + execution: savedExecution, + data: '[]', + workflowData: workflow, + }); + const testAudit = await audit(['credentials']); expect(testAudit).toBeEmptyArray(); diff --git a/packages/cli/test/integration/audit/database.risk.test.ts b/packages/cli/test/integration/audit/database.risk.test.ts index 7a9dae75a2..0ae783ffa7 100644 --- a/packages/cli/test/integration/audit/database.risk.test.ts +++ b/packages/cli/test/integration/audit/database.risk.test.ts @@ -8,6 +8,7 @@ import { } from '@/audit/constants'; import { getRiskSection, saveManualTriggerWorkflow } from './utils'; import * as testDb from '../shared/testDb'; +import { generateNanoId } from '@/databases/utils/generators'; beforeAll(async () => { await testDb.init(); @@ -28,6 +29,7 @@ test('should report expressions in queries', async () => { const promises = Object.entries(map).map(async ([nodeType, nodeId]) => { const details = { + id: generateNanoId(), name: 'My Test Workflow', active: false, connections: {}, @@ -80,6 +82,7 @@ test('should report expressions in query params', async () => { const promises = Object.entries(map).map(async ([nodeType, nodeId]) => { const details = { + id: generateNanoId(), name: 'My Test Workflow', active: false, connections: {}, @@ -134,6 +137,7 @@ test('should report unused query params', async () => { const promises = Object.entries(map).map(async ([nodeType, nodeId]) => { const details = { + id: generateNanoId(), name: 'My Test Workflow', active: false, connections: {}, diff --git a/packages/cli/test/integration/audit/filesystem.risk.test.ts b/packages/cli/test/integration/audit/filesystem.risk.test.ts index 57ebf38ad5..009d1c2dd0 100644 --- a/packages/cli/test/integration/audit/filesystem.risk.test.ts +++ b/packages/cli/test/integration/audit/filesystem.risk.test.ts @@ -4,6 +4,7 @@ import { audit } from '@/audit'; import { FILESYSTEM_INTERACTION_NODE_TYPES, FILESYSTEM_REPORT } from '@/audit/constants'; import { getRiskSection, saveManualTriggerWorkflow } from './utils'; import * as testDb from '../shared/testDb'; +import { WorkflowEntity } from '@/databases/entities/WorkflowEntity'; beforeAll(async () => { await testDb.init(); @@ -26,11 +27,10 @@ test('should report filesystem interaction nodes', async () => { ); const promises = Object.entries(map).map(async ([nodeType, nodeId]) => { - const details = { + const details = new WorkflowEntity({ name: 'My Test Workflow', active: false, connections: {}, - nodeTypes: {}, nodes: [ { id: nodeId, @@ -38,9 +38,10 @@ test('should report filesystem interaction nodes', async () => { type: nodeType, typeVersion: 1, position: [0, 0] as [number, number], + parameters: {}, }, ], - }; + }); return Db.collections.Workflow.save(details); }); diff --git a/packages/cli/test/integration/audit/instance.risk.test.ts b/packages/cli/test/integration/audit/instance.risk.test.ts index f05ab3a067..89411c1d0d 100644 --- a/packages/cli/test/integration/audit/instance.risk.test.ts +++ b/packages/cli/test/integration/audit/instance.risk.test.ts @@ -12,6 +12,7 @@ import { import * as testDb from '../shared/testDb'; import { toReportTitle } from '@/audit/utils'; import config from '@/config'; +import { generateNanoId } from '@/databases/utils/generators'; beforeAll(async () => { await testDb.init(); @@ -31,6 +32,7 @@ test('should report webhook lacking authentication', async () => { const targetNodeId = uuid(); const details = { + id: generateNanoId(), name: 'My Test Workflow', active: true, nodeTypes: {}, @@ -73,6 +75,7 @@ test('should report webhook lacking authentication', async () => { test('should not report webhooks having basic or header auth', async () => { const promises = ['basicAuth', 'headerAuth'].map(async (authType) => { const details = { + id: generateNanoId(), name: 'My Test Workflow', active: true, nodeTypes: {}, @@ -115,6 +118,7 @@ test('should not report webhooks having basic or header auth', async () => { test('should not report webhooks validated by direct children', async () => { const promises = [...WEBHOOK_VALIDATOR_NODE_TYPES].map(async (nodeType) => { const details = { + id: generateNanoId(), name: 'My Test Workflow', active: true, nodeTypes: {}, diff --git a/packages/cli/test/integration/audit/nodes.risk.test.ts b/packages/cli/test/integration/audit/nodes.risk.test.ts index 384850c559..8a3643fb7f 100644 --- a/packages/cli/test/integration/audit/nodes.risk.test.ts +++ b/packages/cli/test/integration/audit/nodes.risk.test.ts @@ -9,6 +9,7 @@ import { toReportTitle } from '@/audit/utils'; import { mockInstance } from '../shared/utils'; import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; import { NodeTypes } from '@/NodeTypes'; +import { WorkflowEntity } from '@/databases/entities/WorkflowEntity'; const nodesAndCredentials = mockInstance(LoadNodesAndCredentials); nodesAndCredentials.getCustomDirectories.mockReturnValue([]); @@ -32,11 +33,10 @@ test('should report risky official nodes', async () => { }, {}); const promises = Object.entries(map).map(async ([nodeType, nodeId]) => { - const details = { + const details = new WorkflowEntity({ name: 'My Test Workflow', active: false, connections: {}, - nodeTypes: {}, nodes: [ { id: nodeId, @@ -44,9 +44,10 @@ test('should report risky official nodes', async () => { type: nodeType, typeVersion: 1, position: [0, 0] as [number, number], + parameters: {}, }, ], - }; + }); return Db.collections.Workflow.save(details); }); diff --git a/packages/cli/test/integration/commands/import.cmd.test.ts b/packages/cli/test/integration/commands/import.cmd.test.ts index 93fbfeac8f..746af32a2f 100644 --- a/packages/cli/test/integration/commands/import.cmd.test.ts +++ b/packages/cli/test/integration/commands/import.cmd.test.ts @@ -1,7 +1,7 @@ import * as testDb from '../shared/testDb'; import { mockInstance } from '../shared/utils'; import { InternalHooks } from '@/InternalHooks'; -import { ImportWorkflowsCommand } from '../../../src/commands/import/workflow'; +import { ImportWorkflowsCommand } from '@/commands/import/workflow'; import * as Config from '@oclif/config'; beforeAll(async () => { diff --git a/packages/cli/test/integration/environments/VersionControl.test.ts b/packages/cli/test/integration/environments/VersionControl.test.ts index 87f6afd9e8..19c510ae10 100644 --- a/packages/cli/test/integration/environments/VersionControl.test.ts +++ b/packages/cli/test/integration/environments/VersionControl.test.ts @@ -4,14 +4,14 @@ import type { User } from '@db/entities/User'; import { License } from '@/License'; import * as testDb from '../shared/testDb'; import * as utils from '../shared/utils'; -import { VERSION_CONTROL_API_ROOT } from '@/environments/versionControl/constants'; +import { SOURCE_CONTROL_API_ROOT } from '@/environments/sourceControl/constants'; let owner: User; let authOwnerAgent: SuperAgentTest; beforeAll(async () => { - Container.get(License).isVersionControlLicensed = () => true; - const app = await utils.initTestServer({ endpointGroups: ['versionControl'] }); + Container.get(License).isSourceControlLicensed = () => true; + const app = await utils.initTestServer({ endpointGroups: ['sourceControl'] }); owner = await testDb.createOwner(); authOwnerAgent = utils.createAuthAgent(app)(owner); }); @@ -20,10 +20,10 @@ afterAll(async () => { await testDb.terminate(); }); -describe('GET /versionControl/preferences', () => { - test('should return Version Control preferences', async () => { +describe('GET /sourceControl/preferences', () => { + test('should return Source Control preferences', async () => { await authOwnerAgent - .get(`/${VERSION_CONTROL_API_ROOT}/preferences`) + .get(`/${SOURCE_CONTROL_API_ROOT}/preferences`) .expect(200) .expect((res) => { return 'repositoryUrl' in res.body && 'branchName' in res.body; diff --git a/packages/cli/test/integration/publicApi/credentials.test.ts b/packages/cli/test/integration/publicApi/credentials.test.ts index 23f52c294f..f6466ab6e5 100644 --- a/packages/cli/test/integration/publicApi/credentials.test.ts +++ b/packages/cli/test/integration/publicApi/credentials.test.ts @@ -77,7 +77,6 @@ describe('POST /credentials', () => { const response = await authOwnerAgent.post('/credentials').send(payload); expect(response.statusCode).toBe(200); - const { id, name, type } = response.body; expect(name).toBe(payload.name); diff --git a/packages/cli/test/integration/publicApi/workflows.test.ts b/packages/cli/test/integration/publicApi/workflows.test.ts index 1dd63488d2..89b0a5a8b2 100644 --- a/packages/cli/test/integration/publicApi/workflows.test.ts +++ b/packages/cli/test/integration/publicApi/workflows.test.ts @@ -10,6 +10,7 @@ import type { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; import { randomApiKey } from '../shared/random'; import * as utils from '../shared/utils'; import * as testDb from '../shared/testDb'; +// import { generateNanoId } from '@/databases/utils/generators'; let app: Application; let workflowOwnerRole: Role; @@ -177,7 +178,7 @@ describe('GET /workflows', () => { } // check that we really received a different result - expect(Number(response.body.data[0].id)).toBeLessThan(Number(response2.body.data[0].id)); + expect(response.body.data[0].id).not.toEqual(response2.body.data[0].id); }); test('should return all owned workflows filtered by tag', async () => { diff --git a/packages/cli/test/integration/shared/testDb.ts b/packages/cli/test/integration/shared/testDb.ts index de32f312cc..4e5f4e9528 100644 --- a/packages/cli/test/integration/shared/testDb.ts +++ b/packages/cli/test/integration/shared/testDb.ts @@ -19,7 +19,7 @@ import { InstalledPackages } from '@db/entities/InstalledPackages'; import type { Role } from '@db/entities/Role'; import type { TagEntity } from '@db/entities/TagEntity'; import type { User } from '@db/entities/User'; -import type { WorkflowEntity } from '@db/entities/WorkflowEntity'; +import { WorkflowEntity } from '@db/entities/WorkflowEntity'; import { RoleRepository } from '@db/repositories'; import type { ICredentialsDb } from '@/Interfaces'; @@ -32,6 +32,8 @@ import type { InstalledPackagePayload, PostgresSchemaSection, } from './types'; +import type { ExecutionData } from '@/databases/entities/ExecutionData'; +import { generateNanoId } from '@/databases/utils/generators'; export type TestDBType = 'postgres' | 'mysql'; @@ -332,17 +334,26 @@ export async function createManyExecutions( /** * Store a execution in the DB and assign it to a workflow. */ -async function createExecution(attributes: Partial, workflow: WorkflowEntity) { - const { data, finished, mode, startedAt, stoppedAt, waitTill } = attributes; +async function createExecution( + attributes: Partial, + workflow: WorkflowEntity, +) { + const { data, finished, mode, startedAt, stoppedAt, waitTill, status } = attributes; const execution = await Db.collections.Execution.save({ - data: data ?? '[]', finished: finished ?? true, mode: mode ?? 'manual', startedAt: startedAt ?? new Date(), - ...(workflow !== undefined && { workflowData: workflow, workflowId: workflow.id }), + ...(workflow !== undefined && { workflowId: workflow.id }), stoppedAt: stoppedAt ?? new Date(), waitTill: waitTill ?? null, + status, + }); + + await Db.collections.ExecutionData.save({ + data: data ?? '[]', + workflowData: workflow ?? {}, + executionId: execution.id, }); return execution; @@ -352,21 +363,21 @@ async function createExecution(attributes: Partial, workflow: W * Store a successful execution in the DB and assign it to a workflow. */ export async function createSuccessfulExecution(workflow: WorkflowEntity) { - return createExecution({ finished: true }, workflow); + return createExecution({ finished: true, status: 'success' }, workflow); } /** * Store an error execution in the DB and assign it to a workflow. */ export async function createErrorExecution(workflow: WorkflowEntity) { - return createExecution({ finished: false, stoppedAt: new Date() }, workflow); + return createExecution({ finished: false, stoppedAt: new Date(), status: 'failed' }, workflow); } /** * Store a waiting execution in the DB and assign it to a workflow. */ export async function createWaitingExecution(workflow: WorkflowEntity) { - return createExecution({ finished: false, waitTill: new Date() }, workflow); + return createExecution({ finished: false, waitTill: new Date(), status: 'waiting' }, workflow); } // ---------------------------------- @@ -377,6 +388,7 @@ export async function createTag(attributes: Partial = {}) { const { name } = attributes; return Db.collections.Tag.save({ + id: generateNanoId(), name: name ?? randomName(), ...attributes, }); @@ -403,7 +415,7 @@ export async function createManyWorkflows( export async function createWorkflow(attributes: Partial = {}, user?: User) { const { active, name, nodes, connections } = attributes; - const workflow = await Db.collections.Workflow.save({ + const workflowEntity = new WorkflowEntity({ active: active ?? false, name: name ?? 'test workflow', nodes: nodes ?? [ @@ -420,6 +432,8 @@ export async function createWorkflow(attributes: Partial = {}, u ...attributes, }); + const workflow = await Db.collections.Workflow.save(workflowEntity); + if (user) { await Db.collections.SharedWorkflow.save({ user, @@ -505,6 +519,7 @@ export async function getWorkflowSharing(workflow: WorkflowEntity) { export async function createVariable(key: string, value: string) { return Db.collections.Variables.save({ + id: generateNanoId(), key, value, }); @@ -518,7 +533,7 @@ export async function getVariableByKey(key: string) { }); } -export async function getVariableById(id: number) { +export async function getVariableById(id: string) { return Db.collections.Variables.findOne({ where: { id, diff --git a/packages/cli/test/integration/shared/types.d.ts b/packages/cli/test/integration/shared/types.d.ts index 62dfcd3a87..bfbf1fd767 100644 --- a/packages/cli/test/integration/shared/types.d.ts +++ b/packages/cli/test/integration/shared/types.d.ts @@ -23,7 +23,7 @@ type EndpointGroup = | 'nodes' | 'ldap' | 'saml' - | 'versionControl' + | 'sourceControl' | 'eventBus' | 'license' | 'variables'; diff --git a/packages/cli/test/integration/shared/utils.ts b/packages/cli/test/integration/shared/utils.ts index 359b46b2f1..ef6dbf2e96 100644 --- a/packages/cli/test/integration/shared/utils.ts +++ b/packages/cli/test/integration/shared/utils.ts @@ -79,9 +79,9 @@ import { SamlService } from '@/sso/saml/saml.service.ee'; import { SamlController } from '@/sso/saml/routes/saml.controller.ee'; import { EventBusController } from '@/eventbus/eventBus.controller'; import { License } from '@/License'; -import { VersionControlService } from '@/environments/versionControl/versionControl.service.ee'; -import { VersionControlController } from '@/environments/versionControl/versionControl.controller.ee'; -import { VersionControlPreferencesService } from '@/environments/versionControl/versionControlPreferences.service.ee'; +import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee'; +import { SourceControlController } from '@/environments/sourceControl/sourceControl.controller.ee'; +import { SourceControlPreferencesService } from '@/environments/sourceControl/sourceControlPreferences.service.ee'; export const mockInstance = ( ctor: new (...args: any[]) => T, @@ -202,13 +202,13 @@ export async function initTestServer({ const samlService = Container.get(SamlService); registerController(testServer.app, config, new SamlController(samlService)); break; - case 'versionControl': - const versionControlService = Container.get(VersionControlService); - const versionControlPreferencesService = Container.get(VersionControlPreferencesService); + case 'sourceControl': + const sourceControlService = Container.get(SourceControlService); + const sourceControlPreferencesService = Container.get(SourceControlPreferencesService); registerController( testServer.app, config, - new VersionControlController(versionControlService, versionControlPreferencesService), + new SourceControlController(sourceControlService, sourceControlPreferencesService), ); break; case 'nodes': diff --git a/packages/cli/test/integration/workflows.controller.ee.test.ts b/packages/cli/test/integration/workflows.controller.ee.test.ts index 223f0bcc52..f2949c3ed5 100644 --- a/packages/cli/test/integration/workflows.controller.ee.test.ts +++ b/packages/cli/test/integration/workflows.controller.ee.test.ts @@ -586,7 +586,6 @@ describe('PATCH /workflows/:id - validate credential permissions to user', () => }, ], }); - expect(response.statusCode).toBe(400); }); diff --git a/packages/cli/test/integration/workflows.controller.test.ts b/packages/cli/test/integration/workflows.controller.test.ts index 5c53cc25db..62fe7d5e74 100644 --- a/packages/cli/test/integration/workflows.controller.test.ts +++ b/packages/cli/test/integration/workflows.controller.test.ts @@ -7,6 +7,7 @@ import * as UserManagementHelpers from '@/UserManagement/UserManagementHelper'; import * as utils from './shared/utils'; import * as testDb from './shared/testDb'; import { makeWorkflow, MOCK_PINDATA } from './shared/utils'; +import * as Db from '@/Db'; let ownerShell: User; let authOwnerAgent: SuperAgentTest; @@ -63,6 +64,8 @@ describe('GET /workflows/:id', () => { const { id } = workflowCreationResponse.body.data as { id: string }; + const sw = await Db.collections.SharedWorkflow.find(); + const workflowRetrievalResponse = await authOwnerAgent.get(`/workflows/${id}`); expect(workflowRetrievalResponse.statusCode).toBe(200); diff --git a/packages/cli/test/unit/ActiveExecutions.test.ts b/packages/cli/test/unit/ActiveExecutions.test.ts index db2e502c72..79b926441b 100644 --- a/packages/cli/test/unit/ActiveExecutions.test.ts +++ b/packages/cli/test/unit/ActiveExecutions.test.ts @@ -1,24 +1,23 @@ -import * as Db from '@/Db'; import { ActiveExecutions } from '@/ActiveExecutions'; -import { mocked } from 'jest-mock'; import PCancelable from 'p-cancelable'; import { v4 as uuid } from 'uuid'; import type { IExecuteResponsePromiseData, IRun } from 'n8n-workflow'; import { createDeferredPromise } from 'n8n-workflow'; import type { IWorkflowExecutionDataProcess } from '@/Interfaces'; +import { ExecutionRepository } from '@/databases/repositories'; +import Container from 'typedi'; const FAKE_EXECUTION_ID = '15'; const FAKE_SECOND_EXECUTION_ID = '20'; -jest.mock('@/Db', () => { - return { - collections: { - Execution: { - save: jest.fn(async () => ({ id: FAKE_EXECUTION_ID })), - update: jest.fn(), - }, - }, - }; +const updateExistingExecution = jest.fn(); +const createNewExecution = jest.fn(async () => { + return { id: FAKE_EXECUTION_ID }; +}); + +Container.set(ExecutionRepository, { + updateExistingExecution, + createNewExecution, }); describe('ActiveExecutions', () => { @@ -42,8 +41,8 @@ describe('ActiveExecutions', () => { expect(executionId).toBe(FAKE_EXECUTION_ID); expect(activeExecutions.getActiveExecutions().length).toBe(1); - expect(mocked(Db.collections.Execution.save)).toHaveBeenCalledTimes(1); - expect(mocked(Db.collections.Execution.update)).toHaveBeenCalledTimes(0); + expect(createNewExecution).toHaveBeenCalledTimes(1); + expect(updateExistingExecution).toHaveBeenCalledTimes(0); }); test('Should update execution if add is called with execution ID', async () => { @@ -56,8 +55,8 @@ describe('ActiveExecutions', () => { expect(executionId).toBe(FAKE_SECOND_EXECUTION_ID); expect(activeExecutions.getActiveExecutions().length).toBe(1); - expect(mocked(Db.collections.Execution.save)).toHaveBeenCalledTimes(0); - expect(mocked(Db.collections.Execution.update)).toHaveBeenCalledTimes(1); + expect(createNewExecution).toHaveBeenCalledTimes(0); + expect(updateExistingExecution).toHaveBeenCalledTimes(1); }); test('Should fail attaching execution to invalid executionId', async () => { diff --git a/packages/cli/test/unit/VersionControl.test.ts b/packages/cli/test/unit/VersionControl.test.ts index 827f5494ec..52f1da05d2 100644 --- a/packages/cli/test/unit/VersionControl.test.ts +++ b/packages/cli/test/unit/VersionControl.test.ts @@ -1,6 +1,6 @@ -import { generateSshKeyPair } from '../../src/environments/versionControl/versionControlHelper.ee'; +import { generateSshKeyPair } from '../../src/environments/sourceControl/sourceControlHelper.ee'; -describe('Version Control', () => { +describe('Source Control', () => { it('should generate an SSH key pair', () => { const keyPair = generateSshKeyPair(); expect(keyPair.privateKey).toBeTruthy(); diff --git a/packages/editor-ui/src/App.vue b/packages/editor-ui/src/App.vue index 664e1bc0d9..903aa812aa 100644 --- a/packages/editor-ui/src/App.vue +++ b/packages/editor-ui/src/App.vue @@ -46,7 +46,7 @@ import { useTemplatesStore, useNodeTypesStore, useCloudPlanStore, - useVersionControlStore, + useSourceControlStore, useUsageStore, } from '@/stores'; import { useHistoryHelper } from '@/composables/useHistoryHelper'; @@ -79,7 +79,7 @@ export default defineComponent({ useTemplatesStore, useUIStore, useUsersStore, - useVersionControlStore, + useSourceControlStore, useCloudPlanStore, useUsageStore, ), @@ -223,10 +223,10 @@ export default defineComponent({ void this.checkForCloudPlanData(); if ( - this.versionControlStore.isEnterpriseVersionControlEnabled && + this.sourceControlStore.isEnterpriseSourceControlEnabled && this.usersStore.isInstanceOwner ) { - await this.versionControlStore.getPreferences(); + await this.sourceControlStore.getPreferences(); } this.loading = false; diff --git a/packages/editor-ui/src/Interface.ts b/packages/editor-ui/src/Interface.ts index b79ca557a0..859a2ca104 100644 --- a/packages/editor-ui/src/Interface.ts +++ b/packages/editor-ui/src/Interface.ts @@ -1439,7 +1439,7 @@ export type SamlPreferencesExtractedData = { returnUrl: string; }; -export type VersionControlPreferences = { +export type SourceControlPreferences = { connected: boolean; repositoryUrl: string; authorName: string; @@ -1452,7 +1452,7 @@ export type VersionControlPreferences = { currentBranch?: string; }; -export interface VersionControlStatus { +export interface SourceControlStatus { ahead: number; behind: number; conflicted: string[]; @@ -1472,7 +1472,7 @@ export interface VersionControlStatus { tracking: null; } -export interface VersionControlAggregatedFile { +export interface SourceControlAggregatedFile { conflict: boolean; file: string; id: string; diff --git a/packages/editor-ui/src/__tests__/server/endpoints/index.ts b/packages/editor-ui/src/__tests__/server/endpoints/index.ts index 911ca9c11f..edc073f2a0 100644 --- a/packages/editor-ui/src/__tests__/server/endpoints/index.ts +++ b/packages/editor-ui/src/__tests__/server/endpoints/index.ts @@ -5,7 +5,7 @@ import { routesForCredentialTypes } from './credentialType'; import { routesForVariables } from './variable'; import { routesForSettings } from './settings'; import { routesForSSO } from './sso'; -import { routesForVersionControl } from './versionControl'; +import { routesForSourceControl } from './sourceControl'; const endpoints: Array<(server: Server) => void> = [ routesForCredentials, @@ -14,7 +14,7 @@ const endpoints: Array<(server: Server) => void> = [ routesForVariables, routesForSettings, routesForSSO, - routesForVersionControl, + routesForSourceControl, ]; export { endpoints }; diff --git a/packages/editor-ui/src/__tests__/server/endpoints/settings.ts b/packages/editor-ui/src/__tests__/server/endpoints/settings.ts index 07fbaf99dd..4462f6e3e5 100644 --- a/packages/editor-ui/src/__tests__/server/endpoints/settings.ts +++ b/packages/editor-ui/src/__tests__/server/endpoints/settings.ts @@ -16,7 +16,7 @@ const defaultSettings: IN8nUISettings = { logStreaming: false, advancedExecutionFilters: false, variables: true, - versionControl: false, + sourceControl: false, auditLogs: false, }, executionMode: 'regular', diff --git a/packages/editor-ui/src/__tests__/server/endpoints/versionControl.ts b/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts similarity index 52% rename from packages/editor-ui/src/__tests__/server/endpoints/versionControl.ts rename to packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts index b3c6c60ac8..2b5fbe8799 100644 --- a/packages/editor-ui/src/__tests__/server/endpoints/versionControl.ts +++ b/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts @@ -2,11 +2,11 @@ import type { Server, Request } from 'miragejs'; import { Response } from 'miragejs'; import { jsonParse } from 'n8n-workflow'; import type { AppSchema } from '@/__tests__/server/types'; -import type { VersionControlPreferences } from '@/Interface'; +import type { SourceControlPreferences } from '@/Interface'; -export function routesForVersionControl(server: Server) { - const versionControlApiRoot = '/rest/version-control'; - const defaultVersionControlPreferences: VersionControlPreferences = { +export function routesForSourceControl(server: Server) { + const sourceControlApiRoot = '/rest/source-control'; + const defaultSourceControlPreferences: SourceControlPreferences = { branchName: '', branches: [], authorName: '', @@ -18,37 +18,37 @@ export function routesForVersionControl(server: Server) { publicKey: 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHEX+25m', }; - server.post(`${versionControlApiRoot}/preferences`, (schema: AppSchema, request: Request) => { - const requestBody: Partial = jsonParse(request.requestBody); + server.post(`${sourceControlApiRoot}/preferences`, (schema: AppSchema, request: Request) => { + const requestBody: Partial = jsonParse(request.requestBody); return new Response( 200, {}, { data: { - ...defaultVersionControlPreferences, + ...defaultSourceControlPreferences, ...requestBody, }, }, ); }); - server.patch(`${versionControlApiRoot}/preferences`, (schema: AppSchema, request: Request) => { - const requestBody: Partial = jsonParse(request.requestBody); + server.patch(`${sourceControlApiRoot}/preferences`, (schema: AppSchema, request: Request) => { + const requestBody: Partial = jsonParse(request.requestBody); return new Response( 200, {}, { data: { - ...defaultVersionControlPreferences, + ...defaultSourceControlPreferences, ...requestBody, }, }, ); }); - server.get(`${versionControlApiRoot}/get-branches`, () => { + server.get(`${sourceControlApiRoot}/get-branches`, () => { return new Response( 200, {}, @@ -61,13 +61,13 @@ export function routesForVersionControl(server: Server) { ); }); - server.post(`${versionControlApiRoot}/disconnect`, () => { + server.post(`${sourceControlApiRoot}/disconnect`, () => { return new Response( 200, {}, { data: { - ...defaultVersionControlPreferences, + ...defaultSourceControlPreferences, branchName: '', connected: false, }, diff --git a/packages/editor-ui/src/__tests__/utils.ts b/packages/editor-ui/src/__tests__/utils.ts index 932b955a22..24d7cd9d60 100644 --- a/packages/editor-ui/src/__tests__/utils.ts +++ b/packages/editor-ui/src/__tests__/utils.ts @@ -43,7 +43,7 @@ export const SETTINGS_STORE_DEFAULT_STATE: ISettingsState = { saml: false, logStreaming: false, variables: false, - versionControl: false, + sourceControl: false, auditLogs: false, }, executionMode: 'regular', diff --git a/packages/editor-ui/src/api/versionControl.ts b/packages/editor-ui/src/api/sourceControl.ts similarity index 50% rename from packages/editor-ui/src/api/versionControl.ts rename to packages/editor-ui/src/api/sourceControl.ts index a526ced47b..3ab78fe4b3 100644 --- a/packages/editor-ui/src/api/versionControl.ts +++ b/packages/editor-ui/src/api/sourceControl.ts @@ -1,40 +1,40 @@ import type { IRestApiContext, - VersionControlAggregatedFile, - VersionControlPreferences, - VersionControlStatus, + SourceControlAggregatedFile, + SourceControlPreferences, + SourceControlStatus, } from '@/Interface'; import { makeRestApiRequest } from '@/utils'; import type { IDataObject } from 'n8n-workflow'; -const versionControlApiRoot = '/version-control'; +const sourceControlApiRoot = '/source-control'; const createPreferencesRequestFn = (method: 'POST' | 'PATCH') => async ( context: IRestApiContext, - preferences: Partial, - ): Promise => - makeRestApiRequest(context, method, `${versionControlApiRoot}/preferences`, preferences); + preferences: Partial, + ): Promise => + makeRestApiRequest(context, method, `${sourceControlApiRoot}/preferences`, preferences); export const pushWorkfolder = async ( context: IRestApiContext, data: IDataObject, ): Promise => { - return makeRestApiRequest(context, 'POST', `${versionControlApiRoot}/push-workfolder`, data); + return makeRestApiRequest(context, 'POST', `${sourceControlApiRoot}/push-workfolder`, data); }; export const pullWorkfolder = async ( context: IRestApiContext, data: IDataObject, ): Promise => { - return makeRestApiRequest(context, 'POST', `${versionControlApiRoot}/pull-workfolder`, data); + return makeRestApiRequest(context, 'POST', `${sourceControlApiRoot}/pull-workfolder`, data); }; export const getBranches = async ( context: IRestApiContext, ): Promise<{ branches: string[]; currentBranch: string }> => { - return makeRestApiRequest(context, 'GET', `${versionControlApiRoot}/get-branches`); + return makeRestApiRequest(context, 'GET', `${sourceControlApiRoot}/get-branches`); }; export const savePreferences = createPreferencesRequestFn('POST'); @@ -42,29 +42,29 @@ export const updatePreferences = createPreferencesRequestFn('PATCH'); export const getPreferences = async ( context: IRestApiContext, -): Promise => { - return makeRestApiRequest(context, 'GET', `${versionControlApiRoot}/preferences`); +): Promise => { + return makeRestApiRequest(context, 'GET', `${sourceControlApiRoot}/preferences`); }; -export const getStatus = async (context: IRestApiContext): Promise => { - return makeRestApiRequest(context, 'GET', `${versionControlApiRoot}/status`); +export const getStatus = async (context: IRestApiContext): Promise => { + return makeRestApiRequest(context, 'GET', `${sourceControlApiRoot}/status`); }; export const getAggregatedStatus = async ( context: IRestApiContext, -): Promise => { - return makeRestApiRequest(context, 'GET', `${versionControlApiRoot}/get-status`); +): Promise => { + return makeRestApiRequest(context, 'GET', `${sourceControlApiRoot}/get-status`); }; export const disconnect = async ( context: IRestApiContext, keepKeyPair: boolean, ): Promise => { - return makeRestApiRequest(context, 'POST', `${versionControlApiRoot}/disconnect`, { + return makeRestApiRequest(context, 'POST', `${sourceControlApiRoot}/disconnect`, { keepKeyPair, }); }; export const generateKeyPair = async (context: IRestApiContext): Promise => { - return makeRestApiRequest(context, 'POST', `${versionControlApiRoot}/generate-key-pair`); + return makeRestApiRequest(context, 'POST', `${sourceControlApiRoot}/generate-key-pair`); }; diff --git a/packages/editor-ui/src/components/MainHeader/MainHeader.vue b/packages/editor-ui/src/components/MainHeader/MainHeader.vue index 1ed7b9a974..24e5d16465 100644 --- a/packages/editor-ui/src/components/MainHeader/MainHeader.vue +++ b/packages/editor-ui/src/components/MainHeader/MainHeader.vue @@ -30,7 +30,7 @@ import { } from '@/constants'; import type { INodeUi, ITabBarItem } from '@/Interface'; import { workflowHelpers } from '@/mixins/workflowHelpers'; -import { useUIStore, useNDVStore, useVersionControlStore } from '@/stores'; +import { useUIStore, useNDVStore, useSourceControlStore } from '@/stores'; export default defineComponent({ name: 'MainHeader', @@ -53,7 +53,7 @@ export default defineComponent({ }; }, computed: { - ...mapStores(useNDVStore, useUIStore, useVersionControlStore), + ...mapStores(useNDVStore, useUIStore, useSourceControlStore), tabBarItems(): ITabBarItem[] { return [ { value: MAIN_HEADER_TABS.WORKFLOW, label: this.$locale.baseText('generic.editor') }, @@ -82,7 +82,7 @@ export default defineComponent({ return this.workflowsStore.activeWorkflowExecution as IExecutionsSummary; }, readOnly(): boolean { - return this.versionControlStore.preferences.branchReadOnly; + return this.sourceControlStore.preferences.branchReadOnly; }, }, mounted() { diff --git a/packages/editor-ui/src/components/MainSidebar.vue b/packages/editor-ui/src/components/MainSidebar.vue index b9477186bb..1657e5c888 100644 --- a/packages/editor-ui/src/components/MainSidebar.vue +++ b/packages/editor-ui/src/components/MainSidebar.vue @@ -46,7 +46,7 @@ }} - + diff --git a/packages/editor-ui/src/components/Modals.vue b/packages/editor-ui/src/components/Modals.vue index cc1ee84136..d0fccd5a52 100644 --- a/packages/editor-ui/src/components/Modals.vue +++ b/packages/editor-ui/src/components/Modals.vue @@ -112,9 +112,9 @@ - + @@ -145,7 +145,7 @@ import { IMPORT_CURL_MODAL_KEY, LOG_STREAM_MODAL_KEY, ASK_AI_MODAL_KEY, - VERSION_CONTROL_PUSH_MODAL_KEY, + SOURCE_CONTROL_PUSH_MODAL_KEY, } from '@/constants'; import AboutModal from './AboutModal.vue'; @@ -171,7 +171,7 @@ import ActivationModal from './ActivationModal.vue'; import ImportCurlModal from './ImportCurlModal.vue'; import WorkflowShareModal from './WorkflowShareModal.ee.vue'; import EventDestinationSettingsModal from '@/components/SettingsLogStreaming/EventDestinationSettingsModal.ee.vue'; -import VersionControlPushModal from '@/components/VersionControlPushModal.ee.vue'; +import SourceControlPushModal from '@/components/SourceControlPushModal.ee.vue'; export default defineComponent({ name: 'Modals', @@ -199,7 +199,7 @@ export default defineComponent({ WorkflowShareModal, ImportCurlModal, EventDestinationSettingsModal, - VersionControlPushModal, + SourceControlPushModal, }, data: () => ({ COMMUNITY_PACKAGE_CONFIRM_MODAL_KEY, @@ -224,7 +224,7 @@ export default defineComponent({ WORKFLOW_ACTIVE_MODAL_KEY, IMPORT_CURL_MODAL_KEY, LOG_STREAM_MODAL_KEY, - VERSION_CONTROL_PUSH_MODAL_KEY, + SOURCE_CONTROL_PUSH_MODAL_KEY, }), }); diff --git a/packages/editor-ui/src/components/SettingsSidebar.vue b/packages/editor-ui/src/components/SettingsSidebar.vue index 08f45963b4..31bc117fe5 100644 --- a/packages/editor-ui/src/components/SettingsSidebar.vue +++ b/packages/editor-ui/src/components/SettingsSidebar.vue @@ -83,12 +83,12 @@ export default defineComponent({ activateOnRouteNames: [VIEWS.AUDIT_LOGS], }, { - id: 'settings-version-control', + id: 'settings-source-control', icon: 'code-branch', - label: this.$locale.baseText('settings.versionControl.title'), + label: this.$locale.baseText('settings.sourceControl.title'), position: 'top', - available: this.canAccessVersionControl(), - activateOnRouteNames: [VIEWS.VERSION_CONTROL], + available: this.canAccessSourceControl(), + activateOnRouteNames: [VIEWS.SOURCE_CONTROL], }, { id: 'settings-sso', @@ -164,8 +164,8 @@ export default defineComponent({ canAccessUsageAndPlan(): boolean { return this.canUserAccessRouteByName(VIEWS.USAGE); }, - canAccessVersionControl(): boolean { - return this.canUserAccessRouteByName(VIEWS.VERSION_CONTROL); + canAccessSourceControl(): boolean { + return this.canUserAccessRouteByName(VIEWS.SOURCE_CONTROL); }, canAccessAuditLogs(): boolean { return this.canUserAccessRouteByName(VIEWS.AUDIT_LOGS); @@ -226,9 +226,9 @@ export default defineComponent({ void this.$router.push({ name: VIEWS.SSO_SETTINGS }); } break; - case 'settings-version-control': - if (this.$router.currentRoute.name !== VIEWS.VERSION_CONTROL) { - void this.$router.push({ name: VIEWS.VERSION_CONTROL }); + case 'settings-source-control': + if (this.$router.currentRoute.name !== VIEWS.SOURCE_CONTROL) { + void this.$router.push({ name: VIEWS.SOURCE_CONTROL }); } break; case 'settings-audit-logs': diff --git a/packages/editor-ui/src/components/VersionControlPushModal.ee.vue b/packages/editor-ui/src/components/SourceControlPushModal.ee.vue similarity index 72% rename from packages/editor-ui/src/components/VersionControlPushModal.ee.vue rename to packages/editor-ui/src/components/SourceControlPushModal.ee.vue index 0d549b1266..e27eee84ab 100644 --- a/packages/editor-ui/src/components/VersionControlPushModal.ee.vue +++ b/packages/editor-ui/src/components/SourceControlPushModal.ee.vue @@ -1,18 +1,18 @@ diff --git a/packages/editor-ui/src/views/NodeView.vue b/packages/editor-ui/src/views/NodeView.vue index fdd1eb6b3c..c9573d2b7a 100644 --- a/packages/editor-ui/src/views/NodeView.vue +++ b/packages/editor-ui/src/views/NodeView.vue @@ -281,7 +281,7 @@ import { useSettingsStore, useUIStore, useHistoryStore, - useVersionControlStore, + useSourceControlStore, } from '@/stores'; import * as NodeViewUtils from '@/utils/nodeViewUtils'; import { getAccountAge, getConnectionInfo, getNodeViewTab } from '@/utils'; @@ -483,10 +483,10 @@ export default defineComponent({ useEnvironmentsStore, useWorkflowsEEStore, useHistoryStore, - useVersionControlStore, + useSourceControlStore, ), readOnlyEnv(): boolean { - return this.versionControlStore.preferences.branchReadOnly; + return this.sourceControlStore.preferences.branchReadOnly; }, nativelyNumberSuffixedDefaults(): string[] { return this.rootStore.nativelyNumberSuffixedDefaults; diff --git a/packages/editor-ui/src/views/SettingsVersionControl.vue b/packages/editor-ui/src/views/SettingsSourceControl.vue similarity index 53% rename from packages/editor-ui/src/views/SettingsVersionControl.vue rename to packages/editor-ui/src/views/SettingsSourceControl.vue index f3b066d910..eee3020341 100644 --- a/packages/editor-ui/src/views/SettingsVersionControl.vue +++ b/packages/editor-ui/src/views/SettingsSourceControl.vue @@ -2,39 +2,39 @@ import { computed, reactive, onBeforeMount, ref } from 'vue'; import type { Rule, RuleGroup } from 'n8n-design-system/types'; import { MODAL_CONFIRM, VALID_EMAIL_REGEX } from '@/constants'; -import { useUIStore, useVersionControlStore } from '@/stores'; +import { useUIStore, useSourceControlStore } from '@/stores'; import { useToast, useMessage, useLoadingService, useI18n } from '@/composables'; import CopyInput from '@/components/CopyInput.vue'; const { i18n: locale } = useI18n(); -const versionControlStore = useVersionControlStore(); +const sourceControlStore = useSourceControlStore(); const uiStore = useUIStore(); const toast = useToast(); const message = useMessage(); const loadingService = useLoadingService(); -const versionControlDocsSetupUrl = computed(() => - locale.baseText('settings.versionControl.docs.setup.url'), +const sourceControlDocsSetupUrl = computed(() => + locale.baseText('settings.sourceControl.docs.setup.url'), ); const isConnected = ref(false); const onConnect = async () => { loadingService.startLoading(); try { - await versionControlStore.savePreferences({ - authorName: versionControlStore.preferences.authorName, - authorEmail: versionControlStore.preferences.authorEmail, - repositoryUrl: versionControlStore.preferences.repositoryUrl, + await sourceControlStore.savePreferences({ + authorName: sourceControlStore.preferences.authorName, + authorEmail: sourceControlStore.preferences.authorEmail, + repositoryUrl: sourceControlStore.preferences.repositoryUrl, }); - await versionControlStore.getBranches(); + await sourceControlStore.getBranches(); isConnected.value = true; toast.showMessage({ - title: locale.baseText('settings.versionControl.toast.connected.title'), - message: locale.baseText('settings.versionControl.toast.connected.message'), + title: locale.baseText('settings.sourceControl.toast.connected.title'), + message: locale.baseText('settings.sourceControl.toast.connected.message'), type: 'success', }); } catch (error) { - toast.showError(error, locale.baseText('settings.versionControl.toast.connected.error')); + toast.showError(error, locale.baseText('settings.sourceControl.toast.connected.error')); } loadingService.stopLoading(); }; @@ -42,26 +42,26 @@ const onConnect = async () => { const onDisconnect = async () => { try { const confirmation = await message.confirm( - locale.baseText('settings.versionControl.modals.disconnect.message'), - locale.baseText('settings.versionControl.modals.disconnect.title'), + locale.baseText('settings.sourceControl.modals.disconnect.message'), + locale.baseText('settings.sourceControl.modals.disconnect.title'), { - confirmButtonText: locale.baseText('settings.versionControl.modals.disconnect.confirm'), - cancelButtonText: locale.baseText('settings.versionControl.modals.disconnect.cancel'), + confirmButtonText: locale.baseText('settings.sourceControl.modals.disconnect.confirm'), + cancelButtonText: locale.baseText('settings.sourceControl.modals.disconnect.cancel'), }, ); if (confirmation === MODAL_CONFIRM) { loadingService.startLoading(); - await versionControlStore.disconnect(true); + await sourceControlStore.disconnect(true); isConnected.value = false; toast.showMessage({ - title: locale.baseText('settings.versionControl.toast.disconnected.title'), - message: locale.baseText('settings.versionControl.toast.disconnected.message'), + title: locale.baseText('settings.sourceControl.toast.disconnected.title'), + message: locale.baseText('settings.sourceControl.toast.disconnected.message'), type: 'success', }); } } catch (error) { - toast.showError(error, locale.baseText('settings.versionControl.toast.disconnected.error')); + toast.showError(error, locale.baseText('settings.sourceControl.toast.disconnected.error')); } loadingService.stopLoading(); }; @@ -69,13 +69,13 @@ const onDisconnect = async () => { const onSave = async () => { loadingService.startLoading(); try { - await versionControlStore.updatePreferences({ - branchName: versionControlStore.preferences.branchName, - branchReadOnly: versionControlStore.preferences.branchReadOnly, - branchColor: versionControlStore.preferences.branchColor, + await sourceControlStore.updatePreferences({ + branchName: sourceControlStore.preferences.branchName, + branchReadOnly: sourceControlStore.preferences.branchReadOnly, + branchColor: sourceControlStore.preferences.branchColor, }); toast.showMessage({ - title: locale.baseText('settings.versionControl.saved.title'), + title: locale.baseText('settings.sourceControl.saved.title'), type: 'success', }); } catch (error) { @@ -85,20 +85,20 @@ const onSave = async () => { }; const onSelect = async (b: string) => { - if (b === versionControlStore.preferences.branchName) { + if (b === sourceControlStore.preferences.branchName) { return; } - versionControlStore.preferences.branchName = b; + sourceControlStore.preferences.branchName = b; }; const goToUpgrade = () => { - uiStore.goToUpgrade('version-control', 'upgrade-version-control'); + uiStore.goToUpgrade('source-control', 'upgrade-source-control'); }; onBeforeMount(() => { - if (versionControlStore.preferences.connected) { + if (sourceControlStore.preferences.connected) { isConnected.value = true; - void versionControlStore.getBranches(); + void sourceControlStore.getBranches(); } }); @@ -118,7 +118,7 @@ const repoUrlValidationRules: Array = [ name: 'MATCH_REGEX', config: { regex: /^(?!https?:\/\/)(?:git|ssh|git@[-\w.]+):(\/\/)?(.*?)(\.git)(\/?|\#[-\d\w._]+?)$/, - message: locale.baseText('settings.versionControl.repoUrlInvalid'), + message: locale.baseText('settings.sourceControl.repoUrlInvalid'), }, }, ]; @@ -131,7 +131,7 @@ const authorEmailValidationRules: Array = [ name: 'MATCH_REGEX', config: { regex: VALID_EMAIL_REGEX, - message: locale.baseText('settings.versionControl.authorEmailInvalid'), + message: locale.baseText('settings.sourceControl.authorEmailInvalid'), }, }, ]; @@ -146,35 +146,35 @@ const validForConnection = computed( async function refreshSshKey() { try { const confirmation = await message.confirm( - locale.baseText('settings.versionControl.modals.refreshSshKey.message'), - locale.baseText('settings.versionControl.modals.refreshSshKey.title'), + locale.baseText('settings.sourceControl.modals.refreshSshKey.message'), + locale.baseText('settings.sourceControl.modals.refreshSshKey.title'), { - confirmButtonText: locale.baseText('settings.versionControl.modals.refreshSshKey.confirm'), - cancelButtonText: locale.baseText('settings.versionControl.modals.refreshSshKey.cancel'), + confirmButtonText: locale.baseText('settings.sourceControl.modals.refreshSshKey.confirm'), + cancelButtonText: locale.baseText('settings.sourceControl.modals.refreshSshKey.cancel'), }, ); if (confirmation === MODAL_CONFIRM) { - await versionControlStore.generateKeyPair(); + await sourceControlStore.generateKeyPair(); toast.showMessage({ - title: locale.baseText('settings.versionControl.refreshSshKey.successful.title'), + title: locale.baseText('settings.sourceControl.refreshSshKey.successful.title'), type: 'success', }); } } catch (error) { - toast.showError(error, locale.baseText('settings.versionControl.refreshSshKey.error.title')); + toast.showError(error, locale.baseText('settings.sourceControl.refreshSshKey.error.title')); } } const refreshBranches = async () => { try { - await versionControlStore.getBranches(); + await sourceControlStore.getBranches(); toast.showMessage({ - title: locale.baseText('settings.versionControl.refreshBranches.success'), + title: locale.baseText('settings.sourceControl.refreshBranches.success'), type: 'success', }); } catch (error) { - toast.showError(error, locale.baseText('settings.versionControl.refreshBranches.error')); + toast.showError(error, locale.baseText('settings.sourceControl.refreshBranches.error')); } }; @@ -182,26 +182,26 @@ const refreshBranches = async () => {