mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-18 02:21:13 +00:00
feat(core): Add folder synchronization to environments feature (#14005)
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
const FileTypeSchema = z.enum(['credential', 'workflow', 'tags', 'variables', 'file']);
|
||||
const FileTypeSchema = z.enum(['credential', 'workflow', 'tags', 'variables', 'file', 'folders']);
|
||||
export const SOURCE_CONTROL_FILE_TYPE = FileTypeSchema.Values;
|
||||
|
||||
const FileStatusSchema = z.enum([
|
||||
|
||||
@@ -6,6 +6,7 @@ import fsp from 'node:fs/promises';
|
||||
|
||||
import type { SharedCredentials } from '@/databases/entities/shared-credentials';
|
||||
import type { SharedWorkflow } from '@/databases/entities/shared-workflow';
|
||||
import type { FolderRepository } from '@/databases/repositories/folder.repository';
|
||||
import type { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
|
||||
import type { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
|
||||
import type { TagRepository } from '@/databases/repositories/tag.repository';
|
||||
@@ -23,6 +24,7 @@ describe('SourceControlExportService', () => {
|
||||
const tagRepository = mock<TagRepository>();
|
||||
const workflowTagMappingRepository = mock<WorkflowTagMappingRepository>();
|
||||
const variablesService = mock<VariablesService>();
|
||||
const folderRepository = mock<FolderRepository>();
|
||||
|
||||
const service = new SourceControlExportService(
|
||||
mock(),
|
||||
@@ -32,6 +34,7 @@ describe('SourceControlExportService', () => {
|
||||
sharedWorkflowRepository,
|
||||
workflowRepository,
|
||||
workflowTagMappingRepository,
|
||||
folderRepository,
|
||||
mock<InstanceSettings>({ n8nFolder: '/mock/n8n' }),
|
||||
);
|
||||
|
||||
@@ -190,6 +193,35 @@ describe('SourceControlExportService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('exportFoldersToWorkFolder', () => {
|
||||
it('should export folders to work folder', async () => {
|
||||
// Arrange
|
||||
folderRepository.find.mockResolvedValue([
|
||||
mock({ updatedAt: new Date(), createdAt: new Date() }),
|
||||
]);
|
||||
workflowRepository.find.mockResolvedValue([mock()]);
|
||||
|
||||
// Act
|
||||
const result = await service.exportFoldersToWorkFolder();
|
||||
|
||||
// Assert
|
||||
expect(result.count).toBe(1);
|
||||
expect(result.files).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should not export empty folders', async () => {
|
||||
// Arrange
|
||||
folderRepository.find.mockResolvedValue([]);
|
||||
|
||||
// Act
|
||||
const result = await service.exportFoldersToWorkFolder();
|
||||
|
||||
// Assert
|
||||
expect(result.count).toBe(0);
|
||||
expect(result.files).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('exportVariablesToWorkFolder', () => {
|
||||
it('should export variables to work folder', async () => {
|
||||
// Arrange
|
||||
|
||||
@@ -4,14 +4,17 @@ import { type InstanceSettings } from 'n8n-core';
|
||||
import fsp from 'node:fs/promises';
|
||||
|
||||
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
|
||||
import type { FolderRepository } from '@/databases/repositories/folder.repository';
|
||||
import type { WorkflowRepository } from '@/databases/repositories/workflow.repository';
|
||||
|
||||
import { SourceControlImportService } from '../source-control-import.service.ee';
|
||||
import type { ExportableFolder } from '../types/exportable-folders';
|
||||
|
||||
jest.mock('fast-glob');
|
||||
|
||||
describe('SourceControlImportService', () => {
|
||||
const workflowRepository = mock<WorkflowRepository>();
|
||||
const folderRepository = mock<FolderRepository>();
|
||||
const service = new SourceControlImportService(
|
||||
mock(),
|
||||
mock(),
|
||||
@@ -29,6 +32,7 @@ describe('SourceControlImportService', () => {
|
||||
mock(),
|
||||
mock(),
|
||||
mock(),
|
||||
folderRepository,
|
||||
mock<InstanceSettings>({ n8nFolder: '/mock/n8n' }),
|
||||
);
|
||||
|
||||
@@ -160,6 +164,43 @@ describe('SourceControlImportService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRemoteFoldersAndMappingsFromFile', () => {
|
||||
it('should parse folders and mappings file correctly', async () => {
|
||||
globMock.mockResolvedValue(['/mock/folders.json']);
|
||||
|
||||
const now = new Date();
|
||||
|
||||
const mockFoldersData: {
|
||||
folders: ExportableFolder[];
|
||||
} = {
|
||||
folders: [
|
||||
{
|
||||
id: 'folder1',
|
||||
name: 'folder 1',
|
||||
parentFolderId: null,
|
||||
homeProjectId: 'project1',
|
||||
createdAt: now.toISOString(),
|
||||
updatedAt: now.toISOString(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
fsReadFile.mockResolvedValue(JSON.stringify(mockFoldersData));
|
||||
|
||||
const result = await service.getRemoteFoldersAndMappingsFromFile();
|
||||
|
||||
expect(result.folders).toEqual(mockFoldersData.folders);
|
||||
});
|
||||
|
||||
it('should return empty folders and mappings if no file found', async () => {
|
||||
globMock.mockResolvedValue([]);
|
||||
|
||||
const result = await service.getRemoteFoldersAndMappingsFromFile();
|
||||
|
||||
expect(result.folders).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLocalVersionIdsFromDb', () => {
|
||||
const now = new Date();
|
||||
jest.useFakeTimers({ now });
|
||||
@@ -180,4 +221,31 @@ describe('SourceControlImportService', () => {
|
||||
expect(result[0].updatedAt).toBe(now.toISOString());
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLocalFoldersAndMappingsFromDb', () => {
|
||||
it('should return data from DB', async () => {
|
||||
// Arrange
|
||||
|
||||
folderRepository.find.mockResolvedValue([
|
||||
mock({ createdAt: new Date(), updatedAt: new Date() }),
|
||||
]);
|
||||
workflowRepository.find.mockResolvedValue([mock()]);
|
||||
|
||||
// Act
|
||||
|
||||
const result = await service.getLocalFoldersAndMappingsFromDb();
|
||||
|
||||
// Assert
|
||||
|
||||
expect(result.folders).toHaveLength(1);
|
||||
expect(result.folders[0]).toHaveProperty('id');
|
||||
expect(result.folders[0]).toHaveProperty('name');
|
||||
expect(result.folders[0]).toHaveProperty('parentFolderId');
|
||||
expect(result.folders[0]).toHaveProperty('homeProjectId');
|
||||
});
|
||||
});
|
||||
|
||||
describe('importFoldersFromWorkFolder', () => {
|
||||
// add tests for this.
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,9 +3,11 @@ import { Container } from '@n8n/di';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { InstanceSettings } from 'n8n-core';
|
||||
|
||||
import type { FolderWithWorkflowAndSubFolderCount } from '@/databases/entities/folder';
|
||||
import type { TagEntity } from '@/databases/entities/tag-entity';
|
||||
import type { User } from '@/databases/entities/user';
|
||||
import type { Variables } from '@/databases/entities/variables';
|
||||
import type { FolderRepository } from '@/databases/repositories/folder.repository';
|
||||
import type { TagRepository } from '@/databases/repositories/tag.repository';
|
||||
import { SourceControlPreferencesService } from '@/environments.ee/source-control/source-control-preferences.service.ee';
|
||||
import { SourceControlService } from '@/environments.ee/source-control/source-control.service.ee';
|
||||
@@ -24,6 +26,7 @@ describe('SourceControlService', () => {
|
||||
);
|
||||
const sourceControlImportService = mock<SourceControlImportService>();
|
||||
const tagRepository = mock<TagRepository>();
|
||||
const folderRepository = mock<FolderRepository>();
|
||||
const sourceControlService = new SourceControlService(
|
||||
mock(),
|
||||
mock(),
|
||||
@@ -31,6 +34,7 @@ describe('SourceControlService', () => {
|
||||
mock(),
|
||||
sourceControlImportService,
|
||||
tagRepository,
|
||||
folderRepository,
|
||||
mock(),
|
||||
);
|
||||
|
||||
@@ -171,6 +175,30 @@ describe('SourceControlService', () => {
|
||||
mappings: [],
|
||||
});
|
||||
|
||||
// Define a folder that does only exist locally.
|
||||
// Pulling this would delete it so it should be marked as a conflict.
|
||||
// Pushing this is conflict free.
|
||||
const folder = mock<FolderWithWorkflowAndSubFolderCount>({
|
||||
updatedAt: new Date(),
|
||||
createdAt: new Date(),
|
||||
});
|
||||
folderRepository.find.mockResolvedValue([folder]);
|
||||
sourceControlImportService.getRemoteFoldersAndMappingsFromFile.mockResolvedValue({
|
||||
folders: [],
|
||||
});
|
||||
sourceControlImportService.getLocalFoldersAndMappingsFromDb.mockResolvedValue({
|
||||
folders: [
|
||||
{
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
parentFolderId: folder.parentFolder?.id ?? '',
|
||||
homeProjectId: folder.homeProject.id,
|
||||
createdAt: folder.createdAt.toISOString(),
|
||||
updatedAt: folder.updatedAt.toISOString(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// ACT
|
||||
const pullResult = await sourceControlService.getStatus({
|
||||
direction: 'pull',
|
||||
@@ -185,8 +213,6 @@ describe('SourceControlService', () => {
|
||||
});
|
||||
|
||||
// ASSERT
|
||||
console.log(pullResult);
|
||||
console.log(pushResult);
|
||||
|
||||
if (!Array.isArray(pullResult)) {
|
||||
fail('Expected pullResult to be an array.');
|
||||
@@ -195,8 +221,8 @@ describe('SourceControlService', () => {
|
||||
fail('Expected pushResult to be an array.');
|
||||
}
|
||||
|
||||
expect(pullResult).toHaveLength(4);
|
||||
expect(pushResult).toHaveLength(4);
|
||||
expect(pullResult).toHaveLength(5);
|
||||
expect(pushResult).toHaveLength(5);
|
||||
|
||||
expect(pullResult.find((i) => i.type === 'workflow')).toHaveProperty('conflict', true);
|
||||
expect(pushResult.find((i) => i.type === 'workflow')).toHaveProperty('conflict', false);
|
||||
@@ -209,6 +235,9 @@ describe('SourceControlService', () => {
|
||||
|
||||
expect(pullResult.find((i) => i.type === 'tags')).toHaveProperty('conflict', true);
|
||||
expect(pushResult.find((i) => i.type === 'tags')).toHaveProperty('conflict', false);
|
||||
|
||||
expect(pullResult.find((i) => i.type === 'folders')).toHaveProperty('conflict', true);
|
||||
expect(pushResult.find((i) => i.type === 'folders')).toHaveProperty('conflict', false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,6 +5,7 @@ export const SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER = 'workflows';
|
||||
export const SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER = 'credential_stubs';
|
||||
export const SOURCE_CONTROL_VARIABLES_EXPORT_FILE = 'variable_stubs.json';
|
||||
export const SOURCE_CONTROL_TAGS_EXPORT_FILE = 'tags.json';
|
||||
export const SOURCE_CONTROL_FOLDERS_EXPORT_FILE = 'folders.json';
|
||||
export const SOURCE_CONTROL_OWNERS_EXPORT_FILE = 'workflow_owners.json';
|
||||
export const SOURCE_CONTROL_SSH_FOLDER = 'ssh';
|
||||
export const SOURCE_CONTROL_SSH_KEY_NAME = 'key';
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import type { SourceControlledFile } from '@n8n/api-types';
|
||||
import { Service } from '@n8n/di';
|
||||
// eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import
|
||||
import { In } from '@n8n/typeorm';
|
||||
import { rmSync } from 'fs';
|
||||
import { Credentials, InstanceSettings, Logger } from 'n8n-core';
|
||||
import { UnexpectedError, type ICredentialDataDecryptedObject } from 'n8n-workflow';
|
||||
import { writeFile as fsWriteFile, rm as fsRm } from 'node:fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import { FolderRepository } from '@/databases/repositories/folder.repository';
|
||||
import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
|
||||
import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
|
||||
import { TagRepository } from '@/databases/repositories/tag.repository';
|
||||
@@ -22,6 +25,7 @@ import {
|
||||
} from './constants';
|
||||
import {
|
||||
getCredentialExportPath,
|
||||
getFoldersPath,
|
||||
getVariablesPath,
|
||||
getWorkflowExportPath,
|
||||
sourceControlFoldersExistCheck,
|
||||
@@ -49,6 +53,7 @@ export class SourceControlExportService {
|
||||
private readonly sharedWorkflowRepository: SharedWorkflowRepository,
|
||||
private readonly workflowRepository: WorkflowRepository,
|
||||
private readonly workflowTagMappingRepository: WorkflowTagMappingRepository,
|
||||
private readonly folderRepository: FolderRepository,
|
||||
instanceSettings: InstanceSettings,
|
||||
) {
|
||||
this.gitFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_GIT_FOLDER);
|
||||
@@ -100,6 +105,7 @@ export class SourceControlExportService {
|
||||
triggerCount: e.triggerCount,
|
||||
versionId: e.versionId,
|
||||
owner: owners[e.id],
|
||||
parentFolderId: e.parentFolder?.id ?? null,
|
||||
};
|
||||
this.logger.debug(`Writing workflow ${e.id} to ${fileName}`);
|
||||
return await fsWriteFile(fileName, JSON.stringify(sanitizedWorkflow, null, 2));
|
||||
@@ -112,7 +118,10 @@ export class SourceControlExportService {
|
||||
sourceControlFoldersExistCheck([this.workflowExportFolder]);
|
||||
const workflowIds = candidates.map((e) => e.id);
|
||||
const sharedWorkflows = await this.sharedWorkflowRepository.findByWorkflowIds(workflowIds);
|
||||
const workflows = await this.workflowRepository.findByIds(workflowIds);
|
||||
const workflows = await this.workflowRepository.find({
|
||||
where: { id: In(workflowIds) },
|
||||
relations: ['parentFolder'],
|
||||
});
|
||||
|
||||
// determine owner of each workflow to be exported
|
||||
const owners: Record<string, ResourceOwner> = {};
|
||||
@@ -201,6 +210,66 @@ export class SourceControlExportService {
|
||||
}
|
||||
}
|
||||
|
||||
async exportFoldersToWorkFolder(): Promise<ExportResult> {
|
||||
try {
|
||||
sourceControlFoldersExistCheck([this.gitFolder]);
|
||||
const folders = await this.folderRepository.find({
|
||||
relations: ['parentFolder', 'homeProject'],
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
parentFolder: {
|
||||
id: true,
|
||||
},
|
||||
homeProject: {
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (folders.length === 0) {
|
||||
return {
|
||||
count: 0,
|
||||
folder: this.gitFolder,
|
||||
files: [],
|
||||
};
|
||||
}
|
||||
|
||||
const fileName = getFoldersPath(this.gitFolder);
|
||||
await fsWriteFile(
|
||||
fileName,
|
||||
JSON.stringify(
|
||||
{
|
||||
folders: folders.map((f) => ({
|
||||
id: f.id,
|
||||
name: f.name,
|
||||
parentFolderId: f.parentFolder?.id ?? null,
|
||||
homeProjectId: f.homeProject.id,
|
||||
createdAt: f.createdAt.toISOString(),
|
||||
updatedAt: f.updatedAt.toISOString(),
|
||||
})),
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
return {
|
||||
count: folders.length,
|
||||
folder: this.gitFolder,
|
||||
files: [
|
||||
{
|
||||
id: '',
|
||||
name: fileName,
|
||||
},
|
||||
],
|
||||
};
|
||||
} catch (error) {
|
||||
throw new UnexpectedError('Failed to export folders to work folder', { cause: error });
|
||||
}
|
||||
}
|
||||
|
||||
async exportTagsToWorkFolder(): Promise<ExportResult> {
|
||||
try {
|
||||
sourceControlFoldersExistCheck([this.gitFolder]);
|
||||
|
||||
@@ -11,6 +11,7 @@ import { License } from '@/license';
|
||||
import { isContainedWithin } from '@/utils/path-util';
|
||||
|
||||
import {
|
||||
SOURCE_CONTROL_FOLDERS_EXPORT_FILE,
|
||||
SOURCE_CONTROL_GIT_KEY_COMMENT,
|
||||
SOURCE_CONTROL_TAGS_EXPORT_FILE,
|
||||
SOURCE_CONTROL_VARIABLES_EXPORT_FILE,
|
||||
@@ -41,6 +42,10 @@ export function getTagsPath(gitFolder: string): string {
|
||||
return path.join(gitFolder, SOURCE_CONTROL_TAGS_EXPORT_FILE);
|
||||
}
|
||||
|
||||
export function getFoldersPath(gitFolder: string): string {
|
||||
return path.join(gitFolder, SOURCE_CONTROL_FOLDERS_EXPORT_FILE);
|
||||
}
|
||||
|
||||
export function sourceControlFoldersExistCheck(
|
||||
folders: string[],
|
||||
createIfNotExists = true,
|
||||
|
||||
@@ -17,6 +17,7 @@ import type { User } from '@/databases/entities/user';
|
||||
import type { Variables } from '@/databases/entities/variables';
|
||||
import type { WorkflowTagMapping } from '@/databases/entities/workflow-tag-mapping';
|
||||
import { CredentialsRepository } from '@/databases/repositories/credentials.repository';
|
||||
import { FolderRepository } from '@/databases/repositories/folder.repository';
|
||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||
import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
|
||||
import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
|
||||
@@ -33,6 +34,7 @@ import { WorkflowService } from '@/workflows/workflow.service';
|
||||
|
||||
import {
|
||||
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
|
||||
SOURCE_CONTROL_FOLDERS_EXPORT_FILE,
|
||||
SOURCE_CONTROL_GIT_FOLDER,
|
||||
SOURCE_CONTROL_TAGS_EXPORT_FILE,
|
||||
SOURCE_CONTROL_VARIABLES_EXPORT_FILE,
|
||||
@@ -40,6 +42,7 @@ import {
|
||||
} from './constants';
|
||||
import { getCredentialExportPath, getWorkflowExportPath } from './source-control-helper.ee';
|
||||
import type { ExportableCredential } from './types/exportable-credential';
|
||||
import type { ExportableFolder } from './types/exportable-folders';
|
||||
import type { ResourceOwner } from './types/resource-owner';
|
||||
import type { SourceControlWorkflowVersionId } from './types/source-control-workflow-version-id';
|
||||
import { VariablesService } from '../variables/variables.service.ee';
|
||||
@@ -69,6 +72,7 @@ export class SourceControlImportService {
|
||||
private readonly workflowService: WorkflowService,
|
||||
private readonly credentialsService: CredentialsService,
|
||||
private readonly tagService: TagService,
|
||||
private readonly folderRepository: FolderRepository,
|
||||
instanceSettings: InstanceSettings,
|
||||
) {
|
||||
this.gitFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_GIT_FOLDER);
|
||||
@@ -88,6 +92,7 @@ export class SourceControlImportService {
|
||||
remoteWorkflowFiles.map(async (file) => {
|
||||
this.logger.debug(`Parsing workflow file ${file}`);
|
||||
const remote = jsonParse<IWorkflowToImport>(await fsReadFile(file, { encoding: 'utf8' }));
|
||||
|
||||
if (!remote?.id) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -95,6 +100,7 @@ export class SourceControlImportService {
|
||||
id: remote.id,
|
||||
versionId: remote.versionId,
|
||||
name: remote.name,
|
||||
parentFolderId: remote.parentFolderId,
|
||||
remoteId: remote.id,
|
||||
filename: getWorkflowExportPath(remote.id, this.workflowExportFolder),
|
||||
} as SourceControlWorkflowVersionId;
|
||||
@@ -107,7 +113,16 @@ export class SourceControlImportService {
|
||||
|
||||
async getLocalVersionIdsFromDb(): Promise<SourceControlWorkflowVersionId[]> {
|
||||
const localWorkflows = await this.workflowRepository.find({
|
||||
select: ['id', 'name', 'versionId', 'updatedAt'],
|
||||
relations: ['parentFolder'],
|
||||
select: {
|
||||
id: true,
|
||||
versionId: true,
|
||||
name: true,
|
||||
updatedAt: true,
|
||||
parentFolder: {
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
return localWorkflows.map((local) => {
|
||||
let updatedAt: Date;
|
||||
@@ -127,6 +142,7 @@ export class SourceControlImportService {
|
||||
versionId: local.versionId,
|
||||
name: local.name,
|
||||
localId: local.id,
|
||||
parentFolderId: local.parentFolder?.id ?? null,
|
||||
filename: getWorkflowExportPath(local.id, this.workflowExportFolder),
|
||||
updatedAt: updatedAt.toISOString(),
|
||||
};
|
||||
@@ -190,6 +206,52 @@ export class SourceControlImportService {
|
||||
return await this.variablesService.getAllCached();
|
||||
}
|
||||
|
||||
async getRemoteFoldersAndMappingsFromFile(): Promise<{
|
||||
folders: ExportableFolder[];
|
||||
}> {
|
||||
const foldersFile = await glob(SOURCE_CONTROL_FOLDERS_EXPORT_FILE, {
|
||||
cwd: this.gitFolder,
|
||||
absolute: true,
|
||||
});
|
||||
if (foldersFile.length > 0) {
|
||||
this.logger.debug(`Importing folders from file ${foldersFile[0]}`);
|
||||
const mappedFolders = jsonParse<{
|
||||
folders: ExportableFolder[];
|
||||
}>(await fsReadFile(foldersFile[0], { encoding: 'utf8' }), {
|
||||
fallbackValue: { folders: [] },
|
||||
});
|
||||
return mappedFolders;
|
||||
}
|
||||
return { folders: [] };
|
||||
}
|
||||
|
||||
async getLocalFoldersAndMappingsFromDb(): Promise<{
|
||||
folders: ExportableFolder[];
|
||||
}> {
|
||||
const localFolders = await this.folderRepository.find({
|
||||
relations: ['parentFolder', 'homeProject'],
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
parentFolder: { id: true },
|
||||
homeProject: { id: true },
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
folders: localFolders.map((f) => ({
|
||||
id: f.id,
|
||||
name: f.name,
|
||||
parentFolderId: f.parentFolder?.id ?? null,
|
||||
homeProjectId: f.homeProject.id,
|
||||
createdAt: f.createdAt.toISOString(),
|
||||
updatedAt: f.updatedAt.toISOString(),
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
async getRemoteTagsAndMappingsFromFile(): Promise<{
|
||||
tags: TagEntity[];
|
||||
mappings: WorkflowTagMapping[];
|
||||
@@ -229,6 +291,10 @@ export class SourceControlImportService {
|
||||
const existingWorkflows = await this.workflowRepository.findByIds(candidateIds, {
|
||||
fields: ['id', 'name', 'versionId', 'active'],
|
||||
});
|
||||
|
||||
const folders = await this.folderRepository.find({ select: ['id'] });
|
||||
const existingFolderIds = folders.map((f) => f.id);
|
||||
|
||||
const allSharedWorkflows = await this.sharedWorkflowRepository.findWithFields(candidateIds, {
|
||||
select: ['workflowId', 'role', 'projectId'],
|
||||
});
|
||||
@@ -239,7 +305,7 @@ export class SourceControlImportService {
|
||||
// We must iterate over the array and run the whole process workflow by workflow
|
||||
for (const candidate of candidates) {
|
||||
this.logger.debug(`Parsing workflow file ${candidate.file}`);
|
||||
const importedWorkflow = jsonParse<IWorkflowToImport & { owner: string }>(
|
||||
const importedWorkflow = jsonParse<IWorkflowToImport>(
|
||||
await fsReadFile(candidate.file, { encoding: 'utf8' }),
|
||||
);
|
||||
if (!importedWorkflow?.id) {
|
||||
@@ -247,8 +313,18 @@ export class SourceControlImportService {
|
||||
}
|
||||
const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id);
|
||||
importedWorkflow.active = existingWorkflow?.active ?? false;
|
||||
|
||||
const parentFolderId = importedWorkflow.parentFolderId ?? '';
|
||||
|
||||
this.logger.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`);
|
||||
const upsertResult = await this.workflowRepository.upsert({ ...importedWorkflow }, ['id']);
|
||||
|
||||
const upsertResult = await this.workflowRepository.upsert(
|
||||
{
|
||||
...importedWorkflow,
|
||||
parentFolder: existingFolderIds.includes(parentFolderId) ? { id: parentFolderId } : null,
|
||||
},
|
||||
['id'],
|
||||
);
|
||||
if (upsertResult?.identifiers?.length !== 1) {
|
||||
throw new UnexpectedError('Failed to upsert workflow', {
|
||||
extra: { workflowId: importedWorkflow.id ?? 'new' },
|
||||
@@ -440,6 +516,62 @@ export class SourceControlImportService {
|
||||
return mappedTags;
|
||||
}
|
||||
|
||||
async importFoldersFromWorkFolder(user: User, candidate: SourceControlledFile) {
|
||||
let mappedFolders;
|
||||
const projects = await this.projectRepository.find();
|
||||
const personalProject = await this.projectRepository.getPersonalProjectForUserOrFail(user.id);
|
||||
|
||||
try {
|
||||
this.logger.debug(`Importing folders from file ${candidate.file}`);
|
||||
mappedFolders = jsonParse<{
|
||||
folders: ExportableFolder[];
|
||||
}>(await fsReadFile(candidate.file, { encoding: 'utf8' }), {
|
||||
fallbackValue: { folders: [] },
|
||||
});
|
||||
} catch (e) {
|
||||
const error = ensureError(e);
|
||||
this.logger.error(`Failed to import folders from file ${candidate.file}`, { error });
|
||||
return;
|
||||
}
|
||||
|
||||
if (mappedFolders.folders.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
mappedFolders.folders.map(async (folder) => {
|
||||
const folderCopy = this.folderRepository.create({
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
homeProject: {
|
||||
id: projects.find((p) => p.id === folder.homeProjectId)?.id ?? personalProject.id,
|
||||
},
|
||||
});
|
||||
|
||||
await this.folderRepository.upsert(folderCopy, {
|
||||
skipUpdateIfNoValuesChanged: true,
|
||||
conflictPaths: { id: true },
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
// After folders are created, setup the parentFolder relationship
|
||||
await Promise.all(
|
||||
mappedFolders.folders.map(async (folder) => {
|
||||
await this.folderRepository.update(
|
||||
{ id: folder.id },
|
||||
{
|
||||
parentFolder: folder.parentFolderId ? { id: folder.parentFolderId } : null,
|
||||
createdAt: folder.createdAt,
|
||||
updatedAt: folder.updatedAt,
|
||||
},
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
return mappedFolders;
|
||||
}
|
||||
|
||||
async importVariablesFromWorkFolder(
|
||||
candidate: SourceControlledFile,
|
||||
valueOverrides?: {
|
||||
@@ -531,6 +663,12 @@ export class SourceControlImportService {
|
||||
}
|
||||
}
|
||||
|
||||
async deleteFoldersNotInWorkfolder(candidates: SourceControlledFile[]) {
|
||||
for (const candidate of candidates) {
|
||||
await this.folderRepository.delete(candidate.id);
|
||||
}
|
||||
}
|
||||
|
||||
private async findOrCreateOwnerProject(owner: ResourceOwner): Promise<Project | null> {
|
||||
if (typeof owner === 'string' || owner.type === 'personal') {
|
||||
const email = typeof owner === 'string' ? owner : owner.personalEmail;
|
||||
|
||||
@@ -13,6 +13,7 @@ import type { PushResult } from 'simple-git';
|
||||
import type { TagEntity } from '@/databases/entities/tag-entity';
|
||||
import type { User } from '@/databases/entities/user';
|
||||
import type { Variables } from '@/databases/entities/variables';
|
||||
import { FolderRepository } from '@/databases/repositories/folder.repository';
|
||||
import { TagRepository } from '@/databases/repositories/tag.repository';
|
||||
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
|
||||
import { EventService } from '@/events/event.service';
|
||||
@@ -25,6 +26,7 @@ import {
|
||||
import { SourceControlExportService } from './source-control-export.service.ee';
|
||||
import { SourceControlGitService } from './source-control-git.service.ee';
|
||||
import {
|
||||
getFoldersPath,
|
||||
getTagsPath,
|
||||
getTrackingInformationFromPostPushResult,
|
||||
getTrackingInformationFromPrePushResult,
|
||||
@@ -36,6 +38,7 @@ import {
|
||||
import { SourceControlImportService } from './source-control-import.service.ee';
|
||||
import { SourceControlPreferencesService } from './source-control-preferences.service.ee';
|
||||
import type { ExportableCredential } from './types/exportable-credential';
|
||||
import type { ExportableFolder } from './types/exportable-folders';
|
||||
import type { ImportResult } from './types/import-result';
|
||||
import type { SourceControlGetStatus } from './types/source-control-get-status';
|
||||
import type { SourceControlPreferences } from './types/source-control-preferences';
|
||||
@@ -57,6 +60,7 @@ export class SourceControlService {
|
||||
private sourceControlExportService: SourceControlExportService,
|
||||
private sourceControlImportService: SourceControlImportService,
|
||||
private tagRepository: TagRepository,
|
||||
private folderRepository: FolderRepository,
|
||||
private readonly eventService: EventService,
|
||||
) {
|
||||
const { gitFolder, sshFolder, sshKeyName } = sourceControlPreferencesService;
|
||||
@@ -255,7 +259,14 @@ export class SourceControlService {
|
||||
|
||||
const filesToBePushed = new Set<string>();
|
||||
const filesToBeDeleted = new Set<string>();
|
||||
filesToPush.forEach((e) => {
|
||||
|
||||
/*
|
||||
Exclude tags, variables and folders JSON file from being deleted as
|
||||
we keep track of them in a single file unlike workflows and credentials
|
||||
*/
|
||||
filesToPush
|
||||
.filter((f) => ['workflow', 'credential'].includes(f.type))
|
||||
.forEach((e) => {
|
||||
if (e.status !== 'deleted') {
|
||||
filesToBePushed.add(e.file);
|
||||
} else {
|
||||
@@ -284,11 +295,21 @@ export class SourceControlService {
|
||||
});
|
||||
}
|
||||
|
||||
if (filesToPush.find((e) => e.type === 'tags')) {
|
||||
const tagChanges = filesToPush.find((e) => e.type === 'tags');
|
||||
if (tagChanges) {
|
||||
filesToBePushed.add(tagChanges.file);
|
||||
await this.sourceControlExportService.exportTagsToWorkFolder();
|
||||
}
|
||||
|
||||
if (filesToPush.find((e) => e.type === 'variables')) {
|
||||
const folderChanges = filesToPush.find((e) => e.type === 'folders');
|
||||
if (folderChanges) {
|
||||
filesToBePushed.add(folderChanges.file);
|
||||
await this.sourceControlExportService.exportFoldersToWorkFolder();
|
||||
}
|
||||
|
||||
const variablesChanges = filesToPush.find((e) => e.type === 'variables');
|
||||
if (variablesChanges) {
|
||||
filesToBePushed.add(variablesChanges.file);
|
||||
await this.sourceControlExportService.exportVariablesToWorkFolder();
|
||||
}
|
||||
|
||||
@@ -354,6 +375,14 @@ export class SourceControlService {
|
||||
return files.find((e) => e.type === 'variables' && e.status !== 'deleted');
|
||||
}
|
||||
|
||||
private getFoldersToImport(files: SourceControlledFile[]): SourceControlledFile | undefined {
|
||||
return files.find((e) => e.type === 'folders' && e.status !== 'deleted');
|
||||
}
|
||||
|
||||
private getFoldersToDelete(files: SourceControlledFile[]): SourceControlledFile[] {
|
||||
return files.filter((e) => e.type === 'folders' && e.status === 'deleted');
|
||||
}
|
||||
|
||||
private getVariablesToDelete(files: SourceControlledFile[]): SourceControlledFile[] {
|
||||
return files.filter((e) => e.type === 'variables' && e.status === 'deleted');
|
||||
}
|
||||
@@ -381,6 +410,12 @@ export class SourceControlService {
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure the folders get processed first as the workflows depend on them
|
||||
const foldersToBeImported = this.getFoldersToImport(statusResult);
|
||||
if (foldersToBeImported) {
|
||||
await this.sourceControlImportService.importFoldersFromWorkFolder(user, foldersToBeImported);
|
||||
}
|
||||
|
||||
const workflowsToBeImported = this.getWorkflowsToImport(statusResult);
|
||||
await this.sourceControlImportService.importWorkflowFromWorkFolder(
|
||||
workflowsToBeImported,
|
||||
@@ -416,6 +451,9 @@ export class SourceControlService {
|
||||
const variablesToBeDeleted = this.getVariablesToDelete(statusResult);
|
||||
await this.sourceControlImportService.deleteVariablesNotInWorkfolder(variablesToBeDeleted);
|
||||
|
||||
const foldersToBeDeleted = this.getFoldersToDelete(statusResult);
|
||||
await this.sourceControlImportService.deleteFoldersNotInWorkfolder(foldersToBeDeleted);
|
||||
|
||||
// #region Tracking Information
|
||||
this.eventService.emit(
|
||||
'source-control-user-finished-pull-ui',
|
||||
@@ -469,6 +507,9 @@ export class SourceControlService {
|
||||
mappingsMissingInRemote,
|
||||
} = await this.getStatusTagsMappings(options, sourceControlledFiles);
|
||||
|
||||
const { foldersMissingInLocal, foldersMissingInRemote, foldersModifiedInEither } =
|
||||
await this.getStatusFoldersMapping(options, sourceControlledFiles);
|
||||
|
||||
// #region Tracking Information
|
||||
if (options.direction === 'push') {
|
||||
this.eventService.emit(
|
||||
@@ -501,6 +542,9 @@ export class SourceControlService {
|
||||
tagsModifiedInEither,
|
||||
mappingsMissingInLocal,
|
||||
mappingsMissingInRemote,
|
||||
foldersMissingInLocal,
|
||||
foldersMissingInRemote,
|
||||
foldersModifiedInEither,
|
||||
sourceControlledFiles,
|
||||
};
|
||||
} else {
|
||||
@@ -526,7 +570,9 @@ export class SourceControlService {
|
||||
const wfModifiedInEither: SourceControlWorkflowVersionId[] = [];
|
||||
wfLocalVersionIds.forEach((local) => {
|
||||
const mismatchingIds = wfRemoteVersionIds.find(
|
||||
(remote) => remote.id === local.id && remote.versionId !== local.versionId,
|
||||
(remote) =>
|
||||
remote.id === local.id &&
|
||||
(remote.versionId !== local.versionId || remote.parentFolderId !== local.parentFolderId),
|
||||
);
|
||||
let name = (options?.preferLocalVersion ? local?.name : mismatchingIds?.name) ?? 'Workflow';
|
||||
if (local.name && mismatchingIds?.name && local.name !== mismatchingIds.name) {
|
||||
@@ -840,6 +886,88 @@ export class SourceControlService {
|
||||
};
|
||||
}
|
||||
|
||||
private async getStatusFoldersMapping(
|
||||
options: SourceControlGetStatus,
|
||||
sourceControlledFiles: SourceControlledFile[],
|
||||
) {
|
||||
const lastUpdatedFolder = await this.folderRepository.find({
|
||||
order: { updatedAt: 'DESC' },
|
||||
take: 1,
|
||||
select: ['updatedAt'],
|
||||
});
|
||||
|
||||
const foldersMappingsRemote =
|
||||
await this.sourceControlImportService.getRemoteFoldersAndMappingsFromFile();
|
||||
const foldersMappingsLocal =
|
||||
await this.sourceControlImportService.getLocalFoldersAndMappingsFromDb();
|
||||
|
||||
const foldersMissingInLocal = foldersMappingsRemote.folders.filter(
|
||||
(remote) => foldersMappingsLocal.folders.findIndex((local) => local.id === remote.id) === -1,
|
||||
);
|
||||
|
||||
const foldersMissingInRemote = foldersMappingsLocal.folders.filter(
|
||||
(local) => foldersMappingsRemote.folders.findIndex((remote) => remote.id === local.id) === -1,
|
||||
);
|
||||
|
||||
const foldersModifiedInEither: ExportableFolder[] = [];
|
||||
foldersMappingsLocal.folders.forEach((local) => {
|
||||
const mismatchingIds = foldersMappingsRemote.folders.find(
|
||||
(remote) =>
|
||||
remote.id === local.id &&
|
||||
(remote.name !== local.name || remote.parentFolderId !== local.parentFolderId),
|
||||
);
|
||||
|
||||
if (!mismatchingIds) {
|
||||
return;
|
||||
}
|
||||
foldersModifiedInEither.push(options.preferLocalVersion ? local : mismatchingIds);
|
||||
});
|
||||
|
||||
foldersMissingInLocal.forEach((item) => {
|
||||
sourceControlledFiles.push({
|
||||
id: item.id,
|
||||
name: item.name,
|
||||
type: 'folders',
|
||||
status: options.direction === 'push' ? 'deleted' : 'created',
|
||||
location: options.direction === 'push' ? 'local' : 'remote',
|
||||
conflict: false,
|
||||
file: getFoldersPath(this.gitFolder),
|
||||
updatedAt: lastUpdatedFolder[0]?.updatedAt.toISOString(),
|
||||
});
|
||||
});
|
||||
foldersMissingInRemote.forEach((item) => {
|
||||
sourceControlledFiles.push({
|
||||
id: item.id,
|
||||
name: item.name,
|
||||
type: 'folders',
|
||||
status: options.direction === 'push' ? 'created' : 'deleted',
|
||||
location: options.direction === 'push' ? 'local' : 'remote',
|
||||
conflict: options.direction === 'push' ? false : true,
|
||||
file: getFoldersPath(this.gitFolder),
|
||||
updatedAt: lastUpdatedFolder[0]?.updatedAt.toISOString(),
|
||||
});
|
||||
});
|
||||
|
||||
foldersModifiedInEither.forEach((item) => {
|
||||
sourceControlledFiles.push({
|
||||
id: item.id,
|
||||
name: item.name,
|
||||
type: 'folders',
|
||||
status: 'modified',
|
||||
location: options.direction === 'push' ? 'local' : 'remote',
|
||||
conflict: true,
|
||||
file: getFoldersPath(this.gitFolder),
|
||||
updatedAt: lastUpdatedFolder[0]?.updatedAt.toISOString(),
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
foldersMissingInLocal,
|
||||
foldersMissingInRemote,
|
||||
foldersModifiedInEither,
|
||||
};
|
||||
}
|
||||
|
||||
async setGitUserDetails(
|
||||
name = SOURCE_CONTROL_DEFAULT_NAME,
|
||||
email = SOURCE_CONTROL_DEFAULT_EMAIL,
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
export type ExportableFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
parentFolderId: string | null;
|
||||
homeProjectId: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
};
|
||||
@@ -11,4 +11,5 @@ export interface ExportableWorkflow {
|
||||
triggerCount: number;
|
||||
versionId?: string;
|
||||
owner: ResourceOwner;
|
||||
parentFolderId: string | null;
|
||||
}
|
||||
|
||||
@@ -5,5 +5,6 @@ export interface SourceControlWorkflowVersionId {
|
||||
name?: string;
|
||||
localId?: string;
|
||||
remoteId?: string;
|
||||
parentFolderId: string | null;
|
||||
updatedAt?: string;
|
||||
}
|
||||
|
||||
@@ -96,14 +96,19 @@ export interface IWorkflowDb extends IWorkflowBase {
|
||||
parentFolder?: Folder | null;
|
||||
}
|
||||
|
||||
export interface IWorkflowToImport extends IWorkflowBase {
|
||||
tags: ITagToImport[];
|
||||
}
|
||||
|
||||
export interface IWorkflowResponse extends IWorkflowBase {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface IWorkflowToImport
|
||||
extends Omit<IWorkflowBase, 'staticData' | 'pinData' | 'createdAt' | 'updatedAt'> {
|
||||
owner: {
|
||||
type: 'personal';
|
||||
personalEmail: string;
|
||||
};
|
||||
parentFolderId: string | null;
|
||||
}
|
||||
|
||||
// ----------------------------------
|
||||
// credentials
|
||||
// ----------------------------------
|
||||
|
||||
@@ -8,6 +8,7 @@ import { nanoid } from 'nanoid';
|
||||
import fsp from 'node:fs/promises';
|
||||
|
||||
import { CredentialsRepository } from '@/databases/repositories/credentials.repository';
|
||||
import { FolderRepository } from '@/databases/repositories/folder.repository';
|
||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||
import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
|
||||
import { UserRepository } from '@/databases/repositories/user.repository';
|
||||
@@ -26,7 +27,9 @@ describe('SourceControlImportService', () => {
|
||||
let projectRepository: ProjectRepository;
|
||||
let sharedCredentialsRepository: SharedCredentialsRepository;
|
||||
let userRepository: UserRepository;
|
||||
let folderRepository: FolderRepository;
|
||||
let service: SourceControlImportService;
|
||||
|
||||
const cipher = mockInstance(Cipher);
|
||||
|
||||
beforeAll(async () => {
|
||||
@@ -36,6 +39,7 @@ describe('SourceControlImportService', () => {
|
||||
projectRepository = Container.get(ProjectRepository);
|
||||
sharedCredentialsRepository = Container.get(SharedCredentialsRepository);
|
||||
userRepository = Container.get(UserRepository);
|
||||
folderRepository = Container.get(FolderRepository);
|
||||
service = new SourceControlImportService(
|
||||
mock(),
|
||||
mock(),
|
||||
@@ -53,6 +57,7 @@ describe('SourceControlImportService', () => {
|
||||
mock(),
|
||||
mock(),
|
||||
mock(),
|
||||
folderRepository,
|
||||
mock<InstanceSettings>({ n8nFolder: '/some-path' }),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -55,6 +55,7 @@ const ITEM_TITLES: Record<Exclude<SourceControlledFileType, 'file'>, string> = {
|
||||
[SOURCE_CONTROL_FILE_TYPE.credential]: 'Credentials',
|
||||
[SOURCE_CONTROL_FILE_TYPE.variables]: 'Variables',
|
||||
[SOURCE_CONTROL_FILE_TYPE.tags]: 'Tags',
|
||||
[SOURCE_CONTROL_FILE_TYPE.folders]: 'Folders',
|
||||
} as const;
|
||||
|
||||
const files = computed<ItemsList>(() =>
|
||||
@@ -63,6 +64,7 @@ const files = computed<ItemsList>(() =>
|
||||
SOURCE_CONTROL_FILE_TYPE.credential,
|
||||
SOURCE_CONTROL_FILE_TYPE.variables,
|
||||
SOURCE_CONTROL_FILE_TYPE.tags,
|
||||
SOURCE_CONTROL_FILE_TYPE.folders,
|
||||
].reduce<ItemsList>((acc, fileType) => {
|
||||
if (!groupedFilesByType.value[fileType]) {
|
||||
return acc;
|
||||
|
||||
@@ -214,6 +214,16 @@ describe('SourceControlPushModal', () => {
|
||||
file: '/Users/raul/.n8n/git/tags.json',
|
||||
updatedAt: '2024-12-04T11:29:22.095Z',
|
||||
},
|
||||
{
|
||||
id: 'mappings',
|
||||
name: 'folders',
|
||||
type: 'folders',
|
||||
status: 'modified',
|
||||
location: 'local',
|
||||
conflict: false,
|
||||
file: '/Users/raul/.n8n/git/folders.json',
|
||||
updatedAt: '2024-12-04T11:29:22.095Z',
|
||||
},
|
||||
];
|
||||
|
||||
const sourceControlStore = mockedStore(useSourceControlStore);
|
||||
@@ -233,6 +243,7 @@ describe('SourceControlPushModal', () => {
|
||||
expect(getByRole('alert').textContent).toContain('Credentials: 1 added.');
|
||||
expect(getByRole('alert').textContent).toContain('Variables: at least one new or modified.');
|
||||
expect(getByRole('alert').textContent).toContain('Tags: at least one new or modified.');
|
||||
expect(getByRole('alert').textContent).toContain('Folders: at least one new or modified.');
|
||||
|
||||
await userEvent.type(getByTestId('source-control-push-modal-commit'), commitMessage);
|
||||
|
||||
|
||||
@@ -62,6 +62,7 @@ type Changes = {
|
||||
credentials: SourceControlledFile[];
|
||||
workflows: SourceControlledFile[];
|
||||
currentWorkflow?: SourceControlledFile;
|
||||
folders: SourceControlledFile[];
|
||||
};
|
||||
|
||||
const classifyFilesByType = (files: SourceControlledFile[], currentWorkflowId?: string): Changes =>
|
||||
@@ -86,6 +87,11 @@ const classifyFilesByType = (files: SourceControlledFile[], currentWorkflowId?:
|
||||
return acc;
|
||||
}
|
||||
|
||||
if (file.type === SOURCE_CONTROL_FILE_TYPE.folders) {
|
||||
acc.folders.push(file);
|
||||
return acc;
|
||||
}
|
||||
|
||||
if (file.type === SOURCE_CONTROL_FILE_TYPE.workflow && currentWorkflowId === file.id) {
|
||||
acc.currentWorkflow = file;
|
||||
}
|
||||
@@ -102,7 +108,14 @@ const classifyFilesByType = (files: SourceControlledFile[], currentWorkflowId?:
|
||||
|
||||
return acc;
|
||||
},
|
||||
{ tags: [], variables: [], credentials: [], workflows: [], currentWorkflow: undefined },
|
||||
{
|
||||
tags: [],
|
||||
variables: [],
|
||||
credentials: [],
|
||||
workflows: [],
|
||||
folders: [],
|
||||
currentWorkflow: undefined,
|
||||
},
|
||||
);
|
||||
|
||||
const userNotices = computed(() => {
|
||||
@@ -135,6 +148,13 @@ const userNotices = computed(() => {
|
||||
});
|
||||
}
|
||||
|
||||
if (changes.value.folders.length) {
|
||||
messages.push({
|
||||
title: 'Folders',
|
||||
content: 'at least one new or modified',
|
||||
});
|
||||
}
|
||||
|
||||
return messages;
|
||||
});
|
||||
const workflowId = computed(
|
||||
@@ -220,6 +240,7 @@ const isSubmitDisabled = computed(() => {
|
||||
changes.value.credentials.length +
|
||||
changes.value.tags.length +
|
||||
changes.value.variables.length +
|
||||
changes.value.folders.length +
|
||||
selectedChanges.value.size;
|
||||
|
||||
return toBePushed <= 0;
|
||||
@@ -309,6 +330,10 @@ const successNotificationMessage = () => {
|
||||
messages.push(i18n.baseText('generic.variable_plural'));
|
||||
}
|
||||
|
||||
if (changes.value.folders.length) {
|
||||
messages.push(i18n.baseText('generic.folders_plural'));
|
||||
}
|
||||
|
||||
if (changes.value.tags.length) {
|
||||
messages.push(i18n.baseText('generic.tag_plural'));
|
||||
}
|
||||
@@ -323,6 +348,7 @@ async function commitAndPush() {
|
||||
const files = changes.value.tags
|
||||
.concat(changes.value.variables)
|
||||
.concat(changes.value.credentials)
|
||||
.concat(changes.value.folders)
|
||||
.concat(changes.value.workflows.filter((file) => selectedChanges.value.has(file.id)));
|
||||
loadingService.startLoading(i18n.baseText('settings.sourceControl.loading.push'));
|
||||
close();
|
||||
@@ -481,6 +507,9 @@ watch(refDebounced(search, 500), (term) => {
|
||||
<span v-if="file.type === SOURCE_CONTROL_FILE_TYPE.credential">
|
||||
Deleted Credential:
|
||||
</span>
|
||||
<span v-if="file.type === SOURCE_CONTROL_FILE_TYPE.folders">
|
||||
Deleted Folders:
|
||||
</span>
|
||||
<strong>{{ file.name || file.id }}</strong>
|
||||
</N8nText>
|
||||
<N8nText v-else tag="div" bold color="text-dark" :class="[$style.listItemName]">
|
||||
@@ -517,7 +546,7 @@ watch(refDebounced(search, 500), (term) => {
|
||||
|
||||
<template #footer>
|
||||
<N8nNotice v-if="userNotices.length" :compact="false" class="mt-0">
|
||||
<N8nText bold size="medium">Changes to credentials, variables and tags </N8nText>
|
||||
<N8nText bold size="medium">Changes to credentials, variables, tags and folders </N8nText>
|
||||
<br />
|
||||
<template v-for="{ title, content } in userNotices" :key="title">
|
||||
<N8nText bold size="small">{{ title }}</N8nText>
|
||||
|
||||
@@ -89,6 +89,7 @@
|
||||
"generic.next": "Next",
|
||||
"generic.pro": "Pro",
|
||||
"generic.variable_plural": "Variables",
|
||||
"generic.folders_plural": "Folders",
|
||||
"generic.variable": "Variable | {count} Variables",
|
||||
"generic.viewDocs": "View docs",
|
||||
"generic.workflows": "Workflows",
|
||||
|
||||
@@ -83,6 +83,7 @@ const pullMessage = ({
|
||||
tags,
|
||||
variables,
|
||||
workflow,
|
||||
folders,
|
||||
}: Partial<Record<SourceControlledFile['type'], SourceControlledFile[]>>) => {
|
||||
const messages: string[] = [];
|
||||
|
||||
@@ -112,6 +113,10 @@ const pullMessage = ({
|
||||
messages.push(i18n.baseText('generic.tag_plural'));
|
||||
}
|
||||
|
||||
if (folders?.length) {
|
||||
messages.push(i18n.baseText('generic.folders_plural'));
|
||||
}
|
||||
|
||||
return [
|
||||
new Intl.ListFormat(i18n.locale, { style: 'long', type: 'conjunction' }).format(messages),
|
||||
'were pulled',
|
||||
@@ -131,14 +136,14 @@ export const notifyUserAboutPullWorkFolderOutcome = async (
|
||||
return;
|
||||
}
|
||||
|
||||
const { credential, tags, variables, workflow } = groupBy(files, 'type');
|
||||
const { credential, tags, variables, workflow, folders } = groupBy(files, 'type');
|
||||
|
||||
const toastMessages = [
|
||||
...(variables?.length ? [variablesToast] : []),
|
||||
...(credential?.length ? [credentialsToast] : []),
|
||||
{
|
||||
title: i18n.baseText('settings.sourceControl.pull.success.title'),
|
||||
message: pullMessage({ credential, tags, variables, workflow }),
|
||||
message: pullMessage({ credential, tags, variables, workflow, folders }),
|
||||
type: 'success' as const,
|
||||
},
|
||||
];
|
||||
|
||||
Reference in New Issue
Block a user