mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-16 09:36:44 +00:00
feat: PAY-3773 export entities tooling (#19658)
This commit is contained in:
@@ -37,3 +37,5 @@ export { DbConnection } from './connection/db-connection';
|
|||||||
export { DbConnectionOptions } from './connection/db-connection-options';
|
export { DbConnectionOptions } from './connection/db-connection-options';
|
||||||
|
|
||||||
export { AuthRolesService } from './services/auth.roles.service';
|
export { AuthRolesService } from './services/auth.roles.service';
|
||||||
|
|
||||||
|
export { DataSource } from '@n8n/typeorm';
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
import { ExportEntitiesCommand } from '../entities';
|
import { ExportEntitiesCommand } from '../entities';
|
||||||
import { ensureDir } from 'fs-extra';
|
import { mockInstance } from '@n8n/backend-test-utils';
|
||||||
|
import { ExportService } from '@/services/export.service';
|
||||||
|
|
||||||
jest.mock('fs-extra');
|
jest.mock('fs-extra');
|
||||||
|
jest.mock('@/services/export.service');
|
||||||
|
|
||||||
describe('ExportEntitiesCommand', () => {
|
describe('ExportEntitiesCommand', () => {
|
||||||
|
const mockExportService = mockInstance(ExportService);
|
||||||
describe('run', () => {
|
describe('run', () => {
|
||||||
it('should export entities', async () => {
|
it('should export entities', async () => {
|
||||||
const command = new ExportEntitiesCommand();
|
const command = new ExportEntitiesCommand();
|
||||||
@@ -18,11 +21,7 @@ describe('ExportEntitiesCommand', () => {
|
|||||||
};
|
};
|
||||||
await command.run();
|
await command.run();
|
||||||
|
|
||||||
expect(ensureDir).toHaveBeenCalledWith('./exports');
|
expect(mockExportService.exportEntities).toHaveBeenCalledWith('./exports');
|
||||||
// @ts-expect-error Protected property
|
|
||||||
expect(command.logger.info).toHaveBeenCalledTimes(4);
|
|
||||||
// @ts-expect-error Protected property
|
|
||||||
expect(command.logger.error).not.toHaveBeenCalled();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import { Command } from '@n8n/decorators';
|
import { Command } from '@n8n/decorators';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { ensureDir } from 'fs-extra';
|
import { Container } from '@n8n/di';
|
||||||
|
|
||||||
import { BaseCommand } from '../base-command';
|
import { BaseCommand } from '../base-command';
|
||||||
|
import { ExportService } from '@/services/export.service';
|
||||||
|
|
||||||
const flagsSchema = z.object({
|
const flagsSchema = z.object({
|
||||||
outputDir: z
|
outputDir: z
|
||||||
@@ -22,15 +23,7 @@ export class ExportEntitiesCommand extends BaseCommand<z.infer<typeof flagsSchem
|
|||||||
async run() {
|
async run() {
|
||||||
const outputDir = this.flags.outputDir;
|
const outputDir = this.flags.outputDir;
|
||||||
|
|
||||||
this.logger.info('\n⚠️⚠️ This feature is currently under development. ⚠️⚠️');
|
await Container.get(ExportService).exportEntities(outputDir);
|
||||||
this.logger.info('\n🚀 Starting entity export...');
|
|
||||||
this.logger.info(`📁 Output directory: ${outputDir}`);
|
|
||||||
|
|
||||||
await ensureDir(outputDir);
|
|
||||||
|
|
||||||
// TODO: Export entities
|
|
||||||
|
|
||||||
this.logger.info('✅ Task completed successfully! \n');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
catch(error: Error) {
|
catch(error: Error) {
|
||||||
|
|||||||
250
packages/cli/src/services/__tests__/export.service.test.ts
Normal file
250
packages/cli/src/services/__tests__/export.service.test.ts
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
import { type Logger } from '@n8n/backend-common';
|
||||||
|
import { type DataSource } from '@n8n/typeorm';
|
||||||
|
import { mock } from 'jest-mock-extended';
|
||||||
|
import { mkdir, rm, readdir, appendFile } from 'fs/promises';
|
||||||
|
|
||||||
|
import { ExportService } from '../export.service';
|
||||||
|
|
||||||
|
// Mock fs/promises
|
||||||
|
jest.mock('fs/promises');
|
||||||
|
|
||||||
|
jest.mock('@n8n/db', () => ({
|
||||||
|
DataSource: mock<DataSource>(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('ExportService', () => {
|
||||||
|
let exportService: ExportService;
|
||||||
|
let mockLogger: Logger;
|
||||||
|
let mockDataSource: DataSource = mock<DataSource>();
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
mockLogger = mock<Logger>();
|
||||||
|
mockDataSource = mock<DataSource>();
|
||||||
|
|
||||||
|
exportService = new ExportService(mockLogger, mockDataSource);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('exportEntities', () => {
|
||||||
|
const mockEntityMetadata = {
|
||||||
|
tableName: 'users',
|
||||||
|
name: 'User',
|
||||||
|
columns: [{ databaseName: 'id' }, { databaseName: 'email' }, { databaseName: 'name' }],
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockEntityMetadata2 = {
|
||||||
|
tableName: 'workflows',
|
||||||
|
name: 'Workflow',
|
||||||
|
columns: [{ databaseName: 'id' }, { databaseName: 'name' }, { databaseName: 'active' }],
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Mock DataSource entityMetadatas
|
||||||
|
// @ts-expect-error Protected property
|
||||||
|
mockDataSource.entityMetadatas = [mockEntityMetadata, mockEntityMetadata2];
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should export entities successfully', async () => {
|
||||||
|
const outputDir = '/test/output';
|
||||||
|
const mockEntities = [
|
||||||
|
{ id: 1, email: 'user1@test.com', name: 'User 1' },
|
||||||
|
{ id: 2, email: 'user2@test.com', name: 'User 2' },
|
||||||
|
];
|
||||||
|
|
||||||
|
// Mock file system operations
|
||||||
|
(readdir as jest.Mock).mockResolvedValue([]);
|
||||||
|
(mkdir as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
(appendFile as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Mock database queries
|
||||||
|
jest
|
||||||
|
.mocked(mockDataSource.query)
|
||||||
|
.mockResolvedValueOnce(mockEntities) // First page
|
||||||
|
.mockResolvedValueOnce([]); // No more data
|
||||||
|
|
||||||
|
await exportService.exportEntities(outputDir);
|
||||||
|
|
||||||
|
// Verify directory creation
|
||||||
|
expect(mkdir).toHaveBeenCalledWith(outputDir, { recursive: true });
|
||||||
|
|
||||||
|
// Verify database queries
|
||||||
|
expect(mockDataSource.query).toHaveBeenCalledWith(
|
||||||
|
'SELECT id, email, name FROM users LIMIT 500 OFFSET 0',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify file writing
|
||||||
|
expect(appendFile).toHaveBeenCalledWith(
|
||||||
|
'/test/output/user.jsonl',
|
||||||
|
'{"id":1,"email":"user1@test.com","name":"User 1"}\n{"id":2,"email":"user2@test.com","name":"User 2"}\n',
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple pages of data', async () => {
|
||||||
|
const outputDir = '/test/output';
|
||||||
|
const page1Entities = Array.from({ length: 500 }, (_, i) => ({
|
||||||
|
id: i + 1,
|
||||||
|
email: `user${i + 1}@test.com`,
|
||||||
|
}));
|
||||||
|
const page2Entities = Array.from({ length: 300 }, (_, i) => ({
|
||||||
|
id: i + 501,
|
||||||
|
email: `user${i + 501}@test.com`,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock file system operations
|
||||||
|
(readdir as jest.Mock).mockResolvedValue([]);
|
||||||
|
(mkdir as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
(appendFile as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Mock database queries - multiple pages
|
||||||
|
jest
|
||||||
|
.mocked(mockDataSource.query)
|
||||||
|
.mockResolvedValueOnce(page1Entities) // First page
|
||||||
|
.mockResolvedValueOnce(page2Entities) // Second page
|
||||||
|
.mockResolvedValueOnce([]); // No more data
|
||||||
|
|
||||||
|
await exportService.exportEntities(outputDir);
|
||||||
|
|
||||||
|
// Verify multiple database queries
|
||||||
|
expect(mockDataSource.query).toHaveBeenCalledWith(
|
||||||
|
'SELECT id, email, name FROM users LIMIT 500 OFFSET 0',
|
||||||
|
);
|
||||||
|
expect(mockDataSource.query).toHaveBeenCalledWith(
|
||||||
|
'SELECT id, email, name FROM users LIMIT 500 OFFSET 500',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify multiple file writes
|
||||||
|
expect(appendFile).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should clear existing files before export', async () => {
|
||||||
|
const outputDir = '/test/output';
|
||||||
|
const existingFiles = ['user.jsonl', 'user.1.jsonl', 'other.txt'];
|
||||||
|
|
||||||
|
// Mock file system operations
|
||||||
|
(readdir as jest.Mock).mockResolvedValue(existingFiles);
|
||||||
|
(rm as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
(mkdir as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
(appendFile as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Mock database queries
|
||||||
|
jest.mocked(mockDataSource.query).mockResolvedValue([]);
|
||||||
|
|
||||||
|
await exportService.exportEntities(outputDir);
|
||||||
|
|
||||||
|
// Verify existing files are deleted
|
||||||
|
expect(rm).toHaveBeenCalledWith('/test/output/user.jsonl');
|
||||||
|
expect(rm).toHaveBeenCalledWith('/test/output/user.1.jsonl');
|
||||||
|
expect(rm).not.toHaveBeenCalledWith('/test/output/other.txt');
|
||||||
|
|
||||||
|
// Verify logging
|
||||||
|
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||||
|
' 🗑️ Found 2 existing file(s) for user, deleting...',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle file splitting at 10,000 entities', async () => {
|
||||||
|
const outputDir = '/test/output';
|
||||||
|
const largePage = Array.from({ length: 500 }, (_, i) => ({
|
||||||
|
id: i + 1,
|
||||||
|
email: `user${i + 1}@test.com`,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock file system operations
|
||||||
|
(readdir as jest.Mock).mockResolvedValue([]);
|
||||||
|
(mkdir as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
(appendFile as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Mock database queries - simulate 20 pages of 500 entities each (10,000 total)
|
||||||
|
for (let i = 0; i < 20; i++) {
|
||||||
|
jest.mocked(mockDataSource.query).mockResolvedValueOnce(largePage);
|
||||||
|
}
|
||||||
|
jest.mocked(mockDataSource.query).mockResolvedValue([]); // Final empty result
|
||||||
|
|
||||||
|
await exportService.exportEntities(outputDir);
|
||||||
|
|
||||||
|
// Verify all entities go to the first file (user.jsonl)
|
||||||
|
expect(appendFile).toHaveBeenCalledTimes(20);
|
||||||
|
expect(appendFile).toHaveBeenCalledWith(
|
||||||
|
'/test/output/user.jsonl',
|
||||||
|
expect.any(String),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty tables', async () => {
|
||||||
|
const outputDir = '/test/output';
|
||||||
|
|
||||||
|
// Mock file system operations
|
||||||
|
(readdir as jest.Mock).mockResolvedValue([]);
|
||||||
|
(mkdir as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Mock database queries - empty result
|
||||||
|
jest.mocked(mockDataSource.query).mockResolvedValue([]);
|
||||||
|
|
||||||
|
await exportService.exportEntities(outputDir);
|
||||||
|
|
||||||
|
// Verify no file writing for empty tables
|
||||||
|
expect(appendFile).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple entity types', async () => {
|
||||||
|
const outputDir = '/test/output';
|
||||||
|
const userEntities = [{ id: 1, email: 'user@test.com' }];
|
||||||
|
const workflowEntities = [{ id: 1, name: 'Test Workflow' }];
|
||||||
|
|
||||||
|
// Mock file system operations
|
||||||
|
(readdir as jest.Mock).mockResolvedValue([]);
|
||||||
|
(mkdir as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
(appendFile as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Mock database queries for both entity types
|
||||||
|
jest
|
||||||
|
.mocked(mockDataSource.query)
|
||||||
|
.mockResolvedValueOnce(userEntities) // Users first page
|
||||||
|
.mockResolvedValueOnce(workflowEntities); // Workflows first page
|
||||||
|
|
||||||
|
await exportService.exportEntities(outputDir);
|
||||||
|
|
||||||
|
// Verify both entity types are processed
|
||||||
|
expect(jest.mocked(appendFile).mock.calls[0]).toEqual([
|
||||||
|
'/test/output/user.jsonl',
|
||||||
|
'{"id":1,"email":"user@test.com"}\n',
|
||||||
|
'utf8',
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(jest.mocked(appendFile).mock.calls[1]).toEqual([
|
||||||
|
'/test/output/workflow.jsonl',
|
||||||
|
'{"id":1,"name":"Test Workflow"}\n',
|
||||||
|
'utf8',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log export summary', async () => {
|
||||||
|
const outputDir = '/test/output';
|
||||||
|
const mockEntities = [{ id: 1, email: 'user@test.com' }];
|
||||||
|
|
||||||
|
// Mock file system operations
|
||||||
|
(readdir as jest.Mock).mockResolvedValue([]);
|
||||||
|
(mkdir as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
(appendFile as jest.Mock).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Mock database queries
|
||||||
|
jest
|
||||||
|
.mocked(mockDataSource.query)
|
||||||
|
.mockResolvedValueOnce(mockEntities) // First entity type
|
||||||
|
.mockResolvedValueOnce([]) // No more data for first type
|
||||||
|
.mockResolvedValueOnce([]); // Second entity type is empty
|
||||||
|
|
||||||
|
await exportService.exportEntities(outputDir);
|
||||||
|
|
||||||
|
// Verify summary logging
|
||||||
|
expect(mockLogger.info).toHaveBeenCalledWith('\n📊 Export Summary:');
|
||||||
|
expect(mockLogger.info).toHaveBeenCalledWith(' Tables processed: 2');
|
||||||
|
expect(mockLogger.info).toHaveBeenCalledWith(' Total entities exported: 1');
|
||||||
|
expect(mockLogger.info).toHaveBeenCalledWith(' Output directory: /test/output');
|
||||||
|
expect(mockLogger.info).toHaveBeenCalledWith('✅ Task completed successfully! \n');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
141
packages/cli/src/services/export.service.ts
Normal file
141
packages/cli/src/services/export.service.ts
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
import { Logger } from '@n8n/backend-common';
|
||||||
|
import { mkdir, rm, readdir, appendFile } from 'fs/promises';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
import { Service } from '@n8n/di';
|
||||||
|
import { type DataSource } from '@n8n/db';
|
||||||
|
|
||||||
|
@Service()
|
||||||
|
export class ExportService {
|
||||||
|
constructor(
|
||||||
|
private readonly logger: Logger,
|
||||||
|
private readonly dataSource: DataSource,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
private async clearExistingEntityFiles(outputDir: string, entityName: string): Promise<void> {
|
||||||
|
const existingFiles = await readdir(outputDir);
|
||||||
|
const entityFiles = existingFiles.filter(
|
||||||
|
(file) => file.startsWith(`${entityName}.`) && file.endsWith('.jsonl'),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (entityFiles.length > 0) {
|
||||||
|
this.logger.info(
|
||||||
|
` 🗑️ Found ${entityFiles.length} existing file(s) for ${entityName}, deleting...`,
|
||||||
|
);
|
||||||
|
for (const file of entityFiles) {
|
||||||
|
await rm(path.join(outputDir, file));
|
||||||
|
this.logger.info(` Deleted: ${file}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async exportEntities(outputDir: string) {
|
||||||
|
this.logger.info('\n⚠️⚠️ This feature is currently under development. ⚠️⚠️');
|
||||||
|
this.logger.info('\n🚀 Starting entity export...');
|
||||||
|
this.logger.info(`📁 Output directory: ${outputDir}`);
|
||||||
|
|
||||||
|
// Ensure output directory exists
|
||||||
|
await mkdir(outputDir, { recursive: true });
|
||||||
|
|
||||||
|
// Get DataSource from Container and fetch all repositories
|
||||||
|
const entityMetadatas = this.dataSource.entityMetadatas;
|
||||||
|
|
||||||
|
this.logger.info('\n📋 Exporting entities from all tables:');
|
||||||
|
this.logger.info('====================================');
|
||||||
|
|
||||||
|
let totalTablesProcessed = 0;
|
||||||
|
let totalEntitiesExported = 0;
|
||||||
|
const pageSize = 500;
|
||||||
|
const entitiesPerFile = 10000;
|
||||||
|
|
||||||
|
for (const metadata of entityMetadatas) {
|
||||||
|
// Get table name and entity name
|
||||||
|
const tableName = metadata.tableName;
|
||||||
|
const entityName = metadata.name.toLowerCase();
|
||||||
|
|
||||||
|
this.logger.info(`\n📊 Processing table: ${tableName} (${entityName})`);
|
||||||
|
|
||||||
|
// Clear existing files for this entity
|
||||||
|
await this.clearExistingEntityFiles(outputDir, entityName);
|
||||||
|
|
||||||
|
// Get column information for this table
|
||||||
|
const columns = metadata.columns.map((col) => col.databaseName).join(', ');
|
||||||
|
this.logger.info(` 💭 Columns: ${columns}`);
|
||||||
|
|
||||||
|
let offset = 0;
|
||||||
|
let totalEntityCount = 0;
|
||||||
|
let hasNextPage = true;
|
||||||
|
let fileIndex = 1;
|
||||||
|
let currentFileEntityCount = 0;
|
||||||
|
|
||||||
|
do {
|
||||||
|
/*
|
||||||
|
* use raw SQL query to avoid typeorm limitations,
|
||||||
|
* typeorm repositories do not return joining table entries
|
||||||
|
*/
|
||||||
|
const pageEntities = await this.dataSource.query(
|
||||||
|
`SELECT ${columns} FROM ${tableName} LIMIT ${pageSize} OFFSET ${offset}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// If no entities returned, we've reached the end
|
||||||
|
if (pageEntities.length === 0) {
|
||||||
|
this.logger.info(` No more entities available at offset ${offset}`);
|
||||||
|
hasNextPage = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine which file to write to based on current entity count
|
||||||
|
const targetFileIndex = Math.floor(totalEntityCount / entitiesPerFile) + 1;
|
||||||
|
const fileName =
|
||||||
|
targetFileIndex === 1 ? `${entityName}.jsonl` : `${entityName}.${targetFileIndex}.jsonl`;
|
||||||
|
const filePath = path.join(outputDir, fileName);
|
||||||
|
|
||||||
|
// If we've moved to a new file, log the completion of the previous file
|
||||||
|
if (targetFileIndex > fileIndex) {
|
||||||
|
this.logger.info(` ✅ Completed file ${fileIndex}: ${currentFileEntityCount} entities`);
|
||||||
|
fileIndex = targetFileIndex;
|
||||||
|
currentFileEntityCount = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append all entities in this page as JSONL (one JSON object per line)
|
||||||
|
const entitiesJsonl = pageEntities
|
||||||
|
.map((entity: unknown) => JSON.stringify(entity))
|
||||||
|
.join('\n');
|
||||||
|
await appendFile(filePath, entitiesJsonl + '\n', 'utf8');
|
||||||
|
|
||||||
|
totalEntityCount += pageEntities.length;
|
||||||
|
currentFileEntityCount += pageEntities.length;
|
||||||
|
offset += pageEntities.length;
|
||||||
|
|
||||||
|
this.logger.info(
|
||||||
|
` Fetched page containing ${pageEntities.length} entities (page size: ${pageSize}, offset: ${offset - pageEntities.length}, total processed: ${totalEntityCount})`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// If we got fewer entities than requested, we've reached the end
|
||||||
|
if (pageEntities.length < pageSize) {
|
||||||
|
this.logger.info(
|
||||||
|
` Reached end of dataset (got ${pageEntities.length} < ${pageSize} requested)`,
|
||||||
|
);
|
||||||
|
hasNextPage = false;
|
||||||
|
}
|
||||||
|
} while (hasNextPage);
|
||||||
|
|
||||||
|
// Log completion of the final file
|
||||||
|
if (currentFileEntityCount > 0) {
|
||||||
|
this.logger.info(` ✅ Completed file ${fileIndex}: ${currentFileEntityCount} entities`);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.info(
|
||||||
|
` ✅ Completed export for ${tableName}: ${totalEntityCount} entities in ${fileIndex} file(s)`,
|
||||||
|
);
|
||||||
|
totalTablesProcessed++;
|
||||||
|
totalEntitiesExported += totalEntityCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.info('\n📊 Export Summary:');
|
||||||
|
this.logger.info(` Tables processed: ${totalTablesProcessed}`);
|
||||||
|
this.logger.info(` Total entities exported: ${totalEntitiesExported}`);
|
||||||
|
this.logger.info(` Output directory: ${outputDir}`);
|
||||||
|
this.logger.info('✅ Task completed successfully! \n');
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user