From 763d17bb1f123137566e8f26bb4455be558c4d02 Mon Sep 17 00:00:00 2001 From: Daria Date: Wed, 17 Sep 2025 10:20:33 +0300 Subject: [PATCH] feat(core): Show data table per table storage usage (no-changelog) (#19480) --- .../__tests__/data-store-filters.test.ts | 2 + .../data-store-size-validator.service.test.ts | 46 ++-- .../__tests__/data-store-size.test.ts | 198 ++++++++++++++++++ .../__tests__/data-store.controller.test.ts | 2 + .../__tests__/data-store.service.test.ts | 94 +-------- .../data-table/__tests__/test-helpers.ts | 13 ++ .../data-store-size-validator.service.ts | 57 ++--- .../data-table/data-store.repository.ts | 81 +++++-- .../modules/data-table/data-store.service.ts | 7 +- .../modules/data-table/data-store.types.ts | 5 + .../src/modules/data-table/utils/sql-utils.ts | 4 + .../dataStore/components/DataStoreCard.vue | 22 +- .../src/features/dataStore/dataStore.store.ts | 17 +- packages/workflow/src/data-store.types.ts | 1 + 14 files changed, 384 insertions(+), 165 deletions(-) create mode 100644 packages/cli/src/modules/data-table/__tests__/data-store-size.test.ts create mode 100644 packages/cli/src/modules/data-table/__tests__/test-helpers.ts diff --git a/packages/cli/src/modules/data-table/__tests__/data-store-filters.test.ts b/packages/cli/src/modules/data-table/__tests__/data-store-filters.test.ts index 6c2c61516a..0875721f97 100644 --- a/packages/cli/src/modules/data-table/__tests__/data-store-filters.test.ts +++ b/packages/cli/src/modules/data-table/__tests__/data-store-filters.test.ts @@ -4,12 +4,14 @@ import { createTeamProject, testDb, testModules } from '@n8n/backend-test-utils' import { Project } from '@n8n/db'; import { Container } from '@n8n/di'; +import { mockDataStoreSizeValidator } from './test-helpers'; import { DataStoreService } from '../data-store.service'; import { DataStoreValidationError } from '../errors/data-store-validation.error'; beforeAll(async () => { await testModules.loadModules(['data-table']); await testDb.init(); + mockDataStoreSizeValidator(); }); beforeEach(async () => { diff --git a/packages/cli/src/modules/data-table/__tests__/data-store-size-validator.service.test.ts b/packages/cli/src/modules/data-table/__tests__/data-store-size-validator.service.test.ts index c530ab7a31..af9addb2e1 100644 --- a/packages/cli/src/modules/data-table/__tests__/data-store-size-validator.service.test.ts +++ b/packages/cli/src/modules/data-table/__tests__/data-store-size-validator.service.test.ts @@ -20,7 +20,7 @@ describe('DataStoreSizeValidator', () => { describe('basic functionality', () => { it('should fetch size on first call', async () => { - fetchSizeFn.mockResolvedValue(50 * 1024 * 1024); // 50MB + fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} }); // 50MB await validator.validateSize(fetchSizeFn, new Date('2024-01-01T00:00:00Z')); @@ -28,7 +28,7 @@ describe('DataStoreSizeValidator', () => { }); it('should pass validation when size is under limit', async () => { - fetchSizeFn.mockResolvedValue(50 * 1024 * 1024); + fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} }); await expect( validator.validateSize(fetchSizeFn, new Date('2024-01-01T00:00:00Z')), @@ -36,7 +36,7 @@ describe('DataStoreSizeValidator', () => { }); it('should throw error when size exceeds limit', async () => { - fetchSizeFn.mockResolvedValue(150 * 1024 * 1024); + fetchSizeFn.mockResolvedValue({ totalBytes: 150 * 1024 * 1024, dataTables: {} }); await expect( validator.validateSize(fetchSizeFn, new Date('2024-01-01T00:00:00Z')), @@ -44,7 +44,7 @@ describe('DataStoreSizeValidator', () => { }); it('should throw error when size equals limit', async () => { - fetchSizeFn.mockResolvedValue(100 * 1024 * 1024); + fetchSizeFn.mockResolvedValue({ totalBytes: 100 * 1024 * 1024, dataTables: {} }); await expect( validator.validateSize(fetchSizeFn, new Date('2024-01-01T00:00:00Z')), @@ -54,7 +54,7 @@ describe('DataStoreSizeValidator', () => { describe('caching behavior', () => { it('should use cached value within cache duration', async () => { - fetchSizeFn.mockResolvedValue(50); + fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} }); const time1 = new Date('2024-01-01T00:00:00Z'); const time2 = new Date('2024-01-01T00:00:00.500Z'); // 500ms later @@ -65,7 +65,7 @@ describe('DataStoreSizeValidator', () => { }); it('should refresh cache after cache duration expires', async () => { - fetchSizeFn.mockResolvedValue(50 * 1024 * 1024); + fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} }); const time1 = new Date('2024-01-01T00:00:00Z'); const time2 = new Date('2024-01-01T00:00:01.001Z'); // 1001ms later @@ -77,7 +77,7 @@ describe('DataStoreSizeValidator', () => { it('should always validate against cached value even without refresh', async () => { // First call: DB at 50MB - fetchSizeFn.mockResolvedValue(50 * 1024 * 1024); + fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} }); const time1 = new Date('2024-01-01T00:00:00Z'); await validator.validateSize(fetchSizeFn, time1); @@ -91,7 +91,7 @@ describe('DataStoreSizeValidator', () => { it('should fail validation once cached value shows full DB', async () => { // First call: DB becomes full (100MB) - fetchSizeFn.mockResolvedValue(100 * 1024 * 1024); + fetchSizeFn.mockResolvedValue({ totalBytes: 100 * 1024 * 1024, dataTables: {} }); const time1 = new Date('2024-01-01T00:00:00Z'); await expect(validator.validateSize(fetchSizeFn, time1)).rejects.toThrow( @@ -111,10 +111,12 @@ describe('DataStoreSizeValidator', () => { describe('concurrent calls', () => { it('should handle concurrent calls correctly', async () => { - let resolveCheck: (value: number) => void; - const checkPromise = new Promise((resolve) => { - resolveCheck = resolve; - }); + let resolveCheck: (value: { totalBytes: number; dataTables: Record }) => void; + const checkPromise = new Promise<{ totalBytes: number; dataTables: Record }>( + (resolve) => { + resolveCheck = resolve; + }, + ); fetchSizeFn.mockImplementation(async () => await checkPromise); @@ -129,7 +131,7 @@ describe('DataStoreSizeValidator', () => { await new Promise((resolve) => setImmediate(resolve)); // Resolve the check with a value under the limit - resolveCheck!(50 * 1024 * 1024); + resolveCheck!({ totalBytes: 50 * 1024 * 1024, dataTables: {} }); await Promise.all([promise1, promise2, promise3]); @@ -138,10 +140,12 @@ describe('DataStoreSizeValidator', () => { }); it('should share failure state among concurrent calls', async () => { - let resolveCheck: (value: number) => void; - const checkPromise = new Promise((resolve) => { - resolveCheck = resolve; - }); + let resolveCheck: (value: { totalBytes: number; dataTables: Record }) => void; + const checkPromise = new Promise<{ totalBytes: number; dataTables: Record }>( + (resolve) => { + resolveCheck = resolve; + }, + ); fetchSizeFn.mockImplementation(async () => await checkPromise); @@ -153,7 +157,7 @@ describe('DataStoreSizeValidator', () => { const promise3 = validator.validateSize(fetchSizeFn, time); // Resolve with size over limit - resolveCheck!(150 * 1024 * 1024); + resolveCheck!({ totalBytes: 150 * 1024 * 1024, dataTables: {} }); // All should fail with the same error await expect(promise1).rejects.toThrow( @@ -173,7 +177,7 @@ describe('DataStoreSizeValidator', () => { describe('reset functionality', () => { it('should clear cache when reset is called', async () => { - fetchSizeFn.mockResolvedValue(50 * 1024 * 1024); + fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} }); const time1 = new Date('2024-01-01T00:00:00Z'); // First call @@ -202,7 +206,7 @@ describe('DataStoreSizeValidator', () => { // t=1500ms: Still within new cache window, uses cached 100MB, continues to FAIL // First check: DB at 99MB (under limit) - fetchSizeFn.mockResolvedValueOnce(99 * 1024 * 1024); + fetchSizeFn.mockResolvedValueOnce({ totalBytes: 99 * 1024 * 1024, dataTables: {} }); const time1 = new Date('2024-01-01T00:00:00Z'); await expect(validator.validateSize(fetchSizeFn, time1)).resolves.toBeUndefined(); @@ -212,7 +216,7 @@ describe('DataStoreSizeValidator', () => { await expect(validator.validateSize(fetchSizeFn, time2)).resolves.toBeUndefined(); // After cache expires: new check fetches current state showing DB is now full - fetchSizeFn.mockResolvedValueOnce(100 * 1024 * 1024); + fetchSizeFn.mockResolvedValueOnce({ totalBytes: 100 * 1024 * 1024, dataTables: {} }); const time3 = new Date('2024-01-01T00:00:01.001Z'); await expect(validator.validateSize(fetchSizeFn, time3)).rejects.toThrow( 'Data store size limit exceeded: 100MB used, limit is 100MB', diff --git a/packages/cli/src/modules/data-table/__tests__/data-store-size.test.ts b/packages/cli/src/modules/data-table/__tests__/data-store-size.test.ts new file mode 100644 index 0000000000..781758301a --- /dev/null +++ b/packages/cli/src/modules/data-table/__tests__/data-store-size.test.ts @@ -0,0 +1,198 @@ +import { createTeamProject, testDb, testModules } from '@n8n/backend-test-utils'; +import { GlobalConfig } from '@n8n/config'; +import type { Project } from '@n8n/db'; +import { Container } from '@n8n/di'; + +import { DataStoreSizeValidator } from '../data-store-size-validator.service'; +import { DataStoreRepository } from '../data-store.repository'; +import { DataStoreService } from '../data-store.service'; +import { DataStoreValidationError } from '../errors/data-store-validation.error'; + +beforeAll(async () => { + await testModules.loadModules(['data-table']); + await testDb.init(); +}); + +beforeEach(async () => { + const dataStoreService = Container.get(DataStoreService); + await dataStoreService.deleteDataStoreAll(); + await testDb.truncate(['DataTable', 'DataTableColumn']); + + const dataStoreSizeValidator = Container.get(DataStoreSizeValidator); + dataStoreSizeValidator.reset(); +}); + +afterAll(async () => { + await testDb.terminate(); +}); + +describe('Data Store Size Tests', () => { + let dataStoreService: DataStoreService; + let dataStoreRepository: DataStoreRepository; + + beforeAll(() => { + dataStoreService = Container.get(DataStoreService); + dataStoreRepository = Container.get(DataStoreRepository); + }); + + let project1: Project; + + beforeEach(async () => { + project1 = await createTeamProject(); + }); + + describe('size validation', () => { + it('should prevent insertRows when size limit exceeded', async () => { + // ARRANGE + const dataStoreSizeValidator = Container.get(DataStoreSizeValidator); + dataStoreSizeValidator.reset(); + + const maxSize = Container.get(GlobalConfig).dataTable.maxSize; + + const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, { + name: 'dataStore', + columns: [{ name: 'data', type: 'string' }], + }); + + const mockFindDataTablesSize = jest + .spyOn(dataStoreRepository, 'findDataTablesSize') + .mockResolvedValue({ totalBytes: maxSize + 1, dataTables: {} }); + + // ACT & ASSERT + await expect( + dataStoreService.insertRows(dataStoreId, project1.id, [{ data: 'test' }]), + ).rejects.toThrow(DataStoreValidationError); + + expect(mockFindDataTablesSize).toHaveBeenCalled(); + mockFindDataTablesSize.mockRestore(); + }); + + it('should prevent updateRow when size limit exceeded', async () => { + // ARRANGE + const dataStoreSizeValidator = Container.get(DataStoreSizeValidator); + dataStoreSizeValidator.reset(); + + const maxSize = Container.get(GlobalConfig).dataTable.maxSize; + + const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, { + name: 'dataStore', + columns: [{ name: 'data', type: 'string' }], + }); + + // Now mock the size check to be over limit + const mockFindDataTablesSize = jest + .spyOn(dataStoreRepository, 'findDataTablesSize') + .mockResolvedValue({ totalBytes: maxSize + 1, dataTables: {} }); + + // ACT & ASSERT + await expect( + dataStoreService.updateRow(dataStoreId, project1.id, { + filter: { + type: 'and', + filters: [{ columnName: 'id', condition: 'eq', value: 1 }], + }, + data: { data: 'updated' }, + }), + ).rejects.toThrow(DataStoreValidationError); + + expect(mockFindDataTablesSize).toHaveBeenCalled(); + mockFindDataTablesSize.mockRestore(); + }); + + it('should prevent upsertRow when size limit exceeded (insert case)', async () => { + // ARRANGE + + const maxSize = Container.get(GlobalConfig).dataTable.maxSize; + + const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, { + name: 'dataStore', + columns: [{ name: 'data', type: 'string' }], + }); + + const mockFindDataTablesSize = jest + .spyOn(dataStoreRepository, 'findDataTablesSize') + .mockResolvedValue({ totalBytes: maxSize + 1, dataTables: {} }); + + // ACT & ASSERT + await expect( + dataStoreService.upsertRow(dataStoreId, project1.id, { + filter: { + type: 'and', + filters: [{ columnName: 'data', condition: 'eq', value: 'nonexistent' }], + }, + data: { data: 'new' }, + }), + ).rejects.toThrow(DataStoreValidationError); + + expect(mockFindDataTablesSize).toHaveBeenCalled(); + mockFindDataTablesSize.mockRestore(); + }); + }); + + describe('findDataTablesSize', () => { + it('should return size information for data tables', async () => { + // ARRANGE + const dataStore1 = await dataStoreService.createDataStore(project1.id, { + name: 'dataStore1', + columns: [{ name: 'data', type: 'string' }], + }); + + const dataStore2 = await dataStoreService.createDataStore(project1.id, { + name: 'dataStore2', + columns: [{ name: 'data', type: 'string' }], + }); + + const data = new Array(1000).fill(0).map((_, i) => ({ data: `test_data_${i}` })); + + await dataStoreService.insertRows(dataStore1.id, project1.id, data); + + await dataStoreService.insertRows(dataStore2.id, project1.id, [{ data: 'test' }]); + + // ACT + const result = await dataStoreRepository.findDataTablesSize(); + + // ASSERT + expect(result).toBeDefined(); + expect(result.totalBytes).toBeGreaterThan(0); + expect(result.dataTables).toBeDefined(); + expect(Object.keys(result.dataTables)).toHaveLength(2); + + expect(result.dataTables[dataStore1.id]).toBeGreaterThan(0); + expect(result.dataTables[dataStore2.id]).toBeGreaterThan(0); + + expect(result.dataTables[dataStore1.id]).toBeGreaterThan(result.dataTables[dataStore2.id]); + + // Total should be sum of individual tables + const expectedTotal = result.dataTables[dataStore1.id] + result.dataTables[dataStore2.id]; + expect(result.totalBytes).toBe(expectedTotal); + }); + + it('should return empty result when no data tables exist', async () => { + // ACT + const result = await dataStoreRepository.findDataTablesSize(); + + // ASSERT + expect(result).toBeDefined(); + expect(result.totalBytes).toBe(0); + expect(result.dataTables).toBeDefined(); + expect(Object.keys(result.dataTables)).toHaveLength(0); + }); + + it('should handle data tables with no rows', async () => { + // ARRANGE + const dataStore = await dataStoreService.createDataStore(project1.id, { + name: 'emptyDataStore', + columns: [{ name: 'data', type: 'string' }], + }); + + // ACT + const result = await dataStoreRepository.findDataTablesSize(); + + // ASSERT + expect(result).toBeDefined(); + expect(result.totalBytes).toBeGreaterThanOrEqual(0); + expect(result.dataTables).toBeDefined(); + expect(result.dataTables[dataStore.id]).toBeGreaterThanOrEqual(0); + }); + }); +}); diff --git a/packages/cli/src/modules/data-table/__tests__/data-store.controller.test.ts b/packages/cli/src/modules/data-table/__tests__/data-store.controller.test.ts index cd1cceb150..a686e2a04f 100644 --- a/packages/cli/src/modules/data-table/__tests__/data-store.controller.test.ts +++ b/packages/cli/src/modules/data-table/__tests__/data-store.controller.test.ts @@ -21,6 +21,7 @@ import * as utils from '@test-integration/utils'; import { DataStoreColumnRepository } from '../data-store-column.repository'; import { DataStoreRowsRepository } from '../data-store-rows.repository'; import { DataStoreRepository } from '../data-store.repository'; +import { mockDataStoreSizeValidator } from './test-helpers'; let owner: User; let member: User; @@ -42,6 +43,7 @@ let dataStoreRowsRepository: DataStoreRowsRepository; beforeAll(async () => { await testDb.init(); + mockDataStoreSizeValidator(); }); beforeEach(async () => { diff --git a/packages/cli/src/modules/data-table/__tests__/data-store.service.test.ts b/packages/cli/src/modules/data-table/__tests__/data-store.service.test.ts index e3f0d8f6db..0b73f703fa 100644 --- a/packages/cli/src/modules/data-table/__tests__/data-store.service.test.ts +++ b/packages/cli/src/modules/data-table/__tests__/data-store.service.test.ts @@ -1,15 +1,14 @@ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ import type { AddDataStoreColumnDto, CreateDataStoreColumnDto } from '@n8n/api-types'; import { createTeamProject, testDb, testModules } from '@n8n/backend-test-utils'; -import { GlobalConfig } from '@n8n/config'; import type { Project } from '@n8n/db'; import { Container } from '@n8n/di'; import type { DataStoreRow } from 'n8n-workflow'; import { DataStoreRowsRepository } from '../data-store-rows.repository'; -import { DataStoreSizeValidator } from '../data-store-size-validator.service'; import { DataStoreRepository } from '../data-store.repository'; import { DataStoreService } from '../data-store.service'; +import { mockDataStoreSizeValidator } from './test-helpers'; import { DataStoreColumnNameConflictError } from '../errors/data-store-column-name-conflict.error'; import { DataStoreColumnNotFoundError } from '../errors/data-store-column-not-found.error'; import { DataStoreNameConflictError } from '../errors/data-store-name-conflict.error'; @@ -20,12 +19,11 @@ import { toTableName } from '../utils/sql-utils'; beforeAll(async () => { await testModules.loadModules(['data-table']); await testDb.init(); + mockDataStoreSizeValidator(); }); beforeEach(async () => { await testDb.truncate(['DataTable', 'DataTableColumn']); - const dataStoreSizeValidator = Container.get(DataStoreSizeValidator); - dataStoreSizeValidator.reset(); }); afterAll(async () => { @@ -2657,92 +2655,4 @@ describe('dataStore', () => { await expect(result).rejects.toThrow(DataStoreValidationError); }); }); - - describe('size validation', () => { - it('should prevent insertRows when size limit exceeded', async () => { - // ARRANGE - const dataStoreSizeValidator = Container.get(DataStoreSizeValidator); - dataStoreSizeValidator.reset(); - - const maxSize = Container.get(GlobalConfig).dataTable.maxSize; - - const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, { - name: 'dataStore', - columns: [{ name: 'data', type: 'string' }], - }); - - const mockFindDataTablesSize = jest - .spyOn(dataStoreRepository, 'findDataTablesSize') - .mockResolvedValue(maxSize + 1); - - // ACT & ASSERT - await expect( - dataStoreService.insertRows(dataStoreId, project1.id, [{ data: 'test' }]), - ).rejects.toThrow(DataStoreValidationError); - - expect(mockFindDataTablesSize).toHaveBeenCalled(); - mockFindDataTablesSize.mockRestore(); - }); - - it('should prevent updateRow when size limit exceeded', async () => { - // ARRANGE - const dataStoreSizeValidator = Container.get(DataStoreSizeValidator); - dataStoreSizeValidator.reset(); - - const maxSize = Container.get(GlobalConfig).dataTable.maxSize; - - const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, { - name: 'dataStore', - columns: [{ name: 'data', type: 'string' }], - }); - - // Now mock the size check to be over limit - const mockFindDataTablesSize = jest - .spyOn(dataStoreRepository, 'findDataTablesSize') - .mockResolvedValue(maxSize + 1); - - // ACT & ASSERT - await expect( - dataStoreService.updateRow(dataStoreId, project1.id, { - filter: { - type: 'and', - filters: [{ columnName: 'id', condition: 'eq', value: 1 }], - }, - data: { data: 'updated' }, - }), - ).rejects.toThrow(DataStoreValidationError); - - expect(mockFindDataTablesSize).toHaveBeenCalled(); - mockFindDataTablesSize.mockRestore(); - }); - - it('should prevent upsertRow when size limit exceeded (insert case)', async () => { - // ARRANGE - - const maxSize = Container.get(GlobalConfig).dataTable.maxSize; - - const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, { - name: 'dataStore', - columns: [{ name: 'data', type: 'string' }], - }); - - const mockFindDataTablesSize = jest - .spyOn(dataStoreRepository, 'findDataTablesSize') - .mockResolvedValue(maxSize + 1); - - // ACT & ASSERT - await expect( - dataStoreService.upsertRow(dataStoreId, project1.id, { - filter: { - type: 'and', - filters: [{ columnName: 'data', condition: 'eq', value: 'nonexistent' }], - }, - data: { data: 'new' }, - }), - ).rejects.toThrow(DataStoreValidationError); - - expect(mockFindDataTablesSize).toHaveBeenCalled(); - mockFindDataTablesSize.mockRestore(); - }); - }); }); diff --git a/packages/cli/src/modules/data-table/__tests__/test-helpers.ts b/packages/cli/src/modules/data-table/__tests__/test-helpers.ts new file mode 100644 index 0000000000..6c589f73ad --- /dev/null +++ b/packages/cli/src/modules/data-table/__tests__/test-helpers.ts @@ -0,0 +1,13 @@ +import { Container } from '@n8n/di'; + +import { DataStoreSizeValidator } from '../data-store-size-validator.service'; + +export function mockDataStoreSizeValidator() { + const sizeValidator = Container.get(DataStoreSizeValidator); + jest.spyOn(sizeValidator, 'validateSize').mockResolvedValue(); + jest.spyOn(sizeValidator, 'getCachedSizeData').mockResolvedValue({ + totalBytes: 50 * 1024 * 1024, // 50MB - under the default limit + dataTables: {}, + }); + return sizeValidator; +} diff --git a/packages/cli/src/modules/data-table/data-store-size-validator.service.ts b/packages/cli/src/modules/data-table/data-store-size-validator.service.ts index 4a709846ae..1ee85bcc46 100644 --- a/packages/cli/src/modules/data-table/data-store-size-validator.service.ts +++ b/packages/cli/src/modules/data-table/data-store-size-validator.service.ts @@ -1,40 +1,46 @@ import { GlobalConfig } from '@n8n/config'; import { Service } from '@n8n/di'; - -import { DataStoreValidationError } from './errors/data-store-validation.error'; import { DataTableSizeStatus } from 'n8n-workflow'; +import { DataTablesSizeData } from './data-store.types'; +import { DataStoreValidationError } from './errors/data-store-validation.error'; + @Service() export class DataStoreSizeValidator { private lastCheck: Date | undefined; - private cachedSizeInBytes: number | undefined; - private pendingCheck: Promise | null = null; + private cachedSizeData: DataTablesSizeData | undefined; + private pendingCheck: Promise | null = null; constructor(private readonly globalConfig: GlobalConfig) {} - private shouldRefresh(sizeInBytes: number | undefined, now: Date): sizeInBytes is undefined { + private shouldRefresh( + cachedData: DataTablesSizeData | undefined, + now: Date, + ): cachedData is undefined { if ( !this.lastCheck || + !cachedData || now.getTime() - this.lastCheck.getTime() >= this.globalConfig.dataTable.sizeCheckCacheDuration ) { - sizeInBytes = undefined; + return true; } - return sizeInBytes === undefined; + return false; } - async getCachedSize(fetchSizeFn: () => Promise, now = new Date()): Promise { + async getCachedSizeData( + fetchSizeDataFn: () => Promise, + now = new Date(), + ): Promise { // If there's a pending check, wait for it to complete - if (this.pendingCheck) { - this.cachedSizeInBytes = await this.pendingCheck; + this.cachedSizeData = await this.pendingCheck; } else { - // Check if we need to refresh the db size - - if (this.shouldRefresh(this.cachedSizeInBytes, now)) { - this.pendingCheck = fetchSizeFn(); + // Check if we need to refresh the size data + if (this.shouldRefresh(this.cachedSizeData, now)) { + this.pendingCheck = fetchSizeDataFn(); try { - this.cachedSizeInBytes = await this.pendingCheck; + this.cachedSizeData = await this.pendingCheck; this.lastCheck = now; } finally { this.pendingCheck = null; @@ -42,14 +48,17 @@ export class DataStoreSizeValidator { } } - return this.cachedSizeInBytes; + return this.cachedSizeData; } - async validateSize(fetchSizeFn: () => Promise, now = new Date()): Promise { - const size = await this.getCachedSize(fetchSizeFn, now); - if (size >= this.globalConfig.dataTable.maxSize) { + async validateSize( + fetchSizeFn: () => Promise, + now = new Date(), + ): Promise { + const size = await this.getCachedSizeData(fetchSizeFn, now); + if (size.totalBytes >= this.globalConfig.dataTable.maxSize) { throw new DataStoreValidationError( - `Data store size limit exceeded: ${this.toMb(size)}MB used, limit is ${this.toMb(this.globalConfig.dataTable.maxSize)}MB`, + `Data store size limit exceeded: ${this.toMb(size.totalBytes)}MB used, limit is ${this.toMb(this.globalConfig.dataTable.maxSize)}MB`, ); } } @@ -63,9 +72,9 @@ export class DataStoreSizeValidator { return 'ok'; } - async getSizeStatus(fetchSizeFn: () => Promise, now = new Date()) { - const size = await this.getCachedSize(fetchSizeFn, now); - return this.sizeToState(size); + async getSizeStatus(fetchSizeFn: () => Promise, now = new Date()) { + const size = await this.getCachedSizeData(fetchSizeFn, now); + return this.sizeToState(size.totalBytes); } private toMb(sizeInBytes: number): number { @@ -74,7 +83,7 @@ export class DataStoreSizeValidator { reset() { this.lastCheck = undefined; - this.cachedSizeInBytes = undefined; + this.cachedSizeData = undefined; this.pendingCheck = null; } } diff --git a/packages/cli/src/modules/data-table/data-store.repository.ts b/packages/cli/src/modules/data-table/data-store.repository.ts index bbf3f5525c..25564ae596 100644 --- a/packages/cli/src/modules/data-table/data-store.repository.ts +++ b/packages/cli/src/modules/data-table/data-store.repository.ts @@ -9,9 +9,10 @@ import { DataSource, EntityManager, Repository, SelectQueryBuilder } from '@n8n/ import { UnexpectedError } from 'n8n-workflow'; import { DataStoreRowsRepository } from './data-store-rows.repository'; +import { DataStoreUserTableName, DataTablesSizeData } from './data-store.types'; import { DataTableColumn } from './data-table-column.entity'; import { DataTable } from './data-table.entity'; -import { toTableName } from './utils/sql-utils'; +import { toTableId, toTableName } from './utils/sql-utils'; @Service() export class DataStoreRepository extends Repository { @@ -240,49 +241,83 @@ export class DataStoreRepository extends Repository { return [`${alias}.id`, `${alias}.name`, `${alias}.type`, `${alias}.icon`]; } - async findDataTablesSize(): Promise { + private parseSize = (bytes: number | string | null): number => + bytes === null ? 0 : typeof bytes === 'string' ? parseInt(bytes, 10) : bytes; + + async findDataTablesSize(): Promise { const dbType = this.globalConfig.database.type; - const schemaName = this.globalConfig.database.postgresdb.schema; + const tablePattern = toTableName('%'); let sql = ''; switch (dbType) { case 'sqlite': sql = ` - SELECT SUM(pgsize) AS total_bytes - FROM dbstat - WHERE name LIKE '${toTableName('%')}' - `; + SELECT name AS table_name, SUM(pgsize) AS table_bytes + FROM dbstat + WHERE name LIKE '${tablePattern}' + GROUP BY name + `; break; - case 'postgresdb': + case 'postgresdb': { + const schemaName = this.globalConfig.database.postgresdb?.schema; sql = ` - SELECT SUM(pg_relation_size(c.oid)) AS total_bytes - FROM pg_class c - JOIN pg_namespace n ON n.oid = c.relnamespace - WHERE n.nspname = '${schemaName}' - AND c.relname LIKE '${toTableName('%')}' - AND c.relkind IN ('r', 'm', 'p') - `; + SELECT c.relname AS table_name, pg_relation_size(c.oid) AS table_bytes + FROM pg_class c + JOIN pg_namespace n ON n.oid = c.relnamespace + WHERE n.nspname = '${schemaName}' + AND c.relname LIKE '${tablePattern}' + AND c.relkind IN ('r', 'm', 'p') + `; break; + } case 'mysqldb': case 'mariadb': { const databaseName = this.globalConfig.database.mysqldb.database; + const isMariaDb = dbType === 'mariadb'; + const innodbTables = isMariaDb ? 'INNODB_SYS_TABLES' : 'INNODB_TABLES'; + const innodbTablespaces = isMariaDb ? 'INNODB_SYS_TABLESPACES' : 'INNODB_TABLESPACES'; sql = ` - SELECT SUM((DATA_LENGTH + INDEX_LENGTH)) AS total_bytes - FROM information_schema.tables - WHERE table_schema = '${databaseName}' - AND table_name LIKE '${toTableName('%')}' - `; + SELECT t.TABLE_NAME AS table_name, + COALESCE( + ( + SELECT SUM(ists.ALLOCATED_SIZE) + FROM information_schema.${innodbTables} ist + JOIN information_schema.${innodbTablespaces} ists + ON ists.SPACE = ist.SPACE + WHERE ist.NAME = CONCAT(t.TABLE_SCHEMA, '/', t.TABLE_NAME) + ), + (t.DATA_LENGTH + t.INDEX_LENGTH) + ) AS table_bytes + FROM information_schema.TABLES t + WHERE t.TABLE_SCHEMA = '${databaseName}' + AND t.TABLE_NAME LIKE '${tablePattern}' + `; break; } default: - return 0; + return { totalBytes: 0, dataTables: {} }; } - const result = (await this.query(sql)) as Array<{ total_bytes: number | null }>; - return result[0]?.total_bytes ?? 0; + const result = (await this.query(sql)) as Array<{ + table_name: string; + table_bytes: number | string | null; + }>; + + return result + .filter((row) => row.table_bytes !== null && row.table_name) + .reduce( + (acc, row) => { + const dataStoreId = toTableId(row.table_name as DataStoreUserTableName); + const sizeBytes = this.parseSize(row.table_bytes); + acc.dataTables[dataStoreId] = (acc.dataTables[dataStoreId] ?? 0) + sizeBytes; + acc.totalBytes += sizeBytes; + return acc; + }, + { dataTables: {} as Record, totalBytes: 0 }, + ); } } diff --git a/packages/cli/src/modules/data-table/data-store.service.ts b/packages/cli/src/modules/data-table/data-store.service.ts index 1c075039ef..2737e950c2 100644 --- a/packages/cli/src/modules/data-table/data-store.service.ts +++ b/packages/cli/src/modules/data-table/data-store.service.ts @@ -453,12 +453,13 @@ export class DataStoreService { } async getDataTablesSize() { - const sizeBytes = await this.dataStoreSizeValidator.getCachedSize( + const sizeData = await this.dataStoreSizeValidator.getCachedSizeData( async () => await this.dataStoreRepository.findDataTablesSize(), ); return { - sizeBytes, - sizeState: this.dataStoreSizeValidator.sizeToState(sizeBytes), + sizeBytes: sizeData.totalBytes, + sizeState: this.dataStoreSizeValidator.sizeToState(sizeData.totalBytes), + dataTables: sizeData.dataTables, }; } } diff --git a/packages/cli/src/modules/data-table/data-store.types.ts b/packages/cli/src/modules/data-table/data-store.types.ts index 8bfc1b43f3..8401a0b94f 100644 --- a/packages/cli/src/modules/data-table/data-store.types.ts +++ b/packages/cli/src/modules/data-table/data-store.types.ts @@ -2,6 +2,11 @@ import type { FieldTypeMap } from 'n8n-workflow'; export type DataStoreUserTableName = `${string}data_table_user_${string}`; +export type DataTablesSizeData = { + totalBytes: number; + dataTables: Record; +}; + export const columnTypeToFieldType: Record = { // eslint-disable-next-line id-denylist number: 'number', diff --git a/packages/cli/src/modules/data-table/utils/sql-utils.ts b/packages/cli/src/modules/data-table/utils/sql-utils.ts index 5e01a85b9d..a612b7a7d9 100644 --- a/packages/cli/src/modules/data-table/utils/sql-utils.ts +++ b/packages/cli/src/modules/data-table/utils/sql-utils.ts @@ -293,3 +293,7 @@ export function toTableName(dataStoreId: string): DataStoreUserTableName { const { tablePrefix } = Container.get(GlobalConfig).database; return `${tablePrefix}data_table_user_${dataStoreId}`; } + +export function toTableId(tableName: DataStoreUserTableName) { + return tableName.replace(/.*data_table_user_/, ''); +} diff --git a/packages/frontend/editor-ui/src/features/dataStore/components/DataStoreCard.vue b/packages/frontend/editor-ui/src/features/dataStore/components/DataStoreCard.vue index 0ef05a0362..a14c8070bb 100644 --- a/packages/frontend/editor-ui/src/features/dataStore/components/DataStoreCard.vue +++ b/packages/frontend/editor-ui/src/features/dataStore/components/DataStoreCard.vue @@ -4,6 +4,7 @@ import { DATA_STORE_DETAILS } from '@/features/dataStore/constants'; import { useI18n } from '@n8n/i18n'; import { computed } from 'vue'; import DataStoreActions from '@/features/dataStore/components/DataStoreActions.vue'; +import { useDataStoreStore } from '@/features/dataStore/dataStore.store'; type Props = { dataStore: DataStoreResource; @@ -12,6 +13,7 @@ type Props = { }; const i18n = useI18n(); +const dataStoreStore = useDataStoreStore(); const props = withDefaults(defineProps(), { actions: () => [], @@ -28,6 +30,11 @@ const dataStoreRoute = computed(() => { }, }; }); + +const getDataStoreSize = computed(() => { + const size = dataStoreStore.dataStoreSizes[props.dataStore.id] ?? 0; + return size; +});