feat(core): Show data table per table storage usage (no-changelog) (#19480)

This commit is contained in:
Daria
2025-09-17 10:20:33 +03:00
committed by GitHub
parent 3278b36e28
commit 763d17bb1f
14 changed files with 384 additions and 165 deletions

View File

@@ -4,12 +4,14 @@ import { createTeamProject, testDb, testModules } from '@n8n/backend-test-utils'
import { Project } from '@n8n/db';
import { Container } from '@n8n/di';
import { mockDataStoreSizeValidator } from './test-helpers';
import { DataStoreService } from '../data-store.service';
import { DataStoreValidationError } from '../errors/data-store-validation.error';
beforeAll(async () => {
await testModules.loadModules(['data-table']);
await testDb.init();
mockDataStoreSizeValidator();
});
beforeEach(async () => {

View File

@@ -20,7 +20,7 @@ describe('DataStoreSizeValidator', () => {
describe('basic functionality', () => {
it('should fetch size on first call', async () => {
fetchSizeFn.mockResolvedValue(50 * 1024 * 1024); // 50MB
fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} }); // 50MB
await validator.validateSize(fetchSizeFn, new Date('2024-01-01T00:00:00Z'));
@@ -28,7 +28,7 @@ describe('DataStoreSizeValidator', () => {
});
it('should pass validation when size is under limit', async () => {
fetchSizeFn.mockResolvedValue(50 * 1024 * 1024);
fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} });
await expect(
validator.validateSize(fetchSizeFn, new Date('2024-01-01T00:00:00Z')),
@@ -36,7 +36,7 @@ describe('DataStoreSizeValidator', () => {
});
it('should throw error when size exceeds limit', async () => {
fetchSizeFn.mockResolvedValue(150 * 1024 * 1024);
fetchSizeFn.mockResolvedValue({ totalBytes: 150 * 1024 * 1024, dataTables: {} });
await expect(
validator.validateSize(fetchSizeFn, new Date('2024-01-01T00:00:00Z')),
@@ -44,7 +44,7 @@ describe('DataStoreSizeValidator', () => {
});
it('should throw error when size equals limit', async () => {
fetchSizeFn.mockResolvedValue(100 * 1024 * 1024);
fetchSizeFn.mockResolvedValue({ totalBytes: 100 * 1024 * 1024, dataTables: {} });
await expect(
validator.validateSize(fetchSizeFn, new Date('2024-01-01T00:00:00Z')),
@@ -54,7 +54,7 @@ describe('DataStoreSizeValidator', () => {
describe('caching behavior', () => {
it('should use cached value within cache duration', async () => {
fetchSizeFn.mockResolvedValue(50);
fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} });
const time1 = new Date('2024-01-01T00:00:00Z');
const time2 = new Date('2024-01-01T00:00:00.500Z'); // 500ms later
@@ -65,7 +65,7 @@ describe('DataStoreSizeValidator', () => {
});
it('should refresh cache after cache duration expires', async () => {
fetchSizeFn.mockResolvedValue(50 * 1024 * 1024);
fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} });
const time1 = new Date('2024-01-01T00:00:00Z');
const time2 = new Date('2024-01-01T00:00:01.001Z'); // 1001ms later
@@ -77,7 +77,7 @@ describe('DataStoreSizeValidator', () => {
it('should always validate against cached value even without refresh', async () => {
// First call: DB at 50MB
fetchSizeFn.mockResolvedValue(50 * 1024 * 1024);
fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} });
const time1 = new Date('2024-01-01T00:00:00Z');
await validator.validateSize(fetchSizeFn, time1);
@@ -91,7 +91,7 @@ describe('DataStoreSizeValidator', () => {
it('should fail validation once cached value shows full DB', async () => {
// First call: DB becomes full (100MB)
fetchSizeFn.mockResolvedValue(100 * 1024 * 1024);
fetchSizeFn.mockResolvedValue({ totalBytes: 100 * 1024 * 1024, dataTables: {} });
const time1 = new Date('2024-01-01T00:00:00Z');
await expect(validator.validateSize(fetchSizeFn, time1)).rejects.toThrow(
@@ -111,10 +111,12 @@ describe('DataStoreSizeValidator', () => {
describe('concurrent calls', () => {
it('should handle concurrent calls correctly', async () => {
let resolveCheck: (value: number) => void;
const checkPromise = new Promise<number>((resolve) => {
resolveCheck = resolve;
});
let resolveCheck: (value: { totalBytes: number; dataTables: Record<string, number> }) => void;
const checkPromise = new Promise<{ totalBytes: number; dataTables: Record<string, number> }>(
(resolve) => {
resolveCheck = resolve;
},
);
fetchSizeFn.mockImplementation(async () => await checkPromise);
@@ -129,7 +131,7 @@ describe('DataStoreSizeValidator', () => {
await new Promise((resolve) => setImmediate(resolve));
// Resolve the check with a value under the limit
resolveCheck!(50 * 1024 * 1024);
resolveCheck!({ totalBytes: 50 * 1024 * 1024, dataTables: {} });
await Promise.all([promise1, promise2, promise3]);
@@ -138,10 +140,12 @@ describe('DataStoreSizeValidator', () => {
});
it('should share failure state among concurrent calls', async () => {
let resolveCheck: (value: number) => void;
const checkPromise = new Promise<number>((resolve) => {
resolveCheck = resolve;
});
let resolveCheck: (value: { totalBytes: number; dataTables: Record<string, number> }) => void;
const checkPromise = new Promise<{ totalBytes: number; dataTables: Record<string, number> }>(
(resolve) => {
resolveCheck = resolve;
},
);
fetchSizeFn.mockImplementation(async () => await checkPromise);
@@ -153,7 +157,7 @@ describe('DataStoreSizeValidator', () => {
const promise3 = validator.validateSize(fetchSizeFn, time);
// Resolve with size over limit
resolveCheck!(150 * 1024 * 1024);
resolveCheck!({ totalBytes: 150 * 1024 * 1024, dataTables: {} });
// All should fail with the same error
await expect(promise1).rejects.toThrow(
@@ -173,7 +177,7 @@ describe('DataStoreSizeValidator', () => {
describe('reset functionality', () => {
it('should clear cache when reset is called', async () => {
fetchSizeFn.mockResolvedValue(50 * 1024 * 1024);
fetchSizeFn.mockResolvedValue({ totalBytes: 50 * 1024 * 1024, dataTables: {} });
const time1 = new Date('2024-01-01T00:00:00Z');
// First call
@@ -202,7 +206,7 @@ describe('DataStoreSizeValidator', () => {
// t=1500ms: Still within new cache window, uses cached 100MB, continues to FAIL
// First check: DB at 99MB (under limit)
fetchSizeFn.mockResolvedValueOnce(99 * 1024 * 1024);
fetchSizeFn.mockResolvedValueOnce({ totalBytes: 99 * 1024 * 1024, dataTables: {} });
const time1 = new Date('2024-01-01T00:00:00Z');
await expect(validator.validateSize(fetchSizeFn, time1)).resolves.toBeUndefined();
@@ -212,7 +216,7 @@ describe('DataStoreSizeValidator', () => {
await expect(validator.validateSize(fetchSizeFn, time2)).resolves.toBeUndefined();
// After cache expires: new check fetches current state showing DB is now full
fetchSizeFn.mockResolvedValueOnce(100 * 1024 * 1024);
fetchSizeFn.mockResolvedValueOnce({ totalBytes: 100 * 1024 * 1024, dataTables: {} });
const time3 = new Date('2024-01-01T00:00:01.001Z');
await expect(validator.validateSize(fetchSizeFn, time3)).rejects.toThrow(
'Data store size limit exceeded: 100MB used, limit is 100MB',

View File

@@ -0,0 +1,198 @@
import { createTeamProject, testDb, testModules } from '@n8n/backend-test-utils';
import { GlobalConfig } from '@n8n/config';
import type { Project } from '@n8n/db';
import { Container } from '@n8n/di';
import { DataStoreSizeValidator } from '../data-store-size-validator.service';
import { DataStoreRepository } from '../data-store.repository';
import { DataStoreService } from '../data-store.service';
import { DataStoreValidationError } from '../errors/data-store-validation.error';
beforeAll(async () => {
await testModules.loadModules(['data-table']);
await testDb.init();
});
beforeEach(async () => {
const dataStoreService = Container.get(DataStoreService);
await dataStoreService.deleteDataStoreAll();
await testDb.truncate(['DataTable', 'DataTableColumn']);
const dataStoreSizeValidator = Container.get(DataStoreSizeValidator);
dataStoreSizeValidator.reset();
});
afterAll(async () => {
await testDb.terminate();
});
describe('Data Store Size Tests', () => {
let dataStoreService: DataStoreService;
let dataStoreRepository: DataStoreRepository;
beforeAll(() => {
dataStoreService = Container.get(DataStoreService);
dataStoreRepository = Container.get(DataStoreRepository);
});
let project1: Project;
beforeEach(async () => {
project1 = await createTeamProject();
});
describe('size validation', () => {
it('should prevent insertRows when size limit exceeded', async () => {
// ARRANGE
const dataStoreSizeValidator = Container.get(DataStoreSizeValidator);
dataStoreSizeValidator.reset();
const maxSize = Container.get(GlobalConfig).dataTable.maxSize;
const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, {
name: 'dataStore',
columns: [{ name: 'data', type: 'string' }],
});
const mockFindDataTablesSize = jest
.spyOn(dataStoreRepository, 'findDataTablesSize')
.mockResolvedValue({ totalBytes: maxSize + 1, dataTables: {} });
// ACT & ASSERT
await expect(
dataStoreService.insertRows(dataStoreId, project1.id, [{ data: 'test' }]),
).rejects.toThrow(DataStoreValidationError);
expect(mockFindDataTablesSize).toHaveBeenCalled();
mockFindDataTablesSize.mockRestore();
});
it('should prevent updateRow when size limit exceeded', async () => {
// ARRANGE
const dataStoreSizeValidator = Container.get(DataStoreSizeValidator);
dataStoreSizeValidator.reset();
const maxSize = Container.get(GlobalConfig).dataTable.maxSize;
const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, {
name: 'dataStore',
columns: [{ name: 'data', type: 'string' }],
});
// Now mock the size check to be over limit
const mockFindDataTablesSize = jest
.spyOn(dataStoreRepository, 'findDataTablesSize')
.mockResolvedValue({ totalBytes: maxSize + 1, dataTables: {} });
// ACT & ASSERT
await expect(
dataStoreService.updateRow(dataStoreId, project1.id, {
filter: {
type: 'and',
filters: [{ columnName: 'id', condition: 'eq', value: 1 }],
},
data: { data: 'updated' },
}),
).rejects.toThrow(DataStoreValidationError);
expect(mockFindDataTablesSize).toHaveBeenCalled();
mockFindDataTablesSize.mockRestore();
});
it('should prevent upsertRow when size limit exceeded (insert case)', async () => {
// ARRANGE
const maxSize = Container.get(GlobalConfig).dataTable.maxSize;
const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, {
name: 'dataStore',
columns: [{ name: 'data', type: 'string' }],
});
const mockFindDataTablesSize = jest
.spyOn(dataStoreRepository, 'findDataTablesSize')
.mockResolvedValue({ totalBytes: maxSize + 1, dataTables: {} });
// ACT & ASSERT
await expect(
dataStoreService.upsertRow(dataStoreId, project1.id, {
filter: {
type: 'and',
filters: [{ columnName: 'data', condition: 'eq', value: 'nonexistent' }],
},
data: { data: 'new' },
}),
).rejects.toThrow(DataStoreValidationError);
expect(mockFindDataTablesSize).toHaveBeenCalled();
mockFindDataTablesSize.mockRestore();
});
});
describe('findDataTablesSize', () => {
it('should return size information for data tables', async () => {
// ARRANGE
const dataStore1 = await dataStoreService.createDataStore(project1.id, {
name: 'dataStore1',
columns: [{ name: 'data', type: 'string' }],
});
const dataStore2 = await dataStoreService.createDataStore(project1.id, {
name: 'dataStore2',
columns: [{ name: 'data', type: 'string' }],
});
const data = new Array(1000).fill(0).map((_, i) => ({ data: `test_data_${i}` }));
await dataStoreService.insertRows(dataStore1.id, project1.id, data);
await dataStoreService.insertRows(dataStore2.id, project1.id, [{ data: 'test' }]);
// ACT
const result = await dataStoreRepository.findDataTablesSize();
// ASSERT
expect(result).toBeDefined();
expect(result.totalBytes).toBeGreaterThan(0);
expect(result.dataTables).toBeDefined();
expect(Object.keys(result.dataTables)).toHaveLength(2);
expect(result.dataTables[dataStore1.id]).toBeGreaterThan(0);
expect(result.dataTables[dataStore2.id]).toBeGreaterThan(0);
expect(result.dataTables[dataStore1.id]).toBeGreaterThan(result.dataTables[dataStore2.id]);
// Total should be sum of individual tables
const expectedTotal = result.dataTables[dataStore1.id] + result.dataTables[dataStore2.id];
expect(result.totalBytes).toBe(expectedTotal);
});
it('should return empty result when no data tables exist', async () => {
// ACT
const result = await dataStoreRepository.findDataTablesSize();
// ASSERT
expect(result).toBeDefined();
expect(result.totalBytes).toBe(0);
expect(result.dataTables).toBeDefined();
expect(Object.keys(result.dataTables)).toHaveLength(0);
});
it('should handle data tables with no rows', async () => {
// ARRANGE
const dataStore = await dataStoreService.createDataStore(project1.id, {
name: 'emptyDataStore',
columns: [{ name: 'data', type: 'string' }],
});
// ACT
const result = await dataStoreRepository.findDataTablesSize();
// ASSERT
expect(result).toBeDefined();
expect(result.totalBytes).toBeGreaterThanOrEqual(0);
expect(result.dataTables).toBeDefined();
expect(result.dataTables[dataStore.id]).toBeGreaterThanOrEqual(0);
});
});
});

View File

@@ -21,6 +21,7 @@ import * as utils from '@test-integration/utils';
import { DataStoreColumnRepository } from '../data-store-column.repository';
import { DataStoreRowsRepository } from '../data-store-rows.repository';
import { DataStoreRepository } from '../data-store.repository';
import { mockDataStoreSizeValidator } from './test-helpers';
let owner: User;
let member: User;
@@ -42,6 +43,7 @@ let dataStoreRowsRepository: DataStoreRowsRepository;
beforeAll(async () => {
await testDb.init();
mockDataStoreSizeValidator();
});
beforeEach(async () => {

View File

@@ -1,15 +1,14 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import type { AddDataStoreColumnDto, CreateDataStoreColumnDto } from '@n8n/api-types';
import { createTeamProject, testDb, testModules } from '@n8n/backend-test-utils';
import { GlobalConfig } from '@n8n/config';
import type { Project } from '@n8n/db';
import { Container } from '@n8n/di';
import type { DataStoreRow } from 'n8n-workflow';
import { DataStoreRowsRepository } from '../data-store-rows.repository';
import { DataStoreSizeValidator } from '../data-store-size-validator.service';
import { DataStoreRepository } from '../data-store.repository';
import { DataStoreService } from '../data-store.service';
import { mockDataStoreSizeValidator } from './test-helpers';
import { DataStoreColumnNameConflictError } from '../errors/data-store-column-name-conflict.error';
import { DataStoreColumnNotFoundError } from '../errors/data-store-column-not-found.error';
import { DataStoreNameConflictError } from '../errors/data-store-name-conflict.error';
@@ -20,12 +19,11 @@ import { toTableName } from '../utils/sql-utils';
beforeAll(async () => {
await testModules.loadModules(['data-table']);
await testDb.init();
mockDataStoreSizeValidator();
});
beforeEach(async () => {
await testDb.truncate(['DataTable', 'DataTableColumn']);
const dataStoreSizeValidator = Container.get(DataStoreSizeValidator);
dataStoreSizeValidator.reset();
});
afterAll(async () => {
@@ -2657,92 +2655,4 @@ describe('dataStore', () => {
await expect(result).rejects.toThrow(DataStoreValidationError);
});
});
describe('size validation', () => {
it('should prevent insertRows when size limit exceeded', async () => {
// ARRANGE
const dataStoreSizeValidator = Container.get(DataStoreSizeValidator);
dataStoreSizeValidator.reset();
const maxSize = Container.get(GlobalConfig).dataTable.maxSize;
const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, {
name: 'dataStore',
columns: [{ name: 'data', type: 'string' }],
});
const mockFindDataTablesSize = jest
.spyOn(dataStoreRepository, 'findDataTablesSize')
.mockResolvedValue(maxSize + 1);
// ACT & ASSERT
await expect(
dataStoreService.insertRows(dataStoreId, project1.id, [{ data: 'test' }]),
).rejects.toThrow(DataStoreValidationError);
expect(mockFindDataTablesSize).toHaveBeenCalled();
mockFindDataTablesSize.mockRestore();
});
it('should prevent updateRow when size limit exceeded', async () => {
// ARRANGE
const dataStoreSizeValidator = Container.get(DataStoreSizeValidator);
dataStoreSizeValidator.reset();
const maxSize = Container.get(GlobalConfig).dataTable.maxSize;
const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, {
name: 'dataStore',
columns: [{ name: 'data', type: 'string' }],
});
// Now mock the size check to be over limit
const mockFindDataTablesSize = jest
.spyOn(dataStoreRepository, 'findDataTablesSize')
.mockResolvedValue(maxSize + 1);
// ACT & ASSERT
await expect(
dataStoreService.updateRow(dataStoreId, project1.id, {
filter: {
type: 'and',
filters: [{ columnName: 'id', condition: 'eq', value: 1 }],
},
data: { data: 'updated' },
}),
).rejects.toThrow(DataStoreValidationError);
expect(mockFindDataTablesSize).toHaveBeenCalled();
mockFindDataTablesSize.mockRestore();
});
it('should prevent upsertRow when size limit exceeded (insert case)', async () => {
// ARRANGE
const maxSize = Container.get(GlobalConfig).dataTable.maxSize;
const { id: dataStoreId } = await dataStoreService.createDataStore(project1.id, {
name: 'dataStore',
columns: [{ name: 'data', type: 'string' }],
});
const mockFindDataTablesSize = jest
.spyOn(dataStoreRepository, 'findDataTablesSize')
.mockResolvedValue(maxSize + 1);
// ACT & ASSERT
await expect(
dataStoreService.upsertRow(dataStoreId, project1.id, {
filter: {
type: 'and',
filters: [{ columnName: 'data', condition: 'eq', value: 'nonexistent' }],
},
data: { data: 'new' },
}),
).rejects.toThrow(DataStoreValidationError);
expect(mockFindDataTablesSize).toHaveBeenCalled();
mockFindDataTablesSize.mockRestore();
});
});
});

View File

@@ -0,0 +1,13 @@
import { Container } from '@n8n/di';
import { DataStoreSizeValidator } from '../data-store-size-validator.service';
export function mockDataStoreSizeValidator() {
const sizeValidator = Container.get(DataStoreSizeValidator);
jest.spyOn(sizeValidator, 'validateSize').mockResolvedValue();
jest.spyOn(sizeValidator, 'getCachedSizeData').mockResolvedValue({
totalBytes: 50 * 1024 * 1024, // 50MB - under the default limit
dataTables: {},
});
return sizeValidator;
}

View File

@@ -1,40 +1,46 @@
import { GlobalConfig } from '@n8n/config';
import { Service } from '@n8n/di';
import { DataStoreValidationError } from './errors/data-store-validation.error';
import { DataTableSizeStatus } from 'n8n-workflow';
import { DataTablesSizeData } from './data-store.types';
import { DataStoreValidationError } from './errors/data-store-validation.error';
@Service()
export class DataStoreSizeValidator {
private lastCheck: Date | undefined;
private cachedSizeInBytes: number | undefined;
private pendingCheck: Promise<number> | null = null;
private cachedSizeData: DataTablesSizeData | undefined;
private pendingCheck: Promise<DataTablesSizeData> | null = null;
constructor(private readonly globalConfig: GlobalConfig) {}
private shouldRefresh(sizeInBytes: number | undefined, now: Date): sizeInBytes is undefined {
private shouldRefresh(
cachedData: DataTablesSizeData | undefined,
now: Date,
): cachedData is undefined {
if (
!this.lastCheck ||
!cachedData ||
now.getTime() - this.lastCheck.getTime() >= this.globalConfig.dataTable.sizeCheckCacheDuration
) {
sizeInBytes = undefined;
return true;
}
return sizeInBytes === undefined;
return false;
}
async getCachedSize(fetchSizeFn: () => Promise<number>, now = new Date()): Promise<number> {
async getCachedSizeData(
fetchSizeDataFn: () => Promise<DataTablesSizeData>,
now = new Date(),
): Promise<DataTablesSizeData> {
// If there's a pending check, wait for it to complete
if (this.pendingCheck) {
this.cachedSizeInBytes = await this.pendingCheck;
this.cachedSizeData = await this.pendingCheck;
} else {
// Check if we need to refresh the db size
if (this.shouldRefresh(this.cachedSizeInBytes, now)) {
this.pendingCheck = fetchSizeFn();
// Check if we need to refresh the size data
if (this.shouldRefresh(this.cachedSizeData, now)) {
this.pendingCheck = fetchSizeDataFn();
try {
this.cachedSizeInBytes = await this.pendingCheck;
this.cachedSizeData = await this.pendingCheck;
this.lastCheck = now;
} finally {
this.pendingCheck = null;
@@ -42,14 +48,17 @@ export class DataStoreSizeValidator {
}
}
return this.cachedSizeInBytes;
return this.cachedSizeData;
}
async validateSize(fetchSizeFn: () => Promise<number>, now = new Date()): Promise<void> {
const size = await this.getCachedSize(fetchSizeFn, now);
if (size >= this.globalConfig.dataTable.maxSize) {
async validateSize(
fetchSizeFn: () => Promise<DataTablesSizeData>,
now = new Date(),
): Promise<void> {
const size = await this.getCachedSizeData(fetchSizeFn, now);
if (size.totalBytes >= this.globalConfig.dataTable.maxSize) {
throw new DataStoreValidationError(
`Data store size limit exceeded: ${this.toMb(size)}MB used, limit is ${this.toMb(this.globalConfig.dataTable.maxSize)}MB`,
`Data store size limit exceeded: ${this.toMb(size.totalBytes)}MB used, limit is ${this.toMb(this.globalConfig.dataTable.maxSize)}MB`,
);
}
}
@@ -63,9 +72,9 @@ export class DataStoreSizeValidator {
return 'ok';
}
async getSizeStatus(fetchSizeFn: () => Promise<number>, now = new Date()) {
const size = await this.getCachedSize(fetchSizeFn, now);
return this.sizeToState(size);
async getSizeStatus(fetchSizeFn: () => Promise<DataTablesSizeData>, now = new Date()) {
const size = await this.getCachedSizeData(fetchSizeFn, now);
return this.sizeToState(size.totalBytes);
}
private toMb(sizeInBytes: number): number {
@@ -74,7 +83,7 @@ export class DataStoreSizeValidator {
reset() {
this.lastCheck = undefined;
this.cachedSizeInBytes = undefined;
this.cachedSizeData = undefined;
this.pendingCheck = null;
}
}

View File

@@ -9,9 +9,10 @@ import { DataSource, EntityManager, Repository, SelectQueryBuilder } from '@n8n/
import { UnexpectedError } from 'n8n-workflow';
import { DataStoreRowsRepository } from './data-store-rows.repository';
import { DataStoreUserTableName, DataTablesSizeData } from './data-store.types';
import { DataTableColumn } from './data-table-column.entity';
import { DataTable } from './data-table.entity';
import { toTableName } from './utils/sql-utils';
import { toTableId, toTableName } from './utils/sql-utils';
@Service()
export class DataStoreRepository extends Repository<DataTable> {
@@ -240,49 +241,83 @@ export class DataStoreRepository extends Repository<DataTable> {
return [`${alias}.id`, `${alias}.name`, `${alias}.type`, `${alias}.icon`];
}
async findDataTablesSize(): Promise<number> {
private parseSize = (bytes: number | string | null): number =>
bytes === null ? 0 : typeof bytes === 'string' ? parseInt(bytes, 10) : bytes;
async findDataTablesSize(): Promise<DataTablesSizeData> {
const dbType = this.globalConfig.database.type;
const schemaName = this.globalConfig.database.postgresdb.schema;
const tablePattern = toTableName('%');
let sql = '';
switch (dbType) {
case 'sqlite':
sql = `
SELECT SUM(pgsize) AS total_bytes
FROM dbstat
WHERE name LIKE '${toTableName('%')}'
`;
SELECT name AS table_name, SUM(pgsize) AS table_bytes
FROM dbstat
WHERE name LIKE '${tablePattern}'
GROUP BY name
`;
break;
case 'postgresdb':
case 'postgresdb': {
const schemaName = this.globalConfig.database.postgresdb?.schema;
sql = `
SELECT SUM(pg_relation_size(c.oid)) AS total_bytes
FROM pg_class c
JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE n.nspname = '${schemaName}'
AND c.relname LIKE '${toTableName('%')}'
AND c.relkind IN ('r', 'm', 'p')
`;
SELECT c.relname AS table_name, pg_relation_size(c.oid) AS table_bytes
FROM pg_class c
JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE n.nspname = '${schemaName}'
AND c.relname LIKE '${tablePattern}'
AND c.relkind IN ('r', 'm', 'p')
`;
break;
}
case 'mysqldb':
case 'mariadb': {
const databaseName = this.globalConfig.database.mysqldb.database;
const isMariaDb = dbType === 'mariadb';
const innodbTables = isMariaDb ? 'INNODB_SYS_TABLES' : 'INNODB_TABLES';
const innodbTablespaces = isMariaDb ? 'INNODB_SYS_TABLESPACES' : 'INNODB_TABLESPACES';
sql = `
SELECT SUM((DATA_LENGTH + INDEX_LENGTH)) AS total_bytes
FROM information_schema.tables
WHERE table_schema = '${databaseName}'
AND table_name LIKE '${toTableName('%')}'
`;
SELECT t.TABLE_NAME AS table_name,
COALESCE(
(
SELECT SUM(ists.ALLOCATED_SIZE)
FROM information_schema.${innodbTables} ist
JOIN information_schema.${innodbTablespaces} ists
ON ists.SPACE = ist.SPACE
WHERE ist.NAME = CONCAT(t.TABLE_SCHEMA, '/', t.TABLE_NAME)
),
(t.DATA_LENGTH + t.INDEX_LENGTH)
) AS table_bytes
FROM information_schema.TABLES t
WHERE t.TABLE_SCHEMA = '${databaseName}'
AND t.TABLE_NAME LIKE '${tablePattern}'
`;
break;
}
default:
return 0;
return { totalBytes: 0, dataTables: {} };
}
const result = (await this.query(sql)) as Array<{ total_bytes: number | null }>;
return result[0]?.total_bytes ?? 0;
const result = (await this.query(sql)) as Array<{
table_name: string;
table_bytes: number | string | null;
}>;
return result
.filter((row) => row.table_bytes !== null && row.table_name)
.reduce(
(acc, row) => {
const dataStoreId = toTableId(row.table_name as DataStoreUserTableName);
const sizeBytes = this.parseSize(row.table_bytes);
acc.dataTables[dataStoreId] = (acc.dataTables[dataStoreId] ?? 0) + sizeBytes;
acc.totalBytes += sizeBytes;
return acc;
},
{ dataTables: {} as Record<string, number>, totalBytes: 0 },
);
}
}

View File

@@ -453,12 +453,13 @@ export class DataStoreService {
}
async getDataTablesSize() {
const sizeBytes = await this.dataStoreSizeValidator.getCachedSize(
const sizeData = await this.dataStoreSizeValidator.getCachedSizeData(
async () => await this.dataStoreRepository.findDataTablesSize(),
);
return {
sizeBytes,
sizeState: this.dataStoreSizeValidator.sizeToState(sizeBytes),
sizeBytes: sizeData.totalBytes,
sizeState: this.dataStoreSizeValidator.sizeToState(sizeData.totalBytes),
dataTables: sizeData.dataTables,
};
}
}

View File

@@ -2,6 +2,11 @@ import type { FieldTypeMap } from 'n8n-workflow';
export type DataStoreUserTableName = `${string}data_table_user_${string}`;
export type DataTablesSizeData = {
totalBytes: number;
dataTables: Record<string, number>;
};
export const columnTypeToFieldType: Record<string, keyof FieldTypeMap> = {
// eslint-disable-next-line id-denylist
number: 'number',

View File

@@ -293,3 +293,7 @@ export function toTableName(dataStoreId: string): DataStoreUserTableName {
const { tablePrefix } = Container.get(GlobalConfig).database;
return `${tablePrefix}data_table_user_${dataStoreId}`;
}
export function toTableId(tableName: DataStoreUserTableName) {
return tableName.replace(/.*data_table_user_/, '');
}

View File

@@ -4,6 +4,7 @@ import { DATA_STORE_DETAILS } from '@/features/dataStore/constants';
import { useI18n } from '@n8n/i18n';
import { computed } from 'vue';
import DataStoreActions from '@/features/dataStore/components/DataStoreActions.vue';
import { useDataStoreStore } from '@/features/dataStore/dataStore.store';
type Props = {
dataStore: DataStoreResource;
@@ -12,6 +13,7 @@ type Props = {
};
const i18n = useI18n();
const dataStoreStore = useDataStoreStore();
const props = withDefaults(defineProps<Props>(), {
actions: () => [],
@@ -28,6 +30,11 @@ const dataStoreRoute = computed(() => {
},
};
});
const getDataStoreSize = computed(() => {
const size = dataStoreStore.dataStoreSizes[props.dataStore.id] ?? 0;
return size;
});
</script>
<template>
<div data-test-id="data-store-card">
@@ -54,6 +61,18 @@ const dataStoreRoute = computed(() => {
</template>
<template #footer>
<div :class="$style['card-footer']">
<N8nText
size="small"
color="text-light"
:class="[$style['info-cell'], $style['info-cell--size']]"
data-test-id="data-store-card-size"
>
{{
i18n.baseText('dataStore.card.size', {
interpolate: { size: getDataStoreSize },
})
}}
</N8nText>
<N8nText
size="small"
color="text-light"
@@ -143,7 +162,8 @@ const dataStoreRoute = computed(() => {
flex-wrap: wrap;
}
.info-cell--created,
.info-cell--column-count {
.info-cell--column-count,
.info-cell--size {
display: none;
}
}

View File

@@ -35,11 +35,24 @@ export const useDataStoreStore = defineStore(DATA_STORE_STORE, () => {
const totalCount = ref(0);
const dataStoreSize = ref(0);
const dataStoreSizeLimitState = ref<DataTableSizeStatus>('ok');
const dataStoreTableSizes = ref<Record<string, number>>({});
const formatSize = (sizeBytes: number) => {
return Number((sizeBytes / 1024 / 1024).toFixed(2));
};
const maxSizeMB = computed(() =>
Math.floor(settingsStore.settings?.dataTables?.maxSize / 1024 / 1024),
);
const dataStoreSizes = computed(() => {
const formattedSizes: Record<string, number> = {};
for (const [dataStoreId, sizeBytes] of Object.entries(dataStoreTableSizes.value)) {
formattedSizes[dataStoreId] = formatSize(sizeBytes);
}
return formattedSizes;
});
const fetchDataStores = async (projectId: string, page: number, pageSize: number) => {
const response = await fetchDataStoresApi(rootStore.restApiContext, projectId, {
skip: (page - 1) * pageSize,
@@ -219,8 +232,9 @@ export const useDataStoreStore = defineStore(DATA_STORE_STORE, () => {
const fetchDataStoreSize = async () => {
const result = await fetchDataStoreGlobalLimitInBytes(rootStore.restApiContext);
dataStoreSize.value = Number((result.sizeBytes / 1024 / 1024).toFixed(2));
dataStoreSize.value = formatSize(result.sizeBytes);
dataStoreSizeLimitState.value = result.sizeState;
dataStoreTableSizes.value = result.dataTables;
return result;
};
@@ -231,6 +245,7 @@ export const useDataStoreStore = defineStore(DATA_STORE_STORE, () => {
fetchDataStoreSize,
dataStoreSize: computed(() => dataStoreSize.value),
dataStoreSizeLimitState: computed(() => dataStoreSizeLimitState.value),
dataStoreSizes,
maxSizeMB,
createDataStore,
deleteDataStore,

View File

@@ -106,6 +106,7 @@ export type DataTableSizeStatus = 'ok' | 'warn' | 'error';
export type DataTablesSizeResult = {
sizeBytes: number;
sizeState: DataTableSizeStatus;
dataTables: Record<string, number>;
};
// APIs for a data store service operating on a specific projectId