mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-16 17:46:45 +00:00
fix(core): Add mechanism to prevent concurrent compaction on Insights (#14988)
Co-authored-by: Danny Martini <danny@n8n.io>
This commit is contained in:
committed by
GitHub
parent
539f4cc5be
commit
392e91480a
@@ -1,8 +1,10 @@
|
||||
import { GlobalConfig } from '@n8n/config';
|
||||
import { Container } from '@n8n/di';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { DateTime } from 'luxon';
|
||||
|
||||
import { InsightsRawRepository } from '@/modules/insights/database/repositories/insights-raw.repository';
|
||||
import { mockLogger } from '@test/mocking';
|
||||
import { createTeamProject } from '@test-integration/db/projects';
|
||||
import { createWorkflow } from '@test-integration/db/workflows';
|
||||
import * as testDb from '@test-integration/test-db';
|
||||
@@ -45,6 +47,7 @@ if (dbType === 'sqlite' && !globalConfig.database.sqlite.poolSize) {
|
||||
afterAll(async () => {
|
||||
await testDb.terminate();
|
||||
});
|
||||
|
||||
describe('compaction', () => {
|
||||
describe('compactRawToHour', () => {
|
||||
type TestData = {
|
||||
@@ -317,14 +320,21 @@ if (dbType === 'sqlite' && !globalConfig.database.sqlite.poolSize) {
|
||||
|
||||
describe('compactionSchedule', () => {
|
||||
test('compaction is running on schedule', async () => {
|
||||
// ARRANGE
|
||||
jest.useFakeTimers();
|
||||
try {
|
||||
// ARRANGE
|
||||
const insightsCompactionService = Container.get(InsightsCompactionService);
|
||||
insightsCompactionService.startCompactionTimer();
|
||||
const insightsCompactionService = new InsightsCompactionService(
|
||||
mock<InsightsByPeriodRepository>(),
|
||||
mock<InsightsRawRepository>(),
|
||||
mock<InsightsConfig>({
|
||||
compactionIntervalMinutes: 60,
|
||||
}),
|
||||
mockLogger(),
|
||||
);
|
||||
// spy on the compactInsights method to check if it's called
|
||||
const compactInsightsSpy = jest.spyOn(insightsCompactionService, 'compactInsights');
|
||||
|
||||
// spy on the compactInsights method to check if it's called
|
||||
const compactInsightsSpy = jest.spyOn(insightsCompactionService, 'compactInsights');
|
||||
try {
|
||||
insightsCompactionService.startCompactionTimer();
|
||||
|
||||
// ACT
|
||||
// advance by 1 hour and 1 minute
|
||||
@@ -333,6 +343,7 @@ if (dbType === 'sqlite' && !globalConfig.database.sqlite.poolSize) {
|
||||
// ASSERT
|
||||
expect(compactInsightsSpy).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
insightsCompactionService.stopCompactionTimer();
|
||||
jest.useRealTimers();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
import { Container } from '@n8n/di';
|
||||
import { DateTime } from 'luxon';
|
||||
|
||||
import { InsightsConfig } from '@/modules/insights/insights.config';
|
||||
import { createTeamProject } from '@test-integration/db/projects';
|
||||
import { createWorkflow } from '@test-integration/db/workflows';
|
||||
import * as testDb from '@test-integration/test-db';
|
||||
|
||||
import { createCompactedInsightsEvent, createMetadata } from '../../entities/__tests__/db-utils';
|
||||
import { InsightsByPeriodRepository } from '../insights-by-period.repository';
|
||||
|
||||
describe('InsightsByPeriodRepository', () => {
|
||||
beforeAll(async () => {
|
||||
await testDb.init();
|
||||
});
|
||||
|
||||
describe('Avoid deadlock error', () => {
|
||||
let defaultBatchSize: number;
|
||||
beforeAll(() => {
|
||||
// Store the original config value
|
||||
const insightsConfig = Container.get(InsightsConfig);
|
||||
defaultBatchSize = insightsConfig.compactionBatchSize;
|
||||
|
||||
// Set a smaller batch size to trigger the deadlock error
|
||||
insightsConfig.compactionBatchSize = 3;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Reset the config to its original state
|
||||
const insightsConfig = Container.get(InsightsConfig);
|
||||
insightsConfig.compactionBatchSize = defaultBatchSize;
|
||||
});
|
||||
|
||||
test('should not throw deadlock error on concurrent compaction', async () => {
|
||||
// ARRANGE
|
||||
const insightsConfig = Container.get(InsightsConfig);
|
||||
const insightsByPeriodRepository = Container.get(InsightsByPeriodRepository);
|
||||
const transactionSpy = jest.spyOn(insightsByPeriodRepository.manager, 'transaction');
|
||||
const project = await createTeamProject();
|
||||
const workflow = await createWorkflow({}, project);
|
||||
await createMetadata(workflow);
|
||||
|
||||
const batchQuery = insightsByPeriodRepository.getPeriodInsightsBatchQuery({
|
||||
periodUnitToCompactFrom: 'hour',
|
||||
compactionBatchSize: insightsConfig.compactionBatchSize,
|
||||
maxAgeInDays: insightsConfig.compactionHourlyToDailyThresholdDays,
|
||||
});
|
||||
|
||||
// Create test data
|
||||
const promises = [];
|
||||
for (let i = 0; i < 100; i++) {
|
||||
await createCompactedInsightsEvent(workflow, {
|
||||
type: 'success',
|
||||
value: 1,
|
||||
periodUnit: 'hour',
|
||||
periodStart: DateTime.now().minus({ day: 91, hour: i + 1 }),
|
||||
});
|
||||
}
|
||||
|
||||
// ACT
|
||||
for (let i = 0; i < 10; i++) {
|
||||
promises.push(
|
||||
insightsByPeriodRepository.compactSourceDataIntoInsightPeriod({
|
||||
sourceBatchQuery: batchQuery,
|
||||
sourceTableName: insightsByPeriodRepository.metadata.tableName,
|
||||
periodUnitToCompactInto: 'day',
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
// ASSERT
|
||||
// await all promises concurrently
|
||||
await expect(Promise.all(promises)).resolves.toBeDefined();
|
||||
expect(transactionSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -57,6 +57,8 @@ const aggregatedInsightsByTimeParser = z
|
||||
|
||||
@Service()
|
||||
export class InsightsByPeriodRepository extends Repository<InsightsByPeriod> {
|
||||
private isRunningCompaction = false;
|
||||
|
||||
constructor(dataSource: DataSource) {
|
||||
super(InsightsByPeriod, dataSource.manager);
|
||||
}
|
||||
@@ -171,73 +173,83 @@ export class InsightsByPeriodRepository extends Repository<InsightsByPeriod> {
|
||||
*/
|
||||
periodUnitToCompactInto: PeriodUnit;
|
||||
}): Promise<number> {
|
||||
// Create temp table that only exists in this transaction for rows to compact
|
||||
const getBatchAndStoreInTemporaryTable = sql`
|
||||
CREATE TEMPORARY TABLE rows_to_compact AS
|
||||
${sourceBatchQuery.getSql()};
|
||||
`;
|
||||
// Skip compaction if the process is already running
|
||||
if (this.isRunningCompaction) {
|
||||
return 0;
|
||||
}
|
||||
this.isRunningCompaction = true;
|
||||
|
||||
const countBatch = sql`
|
||||
SELECT COUNT(*) ${this.escapeField('rowsInBatch')} FROM rows_to_compact;
|
||||
`;
|
||||
try {
|
||||
// Create temp table that only exists in this transaction for rows to compact
|
||||
const getBatchAndStoreInTemporaryTable = sql`
|
||||
CREATE TEMPORARY TABLE rows_to_compact AS
|
||||
${sourceBatchQuery.getSql()};
|
||||
`;
|
||||
|
||||
const targetColumnNamesStr = ['metaId', 'type', 'periodUnit', 'periodStart']
|
||||
.map((param) => this.escapeField(param))
|
||||
.join(', ');
|
||||
const targetColumnNamesWithValue = `${targetColumnNamesStr}, value`;
|
||||
const countBatch = sql`
|
||||
SELECT COUNT(*) ${this.escapeField('rowsInBatch')} FROM rows_to_compact;
|
||||
`;
|
||||
|
||||
// Function to get the aggregation query
|
||||
const aggregationQuery = this.getAggregationQuery(periodUnitToCompactInto);
|
||||
const targetColumnNamesStr = ['metaId', 'type', 'periodUnit', 'periodStart']
|
||||
.map((param) => this.escapeField(param))
|
||||
.join(', ');
|
||||
const targetColumnNamesWithValue = `${targetColumnNamesStr}, value`;
|
||||
|
||||
// Insert or update aggregated data
|
||||
const insertQueryBase = sql`
|
||||
INSERT INTO ${this.metadata.tableName}
|
||||
(${targetColumnNamesWithValue})
|
||||
${aggregationQuery.getSql()}
|
||||
`;
|
||||
// Function to get the aggregation query
|
||||
const aggregationQuery = this.getAggregationQuery(periodUnitToCompactInto);
|
||||
|
||||
// Database-specific duplicate key logic
|
||||
let deduplicateQuery: string;
|
||||
if (dbType === 'mysqldb' || dbType === 'mariadb') {
|
||||
deduplicateQuery = sql`
|
||||
// Insert or update aggregated data
|
||||
const insertQueryBase = sql`
|
||||
INSERT INTO ${this.metadata.tableName}
|
||||
(${targetColumnNamesWithValue})
|
||||
${aggregationQuery.getSql()}
|
||||
`;
|
||||
|
||||
// Database-specific duplicate key logic
|
||||
let deduplicateQuery: string;
|
||||
if (dbType === 'mysqldb' || dbType === 'mariadb') {
|
||||
deduplicateQuery = sql`
|
||||
ON DUPLICATE KEY UPDATE value = value + VALUES(value)`;
|
||||
} else {
|
||||
deduplicateQuery = sql`
|
||||
} else {
|
||||
deduplicateQuery = sql`
|
||||
ON CONFLICT(${targetColumnNamesStr})
|
||||
DO UPDATE SET value = ${this.metadata.tableName}.value + excluded.value
|
||||
RETURNING *`;
|
||||
}
|
||||
|
||||
const upsertEvents = sql`
|
||||
${insertQueryBase}
|
||||
${deduplicateQuery}
|
||||
`;
|
||||
|
||||
// Delete the processed rows
|
||||
const deleteBatch = sql`
|
||||
DELETE FROM ${sourceTableName}
|
||||
WHERE id IN (SELECT id FROM rows_to_compact);
|
||||
`;
|
||||
|
||||
// Clean up
|
||||
const dropTemporaryTable = sql`
|
||||
DROP TABLE rows_to_compact;
|
||||
`;
|
||||
|
||||
const result = await this.manager.transaction(async (trx) => {
|
||||
await trx.query(getBatchAndStoreInTemporaryTable);
|
||||
|
||||
await trx.query<Array<{ type: any; value: number }>>(upsertEvents);
|
||||
|
||||
const rowsInBatch = await trx.query<[{ rowsInBatch: number | string }]>(countBatch);
|
||||
|
||||
await trx.query(deleteBatch);
|
||||
await trx.query(dropTemporaryTable);
|
||||
|
||||
return Number(rowsInBatch[0].rowsInBatch);
|
||||
});
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
this.isRunningCompaction = false;
|
||||
}
|
||||
|
||||
const upsertEvents = sql`
|
||||
${insertQueryBase}
|
||||
${deduplicateQuery}
|
||||
`;
|
||||
|
||||
// Delete the processed rows
|
||||
const deleteBatch = sql`
|
||||
DELETE FROM ${sourceTableName}
|
||||
WHERE id IN (SELECT id FROM rows_to_compact);
|
||||
`;
|
||||
|
||||
// Clean up
|
||||
const dropTemporaryTable = sql`
|
||||
DROP TABLE rows_to_compact;
|
||||
`;
|
||||
|
||||
const result = await this.manager.transaction(async (trx) => {
|
||||
await trx.query(getBatchAndStoreInTemporaryTable);
|
||||
|
||||
await trx.query<Array<{ type: any; value: number }>>(upsertEvents);
|
||||
|
||||
const rowsInBatch = await trx.query<[{ rowsInBatch: number | string }]>(countBatch);
|
||||
|
||||
await trx.query(deleteBatch);
|
||||
await trx.query(dropTemporaryTable);
|
||||
|
||||
return Number(rowsInBatch[0].rowsInBatch);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private getAgeLimitQuery(maxAgeInDays: number) {
|
||||
|
||||
Reference in New Issue
Block a user