mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 18:12:04 +00:00
feat(core): Add logs for insights flushing and compaction (#15519)
This commit is contained in:
committed by
GitHub
parent
bb2f675817
commit
3743a8c33d
@@ -282,11 +282,11 @@ describe('compaction', () => {
|
|||||||
const workflow = await createWorkflow({}, project);
|
const workflow = await createWorkflow({}, project);
|
||||||
|
|
||||||
// create 100 more events than the batch size (500)
|
// create 100 more events than the batch size (500)
|
||||||
const batchSize = 600;
|
const numberOfEvents = 600;
|
||||||
|
|
||||||
let timestamp = DateTime.utc().startOf('hour');
|
let timestamp = DateTime.utc().startOf('hour');
|
||||||
const events = Array<{ type: 'success'; value: number; timestamp: DateTime }>();
|
const events = Array<{ type: 'success'; value: number; timestamp: DateTime }>();
|
||||||
for (let i = 0; i < batchSize; i++) {
|
for (let i = 0; i < numberOfEvents; i++) {
|
||||||
events.push({ type: 'success', value: 1, timestamp });
|
events.push({ type: 'success', value: 1, timestamp });
|
||||||
timestamp = timestamp.plus({ minute: 1 });
|
timestamp = timestamp.plus({ minute: 1 });
|
||||||
}
|
}
|
||||||
@@ -296,13 +296,13 @@ describe('compaction', () => {
|
|||||||
await insightsCompactionService.compactInsights();
|
await insightsCompactionService.compactInsights();
|
||||||
|
|
||||||
// ASSERT
|
// ASSERT
|
||||||
// compaction batch size is 500, so rawToHour should be called 3 times:
|
// compaction batch size is 500, so rawToHour should be called 2 times:
|
||||||
// 1st call: 500 events, 2nd call: 100 events, and third call that returns nothing
|
// 1st call: 500 events, 2nd call: 100 events
|
||||||
expect(rawToHourSpy).toHaveBeenCalledTimes(3);
|
expect(rawToHourSpy).toHaveBeenCalledTimes(2);
|
||||||
await expect(insightsRawRepository.count()).resolves.toBe(0);
|
await expect(insightsRawRepository.count()).resolves.toBe(0);
|
||||||
const allCompacted = await insightsByPeriodRepository.find({ order: { periodStart: 1 } });
|
const allCompacted = await insightsByPeriodRepository.find({ order: { periodStart: 1 } });
|
||||||
const accumulatedValues = allCompacted.reduce((acc, event) => acc + event.value, 0);
|
const accumulatedValues = allCompacted.reduce((acc, event) => acc + event.value, 0);
|
||||||
expect(accumulatedValues).toBe(batchSize);
|
expect(accumulatedValues).toBe(numberOfEvents);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -100,6 +100,7 @@ export class InsightsCollectionService {
|
|||||||
|
|
||||||
// Wait for all in-progress asynchronous flushes
|
// Wait for all in-progress asynchronous flushes
|
||||||
// Flush any remaining events
|
// Flush any remaining events
|
||||||
|
this.logger.debug('Flushing remaining insights before shutdown');
|
||||||
await Promise.all([...this.flushesInProgress, this.flushEvents()]);
|
await Promise.all([...this.flushesInProgress, this.flushEvents()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -144,18 +145,21 @@ export class InsightsCollectionService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!this.isAsynchronouslySavingInsights) {
|
if (!this.isAsynchronouslySavingInsights) {
|
||||||
|
this.logger.debug('Flushing insights synchronously (shutdown in progress)');
|
||||||
// If we are not asynchronously saving insights, we need to flush the events
|
// If we are not asynchronously saving insights, we need to flush the events
|
||||||
await this.flushEvents();
|
await this.flushEvents();
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the buffer is full, flush the events asynchronously
|
// If the buffer is full, flush the events asynchronously
|
||||||
if (this.bufferedInsights.size >= this.insightsConfig.flushBatchSize) {
|
if (this.bufferedInsights.size >= this.insightsConfig.flushBatchSize) {
|
||||||
|
this.logger.debug(`Buffer is full (${this.bufferedInsights.size} insights), flushing events`);
|
||||||
// Fire and forget flush to avoid blocking the workflow execute after handler
|
// Fire and forget flush to avoid blocking the workflow execute after handler
|
||||||
void this.flushEvents();
|
void this.flushEvents();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async saveInsightsMetadataAndRaw(insightsRawToInsertBuffer: Set<BufferedInsight>) {
|
private async saveInsightsMetadataAndRaw(insightsRawToInsertBuffer: Set<BufferedInsight>) {
|
||||||
|
this.logger.debug(`Flushing ${insightsRawToInsertBuffer.size} insights`);
|
||||||
const workflowIdNames: Map<string, string> = new Map();
|
const workflowIdNames: Map<string, string> = new Map();
|
||||||
|
|
||||||
for (const event of insightsRawToInsertBuffer) {
|
for (const event of insightsRawToInsertBuffer) {
|
||||||
@@ -188,6 +192,7 @@ export class InsightsCollectionService {
|
|||||||
return acc;
|
return acc;
|
||||||
}, [] as InsightsMetadata[]);
|
}, [] as InsightsMetadata[]);
|
||||||
|
|
||||||
|
this.logger.debug(`Saving ${metadataToUpsert.length} insights metadata for workflows`);
|
||||||
await this.insightsMetadataRepository.upsert(metadataToUpsert, ['workflowId']);
|
await this.insightsMetadataRepository.upsert(metadataToUpsert, ['workflowId']);
|
||||||
|
|
||||||
const upsertMetadata = await this.insightsMetadataRepository.findBy({
|
const upsertMetadata = await this.insightsMetadataRepository.findBy({
|
||||||
@@ -215,6 +220,7 @@ export class InsightsCollectionService {
|
|||||||
events.push(insight);
|
events.push(insight);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.logger.debug(`Inserting ${events.length} insights raw`);
|
||||||
await this.insightsRawRepository.insert(events);
|
await this.insightsRawRepository.insert(events);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -44,21 +44,27 @@ export class InsightsCompactionService {
|
|||||||
|
|
||||||
// Compact raw data to hourly aggregates
|
// Compact raw data to hourly aggregates
|
||||||
do {
|
do {
|
||||||
|
this.logger.debug('Compacting raw data to hourly aggregates');
|
||||||
numberOfCompactedRawData = await this.compactRawToHour();
|
numberOfCompactedRawData = await this.compactRawToHour();
|
||||||
} while (numberOfCompactedRawData > 0);
|
this.logger.debug(`Compacted ${numberOfCompactedRawData} raw data to hourly aggregates`);
|
||||||
|
} while (numberOfCompactedRawData === this.insightsConfig.compactionBatchSize);
|
||||||
|
|
||||||
let numberOfCompactedHourData: number;
|
let numberOfCompactedHourData: number;
|
||||||
|
|
||||||
// Compact hourly data to daily aggregates
|
// Compact hourly data to daily aggregates
|
||||||
do {
|
do {
|
||||||
|
this.logger.debug('Compacting hourly data to daily aggregates');
|
||||||
numberOfCompactedHourData = await this.compactHourToDay();
|
numberOfCompactedHourData = await this.compactHourToDay();
|
||||||
} while (numberOfCompactedHourData > 0);
|
this.logger.debug(`Compacted ${numberOfCompactedHourData} hourly data to daily aggregates`);
|
||||||
|
} while (numberOfCompactedHourData === this.insightsConfig.compactionBatchSize);
|
||||||
|
|
||||||
let numberOfCompactedDayData: number;
|
let numberOfCompactedDayData: number;
|
||||||
// Compact daily data to weekly aggregates
|
// Compact daily data to weekly aggregates
|
||||||
do {
|
do {
|
||||||
|
this.logger.debug('Compacting daily data to weekly aggregates');
|
||||||
numberOfCompactedDayData = await this.compactDayToWeek();
|
numberOfCompactedDayData = await this.compactDayToWeek();
|
||||||
} while (numberOfCompactedDayData > 0);
|
this.logger.debug(`Compacted ${numberOfCompactedDayData} daily data to weekly aggregates`);
|
||||||
|
} while (numberOfCompactedDayData === this.insightsConfig.compactionBatchSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
Reference in New Issue
Block a user