mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 18:12:04 +00:00
feat(core): Store insights data at the end of executions (no-changelog) (#13905)
Co-authored-by: Danny Martini <danny@n8n.io> Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in>
This commit is contained in:
committed by
GitHub
parent
d80b49d6e5
commit
cb5a803f9e
@@ -33,6 +33,9 @@ import { WorkflowEntity } from './workflow-entity';
|
|||||||
import { WorkflowHistory } from './workflow-history';
|
import { WorkflowHistory } from './workflow-history';
|
||||||
import { WorkflowStatistics } from './workflow-statistics';
|
import { WorkflowStatistics } from './workflow-statistics';
|
||||||
import { WorkflowTagMapping } from './workflow-tag-mapping';
|
import { WorkflowTagMapping } from './workflow-tag-mapping';
|
||||||
|
import { InsightsByPeriod } from '../../modules/insights/entities/insights-by-period';
|
||||||
|
import { InsightsMetadata } from '../../modules/insights/entities/insights-metadata';
|
||||||
|
import { InsightsRaw } from '../../modules/insights/entities/insights-raw';
|
||||||
|
|
||||||
export const entities = {
|
export const entities = {
|
||||||
AnnotationTagEntity,
|
AnnotationTagEntity,
|
||||||
@@ -70,4 +73,7 @@ export const entities = {
|
|||||||
TestCaseExecution,
|
TestCaseExecution,
|
||||||
Folder,
|
Folder,
|
||||||
FolderTagMapping,
|
FolderTagMapping,
|
||||||
|
InsightsRaw,
|
||||||
|
InsightsMetadata,
|
||||||
|
InsightsByPeriod,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -99,8 +99,8 @@ export class CreateAnalyticsTables1739549398681 implements ReversibleMigration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async down({ schemaBuilder: { dropTable } }: MigrationContext) {
|
async down({ schemaBuilder: { dropTable } }: MigrationContext) {
|
||||||
await dropTable(names.t.analyticsMetadata);
|
|
||||||
await dropTable(names.t.analyticsRaw);
|
await dropTable(names.t.analyticsRaw);
|
||||||
await dropTable(names.t.analyticsByPeriod);
|
await dropTable(names.t.analyticsByPeriod);
|
||||||
|
await dropTable(names.t.analyticsMetadata);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,112 @@
|
|||||||
|
import type { IrreversibleMigration, MigrationContext } from '@/databases/types';
|
||||||
|
|
||||||
|
const names = {
|
||||||
|
// table names
|
||||||
|
t: {
|
||||||
|
analyticsMetadata: 'analytics_metadata',
|
||||||
|
analyticsRaw: 'analytics_raw',
|
||||||
|
analyticsByPeriod: 'analytics_by_period',
|
||||||
|
|
||||||
|
insightsMetadata: 'insights_metadata',
|
||||||
|
insightsRaw: 'insights_raw',
|
||||||
|
insightsByPeriod: 'insights_by_period',
|
||||||
|
|
||||||
|
workflowEntity: 'workflow_entity',
|
||||||
|
project: 'project',
|
||||||
|
},
|
||||||
|
// column names by table
|
||||||
|
c: {
|
||||||
|
insightsMetadata: {
|
||||||
|
metaId: 'metaId',
|
||||||
|
projectId: 'projectId',
|
||||||
|
workflowId: 'workflowId',
|
||||||
|
},
|
||||||
|
insightsRaw: {
|
||||||
|
metaId: 'metaId',
|
||||||
|
},
|
||||||
|
insightsByPeriod: {
|
||||||
|
metaId: 'metaId',
|
||||||
|
type: 'type',
|
||||||
|
periodUnit: 'periodUnit',
|
||||||
|
periodStart: 'periodStart',
|
||||||
|
},
|
||||||
|
project: {
|
||||||
|
id: 'id',
|
||||||
|
},
|
||||||
|
workflowEntity: {
|
||||||
|
id: 'id',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export class RenameAnalyticsToInsights1741167584277 implements IrreversibleMigration {
|
||||||
|
async up({ schemaBuilder: { createTable, column, dropTable } }: MigrationContext) {
|
||||||
|
// Until the insights feature is released we're dropping the tables instead
|
||||||
|
// of migrating them.
|
||||||
|
await dropTable(names.t.analyticsRaw);
|
||||||
|
await dropTable(names.t.analyticsByPeriod);
|
||||||
|
await dropTable(names.t.analyticsMetadata);
|
||||||
|
|
||||||
|
await createTable(names.t.insightsMetadata)
|
||||||
|
.withColumns(
|
||||||
|
column(names.c.insightsMetadata.metaId).int.primary.autoGenerate2,
|
||||||
|
column(names.c.insightsMetadata.workflowId).varchar(16),
|
||||||
|
column(names.c.insightsMetadata.projectId).varchar(36),
|
||||||
|
column('workflowName').varchar(128).notNull,
|
||||||
|
column('projectName').varchar(255).notNull,
|
||||||
|
)
|
||||||
|
.withForeignKey(names.c.insightsMetadata.workflowId, {
|
||||||
|
tableName: names.t.workflowEntity,
|
||||||
|
columnName: names.c.workflowEntity.id,
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
})
|
||||||
|
.withForeignKey(names.c.insightsMetadata.projectId, {
|
||||||
|
tableName: names.t.project,
|
||||||
|
columnName: names.c.project.id,
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
})
|
||||||
|
.withIndexOn(names.c.insightsMetadata.workflowId, true);
|
||||||
|
|
||||||
|
const typeComment = '0: time_saved_minutes, 1: runtime_milliseconds, 2: success, 3: failure';
|
||||||
|
|
||||||
|
await createTable(names.t.insightsRaw)
|
||||||
|
.withColumns(
|
||||||
|
column('id').int.primary.autoGenerate2,
|
||||||
|
column(names.c.insightsRaw.metaId).int.notNull,
|
||||||
|
column('type').int.notNull.comment(typeComment),
|
||||||
|
column('value').int.notNull,
|
||||||
|
column('timestamp').timestampTimezone(0).default('CURRENT_TIMESTAMP').notNull,
|
||||||
|
)
|
||||||
|
.withForeignKey(names.c.insightsRaw.metaId, {
|
||||||
|
tableName: names.t.insightsMetadata,
|
||||||
|
columnName: names.c.insightsMetadata.metaId,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
});
|
||||||
|
|
||||||
|
await createTable(names.t.insightsByPeriod)
|
||||||
|
.withColumns(
|
||||||
|
column('id').int.primary.autoGenerate2,
|
||||||
|
column(names.c.insightsByPeriod.metaId).int.notNull,
|
||||||
|
column(names.c.insightsByPeriod.type).int.notNull.comment(typeComment),
|
||||||
|
column('value').int.notNull,
|
||||||
|
column(names.c.insightsByPeriod.periodUnit).int.notNull.comment('0: hour, 1: day, 2: week'),
|
||||||
|
column(names.c.insightsByPeriod.periodStart)
|
||||||
|
.default('CURRENT_TIMESTAMP')
|
||||||
|
.timestampTimezone(0),
|
||||||
|
)
|
||||||
|
.withForeignKey(names.c.insightsByPeriod.metaId, {
|
||||||
|
tableName: names.t.insightsMetadata,
|
||||||
|
columnName: names.c.insightsMetadata.metaId,
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
})
|
||||||
|
.withIndexOn(
|
||||||
|
[
|
||||||
|
names.c.insightsByPeriod.periodStart,
|
||||||
|
names.c.insightsByPeriod.type,
|
||||||
|
names.c.insightsByPeriod.periodUnit,
|
||||||
|
names.c.insightsByPeriod.metaId,
|
||||||
|
],
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -82,6 +82,7 @@ import { CreateTestCaseExecutionTable1736947513045 } from '../common/17369475130
|
|||||||
import { AddErrorColumnsToTestRuns1737715421462 } from '../common/1737715421462-AddErrorColumnsToTestRuns';
|
import { AddErrorColumnsToTestRuns1737715421462 } from '../common/1737715421462-AddErrorColumnsToTestRuns';
|
||||||
import { CreateFolderTable1738709609940 } from '../common/1738709609940-CreateFolderTable';
|
import { CreateFolderTable1738709609940 } from '../common/1738709609940-CreateFolderTable';
|
||||||
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
|
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
|
||||||
|
import { RenameAnalyticsToInsights1741167584277 } from '../common/1741167584277-RenameAnalyticsToInsights';
|
||||||
import { UpdateParentFolderIdColumn1740445074052 } from '../mysqldb/1740445074052-UpdateParentFolderIdColumn';
|
import { UpdateParentFolderIdColumn1740445074052 } from '../mysqldb/1740445074052-UpdateParentFolderIdColumn';
|
||||||
|
|
||||||
export const mysqlMigrations: Migration[] = [
|
export const mysqlMigrations: Migration[] = [
|
||||||
@@ -168,4 +169,5 @@ export const mysqlMigrations: Migration[] = [
|
|||||||
FixTestDefinitionPrimaryKey1739873751194,
|
FixTestDefinitionPrimaryKey1739873751194,
|
||||||
CreateAnalyticsTables1739549398681,
|
CreateAnalyticsTables1739549398681,
|
||||||
UpdateParentFolderIdColumn1740445074052,
|
UpdateParentFolderIdColumn1740445074052,
|
||||||
|
RenameAnalyticsToInsights1741167584277,
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -82,6 +82,7 @@ import { CreateTestCaseExecutionTable1736947513045 } from '../common/17369475130
|
|||||||
import { AddErrorColumnsToTestRuns1737715421462 } from '../common/1737715421462-AddErrorColumnsToTestRuns';
|
import { AddErrorColumnsToTestRuns1737715421462 } from '../common/1737715421462-AddErrorColumnsToTestRuns';
|
||||||
import { CreateFolderTable1738709609940 } from '../common/1738709609940-CreateFolderTable';
|
import { CreateFolderTable1738709609940 } from '../common/1738709609940-CreateFolderTable';
|
||||||
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
|
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
|
||||||
|
import { RenameAnalyticsToInsights1741167584277 } from '../common/1741167584277-RenameAnalyticsToInsights';
|
||||||
|
|
||||||
export const postgresMigrations: Migration[] = [
|
export const postgresMigrations: Migration[] = [
|
||||||
InitialMigration1587669153312,
|
InitialMigration1587669153312,
|
||||||
@@ -166,4 +167,5 @@ export const postgresMigrations: Migration[] = [
|
|||||||
CreateFolderTable1738709609940,
|
CreateFolderTable1738709609940,
|
||||||
CreateAnalyticsTables1739549398681,
|
CreateAnalyticsTables1739549398681,
|
||||||
UpdateParentFolderIdColumn1740445074052,
|
UpdateParentFolderIdColumn1740445074052,
|
||||||
|
RenameAnalyticsToInsights1741167584277,
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -79,6 +79,7 @@ import { AddStatsColumnsToTestRun1736172058779 } from '../common/1736172058779-A
|
|||||||
import { CreateTestCaseExecutionTable1736947513045 } from '../common/1736947513045-CreateTestCaseExecutionTable';
|
import { CreateTestCaseExecutionTable1736947513045 } from '../common/1736947513045-CreateTestCaseExecutionTable';
|
||||||
import { AddErrorColumnsToTestRuns1737715421462 } from '../common/1737715421462-AddErrorColumnsToTestRuns';
|
import { AddErrorColumnsToTestRuns1737715421462 } from '../common/1737715421462-AddErrorColumnsToTestRuns';
|
||||||
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
|
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
|
||||||
|
import { RenameAnalyticsToInsights1741167584277 } from '../common/1741167584277-RenameAnalyticsToInsights';
|
||||||
|
|
||||||
const sqliteMigrations: Migration[] = [
|
const sqliteMigrations: Migration[] = [
|
||||||
InitialMigration1588102412422,
|
InitialMigration1588102412422,
|
||||||
@@ -160,6 +161,7 @@ const sqliteMigrations: Migration[] = [
|
|||||||
CreateFolderTable1738709609940,
|
CreateFolderTable1738709609940,
|
||||||
CreateAnalyticsTables1739549398681,
|
CreateAnalyticsTables1739549398681,
|
||||||
UpdateParentFolderIdColumn1740445074052,
|
UpdateParentFolderIdColumn1740445074052,
|
||||||
|
RenameAnalyticsToInsights1741167584277,
|
||||||
];
|
];
|
||||||
|
|
||||||
export { sqliteMigrations };
|
export { sqliteMigrations };
|
||||||
|
|||||||
@@ -0,0 +1,247 @@
|
|||||||
|
import { Container } from '@n8n/di';
|
||||||
|
import { mock } from 'jest-mock-extended';
|
||||||
|
import { DateTime } from 'luxon';
|
||||||
|
import type { ExecutionLifecycleHooks } from 'n8n-core';
|
||||||
|
import type { ExecutionStatus, IRun, WorkflowExecuteMode } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import type { Project } from '@/databases/entities/project';
|
||||||
|
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
|
||||||
|
import type { IWorkflowDb } from '@/interfaces';
|
||||||
|
import type { TypeUnits } from '@/modules/insights/entities/insights-shared';
|
||||||
|
import { InsightsMetadataRepository } from '@/modules/insights/repositories/insights-metadata.repository';
|
||||||
|
import { InsightsRawRepository } from '@/modules/insights/repositories/insights-raw.repository';
|
||||||
|
import { createTeamProject } from '@test-integration/db/projects';
|
||||||
|
import { createWorkflow } from '@test-integration/db/workflows';
|
||||||
|
import * as testDb from '@test-integration/test-db';
|
||||||
|
|
||||||
|
import { InsightsService } from '../insights.service';
|
||||||
|
import { InsightsByPeriodRepository } from '../repositories/insights-by-period.repository';
|
||||||
|
|
||||||
|
async function truncateAll() {
|
||||||
|
const insightsRawRepository = Container.get(InsightsRawRepository);
|
||||||
|
const insightsMetadataRepository = Container.get(InsightsMetadataRepository);
|
||||||
|
const insightsByPeriodRepository = Container.get(InsightsByPeriodRepository);
|
||||||
|
for (const repo of [
|
||||||
|
insightsRawRepository,
|
||||||
|
insightsMetadataRepository,
|
||||||
|
insightsByPeriodRepository,
|
||||||
|
]) {
|
||||||
|
await repo.delete({});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('workflowExecuteAfterHandler', () => {
|
||||||
|
let insightsService: InsightsService;
|
||||||
|
let insightsRawRepository: InsightsRawRepository;
|
||||||
|
let insightsMetadataRepository: InsightsMetadataRepository;
|
||||||
|
beforeAll(async () => {
|
||||||
|
await testDb.init();
|
||||||
|
|
||||||
|
insightsService = Container.get(InsightsService);
|
||||||
|
insightsRawRepository = Container.get(InsightsRawRepository);
|
||||||
|
insightsMetadataRepository = Container.get(InsightsMetadataRepository);
|
||||||
|
});
|
||||||
|
|
||||||
|
let project: Project;
|
||||||
|
let workflow: IWorkflowDb & WorkflowEntity;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await truncateAll();
|
||||||
|
|
||||||
|
project = await createTeamProject();
|
||||||
|
workflow = await createWorkflow(
|
||||||
|
{
|
||||||
|
settings: {
|
||||||
|
timeSavedPerExecution: 3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
project,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.each<{ status: ExecutionStatus; type: TypeUnits }>([
|
||||||
|
{ status: 'success', type: 'success' },
|
||||||
|
{ status: 'error', type: 'failure' },
|
||||||
|
{ status: 'crashed', type: 'failure' },
|
||||||
|
])('stores events for executions with the status `$status`', async ({ status, type }) => {
|
||||||
|
// ARRANGE
|
||||||
|
const ctx = mock<ExecutionLifecycleHooks>({ workflowData: workflow });
|
||||||
|
const startedAt = DateTime.utc();
|
||||||
|
const stoppedAt = startedAt.plus({ seconds: 5 });
|
||||||
|
const run = mock<IRun>({
|
||||||
|
mode: 'webhook',
|
||||||
|
status,
|
||||||
|
startedAt: startedAt.toJSDate(),
|
||||||
|
stoppedAt: stoppedAt.toJSDate(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
await insightsService.workflowExecuteAfterHandler(ctx, run);
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
const metadata = await insightsMetadataRepository.findOneBy({ workflowId: workflow.id });
|
||||||
|
|
||||||
|
if (!metadata) {
|
||||||
|
return fail('expected metadata to exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(metadata).toMatchObject({
|
||||||
|
workflowId: workflow.id,
|
||||||
|
workflowName: workflow.name,
|
||||||
|
projectId: project.id,
|
||||||
|
projectName: project.name,
|
||||||
|
});
|
||||||
|
|
||||||
|
const allInsights = await insightsRawRepository.find();
|
||||||
|
expect(allInsights).toHaveLength(status === 'success' ? 3 : 2);
|
||||||
|
expect(allInsights).toContainEqual(
|
||||||
|
expect.objectContaining({ metaId: metadata.metaId, type, value: 1 }),
|
||||||
|
);
|
||||||
|
expect(allInsights).toContainEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
metaId: metadata.metaId,
|
||||||
|
type: 'runtime_ms',
|
||||||
|
value: stoppedAt.diff(startedAt).toMillis(),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
if (status === 'success') {
|
||||||
|
expect(allInsights).toContainEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
metaId: metadata.metaId,
|
||||||
|
type: 'time_saved_min',
|
||||||
|
value: 3,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test.each<{ status: ExecutionStatus }>([
|
||||||
|
{ status: 'waiting' },
|
||||||
|
{ status: 'canceled' },
|
||||||
|
{ status: 'unknown' },
|
||||||
|
{ status: 'new' },
|
||||||
|
{ status: 'running' },
|
||||||
|
])('does not store events for executions with the status `$status`', async ({ status }) => {
|
||||||
|
// ARRANGE
|
||||||
|
const ctx = mock<ExecutionLifecycleHooks>({ workflowData: workflow });
|
||||||
|
const startedAt = DateTime.utc();
|
||||||
|
const stoppedAt = startedAt.plus({ seconds: 5 });
|
||||||
|
const run = mock<IRun>({
|
||||||
|
mode: 'webhook',
|
||||||
|
status,
|
||||||
|
startedAt: startedAt.toJSDate(),
|
||||||
|
stoppedAt: stoppedAt.toJSDate(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
await insightsService.workflowExecuteAfterHandler(ctx, run);
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
const metadata = await insightsMetadataRepository.findOneBy({ workflowId: workflow.id });
|
||||||
|
const allInsights = await insightsRawRepository.find();
|
||||||
|
expect(metadata).toBeNull();
|
||||||
|
expect(allInsights).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.each<{ mode: WorkflowExecuteMode }>([{ mode: 'internal' }, { mode: 'manual' }])(
|
||||||
|
'does not store events for executions with the mode `$mode`',
|
||||||
|
async ({ mode }) => {
|
||||||
|
// ARRANGE
|
||||||
|
const ctx = mock<ExecutionLifecycleHooks>({ workflowData: workflow });
|
||||||
|
const startedAt = DateTime.utc();
|
||||||
|
const stoppedAt = startedAt.plus({ seconds: 5 });
|
||||||
|
const run = mock<IRun>({
|
||||||
|
mode,
|
||||||
|
status: 'success',
|
||||||
|
startedAt: startedAt.toJSDate(),
|
||||||
|
stoppedAt: stoppedAt.toJSDate(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
await insightsService.workflowExecuteAfterHandler(ctx, run);
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
const metadata = await insightsMetadataRepository.findOneBy({ workflowId: workflow.id });
|
||||||
|
const allInsights = await insightsRawRepository.find();
|
||||||
|
expect(metadata).toBeNull();
|
||||||
|
expect(allInsights).toHaveLength(0);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test.each<{ mode: WorkflowExecuteMode }>([
|
||||||
|
{ mode: 'evaluation' },
|
||||||
|
{ mode: 'error' },
|
||||||
|
{ mode: 'cli' },
|
||||||
|
{ mode: 'retry' },
|
||||||
|
{ mode: 'trigger' },
|
||||||
|
{ mode: 'webhook' },
|
||||||
|
{ mode: 'integrated' },
|
||||||
|
])('stores events for executions with the mode `$mode`', async ({ mode }) => {
|
||||||
|
// ARRANGE
|
||||||
|
const ctx = mock<ExecutionLifecycleHooks>({ workflowData: workflow });
|
||||||
|
const startedAt = DateTime.utc();
|
||||||
|
const stoppedAt = startedAt.plus({ seconds: 5 });
|
||||||
|
const run = mock<IRun>({
|
||||||
|
mode,
|
||||||
|
status: 'success',
|
||||||
|
startedAt: startedAt.toJSDate(),
|
||||||
|
stoppedAt: stoppedAt.toJSDate(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
await insightsService.workflowExecuteAfterHandler(ctx, run);
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
const metadata = await insightsMetadataRepository.findOneBy({ workflowId: workflow.id });
|
||||||
|
|
||||||
|
if (!metadata) {
|
||||||
|
return fail('expected metadata to exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(metadata).toMatchObject({
|
||||||
|
workflowId: workflow.id,
|
||||||
|
workflowName: workflow.name,
|
||||||
|
projectId: project.id,
|
||||||
|
projectName: project.name,
|
||||||
|
});
|
||||||
|
|
||||||
|
const allInsights = await insightsRawRepository.find();
|
||||||
|
expect(allInsights).toHaveLength(3);
|
||||||
|
expect(allInsights).toContainEqual(
|
||||||
|
expect.objectContaining({ metaId: metadata.metaId, type: 'success', value: 1 }),
|
||||||
|
);
|
||||||
|
expect(allInsights).toContainEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
metaId: metadata.metaId,
|
||||||
|
type: 'runtime_ms',
|
||||||
|
value: stoppedAt.diff(startedAt).toMillis(),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
expect(allInsights).toContainEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
metaId: metadata.metaId,
|
||||||
|
type: 'time_saved_min',
|
||||||
|
value: 3,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("throws UnexpectedError if the execution's workflow has no owner", async () => {
|
||||||
|
// ARRANGE
|
||||||
|
const workflow = await createWorkflow({});
|
||||||
|
const ctx = mock<ExecutionLifecycleHooks>({ workflowData: workflow });
|
||||||
|
const startedAt = DateTime.utc();
|
||||||
|
const stoppedAt = startedAt.plus({ seconds: 5 });
|
||||||
|
const run = mock<IRun>({
|
||||||
|
mode: 'webhook',
|
||||||
|
status: 'success',
|
||||||
|
startedAt: startedAt.toJSDate(),
|
||||||
|
stoppedAt: stoppedAt.toJSDate(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// ACT & ASSERT
|
||||||
|
await expect(insightsService.workflowExecuteAfterHandler(ctx, run)).rejects.toThrowError(
|
||||||
|
`Could not find an owner for the workflow with the name '${workflow.name}' and the id '${workflow.id}'`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,64 @@
|
|||||||
|
import { Container } from '@n8n/di';
|
||||||
|
import type { DateTime } from 'luxon';
|
||||||
|
import type { IWorkflowBase } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
|
||||||
|
import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
|
||||||
|
|
||||||
|
import { InsightsMetadata } from '../../entities/insights-metadata';
|
||||||
|
import { InsightsRaw } from '../../entities/insights-raw';
|
||||||
|
import { InsightsMetadataRepository } from '../../repositories/insights-metadata.repository';
|
||||||
|
import { InsightsRawRepository } from '../../repositories/insights-raw.repository';
|
||||||
|
|
||||||
|
async function getWorkflowSharing(workflow: IWorkflowBase) {
|
||||||
|
return await Container.get(SharedWorkflowRepository).find({
|
||||||
|
where: { workflowId: workflow.id },
|
||||||
|
relations: { project: true },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createMetadata(workflow: WorkflowEntity) {
|
||||||
|
const insightsMetadataRepository = Container.get(InsightsMetadataRepository);
|
||||||
|
const alreadyExisting = await insightsMetadataRepository.findOneBy({ workflowId: workflow.id });
|
||||||
|
|
||||||
|
if (alreadyExisting) {
|
||||||
|
return alreadyExisting;
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = new InsightsMetadata();
|
||||||
|
metadata.workflowName = workflow.name;
|
||||||
|
metadata.workflowId = workflow.id;
|
||||||
|
|
||||||
|
const workflowSharing = (await getWorkflowSharing(workflow)).find(
|
||||||
|
(wfs) => wfs.role === 'workflow:owner',
|
||||||
|
);
|
||||||
|
if (workflowSharing) {
|
||||||
|
metadata.projectName = workflowSharing.project.name;
|
||||||
|
metadata.projectId = workflowSharing.project.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
await insightsMetadataRepository.save(metadata);
|
||||||
|
|
||||||
|
return metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createRawInsightsEvent(
|
||||||
|
workflow: WorkflowEntity,
|
||||||
|
parameters: {
|
||||||
|
type: InsightsRaw['type'];
|
||||||
|
value: number;
|
||||||
|
timestamp?: DateTime;
|
||||||
|
},
|
||||||
|
) {
|
||||||
|
const insightsRawRepository = Container.get(InsightsRawRepository);
|
||||||
|
const metadata = await createMetadata(workflow);
|
||||||
|
|
||||||
|
const event = new InsightsRaw();
|
||||||
|
event.metaId = metadata.metaId;
|
||||||
|
event.type = parameters.type;
|
||||||
|
event.value = parameters.value;
|
||||||
|
if (parameters.timestamp) {
|
||||||
|
event.timestamp = parameters.timestamp.toUTC().toJSDate();
|
||||||
|
}
|
||||||
|
return await insightsRawRepository.save(event);
|
||||||
|
}
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
import { Container } from '@n8n/di';
|
||||||
|
|
||||||
|
import * as testDb from '@test-integration/test-db';
|
||||||
|
|
||||||
|
import { InsightsRawRepository } from '../../repositories/insights-raw.repository';
|
||||||
|
import { InsightsByPeriod } from '../insights-by-period';
|
||||||
|
import type { PeriodUnits, TypeUnits } from '../insights-shared';
|
||||||
|
|
||||||
|
let insightsRawRepository: InsightsRawRepository;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await testDb.init();
|
||||||
|
insightsRawRepository = Container.get(InsightsRawRepository);
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await insightsRawRepository.delete({});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await testDb.terminate();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Insights By Period', () => {
|
||||||
|
test.each(['time_saved_min', 'runtime_ms', 'failure', 'success'] satisfies TypeUnits[])(
|
||||||
|
'`%s` can be serialized and deserialized correctly',
|
||||||
|
(typeUnit) => {
|
||||||
|
// ARRANGE
|
||||||
|
const insightByPeriod = new InsightsByPeriod();
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
insightByPeriod.type = typeUnit;
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
expect(insightByPeriod.type).toBe(typeUnit);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
test.each(['hour', 'day', 'week'] satisfies PeriodUnits[])(
|
||||||
|
'`%s` can be serialized and deserialized correctly',
|
||||||
|
(periodUnit) => {
|
||||||
|
// ARRANGE
|
||||||
|
const insightByPeriod = new InsightsByPeriod();
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
insightByPeriod.periodUnit = periodUnit;
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
expect(insightByPeriod.periodUnit).toBe(periodUnit);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
});
|
||||||
@@ -0,0 +1,80 @@
|
|||||||
|
import { Container } from '@n8n/di';
|
||||||
|
import { DateTime } from 'luxon';
|
||||||
|
|
||||||
|
import { createTeamProject } from '@test-integration/db/projects';
|
||||||
|
import { createWorkflow } from '@test-integration/db/workflows';
|
||||||
|
import * as testDb from '@test-integration/test-db';
|
||||||
|
|
||||||
|
import { createMetadata, createRawInsightsEvent } from './db-utils';
|
||||||
|
import { InsightsRawRepository } from '../../repositories/insights-raw.repository';
|
||||||
|
import { InsightsRaw } from '../insights-raw';
|
||||||
|
import type { TypeUnits } from '../insights-shared';
|
||||||
|
|
||||||
|
let insightsRawRepository: InsightsRawRepository;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await testDb.init();
|
||||||
|
insightsRawRepository = Container.get(InsightsRawRepository);
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await insightsRawRepository.delete({});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await testDb.terminate();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Insights Raw Entity', () => {
|
||||||
|
test.each(['success', 'failure', 'runtime_ms', 'time_saved_min'] satisfies TypeUnits[])(
|
||||||
|
'`%s` can be serialized and deserialized correctly',
|
||||||
|
(typeUnit) => {
|
||||||
|
// ARRANGE
|
||||||
|
const rawInsight = new InsightsRaw();
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
rawInsight.type = typeUnit;
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
expect(rawInsight.type).toBe(typeUnit);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
test('`timestamp` can be serialized and deserialized correctly', () => {
|
||||||
|
// ARRANGE
|
||||||
|
const rawInsight = new InsightsRaw();
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
|
||||||
|
rawInsight.timestamp = now;
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
now.setMilliseconds(0);
|
||||||
|
expect(rawInsight.timestamp).toEqual(now);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('timestamp uses the correct default value', async () => {
|
||||||
|
// ARRANGE
|
||||||
|
const project = await createTeamProject();
|
||||||
|
const workflow = await createWorkflow({}, project);
|
||||||
|
await createMetadata(workflow);
|
||||||
|
const rawInsight = await createRawInsightsEvent(workflow, {
|
||||||
|
type: 'success',
|
||||||
|
value: 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
const now = DateTime.utc().startOf('second');
|
||||||
|
await insightsRawRepository.save(rawInsight);
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
const timestampValue = await insightsRawRepository.find();
|
||||||
|
expect(timestampValue).toHaveLength(1);
|
||||||
|
const timestamp = timestampValue[0].timestamp;
|
||||||
|
|
||||||
|
expect(
|
||||||
|
Math.abs(now.toSeconds() - DateTime.fromJSDate(timestamp).toUTC().toSeconds()),
|
||||||
|
).toBeLessThan(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,62 @@
|
|||||||
|
import { BaseEntity, Column, Entity, PrimaryGeneratedColumn } from '@n8n/typeorm';
|
||||||
|
import { UnexpectedError } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import type { PeriodUnits } from './insights-shared';
|
||||||
|
import {
|
||||||
|
isValidPeriodNumber,
|
||||||
|
isValidTypeNumber,
|
||||||
|
NumberToPeriodUnit,
|
||||||
|
NumberToType,
|
||||||
|
PeriodUnitToNumber,
|
||||||
|
TypeToNumber,
|
||||||
|
} from './insights-shared';
|
||||||
|
import { datetimeColumnType } from '../../../databases/entities/abstract-entity';
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class InsightsByPeriod extends BaseEntity {
|
||||||
|
@PrimaryGeneratedColumn()
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
metaId: number;
|
||||||
|
|
||||||
|
@Column({ name: 'type', type: 'int' })
|
||||||
|
private type_: number;
|
||||||
|
|
||||||
|
get type() {
|
||||||
|
if (!isValidTypeNumber(this.type_)) {
|
||||||
|
throw new UnexpectedError(
|
||||||
|
`Type '${this.type_}' is not a valid type for 'InsightsByPeriod.type'`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return NumberToType[this.type_];
|
||||||
|
}
|
||||||
|
|
||||||
|
set type(value: keyof typeof TypeToNumber) {
|
||||||
|
this.type_ = TypeToNumber[value];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
value: number;
|
||||||
|
|
||||||
|
@Column({ name: 'periodUnit' })
|
||||||
|
private periodUnit_: number;
|
||||||
|
|
||||||
|
get periodUnit() {
|
||||||
|
if (!isValidPeriodNumber(this.periodUnit_)) {
|
||||||
|
throw new UnexpectedError(
|
||||||
|
`Period unit '${this.periodUnit_}' is not a valid unit for 'InsightsByPeriod.periodUnit'`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return NumberToPeriodUnit[this.periodUnit_];
|
||||||
|
}
|
||||||
|
|
||||||
|
set periodUnit(value: PeriodUnits) {
|
||||||
|
this.periodUnit_ = PeriodUnitToNumber[value];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Column({ type: datetimeColumnType })
|
||||||
|
periodStart: Date;
|
||||||
|
}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
import { BaseEntity, Column, Entity, PrimaryGeneratedColumn } from '@n8n/typeorm';
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class InsightsMetadata extends BaseEntity {
|
||||||
|
@PrimaryGeneratedColumn()
|
||||||
|
metaId: number;
|
||||||
|
|
||||||
|
@Column({ unique: true, type: 'varchar', length: 16 })
|
||||||
|
workflowId: string;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 36 })
|
||||||
|
projectId: string;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 128 })
|
||||||
|
workflowName: string;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', length: 255 })
|
||||||
|
projectName: string;
|
||||||
|
}
|
||||||
49
packages/cli/src/modules/insights/entities/insights-raw.ts
Normal file
49
packages/cli/src/modules/insights/entities/insights-raw.ts
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import { GlobalConfig } from '@n8n/config';
|
||||||
|
import { Container } from '@n8n/di';
|
||||||
|
import { BaseEntity, Column, Entity, PrimaryGeneratedColumn } from '@n8n/typeorm';
|
||||||
|
import { UnexpectedError } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import { isValidTypeNumber, NumberToType, TypeToNumber } from './insights-shared';
|
||||||
|
import { datetimeColumnType } from '../../../databases/entities/abstract-entity';
|
||||||
|
|
||||||
|
export const { type: dbType } = Container.get(GlobalConfig).database;
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class InsightsRaw extends BaseEntity {
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this.timestamp = new Date();
|
||||||
|
}
|
||||||
|
|
||||||
|
@PrimaryGeneratedColumn()
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
metaId: number;
|
||||||
|
|
||||||
|
@Column({ name: 'type', type: 'int' })
|
||||||
|
private type_: number;
|
||||||
|
|
||||||
|
get type() {
|
||||||
|
if (!isValidTypeNumber(this.type_)) {
|
||||||
|
throw new UnexpectedError(
|
||||||
|
`Type '${this.type_}' is not a valid type for 'InsightsByPeriod.type'`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return NumberToType[this.type_];
|
||||||
|
}
|
||||||
|
|
||||||
|
set type(value: keyof typeof TypeToNumber) {
|
||||||
|
this.type_ = TypeToNumber[value];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
value: number;
|
||||||
|
|
||||||
|
@Column({
|
||||||
|
name: 'timestamp',
|
||||||
|
type: datetimeColumnType,
|
||||||
|
})
|
||||||
|
timestamp: Date;
|
||||||
|
}
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
function isValid<T extends Record<number | string | symbol, unknown>>(
|
||||||
|
value: number | string | symbol,
|
||||||
|
constant: T,
|
||||||
|
): value is keyof T {
|
||||||
|
return Object.keys(constant).includes(value.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Periods
|
||||||
|
export const PeriodUnitToNumber = {
|
||||||
|
hour: 0,
|
||||||
|
day: 1,
|
||||||
|
week: 2,
|
||||||
|
} as const;
|
||||||
|
export type PeriodUnits = keyof typeof PeriodUnitToNumber;
|
||||||
|
export type PeriodUnitNumbers = (typeof PeriodUnitToNumber)[PeriodUnits];
|
||||||
|
export const NumberToPeriodUnit = Object.entries(PeriodUnitToNumber).reduce(
|
||||||
|
(acc, [key, value]: [PeriodUnits, PeriodUnitNumbers]) => {
|
||||||
|
acc[value] = key;
|
||||||
|
return acc;
|
||||||
|
},
|
||||||
|
{} as Record<PeriodUnitNumbers, PeriodUnits>,
|
||||||
|
);
|
||||||
|
export function isValidPeriodNumber(value: number) {
|
||||||
|
return isValid(value, NumberToPeriodUnit);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Types
|
||||||
|
export const TypeToNumber = {
|
||||||
|
time_saved_min: 0,
|
||||||
|
runtime_ms: 1,
|
||||||
|
success: 2,
|
||||||
|
failure: 3,
|
||||||
|
} as const;
|
||||||
|
export type TypeUnits = keyof typeof TypeToNumber;
|
||||||
|
export type TypeUnitNumbers = (typeof TypeToNumber)[TypeUnits];
|
||||||
|
export const NumberToType = Object.entries(TypeToNumber).reduce(
|
||||||
|
(acc, [key, value]: [TypeUnits, TypeUnitNumbers]) => {
|
||||||
|
acc[value] = key;
|
||||||
|
return acc;
|
||||||
|
},
|
||||||
|
{} as Record<TypeUnitNumbers, TypeUnits>,
|
||||||
|
);
|
||||||
|
|
||||||
|
export function isValidTypeNumber(value: number) {
|
||||||
|
return isValid(value, NumberToType);
|
||||||
|
}
|
||||||
29
packages/cli/src/modules/insights/insights.module.ts
Normal file
29
packages/cli/src/modules/insights/insights.module.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import type { ExecutionLifecycleHooks } from 'n8n-core';
|
||||||
|
import { InstanceSettings, Logger } from 'n8n-core';
|
||||||
|
|
||||||
|
import type { BaseN8nModule } from '@/decorators/module';
|
||||||
|
import { N8nModule } from '@/decorators/module';
|
||||||
|
|
||||||
|
import { InsightsService } from './insights.service';
|
||||||
|
|
||||||
|
@N8nModule()
|
||||||
|
export class InsightsModule implements BaseN8nModule {
|
||||||
|
constructor(
|
||||||
|
private readonly logger: Logger,
|
||||||
|
private readonly insightsService: InsightsService,
|
||||||
|
private readonly instanceSettings: InstanceSettings,
|
||||||
|
) {
|
||||||
|
this.logger = this.logger.scoped('insights');
|
||||||
|
}
|
||||||
|
|
||||||
|
registerLifecycleHooks(hooks: ExecutionLifecycleHooks) {
|
||||||
|
const insightsService = this.insightsService;
|
||||||
|
|
||||||
|
// Workers should not be saving any insights
|
||||||
|
if (this.instanceSettings.instanceType !== 'worker') {
|
||||||
|
hooks.addHandler('workflowExecuteAfter', async function (fullRunData) {
|
||||||
|
await insightsService.workflowExecuteAfterHandler(this, fullRunData);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
110
packages/cli/src/modules/insights/insights.service.ts
Normal file
110
packages/cli/src/modules/insights/insights.service.ts
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
import { Service } from '@n8n/di';
|
||||||
|
import type { ExecutionLifecycleHooks } from 'n8n-core';
|
||||||
|
import { UnexpectedError } from 'n8n-workflow';
|
||||||
|
import type { ExecutionStatus, IRun, WorkflowExecuteMode } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import { SharedWorkflow } from '@/databases/entities/shared-workflow';
|
||||||
|
import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
|
||||||
|
import { InsightsMetadata } from '@/modules/insights/entities/insights-metadata';
|
||||||
|
import { InsightsRaw } from '@/modules/insights/entities/insights-raw';
|
||||||
|
|
||||||
|
const shouldSkipStatus: Record<ExecutionStatus, boolean> = {
|
||||||
|
success: false,
|
||||||
|
crashed: false,
|
||||||
|
error: false,
|
||||||
|
|
||||||
|
canceled: true,
|
||||||
|
new: true,
|
||||||
|
running: true,
|
||||||
|
unknown: true,
|
||||||
|
waiting: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
const shouldSkipMode: Record<WorkflowExecuteMode, boolean> = {
|
||||||
|
cli: false,
|
||||||
|
error: false,
|
||||||
|
integrated: false,
|
||||||
|
retry: false,
|
||||||
|
trigger: false,
|
||||||
|
webhook: false,
|
||||||
|
evaluation: false,
|
||||||
|
|
||||||
|
internal: true,
|
||||||
|
manual: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
@Service()
|
||||||
|
export class InsightsService {
|
||||||
|
constructor(private readonly sharedWorkflowRepository: SharedWorkflowRepository) {}
|
||||||
|
|
||||||
|
async workflowExecuteAfterHandler(ctx: ExecutionLifecycleHooks, fullRunData: IRun) {
|
||||||
|
if (shouldSkipStatus[fullRunData.status] || shouldSkipMode[fullRunData.mode]) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const status = fullRunData.status === 'success' ? 'success' : 'failure';
|
||||||
|
|
||||||
|
await this.sharedWorkflowRepository.manager.transaction(async (trx) => {
|
||||||
|
const sharedWorkflow = await trx.findOne(SharedWorkflow, {
|
||||||
|
where: { workflowId: ctx.workflowData.id, role: 'workflow:owner' },
|
||||||
|
relations: { project: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!sharedWorkflow) {
|
||||||
|
throw new UnexpectedError(
|
||||||
|
`Could not find an owner for the workflow with the name '${ctx.workflowData.name}' and the id '${ctx.workflowData.id}'`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await trx.upsert(
|
||||||
|
InsightsMetadata,
|
||||||
|
{
|
||||||
|
workflowId: ctx.workflowData.id,
|
||||||
|
workflowName: ctx.workflowData.name,
|
||||||
|
projectId: sharedWorkflow.projectId,
|
||||||
|
projectName: sharedWorkflow.project.name,
|
||||||
|
},
|
||||||
|
['workflowId'],
|
||||||
|
);
|
||||||
|
const metadata = await trx.findOneBy(InsightsMetadata, {
|
||||||
|
workflowId: ctx.workflowData.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!metadata) {
|
||||||
|
// This can't happen, we just wrote the metadata in the same
|
||||||
|
// transaction.
|
||||||
|
throw new UnexpectedError(
|
||||||
|
`Could not find metadata for the workflow with the id '${ctx.workflowData.id}'`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// success or failure event
|
||||||
|
{
|
||||||
|
const event = new InsightsRaw();
|
||||||
|
event.metaId = metadata.metaId;
|
||||||
|
event.type = status;
|
||||||
|
event.value = 1;
|
||||||
|
await trx.insert(InsightsRaw, event);
|
||||||
|
}
|
||||||
|
|
||||||
|
// run time event
|
||||||
|
if (fullRunData.stoppedAt) {
|
||||||
|
const value = fullRunData.stoppedAt.getTime() - fullRunData.startedAt.getTime();
|
||||||
|
const event = new InsightsRaw();
|
||||||
|
event.metaId = metadata.metaId;
|
||||||
|
event.type = 'runtime_ms';
|
||||||
|
event.value = value;
|
||||||
|
await trx.insert(InsightsRaw, event);
|
||||||
|
}
|
||||||
|
|
||||||
|
// time saved event
|
||||||
|
if (status === 'success' && ctx.workflowData.settings?.timeSavedPerExecution) {
|
||||||
|
const event = new InsightsRaw();
|
||||||
|
event.metaId = metadata.metaId;
|
||||||
|
event.type = 'time_saved_min';
|
||||||
|
event.value = ctx.workflowData.settings.timeSavedPerExecution;
|
||||||
|
await trx.insert(InsightsRaw, event);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import { Service } from '@n8n/di';
|
||||||
|
import { DataSource, Repository } from '@n8n/typeorm';
|
||||||
|
|
||||||
|
import { InsightsByPeriod } from '../entities/insights-by-period';
|
||||||
|
|
||||||
|
@Service()
|
||||||
|
export class InsightsByPeriodRepository extends Repository<InsightsByPeriod> {
|
||||||
|
constructor(dataSource: DataSource) {
|
||||||
|
super(InsightsByPeriod, dataSource.manager);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import { Service } from '@n8n/di';
|
||||||
|
import { DataSource, Repository } from '@n8n/typeorm';
|
||||||
|
|
||||||
|
import { InsightsMetadata } from '../entities/insights-metadata';
|
||||||
|
|
||||||
|
@Service()
|
||||||
|
export class InsightsMetadataRepository extends Repository<InsightsMetadata> {
|
||||||
|
constructor(dataSource: DataSource) {
|
||||||
|
super(InsightsMetadata, dataSource.manager);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import { Service } from '@n8n/di';
|
||||||
|
import { DataSource, Repository } from '@n8n/typeorm';
|
||||||
|
|
||||||
|
import { InsightsRaw } from '../entities/insights-raw';
|
||||||
|
|
||||||
|
@Service()
|
||||||
|
export class InsightsRawRepository extends Repository<InsightsRaw> {
|
||||||
|
constructor(dataSource: DataSource) {
|
||||||
|
super(InsightsRaw, dataSource.manager);
|
||||||
|
}
|
||||||
|
}
|
||||||
17
packages/cli/src/utils/sql.ts
Normal file
17
packages/cli/src/utils/sql.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
/**
|
||||||
|
* Provides syntax highlighting for embedded SQL queries in template strings.
|
||||||
|
*/
|
||||||
|
export function sql(strings: TemplateStringsArray, ...values: string[]): string {
|
||||||
|
let result = '';
|
||||||
|
|
||||||
|
// Interleave the strings with the values
|
||||||
|
for (let i = 0; i < values.length; i++) {
|
||||||
|
result += strings[i];
|
||||||
|
result += values[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the last string
|
||||||
|
result += strings[strings.length - 1];
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
@@ -24,7 +24,7 @@ export async function createManyWorkflows(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function newWorkflow(attributes: Partial<IWorkflowDb> = {}): IWorkflowDb {
|
export function newWorkflow(attributes: Partial<IWorkflowDb> = {}): IWorkflowDb {
|
||||||
const { active, name, nodes, connections, versionId } = attributes;
|
const { active, name, nodes, connections, versionId, settings } = attributes;
|
||||||
|
|
||||||
const workflowEntity = Container.get(WorkflowRepository).create({
|
const workflowEntity = Container.get(WorkflowRepository).create({
|
||||||
active: active ?? false,
|
active: active ?? false,
|
||||||
@@ -41,7 +41,7 @@ export function newWorkflow(attributes: Partial<IWorkflowDb> = {}): IWorkflowDb
|
|||||||
],
|
],
|
||||||
connections: connections ?? {},
|
connections: connections ?? {},
|
||||||
versionId: versionId ?? uuid(),
|
versionId: versionId ?? uuid(),
|
||||||
settings: {},
|
settings: settings ?? {},
|
||||||
...attributes,
|
...attributes,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -119,8 +119,9 @@ export async function shareWorkflowWithProjects(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function getWorkflowSharing(workflow: IWorkflowBase) {
|
export async function getWorkflowSharing(workflow: IWorkflowBase) {
|
||||||
return await Container.get(SharedWorkflowRepository).findBy({
|
return await Container.get(SharedWorkflowRepository).find({
|
||||||
workflowId: workflow.id,
|
where: { workflowId: workflow.id },
|
||||||
|
relations: { project: true },
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -86,6 +86,9 @@ const repositories = [
|
|||||||
'WorkflowTagMapping',
|
'WorkflowTagMapping',
|
||||||
'ApiKey',
|
'ApiKey',
|
||||||
'Folder',
|
'Folder',
|
||||||
|
'InsightsRaw',
|
||||||
|
'InsightsMetadata',
|
||||||
|
'InsightsByPeriod',
|
||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -2398,6 +2398,7 @@ export interface IWorkflowSettings {
|
|||||||
saveExecutionProgress?: 'DEFAULT' | boolean;
|
saveExecutionProgress?: 'DEFAULT' | boolean;
|
||||||
executionTimeout?: number;
|
executionTimeout?: number;
|
||||||
executionOrder?: 'v0' | 'v1';
|
executionOrder?: 'v0' | 'v1';
|
||||||
|
timeSavedPerExecution?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WorkflowFEMeta {
|
export interface WorkflowFEMeta {
|
||||||
|
|||||||
Reference in New Issue
Block a user