mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 18:12:04 +00:00
feat(core): Update data model for Evaluations (no-changelog) (#15520)
Co-authored-by: Yiorgis Gozadinos <yiorgis@n8n.io> Co-authored-by: JP van Oosten <jp@n8n.io>
This commit is contained in:
@@ -22,8 +22,6 @@ import { SharedCredentials } from './shared-credentials';
|
||||
import { SharedWorkflow } from './shared-workflow';
|
||||
import { TagEntity } from './tag-entity';
|
||||
import { TestCaseExecution } from './test-case-execution.ee';
|
||||
import { TestDefinition } from './test-definition.ee';
|
||||
import { TestMetric } from './test-metric.ee';
|
||||
import { TestRun } from './test-run.ee';
|
||||
import { User } from './user';
|
||||
import { Variables } from './variables';
|
||||
@@ -63,8 +61,6 @@ export {
|
||||
AnnotationTagEntity,
|
||||
ExecutionAnnotation,
|
||||
AnnotationTagMapping,
|
||||
TestDefinition,
|
||||
TestMetric,
|
||||
TestRun,
|
||||
TestCaseExecution,
|
||||
ExecutionEntity,
|
||||
@@ -100,8 +96,6 @@ export const entities = {
|
||||
AnnotationTagEntity,
|
||||
ExecutionAnnotation,
|
||||
AnnotationTagMapping,
|
||||
TestDefinition,
|
||||
TestMetric,
|
||||
TestRun,
|
||||
TestCaseExecution,
|
||||
ExecutionEntity,
|
||||
|
||||
@@ -19,7 +19,7 @@ export type TestCaseExecutionStatus =
|
||||
|
||||
/**
|
||||
* This entity represents the linking between the test runs and individual executions.
|
||||
* It stores status, links to past, new and evaluation executions, and metrics produced by individual evaluation wf executions
|
||||
* It stores status, link to the evaluation execution, and metrics produced by individual test case
|
||||
* Entries in this table are meant to outlive the execution entities, which might be pruned over time.
|
||||
* This allows us to keep track of the details of test runs' status and metrics even after the executions are deleted.
|
||||
*/
|
||||
@@ -28,15 +28,6 @@ export class TestCaseExecution extends WithStringId {
|
||||
@ManyToOne('TestRun')
|
||||
testRun: TestRun;
|
||||
|
||||
@ManyToOne('ExecutionEntity', {
|
||||
onDelete: 'SET NULL',
|
||||
nullable: true,
|
||||
})
|
||||
pastExecution: ExecutionEntity | null;
|
||||
|
||||
@Column({ type: 'varchar', nullable: true })
|
||||
pastExecutionId: string | null;
|
||||
|
||||
@OneToOne('ExecutionEntity', {
|
||||
onDelete: 'SET NULL',
|
||||
nullable: true,
|
||||
@@ -46,15 +37,6 @@ export class TestCaseExecution extends WithStringId {
|
||||
@Column({ type: 'varchar', nullable: true })
|
||||
executionId: string | null;
|
||||
|
||||
@OneToOne('ExecutionEntity', {
|
||||
onDelete: 'SET NULL',
|
||||
nullable: true,
|
||||
})
|
||||
evaluationExecution: ExecutionEntity | null;
|
||||
|
||||
@Column({ type: 'varchar', nullable: true })
|
||||
evaluationExecutionId: string | null;
|
||||
|
||||
@Column()
|
||||
status: TestCaseExecutionStatus;
|
||||
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
import { Column, Entity, Index, ManyToOne, OneToMany, RelationId } from '@n8n/typeorm';
|
||||
import { Length } from 'class-validator';
|
||||
|
||||
import { JsonColumn, WithTimestampsAndStringId } from './abstract-entity';
|
||||
import { AnnotationTagEntity } from './annotation-tag-entity.ee';
|
||||
import type { TestMetric } from './test-metric.ee';
|
||||
import type { MockedNodeItem } from './types-db';
|
||||
import { WorkflowEntity } from './workflow-entity';
|
||||
|
||||
/**
|
||||
* Entity representing a Test Definition
|
||||
* It combines:
|
||||
* - the workflow under test
|
||||
* - the workflow used to evaluate the results of test execution
|
||||
* - the filter used to select test cases from previous executions of the workflow under test - annotation tag
|
||||
*/
|
||||
@Entity()
|
||||
@Index(['workflow'])
|
||||
@Index(['evaluationWorkflow'])
|
||||
export class TestDefinition extends WithTimestampsAndStringId {
|
||||
@Column({ length: 255 })
|
||||
@Length(1, 255, {
|
||||
message: 'Test definition name must be $constraint1 to $constraint2 characters long.',
|
||||
})
|
||||
name: string;
|
||||
|
||||
@Column('text')
|
||||
description: string;
|
||||
|
||||
@JsonColumn({ default: '[]' })
|
||||
mockedNodes: MockedNodeItem[];
|
||||
|
||||
/**
|
||||
* Relation to the workflow under test
|
||||
*/
|
||||
@ManyToOne('WorkflowEntity', 'tests')
|
||||
workflow: WorkflowEntity;
|
||||
|
||||
@RelationId((test: TestDefinition) => test.workflow)
|
||||
workflowId: string;
|
||||
|
||||
/**
|
||||
* Relation to the workflow used to evaluate the results of test execution
|
||||
*/
|
||||
@ManyToOne('WorkflowEntity', 'evaluationTests')
|
||||
evaluationWorkflow: WorkflowEntity;
|
||||
|
||||
@RelationId((test: TestDefinition) => test.evaluationWorkflow)
|
||||
evaluationWorkflowId: string;
|
||||
|
||||
/**
|
||||
* Relation to the annotation tag associated with the test
|
||||
* This tag will be used to select the test cases to run from previous executions
|
||||
*/
|
||||
@ManyToOne('AnnotationTagEntity', 'test')
|
||||
annotationTag: AnnotationTagEntity;
|
||||
|
||||
@RelationId((test: TestDefinition) => test.annotationTag)
|
||||
annotationTagId: string;
|
||||
|
||||
@OneToMany('TestMetric', 'testDefinition')
|
||||
metrics: TestMetric[];
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
import { Column, Entity, Index, ManyToOne } from '@n8n/typeorm';
|
||||
import { Length } from 'class-validator';
|
||||
|
||||
import { WithTimestampsAndStringId } from './abstract-entity';
|
||||
import { TestDefinition } from './test-definition.ee';
|
||||
|
||||
/**
|
||||
* Entity representing a Test Metric
|
||||
* It represents a single metric that can be retrieved from evaluation workflow execution result
|
||||
*/
|
||||
@Entity()
|
||||
@Index(['testDefinition'])
|
||||
export class TestMetric extends WithTimestampsAndStringId {
|
||||
/**
|
||||
* Name of the metric.
|
||||
* This will be used as a property name to extract metric value from the evaluation workflow execution result object
|
||||
*/
|
||||
@Column({ length: 255 })
|
||||
@Length(1, 255, {
|
||||
message: 'Metric name must be $constraint1 to $constraint2 characters long.',
|
||||
})
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* Relation to test definition
|
||||
*/
|
||||
@ManyToOne('TestDefinition', 'metrics')
|
||||
testDefinition: TestDefinition;
|
||||
}
|
||||
@@ -1,27 +1,20 @@
|
||||
import { Column, Entity, Index, ManyToOne, OneToMany, RelationId } from '@n8n/typeorm';
|
||||
import { Column, Entity, OneToMany, ManyToOne } from '@n8n/typeorm';
|
||||
import type { IDataObject } from 'n8n-workflow';
|
||||
|
||||
import { DateTimeColumn, JsonColumn, WithTimestampsAndStringId } from './abstract-entity';
|
||||
import type { TestCaseExecution } from './test-case-execution.ee';
|
||||
import { TestDefinition } from './test-definition.ee';
|
||||
import { AggregatedTestRunMetrics } from './types-db';
|
||||
import type { TestRunErrorCode, TestRunFinalResult } from './types-db';
|
||||
import { WorkflowEntity } from './workflow-entity';
|
||||
|
||||
export type TestRunStatus = 'new' | 'running' | 'completed' | 'error' | 'cancelled';
|
||||
|
||||
/**
|
||||
* Entity representing a Test Run.
|
||||
* It stores info about a specific run of a test, combining the test definition with the status and collected metrics
|
||||
* It stores info about a specific run of a test, including the status and collected metrics
|
||||
*/
|
||||
@Entity()
|
||||
@Index(['testDefinition'])
|
||||
export class TestRun extends WithTimestampsAndStringId {
|
||||
@ManyToOne('TestDefinition', 'runs')
|
||||
testDefinition: TestDefinition;
|
||||
|
||||
@RelationId((testRun: TestRun) => testRun.testDefinition)
|
||||
testDefinitionId: string;
|
||||
|
||||
@Column('varchar')
|
||||
status: TestRunStatus;
|
||||
|
||||
@@ -34,25 +27,6 @@ export class TestRun extends WithTimestampsAndStringId {
|
||||
@JsonColumn({ nullable: true })
|
||||
metrics: AggregatedTestRunMetrics;
|
||||
|
||||
/**
|
||||
* Total number of the test cases, matching the filter condition of the test definition (specified annotationTag)
|
||||
*/
|
||||
@Column('integer', { nullable: true })
|
||||
totalCases: number;
|
||||
|
||||
/**
|
||||
* Number of test cases that passed (evaluation workflow was executed successfully)
|
||||
*/
|
||||
@Column('integer', { nullable: true })
|
||||
passedCases: number;
|
||||
|
||||
/**
|
||||
* Number of failed test cases
|
||||
* (any unexpected exception happened during the execution or evaluation workflow ended with an error)
|
||||
*/
|
||||
@Column('integer', { nullable: true })
|
||||
failedCases: number;
|
||||
|
||||
/**
|
||||
* This will contain the error code if the test run failed.
|
||||
* This is used for test run level errors, not for individual test case errors.
|
||||
@@ -69,6 +43,12 @@ export class TestRun extends WithTimestampsAndStringId {
|
||||
@OneToMany('TestCaseExecution', 'testRun')
|
||||
testCaseExecutions: TestCaseExecution[];
|
||||
|
||||
@ManyToOne('WorkflowEntity')
|
||||
workflow: WorkflowEntity;
|
||||
|
||||
@Column('varchar', { length: 255 })
|
||||
workflowId: string;
|
||||
|
||||
/**
|
||||
* Calculated property to determine the final result of the test run
|
||||
* depending on the statuses of test case executions
|
||||
|
||||
@@ -16,6 +16,7 @@ import { JsonColumn, WithTimestampsAndStringId, dbType } from './abstract-entity
|
||||
import { type Folder } from './folder';
|
||||
import type { SharedWorkflow } from './shared-workflow';
|
||||
import type { TagEntity } from './tag-entity';
|
||||
import type { TestRun } from './test-run.ee';
|
||||
import type { IWorkflowDb } from './types-db';
|
||||
import type { WorkflowStatistics } from './workflow-statistics';
|
||||
import type { WorkflowTagMapping } from './workflow-tag-mapping';
|
||||
@@ -108,6 +109,9 @@ export class WorkflowEntity extends WithTimestampsAndStringId implements IWorkfl
|
||||
})
|
||||
@JoinColumn({ name: 'parentFolderId' })
|
||||
parentFolder: Folder | null;
|
||||
|
||||
@OneToMany('TestRun', 'workflow')
|
||||
testRuns: TestRun[];
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,68 @@
|
||||
import type { MigrationContext, IrreversibleMigration } from '../migration-types';
|
||||
|
||||
const testRunTableName = 'test_run';
|
||||
const testCaseExecutionTableName = 'test_case_execution';
|
||||
export class ClearEvaluation1745322634000 implements IrreversibleMigration {
|
||||
async up({
|
||||
schemaBuilder: { dropTable, column, createTable },
|
||||
queryRunner,
|
||||
tablePrefix,
|
||||
isSqlite,
|
||||
isPostgres,
|
||||
isMysql,
|
||||
}: MigrationContext) {
|
||||
// Drop test_metric, test_definition
|
||||
await dropTable(testCaseExecutionTableName);
|
||||
await dropTable(testRunTableName);
|
||||
await dropTable('test_metric');
|
||||
if (isSqlite) {
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS ${tablePrefix}test_definition;`);
|
||||
} else if (isPostgres) {
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS ${tablePrefix}test_definition CASCADE;`);
|
||||
} else if (isMysql) {
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS ${tablePrefix}test_definition CASCADE;`);
|
||||
}
|
||||
|
||||
await createTable(testRunTableName)
|
||||
.withColumns(
|
||||
column('id').varchar(36).primary.notNull,
|
||||
column('workflowId').varchar(36).notNull,
|
||||
column('status').varchar().notNull,
|
||||
column('errorCode').varchar(),
|
||||
column('errorDetails').json,
|
||||
column('runAt').timestamp(),
|
||||
column('completedAt').timestamp(),
|
||||
column('metrics').json,
|
||||
)
|
||||
.withIndexOn('workflowId')
|
||||
.withForeignKey('workflowId', {
|
||||
tableName: 'workflow_entity',
|
||||
columnName: 'id',
|
||||
onDelete: 'CASCADE',
|
||||
}).withTimestamps;
|
||||
|
||||
await createTable(testCaseExecutionTableName)
|
||||
.withColumns(
|
||||
column('id').varchar(36).primary.notNull,
|
||||
column('testRunId').varchar(36).notNull,
|
||||
column('executionId').int, // Execution of the workflow under test. Might be null if execution was deleted after the test run
|
||||
column('status').varchar().notNull,
|
||||
column('runAt').timestamp(),
|
||||
column('completedAt').timestamp(),
|
||||
column('errorCode').varchar(),
|
||||
column('errorDetails').json,
|
||||
column('metrics').json,
|
||||
)
|
||||
.withIndexOn('testRunId')
|
||||
.withForeignKey('testRunId', {
|
||||
tableName: 'test_run',
|
||||
columnName: 'id',
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
.withForeignKey('executionId', {
|
||||
tableName: 'execution_entity',
|
||||
columnName: 'id',
|
||||
onDelete: 'SET NULL',
|
||||
}).withTimestamps;
|
||||
}
|
||||
}
|
||||
@@ -82,6 +82,7 @@ import { CreateFolderTable1738709609940 } from '../common/1738709609940-CreateFo
|
||||
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
|
||||
import { RenameAnalyticsToInsights1741167584277 } from '../common/1741167584277-RenameAnalyticsToInsights';
|
||||
import { AddScopesColumnToApiKeys1742918400000 } from '../common/1742918400000-AddScopesColumnToApiKeys';
|
||||
import { ClearEvaluation1745322634000 } from '../common/1745322634000-CleanEvaluations';
|
||||
import { AddWorkflowStatisticsRootCount1745587087521 } from '../common/1745587087521-AddWorkflowStatisticsRootCount';
|
||||
import { AddWorkflowArchivedColumn1745934666076 } from '../common/1745934666076-AddWorkflowArchivedColumn';
|
||||
import { DropRoleTable1745934666077 } from '../common/1745934666077-DropRoleTable';
|
||||
@@ -177,4 +178,5 @@ export const mysqlMigrations: Migration[] = [
|
||||
AddWorkflowStatisticsRootCount1745587087521,
|
||||
AddWorkflowArchivedColumn1745934666076,
|
||||
DropRoleTable1745934666077,
|
||||
ClearEvaluation1745322634000,
|
||||
];
|
||||
|
||||
@@ -82,6 +82,7 @@ import { CreateFolderTable1738709609940 } from '../common/1738709609940-CreateFo
|
||||
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
|
||||
import { RenameAnalyticsToInsights1741167584277 } from '../common/1741167584277-RenameAnalyticsToInsights';
|
||||
import { AddScopesColumnToApiKeys1742918400000 } from '../common/1742918400000-AddScopesColumnToApiKeys';
|
||||
import { ClearEvaluation1745322634000 } from '../common/1745322634000-CleanEvaluations';
|
||||
import { AddWorkflowStatisticsRootCount1745587087521 } from '../common/1745587087521-AddWorkflowStatisticsRootCount';
|
||||
import { AddWorkflowArchivedColumn1745934666076 } from '../common/1745934666076-AddWorkflowArchivedColumn';
|
||||
import { DropRoleTable1745934666077 } from '../common/1745934666077-DropRoleTable';
|
||||
@@ -175,4 +176,5 @@ export const postgresMigrations: Migration[] = [
|
||||
AddWorkflowStatisticsRootCount1745587087521,
|
||||
AddWorkflowArchivedColumn1745934666076,
|
||||
DropRoleTable1745934666077,
|
||||
ClearEvaluation1745322634000,
|
||||
];
|
||||
|
||||
@@ -79,6 +79,7 @@ import { CreateTestCaseExecutionTable1736947513045 } from '../common/17369475130
|
||||
import { AddErrorColumnsToTestRuns1737715421462 } from '../common/1737715421462-AddErrorColumnsToTestRuns';
|
||||
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
|
||||
import { RenameAnalyticsToInsights1741167584277 } from '../common/1741167584277-RenameAnalyticsToInsights';
|
||||
import { ClearEvaluation1745322634000 } from '../common/1745322634000-CleanEvaluations';
|
||||
import { AddWorkflowStatisticsRootCount1745587087521 } from '../common/1745587087521-AddWorkflowStatisticsRootCount';
|
||||
import { AddWorkflowArchivedColumn1745934666076 } from '../common/1745934666076-AddWorkflowArchivedColumn';
|
||||
import { DropRoleTable1745934666077 } from '../common/1745934666077-DropRoleTable';
|
||||
@@ -169,6 +170,7 @@ const sqliteMigrations: Migration[] = [
|
||||
AddWorkflowStatisticsRootCount1745587087521,
|
||||
AddWorkflowArchivedColumn1745934666076,
|
||||
DropRoleTable1745934666077,
|
||||
ClearEvaluation1745322634000,
|
||||
];
|
||||
|
||||
export { sqliteMigrations };
|
||||
|
||||
@@ -21,8 +21,6 @@ export { ProcessedDataRepository } from './processed-data.repository';
|
||||
export { SettingsRepository } from './settings.repository';
|
||||
export { TagRepository } from './tag.repository';
|
||||
export { TestCaseExecutionRepository } from './test-case-execution.repository.ee';
|
||||
export { TestDefinitionRepository } from './test-definition.repository.ee';
|
||||
export { TestMetricRepository } from './test-metric.repository.ee';
|
||||
export { TestRunRepository } from './test-run.repository.ee';
|
||||
export { VariablesRepository } from './variables.repository';
|
||||
export { WorkflowHistoryRepository } from './workflow-history.repository';
|
||||
|
||||
@@ -18,16 +18,10 @@ type MarkAsFailedOptions = StatusUpdateOptions & {
|
||||
errorDetails?: IDataObject;
|
||||
};
|
||||
|
||||
type MarkAsWarningOptions = MarkAsFailedOptions;
|
||||
|
||||
type MarkAsRunningOptions = StatusUpdateOptions & {
|
||||
executionId: string;
|
||||
};
|
||||
|
||||
type MarkAsEvaluationRunningOptions = StatusUpdateOptions & {
|
||||
evaluationExecutionId: string;
|
||||
};
|
||||
|
||||
type MarkAsCompletedOptions = StatusUpdateOptions & {
|
||||
metrics: Record<string, number>;
|
||||
};
|
||||
@@ -38,15 +32,12 @@ export class TestCaseExecutionRepository extends Repository<TestCaseExecution> {
|
||||
super(TestCaseExecution, dataSource.manager);
|
||||
}
|
||||
|
||||
async createBatch(testRunId: string, pastExecutionIds: string[]) {
|
||||
async createBatch(testRunId: string, testCases: string[]) {
|
||||
const mappings = this.create(
|
||||
pastExecutionIds.map<DeepPartial<TestCaseExecution>>((id) => ({
|
||||
testCases.map<DeepPartial<TestCaseExecution>>(() => ({
|
||||
testRun: {
|
||||
id: testRunId,
|
||||
},
|
||||
pastExecution: {
|
||||
id,
|
||||
},
|
||||
status: 'new',
|
||||
})),
|
||||
);
|
||||
@@ -68,24 +59,6 @@ export class TestCaseExecutionRepository extends Repository<TestCaseExecution> {
|
||||
);
|
||||
}
|
||||
|
||||
async markAsEvaluationRunning({
|
||||
testRunId,
|
||||
pastExecutionId,
|
||||
evaluationExecutionId,
|
||||
trx,
|
||||
}: MarkAsEvaluationRunningOptions) {
|
||||
trx = trx ?? this.manager;
|
||||
|
||||
return await trx.update(
|
||||
TestCaseExecution,
|
||||
{ testRun: { id: testRunId }, pastExecutionId },
|
||||
{
|
||||
status: 'evaluation_running',
|
||||
evaluationExecutionId,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async markAsCompleted({ testRunId, pastExecutionId, metrics, trx }: MarkAsCompletedOptions) {
|
||||
trx = trx ?? this.manager;
|
||||
|
||||
@@ -133,21 +106,4 @@ export class TestCaseExecutionRepository extends Repository<TestCaseExecution> {
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async markAsWarning({
|
||||
testRunId,
|
||||
pastExecutionId,
|
||||
errorCode,
|
||||
errorDetails,
|
||||
}: MarkAsWarningOptions) {
|
||||
return await this.update(
|
||||
{ testRun: { id: testRunId }, pastExecutionId },
|
||||
{
|
||||
status: 'warning',
|
||||
completedAt: new Date(),
|
||||
errorCode,
|
||||
errorDetails,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
import { Service } from '@n8n/di';
|
||||
import type { FindManyOptions, FindOptionsWhere } from '@n8n/typeorm';
|
||||
import { DataSource, In, Repository } from '@n8n/typeorm';
|
||||
import { UserError } from 'n8n-workflow';
|
||||
|
||||
import { TestDefinition } from '../entities';
|
||||
import type { ListQuery } from '../entities/types-db';
|
||||
|
||||
@Service()
|
||||
export class TestDefinitionRepository extends Repository<TestDefinition> {
|
||||
constructor(dataSource: DataSource) {
|
||||
super(TestDefinition, dataSource.manager);
|
||||
}
|
||||
|
||||
async getMany(accessibleWorkflowIds: string[], options?: ListQuery.Options) {
|
||||
if (accessibleWorkflowIds.length === 0) return { tests: [], count: 0 };
|
||||
|
||||
const where: FindOptionsWhere<TestDefinition> = {};
|
||||
|
||||
if (options?.filter?.workflowId) {
|
||||
if (!accessibleWorkflowIds.includes(options.filter.workflowId as string)) {
|
||||
throw new UserError('User does not have access to the workflow');
|
||||
}
|
||||
|
||||
where.workflow = {
|
||||
id: options.filter.workflowId as string,
|
||||
};
|
||||
} else {
|
||||
where.workflow = {
|
||||
id: In(accessibleWorkflowIds),
|
||||
};
|
||||
}
|
||||
|
||||
const findManyOptions: FindManyOptions<TestDefinition> = {
|
||||
where,
|
||||
relations: ['annotationTag'],
|
||||
order: { createdAt: 'DESC' },
|
||||
};
|
||||
|
||||
if (options?.take) {
|
||||
findManyOptions.skip = options.skip;
|
||||
findManyOptions.take = options.take;
|
||||
}
|
||||
|
||||
const [testDefinitions, count] = await this.findAndCount(findManyOptions);
|
||||
|
||||
return { testDefinitions, count };
|
||||
}
|
||||
|
||||
async getOne(id: string, accessibleWorkflowIds: string[]) {
|
||||
return await this.findOne({
|
||||
where: {
|
||||
id,
|
||||
workflow: {
|
||||
id: In(accessibleWorkflowIds),
|
||||
},
|
||||
},
|
||||
relations: ['annotationTag', 'metrics'],
|
||||
});
|
||||
}
|
||||
|
||||
async deleteById(id: string, accessibleWorkflowIds: string[]) {
|
||||
return await this.delete({
|
||||
id,
|
||||
workflow: {
|
||||
id: In(accessibleWorkflowIds),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
import { Service } from '@n8n/di';
|
||||
import { DataSource, Repository } from '@n8n/typeorm';
|
||||
|
||||
import { TestMetric } from '../entities';
|
||||
|
||||
@Service()
|
||||
export class TestMetricRepository extends Repository<TestMetric> {
|
||||
constructor(dataSource: DataSource) {
|
||||
super(TestMetric, dataSource.manager);
|
||||
}
|
||||
}
|
||||
@@ -22,22 +22,21 @@ export class TestRunRepository extends Repository<TestRun> {
|
||||
super(TestRun, dataSource.manager);
|
||||
}
|
||||
|
||||
async createTestRun(testDefinitionId: string) {
|
||||
async createTestRun(workflowId: string) {
|
||||
const testRun = this.create({
|
||||
status: 'new',
|
||||
testDefinition: { id: testDefinitionId },
|
||||
workflow: {
|
||||
id: workflowId,
|
||||
},
|
||||
});
|
||||
|
||||
return await this.save(testRun);
|
||||
}
|
||||
|
||||
async markAsRunning(id: string, totalCases: number) {
|
||||
async markAsRunning(id: string) {
|
||||
return await this.update(id, {
|
||||
status: 'running',
|
||||
runAt: new Date(),
|
||||
totalCases,
|
||||
passedCases: 0,
|
||||
failedCases: 0,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -65,20 +64,10 @@ export class TestRunRepository extends Repository<TestRun> {
|
||||
);
|
||||
}
|
||||
|
||||
async incrementPassed(id: string, trx?: EntityManager) {
|
||||
trx = trx ?? this.manager;
|
||||
return await trx.increment(TestRun, { id }, 'passedCases', 1);
|
||||
}
|
||||
|
||||
async incrementFailed(id: string, trx?: EntityManager) {
|
||||
trx = trx ?? this.manager;
|
||||
return await trx.increment(TestRun, { id }, 'failedCases', 1);
|
||||
}
|
||||
|
||||
async getMany(testDefinitionId: string, options: ListQuery.Options) {
|
||||
async getMany(workflowId: string, options: ListQuery.Options) {
|
||||
// FIXME: optimize fetching final result of each test run
|
||||
const findManyOptions: FindManyOptions<TestRun> = {
|
||||
where: { testDefinition: { id: testDefinitionId } },
|
||||
where: { workflow: { id: workflowId } },
|
||||
order: { createdAt: 'DESC' },
|
||||
relations: ['testCaseExecutions'],
|
||||
};
|
||||
@@ -103,12 +92,9 @@ export class TestRunRepository extends Repository<TestRun> {
|
||||
* E.g. Test Run is considered successful if all test case executions are successful.
|
||||
* Test Run is considered failed if at least one test case execution is failed.
|
||||
*/
|
||||
async getTestRunSummaryById(
|
||||
testDefinitionId: string,
|
||||
testRunId: string,
|
||||
): Promise<TestRunSummary> {
|
||||
async getTestRunSummaryById(testRunId: string): Promise<TestRunSummary> {
|
||||
const testRun = await this.findOne({
|
||||
where: { id: testRunId, testDefinition: { id: testDefinitionId } },
|
||||
where: { id: testRunId },
|
||||
relations: ['testCaseExecutions'],
|
||||
});
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ import * as CrashJournal from '@/crash-journal';
|
||||
import { DbConnection } from '@/databases/db-connection';
|
||||
import { getDataDeduplicationService } from '@/deduplication';
|
||||
import { DeprecationService } from '@/deprecation/deprecation.service';
|
||||
import { TestRunnerService } from '@/evaluation.ee/test-runner/test-runner.service.ee';
|
||||
import { TestRunCleanupService } from '@/evaluation.ee/test-runner/test-run-cleanup.service.ee';
|
||||
import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus';
|
||||
import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay';
|
||||
import { ExternalHooks } from '@/external-hooks';
|
||||
@@ -286,7 +286,7 @@ export abstract class BaseCommand extends Command {
|
||||
}
|
||||
|
||||
async cleanupTestRunner() {
|
||||
await Container.get(TestRunnerService).cleanupIncompleteRuns();
|
||||
await Container.get(TestRunCleanupService).cleanupIncompleteRuns();
|
||||
}
|
||||
|
||||
async finally(error: Error | undefined) {
|
||||
|
||||
@@ -0,0 +1,155 @@
|
||||
import type { TestCaseExecutionRepository, TestRun, TestRunRepository, User } from '@n8n/db';
|
||||
import type { InstanceSettings } from 'n8n-core';
|
||||
|
||||
import { NotFoundError } from '@/errors/response-errors/not-found.error';
|
||||
import type { TestRunnerService } from '@/evaluation.ee/test-runner/test-runner.service.ee';
|
||||
import { TestRunsController } from '@/evaluation.ee/test-runs.controller.ee';
|
||||
import { getSharedWorkflowIds } from '@/public-api/v1/handlers/workflows/workflows.service';
|
||||
import type { Telemetry } from '@/telemetry';
|
||||
import type { WorkflowFinderService } from '@/workflows/workflow-finder.service';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('@/public-api/v1/handlers/workflows/workflows.service');
|
||||
jest.mock('@/evaluation.ee/test-runner/test-runner.service.ee');
|
||||
|
||||
describe('TestRunsController', () => {
|
||||
let testRunsController: TestRunsController;
|
||||
let mockTestRunRepository: jest.Mocked<TestRunRepository>;
|
||||
let mockWorkflowFinderService: jest.Mocked<WorkflowFinderService>;
|
||||
let mockTestCaseExecutionRepository: jest.Mocked<TestCaseExecutionRepository>;
|
||||
let mockTestRunnerService: jest.Mocked<TestRunnerService>;
|
||||
let mockInstanceSettings: jest.Mocked<InstanceSettings>;
|
||||
let mockTelemetry: jest.Mocked<Telemetry>;
|
||||
let mockGetSharedWorkflowIds: jest.MockedFunction<typeof getSharedWorkflowIds>;
|
||||
let mockUser: User;
|
||||
let mockWorkflowId: string;
|
||||
let mockTestRunId: string;
|
||||
|
||||
beforeEach(() => {
|
||||
// Setup mocks
|
||||
mockTestRunRepository = {
|
||||
findOne: jest.fn(),
|
||||
getMany: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
createTestRun: jest.fn(),
|
||||
} as unknown as jest.Mocked<TestRunRepository>;
|
||||
|
||||
mockWorkflowFinderService = {
|
||||
findWorkflowForUser: jest.fn(),
|
||||
} as unknown as jest.Mocked<WorkflowFinderService>;
|
||||
|
||||
mockTestCaseExecutionRepository = {
|
||||
find: jest.fn(),
|
||||
markAllPendingAsCancelled: jest.fn(),
|
||||
} as unknown as jest.Mocked<TestCaseExecutionRepository>;
|
||||
|
||||
mockTestRunnerService = {
|
||||
runTest: jest.fn(),
|
||||
canBeCancelled: jest.fn(),
|
||||
cancelTestRun: jest.fn(),
|
||||
} as unknown as jest.Mocked<TestRunnerService>;
|
||||
|
||||
mockInstanceSettings = {
|
||||
isMultiMain: false,
|
||||
} as unknown as jest.Mocked<InstanceSettings>;
|
||||
|
||||
mockTelemetry = {
|
||||
track: jest.fn(),
|
||||
} as unknown as jest.Mocked<Telemetry>;
|
||||
|
||||
mockGetSharedWorkflowIds = getSharedWorkflowIds as jest.MockedFunction<
|
||||
typeof getSharedWorkflowIds
|
||||
>;
|
||||
|
||||
// Create test instance
|
||||
testRunsController = new TestRunsController(
|
||||
mockTestRunRepository,
|
||||
mockWorkflowFinderService,
|
||||
mockTestCaseExecutionRepository,
|
||||
mockTestRunnerService,
|
||||
mockInstanceSettings,
|
||||
mockTelemetry,
|
||||
);
|
||||
|
||||
// Common test data
|
||||
mockUser = { id: 'user123' } as User;
|
||||
mockWorkflowId = 'workflow123';
|
||||
mockTestRunId = 'testrun123';
|
||||
|
||||
// Default mock behavior
|
||||
mockGetSharedWorkflowIds.mockResolvedValue([mockWorkflowId]);
|
||||
mockTestRunRepository.findOne.mockResolvedValue({
|
||||
id: mockTestRunId,
|
||||
status: 'running',
|
||||
} as TestRun);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getTestRun', () => {
|
||||
it('should return test run when it exists and user has access', async () => {
|
||||
// Arrange
|
||||
const mockTestRun = {
|
||||
id: mockTestRunId,
|
||||
status: 'running',
|
||||
} as TestRun;
|
||||
mockGetSharedWorkflowIds.mockResolvedValue([mockWorkflowId]);
|
||||
mockTestRunRepository.findOne.mockResolvedValue(mockTestRun);
|
||||
|
||||
// Act
|
||||
const result = await (testRunsController as any).getTestRun(
|
||||
mockTestRunId,
|
||||
mockWorkflowId,
|
||||
mockUser,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(mockGetSharedWorkflowIds).toHaveBeenCalledWith(mockUser, ['workflow:read']);
|
||||
expect(mockTestRunRepository.findOne).toHaveBeenCalledWith({
|
||||
where: { id: mockTestRunId },
|
||||
});
|
||||
expect(result).toEqual(mockTestRun);
|
||||
});
|
||||
|
||||
it('should throw NotFoundError when user has no access to workflow', async () => {
|
||||
// Arrange
|
||||
mockGetSharedWorkflowIds.mockResolvedValue([]); // No access to any workflow
|
||||
|
||||
// Act & Assert
|
||||
await expect(
|
||||
(testRunsController as any).getTestRun(mockTestRunId, mockWorkflowId, mockUser),
|
||||
).rejects.toThrow(NotFoundError);
|
||||
expect(mockGetSharedWorkflowIds).toHaveBeenCalledWith(mockUser, ['workflow:read']);
|
||||
expect(mockTestRunRepository.findOne).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw NotFoundError when workflowId does not match any shared workflows', async () => {
|
||||
// Arrange
|
||||
mockGetSharedWorkflowIds.mockResolvedValue(['different-workflow-id']);
|
||||
|
||||
// Act & Assert
|
||||
await expect(
|
||||
(testRunsController as any).getTestRun(mockTestRunId, mockWorkflowId, mockUser),
|
||||
).rejects.toThrow(NotFoundError);
|
||||
expect(mockGetSharedWorkflowIds).toHaveBeenCalledWith(mockUser, ['workflow:read']);
|
||||
expect(mockTestRunRepository.findOne).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw NotFoundError when test run does not exist', async () => {
|
||||
// Arrange
|
||||
mockGetSharedWorkflowIds.mockResolvedValue([mockWorkflowId]);
|
||||
mockTestRunRepository.findOne.mockResolvedValue(null); // Test run not found
|
||||
|
||||
// Act & Assert
|
||||
await expect(
|
||||
(testRunsController as any).getTestRun(mockTestRunId, mockWorkflowId, mockUser),
|
||||
).rejects.toThrow(NotFoundError);
|
||||
expect(mockGetSharedWorkflowIds).toHaveBeenCalledWith(mockUser, ['workflow:read']);
|
||||
expect(mockTestRunRepository.findOne).toHaveBeenCalledWith({
|
||||
where: { id: mockTestRunId },
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,13 +0,0 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export const testMetricCreateRequestBodySchema = z
|
||||
.object({
|
||||
name: z.string().min(1).max(255),
|
||||
})
|
||||
.strict();
|
||||
|
||||
export const testMetricPatchRequestBodySchema = z
|
||||
.object({
|
||||
name: z.string().min(1).max(255),
|
||||
})
|
||||
.strict();
|
||||
@@ -1,21 +0,0 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export const testDefinitionCreateRequestBodySchema = z
|
||||
.object({
|
||||
name: z.string().min(1).max(255),
|
||||
workflowId: z.string().min(1),
|
||||
description: z.string().optional(),
|
||||
evaluationWorkflowId: z.string().min(1).optional(),
|
||||
annotationTagId: z.string().min(1).optional(),
|
||||
})
|
||||
.strict();
|
||||
|
||||
export const testDefinitionPatchRequestBodySchema = z
|
||||
.object({
|
||||
name: z.string().min(1).max(255).optional(),
|
||||
description: z.string().optional(),
|
||||
evaluationWorkflowId: z.string().min(1).optional(),
|
||||
annotationTagId: z.string().min(1).optional(),
|
||||
mockedNodes: z.array(z.object({ id: z.string(), name: z.string() })).optional(),
|
||||
})
|
||||
.strict();
|
||||
@@ -1,183 +0,0 @@
|
||||
import type { MockedNodeItem, TestDefinition } from '@n8n/db';
|
||||
import { AnnotationTagRepository, TestDefinitionRepository } from '@n8n/db';
|
||||
import { Service } from '@n8n/di';
|
||||
|
||||
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
|
||||
import { NotFoundError } from '@/errors/response-errors/not-found.error';
|
||||
import { validateEntity } from '@/generic-helpers';
|
||||
import type { ListQuery } from '@/requests';
|
||||
import { Telemetry } from '@/telemetry';
|
||||
|
||||
type TestDefinitionLike = Omit<
|
||||
Partial<TestDefinition>,
|
||||
'workflow' | 'evaluationWorkflow' | 'annotationTag' | 'metrics'
|
||||
> & {
|
||||
workflow?: { id: string };
|
||||
evaluationWorkflow?: { id: string };
|
||||
annotationTag?: { id: string };
|
||||
};
|
||||
|
||||
@Service()
|
||||
export class TestDefinitionService {
|
||||
constructor(
|
||||
private testDefinitionRepository: TestDefinitionRepository,
|
||||
private annotationTagRepository: AnnotationTagRepository,
|
||||
private telemetry: Telemetry,
|
||||
) {}
|
||||
|
||||
private toEntityLike(attrs: {
|
||||
name?: string;
|
||||
description?: string;
|
||||
workflowId?: string;
|
||||
evaluationWorkflowId?: string;
|
||||
annotationTagId?: string;
|
||||
id?: string;
|
||||
mockedNodes?: MockedNodeItem[];
|
||||
}) {
|
||||
const entity: TestDefinitionLike = {};
|
||||
|
||||
if (attrs.id) {
|
||||
entity.id = attrs.id;
|
||||
}
|
||||
|
||||
if (attrs.name) {
|
||||
entity.name = attrs.name?.trim();
|
||||
}
|
||||
|
||||
if (attrs.description) {
|
||||
entity.description = attrs.description.trim();
|
||||
}
|
||||
|
||||
if (attrs.workflowId) {
|
||||
entity.workflow = {
|
||||
id: attrs.workflowId,
|
||||
};
|
||||
}
|
||||
|
||||
if (attrs.evaluationWorkflowId) {
|
||||
entity.evaluationWorkflow = {
|
||||
id: attrs.evaluationWorkflowId,
|
||||
};
|
||||
}
|
||||
|
||||
if (attrs.annotationTagId) {
|
||||
entity.annotationTag = {
|
||||
id: attrs.annotationTagId,
|
||||
};
|
||||
}
|
||||
|
||||
if (attrs.mockedNodes) {
|
||||
entity.mockedNodes = attrs.mockedNodes;
|
||||
}
|
||||
|
||||
return entity;
|
||||
}
|
||||
|
||||
toEntity(attrs: {
|
||||
name?: string;
|
||||
workflowId?: string;
|
||||
evaluationWorkflowId?: string;
|
||||
annotationTagId?: string;
|
||||
id?: string;
|
||||
}) {
|
||||
const entity = this.toEntityLike(attrs);
|
||||
return this.testDefinitionRepository.create(entity);
|
||||
}
|
||||
|
||||
async findOne(id: string, accessibleWorkflowIds: string[]) {
|
||||
return await this.testDefinitionRepository.getOne(id, accessibleWorkflowIds);
|
||||
}
|
||||
|
||||
async save(test: TestDefinition) {
|
||||
await validateEntity(test);
|
||||
|
||||
return await this.testDefinitionRepository.save(test);
|
||||
}
|
||||
|
||||
async update(id: string, attrs: TestDefinitionLike) {
|
||||
const existingTestDefinition = await this.testDefinitionRepository.findOneOrFail({
|
||||
where: {
|
||||
id,
|
||||
},
|
||||
relations: ['workflow'],
|
||||
});
|
||||
|
||||
if (attrs.name) {
|
||||
const updatedTest = this.toEntity(attrs);
|
||||
await validateEntity(updatedTest);
|
||||
}
|
||||
|
||||
// Check if the annotation tag exists
|
||||
if (attrs.annotationTagId) {
|
||||
const annotationTagExists = await this.annotationTagRepository.exists({
|
||||
where: {
|
||||
id: attrs.annotationTagId,
|
||||
},
|
||||
});
|
||||
|
||||
if (!annotationTagExists) {
|
||||
throw new BadRequestError('Annotation tag not found');
|
||||
}
|
||||
}
|
||||
|
||||
// If there are mocked nodes, validate them
|
||||
if (attrs.mockedNodes && attrs.mockedNodes.length > 0) {
|
||||
const existingNodeNames = new Map(
|
||||
existingTestDefinition.workflow.nodes.map((n) => [n.name, n]),
|
||||
);
|
||||
const existingNodeIds = new Map(existingTestDefinition.workflow.nodes.map((n) => [n.id, n]));
|
||||
|
||||
// If some node was previously mocked and then removed from the workflow, it should be removed from the mocked nodes
|
||||
attrs.mockedNodes = attrs.mockedNodes.filter(
|
||||
(node) => existingNodeIds.has(node.id) || (node.name && existingNodeNames.has(node.name)),
|
||||
);
|
||||
|
||||
// Update the node names OR node ids if they are not provided
|
||||
attrs.mockedNodes = attrs.mockedNodes.map((node) => {
|
||||
return {
|
||||
id: node.id ?? (node.name && existingNodeNames.get(node.name)?.id),
|
||||
name: node.name ?? (node.id && existingNodeIds.get(node.id)?.name),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Update the test definition
|
||||
const queryResult = await this.testDefinitionRepository.update(id, this.toEntityLike(attrs));
|
||||
|
||||
if (queryResult.affected === 0) {
|
||||
throw new NotFoundError('Test definition not found');
|
||||
}
|
||||
|
||||
// Send the telemetry events
|
||||
if (attrs.annotationTagId && attrs.annotationTagId !== existingTestDefinition.annotationTagId) {
|
||||
this.telemetry.track('User added tag to test', {
|
||||
test_id: id,
|
||||
tag_id: attrs.annotationTagId,
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
attrs.evaluationWorkflowId &&
|
||||
existingTestDefinition.evaluationWorkflowId !== attrs.evaluationWorkflowId
|
||||
) {
|
||||
this.telemetry.track('User added evaluation workflow to test', {
|
||||
test_id: id,
|
||||
subworkflow_id: attrs.evaluationWorkflowId,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async delete(id: string, accessibleWorkflowIds: string[]) {
|
||||
const deleteResult = await this.testDefinitionRepository.deleteById(id, accessibleWorkflowIds);
|
||||
|
||||
if (deleteResult.affected === 0) {
|
||||
throw new NotFoundError('Test definition not found');
|
||||
}
|
||||
|
||||
this.telemetry.track('User deleted a test', { test_id: id });
|
||||
}
|
||||
|
||||
async getMany(options: ListQuery.Options, accessibleWorkflowIds: string[] = []) {
|
||||
return await this.testDefinitionRepository.getMany(accessibleWorkflowIds, options);
|
||||
}
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
import { Get, Post, Patch, RestController, Delete } from '@n8n/decorators';
|
||||
import express from 'express';
|
||||
import { UserError } from 'n8n-workflow';
|
||||
import assert from 'node:assert';
|
||||
|
||||
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
|
||||
import { NotFoundError } from '@/errors/response-errors/not-found.error';
|
||||
import {
|
||||
testDefinitionCreateRequestBodySchema,
|
||||
testDefinitionPatchRequestBodySchema,
|
||||
} from '@/evaluation.ee/test-definition.schema';
|
||||
import { TestRunnerService } from '@/evaluation.ee/test-runner/test-runner.service.ee';
|
||||
import { listQueryMiddleware } from '@/middlewares';
|
||||
import { getSharedWorkflowIds } from '@/public-api/v1/handlers/workflows/workflows.service';
|
||||
|
||||
import { TestDefinitionService } from './test-definition.service.ee';
|
||||
import { TestDefinitionsRequest } from './test-definitions.types.ee';
|
||||
|
||||
@RestController('/evaluation/test-definitions')
|
||||
export class TestDefinitionsController {
|
||||
constructor(
|
||||
private readonly testDefinitionService: TestDefinitionService,
|
||||
private readonly testRunnerService: TestRunnerService,
|
||||
) {}
|
||||
|
||||
@Get('/', { middlewares: listQueryMiddleware })
|
||||
async getMany(req: TestDefinitionsRequest.GetMany) {
|
||||
const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
try {
|
||||
return await this.testDefinitionService.getMany(
|
||||
req.listQueryOptions,
|
||||
userAccessibleWorkflowIds,
|
||||
);
|
||||
} catch (error) {
|
||||
if (error instanceof UserError) throw new ForbiddenError(error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@Get('/:id')
|
||||
async getOne(req: TestDefinitionsRequest.GetOne) {
|
||||
const { id: testDefinitionId } = req.params;
|
||||
|
||||
const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
const testDefinition = await this.testDefinitionService.findOne(
|
||||
testDefinitionId,
|
||||
userAccessibleWorkflowIds,
|
||||
);
|
||||
|
||||
if (!testDefinition) throw new NotFoundError('Test definition not found');
|
||||
|
||||
return testDefinition;
|
||||
}
|
||||
|
||||
@Post('/')
|
||||
async create(req: TestDefinitionsRequest.Create, res: express.Response) {
|
||||
const bodyParseResult = testDefinitionCreateRequestBodySchema.safeParse(req.body);
|
||||
if (!bodyParseResult.success) {
|
||||
res.status(400).json({ errors: bodyParseResult.error.errors });
|
||||
return;
|
||||
}
|
||||
|
||||
const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
if (!userAccessibleWorkflowIds.includes(req.body.workflowId)) {
|
||||
throw new ForbiddenError('User does not have access to the workflow');
|
||||
}
|
||||
|
||||
if (
|
||||
req.body.evaluationWorkflowId &&
|
||||
!userAccessibleWorkflowIds.includes(req.body.evaluationWorkflowId)
|
||||
) {
|
||||
throw new ForbiddenError('User does not have access to the evaluation workflow');
|
||||
}
|
||||
|
||||
return await this.testDefinitionService.save(
|
||||
this.testDefinitionService.toEntity(bodyParseResult.data),
|
||||
);
|
||||
}
|
||||
|
||||
@Delete('/:id')
|
||||
async delete(req: TestDefinitionsRequest.Delete) {
|
||||
const { id: testDefinitionId } = req.params;
|
||||
|
||||
const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
if (userAccessibleWorkflowIds.length === 0)
|
||||
throw new ForbiddenError('User does not have access to any workflows');
|
||||
|
||||
await this.testDefinitionService.delete(testDefinitionId, userAccessibleWorkflowIds);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Patch('/:id')
|
||||
async patch(req: TestDefinitionsRequest.Patch, res: express.Response) {
|
||||
const { id: testDefinitionId } = req.params;
|
||||
|
||||
const bodyParseResult = testDefinitionPatchRequestBodySchema.safeParse(req.body);
|
||||
if (!bodyParseResult.success) {
|
||||
res.status(400).json({ errors: bodyParseResult.error.errors });
|
||||
return;
|
||||
}
|
||||
|
||||
const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
// Fail fast if no workflows are accessible
|
||||
if (userAccessibleWorkflowIds.length === 0)
|
||||
throw new ForbiddenError('User does not have access to any workflows');
|
||||
|
||||
const existingTest = await this.testDefinitionService.findOne(
|
||||
testDefinitionId,
|
||||
userAccessibleWorkflowIds,
|
||||
);
|
||||
if (!existingTest) throw new NotFoundError('Test definition not found');
|
||||
|
||||
if (
|
||||
req.body.evaluationWorkflowId &&
|
||||
!userAccessibleWorkflowIds.includes(req.body.evaluationWorkflowId)
|
||||
) {
|
||||
throw new ForbiddenError('User does not have access to the evaluation workflow');
|
||||
}
|
||||
|
||||
await this.testDefinitionService.update(testDefinitionId, req.body);
|
||||
|
||||
// Respond with the updated test definition
|
||||
const testDefinition = await this.testDefinitionService.findOne(
|
||||
testDefinitionId,
|
||||
userAccessibleWorkflowIds,
|
||||
);
|
||||
|
||||
assert(testDefinition, 'Test definition not found');
|
||||
|
||||
return testDefinition;
|
||||
}
|
||||
|
||||
@Post('/:id/run')
|
||||
async runTest(req: TestDefinitionsRequest.Run, res: express.Response) {
|
||||
const { id: testDefinitionId } = req.params;
|
||||
|
||||
const workflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
// Check test definition exists
|
||||
const testDefinition = await this.testDefinitionService.findOne(testDefinitionId, workflowIds);
|
||||
if (!testDefinition) throw new NotFoundError('Test definition not found');
|
||||
|
||||
// We do not await for the test run to complete
|
||||
void this.testRunnerService.runTest(req.user, testDefinition);
|
||||
|
||||
res.status(202).json({ success: true });
|
||||
}
|
||||
|
||||
@Get('/:id/example-evaluation-input')
|
||||
async exampleEvaluationInput(req: TestDefinitionsRequest.ExampleEvaluationInput) {
|
||||
const { id: testDefinitionId } = req.params;
|
||||
const { annotationTagId } = req.query;
|
||||
|
||||
const workflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
const testDefinition = await this.testDefinitionService.findOne(testDefinitionId, workflowIds);
|
||||
if (!testDefinition) throw new NotFoundError('Test definition not found');
|
||||
|
||||
return await this.testRunnerService.getExampleEvaluationInputData(
|
||||
testDefinition,
|
||||
annotationTagId,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
import type { MockedNodeItem } from '@n8n/db';
|
||||
|
||||
import type { AuthenticatedRequest, ListQuery } from '@/requests';
|
||||
|
||||
// ----------------------------------
|
||||
// /test-definitions
|
||||
// ----------------------------------
|
||||
|
||||
export declare namespace TestDefinitionsRequest {
|
||||
namespace RouteParams {
|
||||
type TestId = {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
type GetOne = AuthenticatedRequest<RouteParams.TestId>;
|
||||
|
||||
type GetMany = AuthenticatedRequest<{}, {}, {}, ListQuery.Params> & {
|
||||
listQueryOptions: ListQuery.Options;
|
||||
};
|
||||
|
||||
type Create = AuthenticatedRequest<
|
||||
{},
|
||||
{},
|
||||
{ name: string; workflowId: string; evaluationWorkflowId?: string }
|
||||
>;
|
||||
|
||||
type Patch = AuthenticatedRequest<
|
||||
RouteParams.TestId,
|
||||
{},
|
||||
{
|
||||
name?: string;
|
||||
evaluationWorkflowId?: string;
|
||||
annotationTagId?: string;
|
||||
mockedNodes?: MockedNodeItem[];
|
||||
}
|
||||
>;
|
||||
|
||||
type Delete = AuthenticatedRequest<RouteParams.TestId>;
|
||||
|
||||
type Run = AuthenticatedRequest<RouteParams.TestId>;
|
||||
|
||||
type ExampleEvaluationInput = AuthenticatedRequest<
|
||||
RouteParams.TestId,
|
||||
{},
|
||||
{},
|
||||
{ annotationTagId: string }
|
||||
>;
|
||||
}
|
||||
|
||||
// ----------------------------------
|
||||
// /test-definitions/:testDefinitionId/runs
|
||||
// ----------------------------------
|
||||
|
||||
export declare namespace TestRunsRequest {
|
||||
namespace RouteParams {
|
||||
type TestId = {
|
||||
testDefinitionId: string;
|
||||
};
|
||||
|
||||
type TestRunId = {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
type GetMany = AuthenticatedRequest<RouteParams.TestId, {}, {}, ListQuery.Params> & {
|
||||
listQueryOptions: ListQuery.Options;
|
||||
};
|
||||
|
||||
type GetOne = AuthenticatedRequest<RouteParams.TestId & RouteParams.TestRunId>;
|
||||
|
||||
type Delete = AuthenticatedRequest<RouteParams.TestId & RouteParams.TestRunId>;
|
||||
|
||||
type Cancel = AuthenticatedRequest<RouteParams.TestId & RouteParams.TestRunId>;
|
||||
|
||||
type GetCases = AuthenticatedRequest<RouteParams.TestId & RouteParams.TestRunId>;
|
||||
}
|
||||
@@ -2,8 +2,7 @@ import { EvaluationMetrics } from '../evaluation-metrics.ee';
|
||||
|
||||
describe('EvaluationMetrics', () => {
|
||||
test('should aggregate metrics correctly', () => {
|
||||
const testMetricNames = new Set(['metric1', 'metric2']);
|
||||
const metrics = new EvaluationMetrics(testMetricNames);
|
||||
const metrics = new EvaluationMetrics();
|
||||
|
||||
metrics.addResults({ metric1: 1, metric2: 0 });
|
||||
metrics.addResults({ metric1: 0.5, metric2: 0.2 });
|
||||
@@ -14,8 +13,7 @@ describe('EvaluationMetrics', () => {
|
||||
});
|
||||
|
||||
test('should throw when metric value is not number', () => {
|
||||
const testMetricNames = new Set(['metric1', 'metric2']);
|
||||
const metrics = new EvaluationMetrics(testMetricNames);
|
||||
const metrics = new EvaluationMetrics();
|
||||
|
||||
expect(() => metrics.addResults({ metric1: 1, metric2: 0 })).not.toThrow();
|
||||
expect(() => metrics.addResults({ metric1: '0.5', metric2: 0.2 })).toThrow('INVALID_METRICS');
|
||||
@@ -25,49 +23,21 @@ describe('EvaluationMetrics', () => {
|
||||
});
|
||||
|
||||
test('should handle empty metrics', () => {
|
||||
const testMetricNames = new Set(['metric1', 'metric2']);
|
||||
const metrics = new EvaluationMetrics(testMetricNames);
|
||||
const metrics = new EvaluationMetrics();
|
||||
|
||||
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||
|
||||
expect(aggregatedMetrics).toEqual({});
|
||||
});
|
||||
|
||||
test('should handle empty testMetrics', () => {
|
||||
const metrics = new EvaluationMetrics(new Set());
|
||||
|
||||
metrics.addResults({ metric1: 1, metric2: 0 });
|
||||
metrics.addResults({ metric1: 0.5, metric2: 0.2 });
|
||||
|
||||
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||
|
||||
expect(aggregatedMetrics).toEqual({});
|
||||
});
|
||||
|
||||
test('should ignore non-relevant values', () => {
|
||||
const testMetricNames = new Set(['metric1']);
|
||||
const metrics = new EvaluationMetrics(testMetricNames);
|
||||
|
||||
metrics.addResults({ metric1: 1, notRelevant: 0 });
|
||||
metrics.addResults({ metric1: 0.5, notRelevant2: { foo: 'bar' } });
|
||||
|
||||
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||
|
||||
expect(aggregatedMetrics).toEqual({ metric1: 0.75 });
|
||||
});
|
||||
|
||||
test('should report info on added metrics', () => {
|
||||
const testMetricNames = new Set(['metric1']);
|
||||
const metrics = new EvaluationMetrics(testMetricNames);
|
||||
const metrics = new EvaluationMetrics();
|
||||
let info;
|
||||
|
||||
expect(() => (info = metrics.addResults({ metric1: 1, metric2: 0 }))).not.toThrow();
|
||||
|
||||
expect(info).toBeDefined();
|
||||
expect(info).toHaveProperty('unknownMetrics');
|
||||
expect(info!.unknownMetrics).toEqual(new Set(['metric2']));
|
||||
|
||||
expect(info).toHaveProperty('addedMetrics');
|
||||
expect(info!.addedMetrics).toEqual({ metric1: 1 });
|
||||
expect(info!.addedMetrics).toEqual({ metric1: 1, metric2: 0 });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import path from 'path';
|
||||
|
||||
import type { TestCaseRunMetadata } from '@/evaluation.ee/test-runner/test-runner.service.ee';
|
||||
import { formatTestCaseExecutionInputData } from '@/evaluation.ee/test-runner/utils.ee';
|
||||
|
||||
const wfUnderTestJson = JSON.parse(
|
||||
readFileSync(path.join(__dirname, './mock-data/workflow.under-test.json'), { encoding: 'utf-8' }),
|
||||
);
|
||||
|
||||
const executionDataJson = JSON.parse(
|
||||
readFileSync(path.join(__dirname, './mock-data/execution-data.json'), { encoding: 'utf-8' }),
|
||||
);
|
||||
|
||||
describe('formatTestCaseExecutionInputData', () => {
|
||||
test('should format the test case execution input data correctly', () => {
|
||||
const data = formatTestCaseExecutionInputData(
|
||||
executionDataJson.resultData.runData,
|
||||
wfUnderTestJson,
|
||||
executionDataJson.resultData.runData,
|
||||
wfUnderTestJson,
|
||||
mock<TestCaseRunMetadata>({
|
||||
pastExecutionId: 'exec-id',
|
||||
highlightedData: [],
|
||||
annotation: {
|
||||
vote: 'up',
|
||||
tags: [{ id: 'tag-id', name: 'tag-name' }],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Check data have all expected properties
|
||||
expect(data.json).toMatchObject({
|
||||
originalExecution: expect.anything(),
|
||||
newExecution: expect.anything(),
|
||||
annotations: expect.anything(),
|
||||
});
|
||||
|
||||
// Check original execution contains all the expected nodes
|
||||
expect(data.json.originalExecution).toHaveProperty('72256d90-3a67-4e29-b032-47df4e5768af');
|
||||
expect(data.json.originalExecution).toHaveProperty('319f29bc-1dd4-4122-b223-c584752151a4');
|
||||
expect(data.json.originalExecution).toHaveProperty('d2474215-63af-40a4-a51e-0ea30d762621');
|
||||
|
||||
// Check format of specific node data
|
||||
expect(data.json.originalExecution).toMatchObject({
|
||||
'72256d90-3a67-4e29-b032-47df4e5768af': {
|
||||
nodeName: 'When clicking ‘Execute workflow’',
|
||||
runs: [
|
||||
{
|
||||
executionTime: 0,
|
||||
rootNode: true,
|
||||
output: {
|
||||
main: [
|
||||
[
|
||||
{
|
||||
query: 'First item',
|
||||
},
|
||||
{
|
||||
query: 'Second item',
|
||||
},
|
||||
{
|
||||
query: 'Third item',
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
// Check annotations
|
||||
expect(data).toMatchObject({
|
||||
json: {
|
||||
annotations: {
|
||||
vote: 'up',
|
||||
tags: [{ id: 'tag-id', name: 'tag-name' }],
|
||||
highlightedData: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,41 +4,34 @@ import { TestCaseExecutionError } from '@/evaluation.ee/test-runner/errors.ee';
|
||||
|
||||
export interface EvaluationMetricsAddResultsInfo {
|
||||
addedMetrics: Record<string, number>;
|
||||
missingMetrics: Set<string>;
|
||||
unknownMetrics: Set<string>;
|
||||
incorrectTypeMetrics: Set<string>;
|
||||
}
|
||||
|
||||
export class EvaluationMetrics {
|
||||
private readonly rawMetricsByName = new Map<string, number[]>();
|
||||
|
||||
constructor(private readonly metricNames: Set<string>) {
|
||||
for (const metricName of metricNames) {
|
||||
this.rawMetricsByName.set(metricName, []);
|
||||
}
|
||||
}
|
||||
|
||||
addResults(result: IDataObject): EvaluationMetricsAddResultsInfo {
|
||||
const addResultsInfo: EvaluationMetricsAddResultsInfo = {
|
||||
addedMetrics: {},
|
||||
missingMetrics: new Set<string>(),
|
||||
unknownMetrics: new Set<string>(),
|
||||
incorrectTypeMetrics: new Set<string>(),
|
||||
};
|
||||
|
||||
for (const [metricName, metricValue] of Object.entries(result)) {
|
||||
if (this.metricNames.has(metricName)) {
|
||||
if (typeof metricValue === 'number') {
|
||||
addResultsInfo.addedMetrics[metricName] = metricValue;
|
||||
this.rawMetricsByName.get(metricName)!.push(metricValue);
|
||||
} else {
|
||||
throw new TestCaseExecutionError('INVALID_METRICS', {
|
||||
metricName,
|
||||
metricValue,
|
||||
});
|
||||
if (typeof metricValue === 'number') {
|
||||
addResultsInfo.addedMetrics[metricName] = metricValue;
|
||||
|
||||
// Initialize the array if this is the first time we see this metric
|
||||
if (!this.rawMetricsByName.has(metricName)) {
|
||||
this.rawMetricsByName.set(metricName, []);
|
||||
}
|
||||
|
||||
this.rawMetricsByName.get(metricName)!.push(metricValue);
|
||||
} else {
|
||||
addResultsInfo.unknownMetrics.add(metricName);
|
||||
addResultsInfo.incorrectTypeMetrics.add(metricName);
|
||||
throw new TestCaseExecutionError('INVALID_METRICS', {
|
||||
metricName,
|
||||
metricValue,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
import { TestRunRepository } from '@n8n/db';
|
||||
import { Service } from '@n8n/di';
|
||||
import { Logger } from 'n8n-core';
|
||||
|
||||
/**
|
||||
* This service is responsible for cleaning up pending Test Runs on application startup.
|
||||
*/
|
||||
@Service()
|
||||
export class TestRunCleanupService {
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly testRunRepository: TestRunRepository,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* As Test Runner does not have a recovery mechanism, it can not resume Test Runs interrupted by the server restart.
|
||||
* All Test Runs in incomplete state will be marked as failed.
|
||||
*/
|
||||
async cleanupIncompleteRuns() {
|
||||
const result = await this.testRunRepository.markAllIncompleteAsFailed();
|
||||
if (result.affected && result.affected > 0) {
|
||||
this.logger.debug(`Marked ${result.affected} incomplete test runs as failed`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,53 +1,23 @@
|
||||
import type { User, ExecutionEntity, MockedNodeItem, TestDefinition, TestRun } from '@n8n/db';
|
||||
import {
|
||||
ExecutionRepository,
|
||||
TestCaseExecutionRepository,
|
||||
TestMetricRepository,
|
||||
TestRunRepository,
|
||||
WorkflowRepository,
|
||||
} from '@n8n/db';
|
||||
import type { User, TestRun } from '@n8n/db';
|
||||
import { TestCaseExecutionRepository, TestRunRepository, WorkflowRepository } from '@n8n/db';
|
||||
import { Service } from '@n8n/di';
|
||||
import { parse } from 'flatted';
|
||||
import difference from 'lodash/difference';
|
||||
import { ErrorReporter, Logger } from 'n8n-core';
|
||||
import { ExecutionCancelledError, NodeConnectionTypes, Workflow } from 'n8n-workflow';
|
||||
import type {
|
||||
AssignmentCollectionValue,
|
||||
IDataObject,
|
||||
IRun,
|
||||
IRunExecutionData,
|
||||
IWorkflowBase,
|
||||
IWorkflowExecutionDataProcess,
|
||||
} from 'n8n-workflow';
|
||||
import { ExecutionCancelledError } from 'n8n-workflow';
|
||||
import type { IRun, IWorkflowBase, IWorkflowExecutionDataProcess } from 'n8n-workflow';
|
||||
import assert from 'node:assert';
|
||||
|
||||
import { ActiveExecutions } from '@/active-executions';
|
||||
import config from '@/config';
|
||||
import { EVALUATION_METRICS_NODE } from '@/constants';
|
||||
import { TestCaseExecutionError, TestRunError } from '@/evaluation.ee/test-runner/errors.ee';
|
||||
import { NodeTypes } from '@/node-types';
|
||||
import { Telemetry } from '@/telemetry';
|
||||
import { getRunData } from '@/workflow-execute-additional-data';
|
||||
import { WorkflowRunner } from '@/workflow-runner';
|
||||
|
||||
import { EvaluationMetrics } from './evaluation-metrics.ee';
|
||||
import {
|
||||
createPinData,
|
||||
formatTestCaseExecutionInputData,
|
||||
getPastExecutionTriggerNode,
|
||||
} from './utils.ee';
|
||||
|
||||
export interface TestRunMetadata {
|
||||
testRunId: string;
|
||||
userId: string;
|
||||
}
|
||||
|
||||
export interface TestCaseRunMetadata extends TestRunMetadata {
|
||||
pastExecutionId: string;
|
||||
annotation: ExecutionEntity['annotation'];
|
||||
highlightedData: ExecutionEntity['metadata'];
|
||||
}
|
||||
|
||||
/**
|
||||
* This service orchestrates the running of test cases.
|
||||
* It uses the test definitions to find
|
||||
@@ -65,78 +35,33 @@ export class TestRunnerService {
|
||||
private readonly telemetry: Telemetry,
|
||||
private readonly workflowRepository: WorkflowRepository,
|
||||
private readonly workflowRunner: WorkflowRunner,
|
||||
private readonly executionRepository: ExecutionRepository,
|
||||
private readonly activeExecutions: ActiveExecutions,
|
||||
private readonly testRunRepository: TestRunRepository,
|
||||
private readonly testCaseExecutionRepository: TestCaseExecutionRepository,
|
||||
private readonly testMetricRepository: TestMetricRepository,
|
||||
private readonly nodeTypes: NodeTypes,
|
||||
private readonly errorReporter: ErrorReporter,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* As Test Runner does not have a recovery mechanism, it can not resume Test Runs interrupted by the server restart.
|
||||
* All Test Runs in incomplete state will be marked as cancelled.
|
||||
*/
|
||||
async cleanupIncompleteRuns() {
|
||||
await this.testRunRepository.markAllIncompleteAsFailed();
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the start nodes and trigger node data props for the `workflowRunner.run` method input.
|
||||
*/
|
||||
private getStartNodesData(
|
||||
workflow: IWorkflowBase,
|
||||
pastExecutionData: IRunExecutionData,
|
||||
pastExecutionWorkflowData: IWorkflowBase,
|
||||
): Pick<IWorkflowExecutionDataProcess, 'startNodes' | 'triggerToStartFrom'> {
|
||||
// Create a new workflow instance to use the helper functions (getChildNodes)
|
||||
const workflowInstance = new Workflow({
|
||||
nodes: workflow.nodes,
|
||||
connections: workflow.connections,
|
||||
active: false,
|
||||
nodeTypes: this.nodeTypes,
|
||||
});
|
||||
|
||||
// Create a map between node IDs and node names for the past workflow
|
||||
const pastWorkflowNodeIdByName = new Map(
|
||||
pastExecutionWorkflowData.nodes.map((node) => [node.name, node.id]),
|
||||
): Pick<IWorkflowExecutionDataProcess, 'triggerToStartFrom'> {
|
||||
// Find the dataset trigger node
|
||||
// TODO: replace with dataset trigger node
|
||||
const triggerNode = workflow.nodes.find(
|
||||
(node) => node.type === 'n8n-nodes-base.executeWorkflowTrigger',
|
||||
);
|
||||
|
||||
// Create a map between node names and IDs for the up-to-date workflow
|
||||
const workflowNodeNameById = new Map(workflow.nodes.map((node) => [node.id, node.name]));
|
||||
|
||||
// Determine the trigger node of the past execution
|
||||
const pastExecutionTriggerNode = getPastExecutionTriggerNode(pastExecutionData);
|
||||
assert(pastExecutionTriggerNode, 'Could not find the trigger node of the past execution');
|
||||
|
||||
const pastExecutionTriggerNodeId = pastWorkflowNodeIdByName.get(pastExecutionTriggerNode);
|
||||
assert(pastExecutionTriggerNodeId, 'Could not find the trigger node ID of the past execution');
|
||||
|
||||
// Check the trigger is still present in the workflow
|
||||
const triggerNode = workflowNodeNameById.get(pastExecutionTriggerNodeId);
|
||||
if (!triggerNode) {
|
||||
// TODO: Change error
|
||||
throw new TestCaseExecutionError('TRIGGER_NO_LONGER_EXISTS');
|
||||
}
|
||||
|
||||
const triggerNodeData = pastExecutionData.resultData.runData[pastExecutionTriggerNode][0];
|
||||
assert(triggerNodeData, 'Trigger node data not found');
|
||||
|
||||
const triggerToStartFrom = {
|
||||
name: triggerNode,
|
||||
data: triggerNodeData,
|
||||
name: triggerNode.name,
|
||||
};
|
||||
|
||||
// Start nodes are the nodes that are connected to the trigger node
|
||||
const startNodes = workflowInstance
|
||||
.getChildNodes(triggerNode, NodeConnectionTypes.Main, 1)
|
||||
.map((nodeName) => ({
|
||||
name: nodeName,
|
||||
sourceData: { previousNode: pastExecutionTriggerNode },
|
||||
}));
|
||||
|
||||
return {
|
||||
startNodes,
|
||||
triggerToStartFrom,
|
||||
};
|
||||
}
|
||||
@@ -147,10 +72,7 @@ export class TestRunnerService {
|
||||
*/
|
||||
private async runTestCase(
|
||||
workflow: IWorkflowBase,
|
||||
pastExecutionData: IRunExecutionData,
|
||||
pastExecutionWorkflowData: IWorkflowBase,
|
||||
mockedNodes: MockedNodeItem[],
|
||||
metadata: TestCaseRunMetadata,
|
||||
metadata: TestRunMetadata,
|
||||
abortSignal: AbortSignal,
|
||||
): Promise<IRun | undefined> {
|
||||
// Do not run if the test run is cancelled
|
||||
@@ -158,19 +80,7 @@ export class TestRunnerService {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create pin data from the past execution data
|
||||
const pinData = createPinData(
|
||||
workflow,
|
||||
mockedNodes,
|
||||
pastExecutionData,
|
||||
pastExecutionWorkflowData,
|
||||
);
|
||||
|
||||
const startNodesData = this.getStartNodesData(
|
||||
workflow,
|
||||
pastExecutionData,
|
||||
pastExecutionWorkflowData,
|
||||
);
|
||||
const startNodesData = this.getStartNodesData(workflow);
|
||||
|
||||
// Prepare the data to run the workflow
|
||||
// Evaluation executions should run the same way as manual,
|
||||
@@ -179,8 +89,8 @@ export class TestRunnerService {
|
||||
...startNodesData,
|
||||
executionMode: 'evaluation',
|
||||
runData: {},
|
||||
pinData,
|
||||
workflowData: { ...workflow, pinData },
|
||||
// pinData,
|
||||
workflowData: workflow,
|
||||
userId: metadata.userId,
|
||||
partialExecutionVersion: 2,
|
||||
};
|
||||
@@ -190,10 +100,10 @@ export class TestRunnerService {
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
data.executionData = {
|
||||
startData: {
|
||||
startNodes: startNodesData.startNodes,
|
||||
// startNodes: startNodesData.startNodes,
|
||||
},
|
||||
resultData: {
|
||||
pinData,
|
||||
// pinData,
|
||||
runData: {},
|
||||
},
|
||||
manualData: {
|
||||
@@ -213,96 +123,7 @@ export class TestRunnerService {
|
||||
this.activeExecutions.stopExecution(executionId);
|
||||
});
|
||||
|
||||
// Update status of the test run execution mapping
|
||||
await this.testCaseExecutionRepository.markAsRunning({
|
||||
testRunId: metadata.testRunId,
|
||||
pastExecutionId: metadata.pastExecutionId,
|
||||
executionId,
|
||||
});
|
||||
|
||||
// Wait for the execution to finish
|
||||
const executePromise = this.activeExecutions.getPostExecutePromise(executionId);
|
||||
|
||||
return await executePromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync the metrics of the test definition with the evaluation workflow.
|
||||
*/
|
||||
async syncMetrics(
|
||||
testDefinitionId: string,
|
||||
evaluationWorkflow: IWorkflowBase,
|
||||
): Promise<Set<string>> {
|
||||
const usedTestMetricNames = await this.getUsedTestMetricNames(evaluationWorkflow);
|
||||
const existingTestMetrics = await this.testMetricRepository.find({
|
||||
where: {
|
||||
testDefinition: { id: testDefinitionId },
|
||||
},
|
||||
});
|
||||
|
||||
const existingMetricNames = new Set(existingTestMetrics.map((metric) => metric.name));
|
||||
const metricsToAdd = difference(
|
||||
Array.from(usedTestMetricNames),
|
||||
Array.from(existingMetricNames),
|
||||
);
|
||||
const metricsToRemove = difference(
|
||||
Array.from(existingMetricNames),
|
||||
Array.from(usedTestMetricNames),
|
||||
);
|
||||
|
||||
// Add new metrics
|
||||
const metricsToAddEntities = metricsToAdd.map((metricName) =>
|
||||
this.testMetricRepository.create({
|
||||
name: metricName,
|
||||
testDefinition: { id: testDefinitionId },
|
||||
}),
|
||||
);
|
||||
await this.testMetricRepository.save(metricsToAddEntities);
|
||||
|
||||
// Remove no longer used metrics
|
||||
metricsToRemove.forEach(async (metricName) => {
|
||||
const metric = existingTestMetrics.find((m) => m.name === metricName);
|
||||
assert(metric, 'Existing metric not found');
|
||||
|
||||
await this.testMetricRepository.delete(metric.id);
|
||||
});
|
||||
|
||||
return usedTestMetricNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the evaluation workflow with the expected and actual run data.
|
||||
*/
|
||||
private async runTestCaseEvaluation(
|
||||
evaluationWorkflow: IWorkflowBase,
|
||||
evaluationInputData: any,
|
||||
abortSignal: AbortSignal,
|
||||
metadata: TestCaseRunMetadata,
|
||||
) {
|
||||
// Do not run if the test run is cancelled
|
||||
if (abortSignal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Prepare the data to run the evaluation workflow
|
||||
const data = await getRunData(evaluationWorkflow, [evaluationInputData]);
|
||||
data.executionMode = 'integrated';
|
||||
|
||||
// Trigger the evaluation workflow
|
||||
const executionId = await this.workflowRunner.run(data);
|
||||
assert(executionId);
|
||||
|
||||
// Listen to the abort signal to stop the execution in case test run is cancelled
|
||||
abortSignal.addEventListener('abort', () => {
|
||||
this.activeExecutions.stopExecution(executionId);
|
||||
});
|
||||
|
||||
// Update status of the test run execution mapping
|
||||
await this.testCaseExecutionRepository.markAsEvaluationRunning({
|
||||
testRunId: metadata.testRunId,
|
||||
pastExecutionId: metadata.pastExecutionId,
|
||||
evaluationExecutionId: executionId,
|
||||
});
|
||||
// TODO: Update status of the test run execution
|
||||
|
||||
// Wait for the execution to finish
|
||||
const executePromise = this.activeExecutions.getPostExecutePromise(executionId);
|
||||
@@ -317,51 +138,18 @@ export class TestRunnerService {
|
||||
return workflow.nodes.filter((node) => node.type === EVALUATION_METRICS_NODE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluation result is the first item in the output of the last node
|
||||
* executed in the evaluation workflow. Defaults to an empty object
|
||||
* in case the node doesn't produce any output items.
|
||||
*/
|
||||
private extractEvaluationResult(execution: IRun, evaluationWorkflow: IWorkflowBase): IDataObject {
|
||||
const lastNodeExecuted = execution.data.resultData.lastNodeExecuted;
|
||||
assert(lastNodeExecuted, 'Could not find the last node executed in evaluation workflow');
|
||||
const metricsNodes = TestRunnerService.getEvaluationMetricsNodes(evaluationWorkflow);
|
||||
const metricsRunData = metricsNodes.flatMap(
|
||||
(node) => execution.data.resultData.runData[node.name],
|
||||
);
|
||||
const metricsData = metricsRunData.reverse().map((data) => data.data?.main?.[0]?.[0]?.json);
|
||||
const metricsResult = metricsData.reduce((acc, curr) => ({ ...acc, ...curr }), {}) ?? {};
|
||||
|
||||
return metricsResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the metrics to collect from the evaluation workflow execution results.
|
||||
*/
|
||||
private async getUsedTestMetricNames(evaluationWorkflow: IWorkflowBase) {
|
||||
const metricsNodes = TestRunnerService.getEvaluationMetricsNodes(evaluationWorkflow);
|
||||
const metrics = metricsNodes.map((node) => {
|
||||
const metricsParameter = node.parameters?.metrics as AssignmentCollectionValue;
|
||||
assert(metricsParameter, 'Metrics parameter not found');
|
||||
|
||||
const metricsNames = metricsParameter.assignments.map((assignment) => assignment.name);
|
||||
return metricsNames;
|
||||
});
|
||||
|
||||
return new Set(metrics.flat());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new test run for the given test definition.
|
||||
*/
|
||||
async runTest(user: User, test: TestDefinition): Promise<void> {
|
||||
this.logger.debug('Starting new test run', { testId: test.id });
|
||||
async runTest(user: User, workflowId: string): Promise<void> {
|
||||
this.logger.debug('Starting new test run', { workflowId });
|
||||
|
||||
const workflow = await this.workflowRepository.findById(test.workflowId);
|
||||
const workflow = await this.workflowRepository.findById(workflowId);
|
||||
assert(workflow, 'Workflow not found');
|
||||
|
||||
// 0. Create new Test Run
|
||||
const testRun = await this.testRunRepository.createTestRun(test.id);
|
||||
// TODO: Check that createTestRun takes workflowId as an argument
|
||||
const testRun = await this.testRunRepository.createTestRun(workflowId);
|
||||
assert(testRun, 'Unable to create a test run');
|
||||
|
||||
// 0.1 Initialize AbortController
|
||||
@@ -378,217 +166,120 @@ export class TestRunnerService {
|
||||
let testRunEndStatusForTelemetry;
|
||||
|
||||
const abortSignal = abortController.signal;
|
||||
const { manager: dbManager } = this.executionRepository;
|
||||
const { manager: dbManager } = this.testRunRepository;
|
||||
|
||||
try {
|
||||
// Get the evaluation workflow
|
||||
const evaluationWorkflow = await this.workflowRepository.findById(test.evaluationWorkflowId);
|
||||
if (!evaluationWorkflow) {
|
||||
throw new TestRunError('EVALUATION_WORKFLOW_NOT_FOUND');
|
||||
}
|
||||
///
|
||||
// 1. Make test cases from previous executions
|
||||
// 1. Make test cases list
|
||||
///
|
||||
|
||||
// Select executions with the annotation tag and workflow ID of the test.
|
||||
// Fetch only ids to reduce the data transfer.
|
||||
const pastExecutions: ReadonlyArray<Pick<ExecutionEntity, 'id'>> =
|
||||
await this.executionRepository
|
||||
.createQueryBuilder('execution')
|
||||
.select('execution.id')
|
||||
.leftJoin('execution.annotation', 'annotation')
|
||||
.leftJoin('annotation.tags', 'annotationTag')
|
||||
.where('annotationTag.id = :tagId', { tagId: test.annotationTagId })
|
||||
.andWhere('execution.workflowId = :workflowId', { workflowId: test.workflowId })
|
||||
.getMany();
|
||||
// TODO: Get the test cases from the dataset trigger node
|
||||
const testCases = [{ id: 1 }];
|
||||
|
||||
this.logger.debug('Found past executions', { count: pastExecutions.length });
|
||||
this.logger.debug('Found test cases', { count: testCases.length });
|
||||
|
||||
if (pastExecutions.length === 0) {
|
||||
if (testCases.length === 0) {
|
||||
// TODO: Change error
|
||||
throw new TestRunError('PAST_EXECUTIONS_NOT_FOUND');
|
||||
}
|
||||
|
||||
// Add all past executions mappings to the test run.
|
||||
// This will be used to track the status of each test case and keep the connection between test run and all related executions (past, current, and evaluation).
|
||||
await this.testCaseExecutionRepository.createBatch(
|
||||
testRun.id,
|
||||
pastExecutions.map((e) => e.id),
|
||||
);
|
||||
// await this.testCaseExecutionRepository.createBatch(
|
||||
// testRun.id,
|
||||
// testCases.map((e) => e.id),
|
||||
// );
|
||||
|
||||
// Sync the metrics of the test definition with the evaluation workflow
|
||||
const testMetricNames = await this.syncMetrics(test.id, evaluationWorkflow);
|
||||
// TODO: Collect metric names from evaluation nodes of the workflow
|
||||
// const testMetricNames = new Set<string>();
|
||||
|
||||
// 2. Run over all the test cases
|
||||
const pastExecutionIds = pastExecutions.map((e) => e.id);
|
||||
// const pastExecutionIds = pastExecutions.map((e) => e.id);
|
||||
|
||||
// Update test run status
|
||||
await this.testRunRepository.markAsRunning(testRun.id, pastExecutions.length);
|
||||
// TODO: mark test run as running
|
||||
// await this.testRunRepository.markAsRunning(testRun.id);
|
||||
|
||||
this.telemetry.track('User ran test', {
|
||||
user_id: user.id,
|
||||
test_id: test.id,
|
||||
run_id: testRun.id,
|
||||
executions_ids: pastExecutionIds,
|
||||
workflow_id: test.workflowId,
|
||||
evaluation_workflow_id: test.evaluationWorkflowId,
|
||||
workflow_id: workflowId,
|
||||
});
|
||||
|
||||
// Initialize object to collect the results of the evaluation workflow executions
|
||||
const metrics = new EvaluationMetrics(testMetricNames);
|
||||
// const metrics = new EvaluationMetrics();
|
||||
|
||||
///
|
||||
// 2. Run over all the test cases
|
||||
///
|
||||
|
||||
for (const pastExecutionId of pastExecutionIds) {
|
||||
for (const _testCase of testCases) {
|
||||
if (abortSignal.aborted) {
|
||||
this.logger.debug('Test run was cancelled', {
|
||||
testId: test.id,
|
||||
stoppedOn: pastExecutionId,
|
||||
workflowId,
|
||||
// stoppedOn: pastExecutionId,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
this.logger.debug('Running test case', { pastExecutionId });
|
||||
this.logger.debug('Running test case');
|
||||
|
||||
try {
|
||||
// Fetch past execution with data
|
||||
const pastExecution = await this.executionRepository.findOne({
|
||||
where: { id: pastExecutionId },
|
||||
relations: ['executionData', 'metadata', 'annotation', 'annotation.tags'],
|
||||
});
|
||||
assert(pastExecution, 'Execution not found');
|
||||
|
||||
const executionData = parse(pastExecution.executionData.data) as IRunExecutionData;
|
||||
|
||||
const testCaseMetadata = {
|
||||
...testRunMetadata,
|
||||
pastExecutionId,
|
||||
highlightedData: pastExecution.metadata,
|
||||
annotation: pastExecution.annotation,
|
||||
};
|
||||
|
||||
// Run the test case and wait for it to finish
|
||||
const testCaseExecution = await this.runTestCase(
|
||||
workflow,
|
||||
executionData,
|
||||
pastExecution.executionData.workflowData,
|
||||
test.mockedNodes,
|
||||
testCaseMetadata,
|
||||
abortSignal,
|
||||
);
|
||||
const testCaseExecution = await this.runTestCase(workflow, testCaseMetadata, abortSignal);
|
||||
|
||||
this.logger.debug('Test case execution finished', { pastExecutionId });
|
||||
this.logger.debug('Test case execution finished');
|
||||
|
||||
// In case of a permission check issue, the test case execution will be undefined.
|
||||
// If that happens, or if the test case execution produced an error, mark the test case as failed.
|
||||
if (!testCaseExecution || testCaseExecution.data.resultData.error) {
|
||||
await dbManager.transaction(async (trx) => {
|
||||
await this.testRunRepository.incrementFailed(testRun.id, trx);
|
||||
await this.testCaseExecutionRepository.markAsFailed({
|
||||
testRunId: testRun.id,
|
||||
pastExecutionId,
|
||||
errorCode: 'FAILED_TO_EXECUTE_WORKFLOW',
|
||||
trx,
|
||||
});
|
||||
});
|
||||
// TODO: add failed test case execution to DB
|
||||
continue;
|
||||
}
|
||||
|
||||
// Collect the results of the test case execution
|
||||
const testCaseRunData = testCaseExecution.data.resultData.runData;
|
||||
// TODO: extract metrics
|
||||
|
||||
// Get the original runData from the test case execution data
|
||||
const originalRunData = executionData.resultData.runData;
|
||||
|
||||
const evaluationInputData = formatTestCaseExecutionInputData(
|
||||
originalRunData,
|
||||
pastExecution.executionData.workflowData,
|
||||
testCaseRunData,
|
||||
workflow,
|
||||
testCaseMetadata,
|
||||
);
|
||||
|
||||
// Run the evaluation workflow with the original and new run data
|
||||
const evalExecution = await this.runTestCaseEvaluation(
|
||||
evaluationWorkflow,
|
||||
evaluationInputData,
|
||||
abortSignal,
|
||||
testCaseMetadata,
|
||||
);
|
||||
assert(evalExecution);
|
||||
|
||||
this.logger.debug('Evaluation execution finished', { pastExecutionId });
|
||||
|
||||
// Extract the output of the last node executed in the evaluation workflow
|
||||
const { addedMetrics } = metrics.addResults(
|
||||
this.extractEvaluationResult(evalExecution, evaluationWorkflow),
|
||||
);
|
||||
|
||||
if (evalExecution.data.resultData.error) {
|
||||
await dbManager.transaction(async (trx) => {
|
||||
await this.testRunRepository.incrementFailed(testRun.id, trx);
|
||||
await this.testCaseExecutionRepository.markAsFailed({
|
||||
testRunId: testRun.id,
|
||||
pastExecutionId,
|
||||
errorCode: 'FAILED_TO_EXECUTE_EVALUATION_WORKFLOW',
|
||||
trx,
|
||||
});
|
||||
});
|
||||
} else {
|
||||
await dbManager.transaction(async (trx) => {
|
||||
await this.testRunRepository.incrementPassed(testRun.id, trx);
|
||||
|
||||
await this.testCaseExecutionRepository.markAsCompleted({
|
||||
testRunId: testRun.id,
|
||||
pastExecutionId,
|
||||
metrics: addedMetrics,
|
||||
trx,
|
||||
});
|
||||
});
|
||||
}
|
||||
// Create a new test case execution in DB
|
||||
// TODO: add successful test case execution to DB
|
||||
} catch (e) {
|
||||
// In case of an unexpected error, increment the failed count and continue with the next test case
|
||||
await dbManager.transaction(async (trx) => {
|
||||
await this.testRunRepository.incrementFailed(testRun.id, trx);
|
||||
|
||||
if (e instanceof TestCaseExecutionError) {
|
||||
await this.testCaseExecutionRepository.markAsFailed({
|
||||
testRunId: testRun.id,
|
||||
pastExecutionId,
|
||||
errorCode: e.code,
|
||||
errorDetails: e.extra as IDataObject,
|
||||
trx,
|
||||
});
|
||||
} else {
|
||||
await this.testCaseExecutionRepository.markAsFailed({
|
||||
testRunId: testRun.id,
|
||||
pastExecutionId,
|
||||
errorCode: 'UNKNOWN_ERROR',
|
||||
trx,
|
||||
});
|
||||
|
||||
// Report unexpected errors
|
||||
this.errorReporter.error(e);
|
||||
}
|
||||
// FIXME: this is a temporary log
|
||||
this.logger.error('Test case execution failed', {
|
||||
workflowId,
|
||||
testRunId: testRun.id,
|
||||
error: e,
|
||||
});
|
||||
|
||||
// In case of an unexpected error save it as failed test case execution and continue with the next test case
|
||||
if (e instanceof TestCaseExecutionError) {
|
||||
// TODO: add failed test case execution to DB
|
||||
} else {
|
||||
// TODO: add failed test case execution to DB
|
||||
|
||||
// Report unexpected errors
|
||||
this.errorReporter.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Mark the test run as completed or cancelled
|
||||
if (abortSignal.aborted) {
|
||||
await dbManager.transaction(async (trx) => {
|
||||
await this.testRunRepository.markAsCancelled(testRun.id, trx);
|
||||
// TODO: mark test run as cancelled
|
||||
// await this.testRunRepository.markAsCancelled(testRun.id, trx);
|
||||
await this.testCaseExecutionRepository.markAllPendingAsCancelled(testRun.id, trx);
|
||||
|
||||
testRunEndStatusForTelemetry = 'cancelled';
|
||||
});
|
||||
} else {
|
||||
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||
// const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||
|
||||
await this.testRunRepository.markAsCompleted(testRun.id, aggregatedMetrics);
|
||||
// TODO: mark test run as completed in DB and save metrics
|
||||
|
||||
this.logger.debug('Test run finished', { testId: test.id, testRunId: testRun.id });
|
||||
this.logger.debug('Test run finished', { workflowId, testRunId: testRun.id });
|
||||
|
||||
testRunEndStatusForTelemetry = 'completed';
|
||||
}
|
||||
@@ -600,16 +291,16 @@ export class TestRunnerService {
|
||||
});
|
||||
|
||||
await dbManager.transaction(async (trx) => {
|
||||
await this.testRunRepository.markAsCancelled(testRun.id, trx);
|
||||
// TODO: mark test run as cancelled in DB
|
||||
await this.testCaseExecutionRepository.markAllPendingAsCancelled(testRun.id, trx);
|
||||
});
|
||||
|
||||
testRunEndStatusForTelemetry = 'cancelled';
|
||||
} else if (e instanceof TestRunError) {
|
||||
await this.testRunRepository.markAsError(testRun.id, e.code, e.extra as IDataObject);
|
||||
// TODO: mark test run as error
|
||||
testRunEndStatusForTelemetry = 'error';
|
||||
} else {
|
||||
await this.testRunRepository.markAsError(testRun.id, 'UNKNOWN_ERROR');
|
||||
// TODO: mark test run as error
|
||||
testRunEndStatusForTelemetry = 'error';
|
||||
throw e;
|
||||
}
|
||||
@@ -619,7 +310,7 @@ export class TestRunnerService {
|
||||
|
||||
// Send telemetry event
|
||||
this.telemetry.track('Test run finished', {
|
||||
test_id: test.id,
|
||||
workflow_id: workflowId,
|
||||
run_id: testRun.id,
|
||||
status: testRunEndStatusForTelemetry,
|
||||
});
|
||||
@@ -643,69 +334,13 @@ export class TestRunnerService {
|
||||
abortController.abort();
|
||||
this.abortControllers.delete(testRunId);
|
||||
} else {
|
||||
const { manager: dbManager } = this.executionRepository;
|
||||
const { manager: dbManager } = this.testRunRepository;
|
||||
|
||||
// If there is no abort controller - just mark the test run and all its' pending test case executions as cancelled
|
||||
await dbManager.transaction(async (trx) => {
|
||||
await this.testRunRepository.markAsCancelled(testRunId, trx);
|
||||
// TODO: mark test run as cancelled in DB
|
||||
await this.testCaseExecutionRepository.markAllPendingAsCancelled(testRunId, trx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the example evaluation WF input for the test definition.
|
||||
* It uses the latest execution of a workflow under test as a source and formats it
|
||||
* the same way as the evaluation input would be formatted.
|
||||
* We explicitly provide annotation tag here (and DO NOT use the one from DB), because the test definition
|
||||
* might not be saved to the DB with the updated annotation tag at the moment we need to get the example data.
|
||||
*/
|
||||
async getExampleEvaluationInputData(test: TestDefinition, annotationTagId: string) {
|
||||
// Select the id of latest execution with the annotation tag and workflow ID of the test
|
||||
const lastPastExecution: Pick<ExecutionEntity, 'id'> | null = await this.executionRepository
|
||||
.createQueryBuilder('execution')
|
||||
.select('execution.id')
|
||||
.leftJoin('execution.annotation', 'annotation')
|
||||
.leftJoin('annotation.tags', 'annotationTag')
|
||||
.where('annotationTag.id = :tagId', { tagId: annotationTagId })
|
||||
.andWhere('execution.workflowId = :workflowId', { workflowId: test.workflowId })
|
||||
.orderBy('execution.createdAt', 'DESC')
|
||||
.getOne();
|
||||
|
||||
if (lastPastExecution === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Fetch past execution with data
|
||||
const pastExecution = await this.executionRepository.findOne({
|
||||
where: {
|
||||
id: lastPastExecution.id,
|
||||
},
|
||||
relations: ['executionData', 'metadata', 'annotation', 'annotation.tags'],
|
||||
});
|
||||
assert(pastExecution, 'Execution not found');
|
||||
|
||||
const executionData = parse(pastExecution.executionData.data) as IRunExecutionData;
|
||||
|
||||
const sampleTestCaseMetadata = {
|
||||
testRunId: 'sample-test-run-id',
|
||||
userId: 'sample-user-id',
|
||||
pastExecutionId: lastPastExecution.id,
|
||||
highlightedData: pastExecution.metadata,
|
||||
annotation: pastExecution.annotation,
|
||||
};
|
||||
|
||||
// Get the original runData from the test case execution data
|
||||
const originalRunData = executionData.resultData.runData;
|
||||
|
||||
// We use the same execution data for the original and new run data format example
|
||||
const evaluationInputData = formatTestCaseExecutionInputData(
|
||||
originalRunData,
|
||||
pastExecution.executionData.workflowData,
|
||||
originalRunData,
|
||||
pastExecution.executionData.workflowData,
|
||||
sampleTestCaseMetadata,
|
||||
);
|
||||
|
||||
return evaluationInputData.json;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
import type { MockedNodeItem } from '@n8n/db';
|
||||
import assert from 'assert';
|
||||
import { mapValues, pick } from 'lodash';
|
||||
import type {
|
||||
IRunExecutionData,
|
||||
IPinData,
|
||||
IWorkflowBase,
|
||||
IRunData,
|
||||
ITaskData,
|
||||
INode,
|
||||
} from 'n8n-workflow';
|
||||
import type { IRunExecutionData, IPinData, IWorkflowBase } from 'n8n-workflow';
|
||||
|
||||
import { TestCaseExecutionError } from '@/evaluation.ee/test-runner/errors.ee';
|
||||
import type { TestCaseRunMetadata } from '@/evaluation.ee/test-runner/test-runner.service.ee';
|
||||
|
||||
// Entity representing a node in a workflow under test, for which data should be mocked during test execution
|
||||
export type MockedNodeItem = {
|
||||
name?: string;
|
||||
id: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Extracts the execution data from the past execution
|
||||
@@ -69,72 +65,3 @@ export function getPastExecutionTriggerNode(executionData: IRunExecutionData) {
|
||||
return !data[0].source || data[0].source.length === 0 || data[0].source[0] === null;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to check if the node is root node or sub-node.
|
||||
* Sub-node is a node which does not have the main output (the only exception is Stop and Error node)
|
||||
*/
|
||||
function isSubNode(node: INode, nodeData: ITaskData[]) {
|
||||
return (
|
||||
!node.type.endsWith('stopAndError') &&
|
||||
nodeData.some((nodeRunData) => !(nodeRunData.data && 'main' in nodeRunData.data))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform execution data and workflow data into a more user-friendly format to supply to evaluation workflow
|
||||
*/
|
||||
function formatExecutionData(data: IRunData, workflow: IWorkflowBase) {
|
||||
const formattedData = {} as Record<string, any>;
|
||||
|
||||
for (const [nodeName, nodeData] of Object.entries(data)) {
|
||||
const node = workflow.nodes.find((n) => n.name === nodeName);
|
||||
|
||||
assert(node, `Node "${nodeName}" not found in the workflow`);
|
||||
|
||||
const rootNode = !isSubNode(node, nodeData);
|
||||
|
||||
const runs = nodeData.map((nodeRunData) => ({
|
||||
executionTime: nodeRunData.executionTime,
|
||||
rootNode,
|
||||
output: nodeRunData.data
|
||||
? mapValues(nodeRunData.data, (connections) =>
|
||||
connections.map((singleOutputData) => singleOutputData?.map((item) => item.json) ?? []),
|
||||
)
|
||||
: null,
|
||||
}));
|
||||
|
||||
formattedData[node.id] = { nodeName, runs };
|
||||
}
|
||||
|
||||
return formattedData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare the evaluation wf input data.
|
||||
* Provide both the expected data (past execution) and the actual data (new execution),
|
||||
* as well as any annotations or highlighted data associated with the past execution
|
||||
*/
|
||||
export function formatTestCaseExecutionInputData(
|
||||
originalExecutionData: IRunData,
|
||||
_originalWorkflowData: IWorkflowBase,
|
||||
newExecutionData: IRunData,
|
||||
_newWorkflowData: IWorkflowBase,
|
||||
metadata: TestCaseRunMetadata,
|
||||
) {
|
||||
const annotations = {
|
||||
vote: metadata.annotation?.vote,
|
||||
tags: metadata.annotation?.tags?.map((tag) => pick(tag, ['id', 'name'])),
|
||||
highlightedData: Object.fromEntries(
|
||||
metadata.highlightedData?.map(({ key, value }) => [key, value]),
|
||||
),
|
||||
};
|
||||
|
||||
return {
|
||||
json: {
|
||||
annotations,
|
||||
originalExecution: formatExecutionData(originalExecutionData, _originalWorkflowData),
|
||||
newExecution: formatExecutionData(newExecutionData, _newWorkflowData),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { TestCaseExecutionRepository, TestRunRepository } from '@n8n/db';
|
||||
import type { User } from '@n8n/db';
|
||||
import { Delete, Get, Post, RestController } from '@n8n/decorators';
|
||||
import express from 'express';
|
||||
import { InstanceSettings } from 'n8n-core';
|
||||
@@ -7,56 +8,36 @@ import { UnexpectedError } from 'n8n-workflow';
|
||||
import { ConflictError } from '@/errors/response-errors/conflict.error';
|
||||
import { NotFoundError } from '@/errors/response-errors/not-found.error';
|
||||
import { NotImplementedError } from '@/errors/response-errors/not-implemented.error';
|
||||
import { TestRunsRequest } from '@/evaluation.ee/test-definitions.types.ee';
|
||||
import { TestRunnerService } from '@/evaluation.ee/test-runner/test-runner.service.ee';
|
||||
import { TestRunsRequest } from '@/evaluation.ee/test-runs.types.ee';
|
||||
import { listQueryMiddleware } from '@/middlewares';
|
||||
import { getSharedWorkflowIds } from '@/public-api/v1/handlers/workflows/workflows.service';
|
||||
import { Telemetry } from '@/telemetry';
|
||||
import { WorkflowFinderService } from '@/workflows/workflow-finder.service';
|
||||
|
||||
import { TestDefinitionService } from './test-definition.service.ee';
|
||||
|
||||
@RestController('/evaluation/test-definitions')
|
||||
@RestController('/workflows')
|
||||
export class TestRunsController {
|
||||
constructor(
|
||||
private readonly testDefinitionService: TestDefinitionService,
|
||||
private readonly testRunRepository: TestRunRepository,
|
||||
private readonly workflowFinderService: WorkflowFinderService,
|
||||
private readonly testCaseExecutionRepository: TestCaseExecutionRepository,
|
||||
private readonly testRunnerService: TestRunnerService,
|
||||
private readonly instanceSettings: InstanceSettings,
|
||||
private readonly telemetry: Telemetry,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* This method is used in multiple places in the controller to get the test definition
|
||||
* (or just check that it exists and the user has access to it).
|
||||
*/
|
||||
private async getTestDefinition(
|
||||
req: TestRunsRequest.GetOne | TestRunsRequest.GetMany | TestRunsRequest.Delete,
|
||||
) {
|
||||
const { testDefinitionId } = req.params;
|
||||
|
||||
const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
const testDefinition = await this.testDefinitionService.findOne(
|
||||
testDefinitionId,
|
||||
userAccessibleWorkflowIds,
|
||||
);
|
||||
|
||||
if (!testDefinition) throw new NotFoundError('Test definition not found');
|
||||
|
||||
return testDefinition;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the test run (or just check that it exists and the user has access to it)
|
||||
*/
|
||||
private async getTestRun(
|
||||
req: TestRunsRequest.GetOne | TestRunsRequest.Delete | TestRunsRequest.Cancel,
|
||||
) {
|
||||
const { id: testRunId, testDefinitionId } = req.params;
|
||||
private async getTestRun(testRunId: string, workflowId: string, user: User) {
|
||||
const sharedWorkflowsIds = await getSharedWorkflowIds(user, ['workflow:read']);
|
||||
|
||||
if (!sharedWorkflowsIds.includes(workflowId)) {
|
||||
throw new NotFoundError('Test run not found');
|
||||
}
|
||||
|
||||
const testRun = await this.testRunRepository.findOne({
|
||||
where: { id: testRunId, testDefinition: { id: testDefinitionId } },
|
||||
where: { id: testRunId },
|
||||
});
|
||||
|
||||
if (!testRun) throw new NotFoundError('Test run not found');
|
||||
@@ -64,55 +45,50 @@ export class TestRunsController {
|
||||
return testRun;
|
||||
}
|
||||
|
||||
@Get('/:testDefinitionId/runs', { middlewares: listQueryMiddleware })
|
||||
@Get('/:workflowId/test-runs', { middlewares: listQueryMiddleware })
|
||||
async getMany(req: TestRunsRequest.GetMany) {
|
||||
const { testDefinitionId } = req.params;
|
||||
const { workflowId } = req.params;
|
||||
|
||||
await this.getTestDefinition(req);
|
||||
|
||||
return await this.testRunRepository.getMany(testDefinitionId, req.listQueryOptions);
|
||||
return await this.testRunRepository.getMany(workflowId, req.listQueryOptions);
|
||||
}
|
||||
|
||||
@Get('/:testDefinitionId/runs/:id')
|
||||
@Get('/:workflowId/test-runs/:id')
|
||||
async getOne(req: TestRunsRequest.GetOne) {
|
||||
const { testDefinitionId, id } = req.params;
|
||||
|
||||
await this.getTestDefinition(req);
|
||||
const { id } = req.params;
|
||||
|
||||
try {
|
||||
return await this.testRunRepository.getTestRunSummaryById(testDefinitionId, id);
|
||||
await this.getTestRun(req.params.id, req.params.workflowId, req.user); // FIXME: do not fetch test run twice
|
||||
return await this.testRunRepository.getTestRunSummaryById(id);
|
||||
} catch (error) {
|
||||
if (error instanceof UnexpectedError) throw new NotFoundError(error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@Get('/:testDefinitionId/runs/:id/cases')
|
||||
@Get('/:workflowId/test-runs/:id/cases')
|
||||
async getTestCases(req: TestRunsRequest.GetCases) {
|
||||
await this.getTestDefinition(req);
|
||||
await this.getTestRun(req);
|
||||
await this.getTestRun(req.params.id, req.params.workflowId, req.user);
|
||||
|
||||
return await this.testCaseExecutionRepository.find({
|
||||
where: { testRun: { id: req.params.id } },
|
||||
});
|
||||
}
|
||||
|
||||
@Delete('/:testDefinitionId/runs/:id')
|
||||
@Delete('/:workflowId/test-runs/:id')
|
||||
async delete(req: TestRunsRequest.Delete) {
|
||||
const { id: testRunId, testDefinitionId } = req.params;
|
||||
const { id: testRunId } = req.params;
|
||||
|
||||
// Check test definition and test run exist
|
||||
await this.getTestDefinition(req);
|
||||
await this.getTestRun(req);
|
||||
// Check test run exist
|
||||
await this.getTestRun(req.params.id, req.params.workflowId, req.user);
|
||||
|
||||
await this.testRunRepository.delete({ id: testRunId });
|
||||
|
||||
this.telemetry.track('User deleted a run', { run_id: testRunId, test_id: testDefinitionId });
|
||||
this.telemetry.track('User deleted a run', { run_id: testRunId });
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Post('/:testDefinitionId/runs/:id/cancel')
|
||||
@Post('/:workflowId/test-runs/:id/cancel')
|
||||
async cancel(req: TestRunsRequest.Cancel, res: express.Response) {
|
||||
if (this.instanceSettings.isMultiMain) {
|
||||
throw new NotImplementedError('Cancelling test runs is not yet supported in multi-main mode');
|
||||
@@ -121,8 +97,7 @@ export class TestRunsController {
|
||||
const { id: testRunId } = req.params;
|
||||
|
||||
// Check test definition and test run exist
|
||||
await this.getTestDefinition(req);
|
||||
const testRun = await this.getTestRun(req);
|
||||
const testRun = await this.getTestRun(req.params.id, req.params.workflowId, req.user);
|
||||
|
||||
if (this.testRunnerService.canBeCancelled(testRun)) {
|
||||
const message = `The test run "${testRunId}" cannot be cancelled`;
|
||||
@@ -133,4 +108,25 @@ export class TestRunsController {
|
||||
|
||||
res.status(202).json({ success: true });
|
||||
}
|
||||
|
||||
@Post('/:workflowId/test-runs/new')
|
||||
async create(req: TestRunsRequest.Create, res: express.Response) {
|
||||
const { workflowId } = req.params;
|
||||
|
||||
const workflow = await this.workflowFinderService.findWorkflowForUser(workflowId, req.user, [
|
||||
'workflow:read',
|
||||
]);
|
||||
|
||||
if (!workflow) {
|
||||
// user trying to access a workflow they do not own
|
||||
// and was not shared to them
|
||||
// Or does not exist.
|
||||
return res.status(404).json({ message: 'Not Found' });
|
||||
}
|
||||
|
||||
// We do not await for the test run to complete
|
||||
void this.testRunnerService.runTest(req.user, workflow.id);
|
||||
|
||||
return res.status(202).json({ success: true });
|
||||
}
|
||||
}
|
||||
|
||||
27
packages/cli/src/evaluation.ee/test-runs.types.ee.ts
Normal file
27
packages/cli/src/evaluation.ee/test-runs.types.ee.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import type { AuthenticatedRequest, ListQuery } from '@/requests';
|
||||
|
||||
export declare namespace TestRunsRequest {
|
||||
namespace RouteParams {
|
||||
type WorkflowId = {
|
||||
workflowId: string;
|
||||
};
|
||||
|
||||
type TestRunId = {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
type Create = AuthenticatedRequest<RouteParams.WorkflowId>;
|
||||
|
||||
type GetMany = AuthenticatedRequest<RouteParams.WorkflowId, {}, {}, ListQuery.Params> & {
|
||||
listQueryOptions: ListQuery.Options;
|
||||
};
|
||||
|
||||
type GetOne = AuthenticatedRequest<RouteParams.WorkflowId & RouteParams.TestRunId>;
|
||||
|
||||
type Delete = AuthenticatedRequest<RouteParams.WorkflowId & RouteParams.TestRunId>;
|
||||
|
||||
type Cancel = AuthenticatedRequest<RouteParams.WorkflowId & RouteParams.TestRunId>;
|
||||
|
||||
type GetCases = AuthenticatedRequest<RouteParams.WorkflowId & RouteParams.TestRunId>;
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import type {
|
||||
WorkflowEntity,
|
||||
TagEntity,
|
||||
AnnotationTagEntity,
|
||||
TestDefinition,
|
||||
} from '@n8n/db';
|
||||
import { validate } from 'class-validator';
|
||||
|
||||
@@ -14,7 +13,6 @@ import { BadRequestError } from './errors/response-errors/bad-request.error';
|
||||
export async function validateEntity(
|
||||
entity:
|
||||
| WorkflowEntity
|
||||
| TestDefinition
|
||||
| CredentialsEntity
|
||||
| TagEntity
|
||||
| AnnotationTagEntity
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import { Expose } from 'class-transformer';
|
||||
import { IsOptional, IsString } from 'class-validator';
|
||||
|
||||
import { BaseFilter } from './base.filter.dto';
|
||||
|
||||
export class TestDefinitionsFilter extends BaseFilter {
|
||||
@IsString()
|
||||
@IsOptional()
|
||||
@Expose()
|
||||
workflowId?: string;
|
||||
|
||||
static async fromString(rawFilter: string) {
|
||||
return await this.toFilter(rawFilter, TestDefinitionsFilter);
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import * as ResponseHelper from '@/response-helper';
|
||||
import { toError } from '@/utils';
|
||||
|
||||
import { CredentialsFilter } from './dtos/credentials.filter.dto';
|
||||
import { TestDefinitionsFilter } from './dtos/test-definitions.filter.dto';
|
||||
import { UserFilter } from './dtos/user.filter.dto';
|
||||
import { WorkflowFilter } from './dtos/workflow.filter.dto';
|
||||
|
||||
@@ -26,8 +25,6 @@ export const filterListQueryMiddleware = async (
|
||||
Filter = CredentialsFilter;
|
||||
} else if (req.baseUrl.endsWith('users')) {
|
||||
Filter = UserFilter;
|
||||
} else if (req.baseUrl.endsWith('test-definitions')) {
|
||||
Filter = TestDefinitionsFilter;
|
||||
} else {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -58,7 +58,6 @@ import '@/events/events.controller';
|
||||
import '@/executions/executions.controller';
|
||||
import '@/external-secrets.ee/external-secrets.controller.ee';
|
||||
import '@/license/license.controller';
|
||||
import '@/evaluation.ee/test-definitions.controller.ee';
|
||||
import '@/evaluation.ee/test-runs.controller.ee';
|
||||
import '@/workflows/workflow-history.ee/workflow-history.controller.ee';
|
||||
import '@/workflows/workflows.controller';
|
||||
|
||||
@@ -1,505 +0,0 @@
|
||||
import type { User } from '@n8n/db';
|
||||
import type { AnnotationTagEntity } from '@n8n/db';
|
||||
import { TestDefinitionRepository } from '@n8n/db';
|
||||
import { Container } from '@n8n/di';
|
||||
import { mockInstance } from 'n8n-core/test/utils';
|
||||
import type { IWorkflowBase } from 'n8n-workflow';
|
||||
|
||||
import { TestRunnerService } from '@/evaluation.ee/test-runner/test-runner.service.ee';
|
||||
import { createAnnotationTags } from '@test-integration/db/executions';
|
||||
|
||||
import { createUserShell } from './../shared/db/users';
|
||||
import { createWorkflow } from './../shared/db/workflows';
|
||||
import * as testDb from './../shared/test-db';
|
||||
import type { SuperAgentTest } from './../shared/types';
|
||||
import * as utils from './../shared/utils/';
|
||||
|
||||
const testRunner = mockInstance(TestRunnerService);
|
||||
|
||||
let authOwnerAgent: SuperAgentTest;
|
||||
let workflowUnderTest: IWorkflowBase;
|
||||
let workflowUnderTest2: IWorkflowBase;
|
||||
let evaluationWorkflow: IWorkflowBase;
|
||||
let otherWorkflow: IWorkflowBase;
|
||||
let ownerShell: User;
|
||||
let annotationTag: AnnotationTagEntity;
|
||||
const testServer = utils.setupTestServer({ endpointGroups: ['evaluation'] });
|
||||
|
||||
beforeAll(async () => {
|
||||
ownerShell = await createUserShell('global:owner');
|
||||
authOwnerAgent = testServer.authAgentFor(ownerShell);
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await testDb.truncate(['TestDefinition', 'WorkflowEntity', 'AnnotationTagEntity']);
|
||||
|
||||
workflowUnderTest = await createWorkflow({ name: 'workflow-under-test' }, ownerShell);
|
||||
workflowUnderTest2 = await createWorkflow({ name: 'workflow-under-test-2' }, ownerShell);
|
||||
evaluationWorkflow = await createWorkflow({ name: 'evaluation-workflow' }, ownerShell);
|
||||
otherWorkflow = await createWorkflow({ name: 'other-workflow' });
|
||||
annotationTag = (await createAnnotationTags(['test-tag']))[0];
|
||||
});
|
||||
|
||||
describe('GET /evaluation/test-definitions', () => {
|
||||
test('should retrieve empty test definitions list', async () => {
|
||||
const resp = await authOwnerAgent.get('/evaluation/test-definitions');
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.count).toBe(0);
|
||||
expect(resp.body.data.testDefinitions).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should retrieve test definitions list', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.get('/evaluation/test-definitions');
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data).toEqual({
|
||||
count: 1,
|
||||
testDefinitions: [
|
||||
expect.objectContaining({
|
||||
name: 'test',
|
||||
workflowId: workflowUnderTest.id,
|
||||
evaluationWorkflowId: null,
|
||||
}),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('should retrieve test definitions list with pagination', async () => {
|
||||
// Add a bunch of test definitions
|
||||
const testDefinitions = [];
|
||||
|
||||
for (let i = 0; i < 15; i++) {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: `test-${i}`,
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
testDefinitions.push(newTest);
|
||||
}
|
||||
|
||||
await Container.get(TestDefinitionRepository).save(testDefinitions);
|
||||
|
||||
// Fetch the first page
|
||||
let resp = await authOwnerAgent.get('/evaluation/test-definitions?take=10');
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.count).toBe(15);
|
||||
expect(resp.body.data.testDefinitions).toHaveLength(10);
|
||||
|
||||
// Fetch the second page
|
||||
resp = await authOwnerAgent.get('/evaluation/test-definitions?take=10&skip=10');
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.count).toBe(15);
|
||||
expect(resp.body.data.testDefinitions).toHaveLength(5);
|
||||
});
|
||||
|
||||
test('should retrieve test definitions list for a workflow', async () => {
|
||||
// Add a bunch of test definitions for two different workflows
|
||||
const testDefinitions = [];
|
||||
|
||||
for (let i = 0; i < 15; i++) {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: `test-${i}`,
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
|
||||
const newTest2 = Container.get(TestDefinitionRepository).create({
|
||||
name: `test-${i * 2}`,
|
||||
workflow: { id: workflowUnderTest2.id },
|
||||
});
|
||||
|
||||
testDefinitions.push(newTest, newTest2);
|
||||
}
|
||||
|
||||
await Container.get(TestDefinitionRepository).save(testDefinitions);
|
||||
|
||||
// Fetch test definitions of a second workflow
|
||||
let resp = await authOwnerAgent.get(
|
||||
`/evaluation/test-definitions?filter=${JSON.stringify({ workflowId: workflowUnderTest2.id })}`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.count).toBe(15);
|
||||
});
|
||||
|
||||
test('should return error if user has no access to the workflowId specified in filter', async () => {
|
||||
let resp = await authOwnerAgent.get(
|
||||
`/evaluation/test-definitions?filter=${JSON.stringify({ workflowId: otherWorkflow.id })}`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(403);
|
||||
expect(resp.body.message).toBe('User does not have access to the workflow');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /evaluation/test-definitions/:id', () => {
|
||||
test('should retrieve test definition', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.get(`/evaluation/test-definitions/${newTest.id}`);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.name).toBe('test');
|
||||
expect(resp.body.data.workflowId).toBe(workflowUnderTest.id);
|
||||
expect(resp.body.data.evaluationWorkflowId).toBe(null);
|
||||
});
|
||||
|
||||
test('should return 404 for non-existent test definition', async () => {
|
||||
const resp = await authOwnerAgent.get('/evaluation/test-definitions/123');
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should retrieve test definition with evaluation workflow', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
evaluationWorkflow: { id: evaluationWorkflow.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.get(`/evaluation/test-definitions/${newTest.id}`);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.name).toBe('test');
|
||||
expect(resp.body.data.workflowId).toBe(workflowUnderTest.id);
|
||||
expect(resp.body.data.evaluationWorkflowId).toBe(evaluationWorkflow.id);
|
||||
});
|
||||
|
||||
test('should not retrieve test definition if user does not have access to workflow under test', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: otherWorkflow.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.get(`/evaluation/test-definitions/${newTest.id}`);
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /evaluation/test-definitions', () => {
|
||||
test('should create test definition', async () => {
|
||||
const resp = await authOwnerAgent.post('/evaluation/test-definitions').send({
|
||||
name: 'test',
|
||||
workflowId: workflowUnderTest.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.name).toBe('test');
|
||||
expect(resp.body.data.workflowId).toBe(workflowUnderTest.id);
|
||||
});
|
||||
|
||||
test('should create test definition with evaluation workflow', async () => {
|
||||
const resp = await authOwnerAgent.post('/evaluation/test-definitions').send({
|
||||
name: 'test',
|
||||
workflowId: workflowUnderTest.id,
|
||||
evaluationWorkflowId: evaluationWorkflow.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data).toEqual(
|
||||
expect.objectContaining({
|
||||
name: 'test',
|
||||
workflowId: workflowUnderTest.id,
|
||||
evaluationWorkflowId: evaluationWorkflow.id,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('should create test definition with all fields', async () => {
|
||||
const resp = await authOwnerAgent.post('/evaluation/test-definitions').send({
|
||||
name: 'test',
|
||||
description: 'test description',
|
||||
workflowId: workflowUnderTest.id,
|
||||
evaluationWorkflowId: evaluationWorkflow.id,
|
||||
annotationTagId: annotationTag.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data).toEqual(
|
||||
expect.objectContaining({
|
||||
name: 'test',
|
||||
description: 'test description',
|
||||
workflowId: workflowUnderTest.id,
|
||||
evaluationWorkflowId: evaluationWorkflow.id,
|
||||
annotationTag: expect.objectContaining({
|
||||
id: annotationTag.id,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('should return error if name is empty', async () => {
|
||||
const resp = await authOwnerAgent.post('/evaluation/test-definitions').send({
|
||||
name: '',
|
||||
workflowId: workflowUnderTest.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(400);
|
||||
expect(resp.body.errors).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
code: 'too_small',
|
||||
path: ['name'],
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
test('should return error if user has no access to the workflow', async () => {
|
||||
const resp = await authOwnerAgent.post('/evaluation/test-definitions').send({
|
||||
name: 'test',
|
||||
workflowId: otherWorkflow.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(403);
|
||||
expect(resp.body.message).toBe('User does not have access to the workflow');
|
||||
});
|
||||
|
||||
test('should return error if user has no access to the evaluation workflow', async () => {
|
||||
const resp = await authOwnerAgent.post('/evaluation/test-definitions').send({
|
||||
name: 'test',
|
||||
workflowId: workflowUnderTest.id,
|
||||
evaluationWorkflowId: otherWorkflow.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(403);
|
||||
expect(resp.body.message).toBe('User does not have access to the evaluation workflow');
|
||||
});
|
||||
});
|
||||
|
||||
describe('PATCH /evaluation/test-definitions/:id', () => {
|
||||
test('should update test definition', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
name: 'updated-test',
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.name).toBe('updated-test');
|
||||
});
|
||||
|
||||
test('should return 404 if user has no access to the workflow', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: otherWorkflow.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
name: 'updated-test',
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
expect(resp.body.message).toBe('Test definition not found');
|
||||
});
|
||||
|
||||
test('should update test definition with evaluation workflow', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
name: 'updated-test',
|
||||
evaluationWorkflowId: evaluationWorkflow.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.name).toBe('updated-test');
|
||||
expect(resp.body.data.evaluationWorkflowId).toBe(evaluationWorkflow.id);
|
||||
});
|
||||
|
||||
test('should return error if user has no access to the evaluation workflow', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
name: 'updated-test',
|
||||
evaluationWorkflowId: otherWorkflow.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(403);
|
||||
expect(resp.body.message).toBe('User does not have access to the evaluation workflow');
|
||||
});
|
||||
|
||||
test('should disallow workflowId', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
name: 'updated-test',
|
||||
workflowId: otherWorkflow.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(400);
|
||||
expect(resp.body.errors).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
code: 'unrecognized_keys',
|
||||
keys: ['workflowId'],
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
test('should update annotationTagId', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
annotationTagId: annotationTag.id,
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.annotationTag.id).toBe(annotationTag.id);
|
||||
});
|
||||
|
||||
test('should return error if annotationTagId is invalid', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
annotationTagId: '123',
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(400);
|
||||
expect(resp.body.message).toBe('Annotation tag not found');
|
||||
});
|
||||
|
||||
test('should update pinned nodes', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
mockedNodes: [
|
||||
{
|
||||
id: 'uuid-1234',
|
||||
name: 'Schedule Trigger',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.mockedNodes).toEqual([{ id: 'uuid-1234', name: 'Schedule Trigger' }]);
|
||||
});
|
||||
|
||||
test('should return error if pinned nodes are invalid', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
mockedNodes: ['Simple string'],
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should return error if pinned nodes are not in the workflow', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({
|
||||
mockedNodes: [
|
||||
{
|
||||
name: 'Invalid Node',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(resp.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /evaluation/test-definitions/:id', () => {
|
||||
test('should delete test definition', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.delete(`/evaluation/test-definitions/${newTest.id}`);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data.success).toBe(true);
|
||||
});
|
||||
|
||||
test('should return 404 if test definition does not exist', async () => {
|
||||
const resp = await authOwnerAgent.delete('/evaluation/test-definitions/123');
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
expect(resp.body.message).toBe('Test definition not found');
|
||||
});
|
||||
|
||||
test('should return 404 if user has no access to the workflow', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: otherWorkflow.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.delete(`/evaluation/test-definitions/${newTest.id}`);
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
expect(resp.body.message).toBe('Test definition not found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /evaluation/test-definitions/:id/run', () => {
|
||||
test('should trigger the test run', async () => {
|
||||
const newTest = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(newTest);
|
||||
|
||||
const resp = await authOwnerAgent.post(`/evaluation/test-definitions/${newTest.id}/run`);
|
||||
|
||||
expect(resp.statusCode).toBe(202);
|
||||
expect(resp.body).toEqual(
|
||||
expect.objectContaining({
|
||||
success: true,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(testRunner.runTest).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,5 @@
|
||||
import type { User } from '@n8n/db';
|
||||
import type { TestDefinition } from '@n8n/db';
|
||||
import { ProjectRepository } from '@n8n/db';
|
||||
import { TestDefinitionRepository } from '@n8n/db';
|
||||
import { TestRunRepository } from '@n8n/db';
|
||||
import { Container } from '@n8n/di';
|
||||
import { mockInstance } from 'n8n-core/test/utils';
|
||||
@@ -17,8 +15,6 @@ import * as utils from '@test-integration/utils';
|
||||
let authOwnerAgent: SuperAgentTest;
|
||||
let workflowUnderTest: IWorkflowBase;
|
||||
let otherWorkflow: IWorkflowBase;
|
||||
let testDefinition: TestDefinition;
|
||||
let otherTestDefinition: TestDefinition;
|
||||
let ownerShell: User;
|
||||
|
||||
const testRunner = mockInstance(TestRunnerService);
|
||||
@@ -34,83 +30,68 @@ beforeAll(async () => {
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await testDb.truncate(['TestDefinition', 'TestRun', 'WorkflowEntity', 'SharedWorkflow']);
|
||||
await testDb.truncate(['TestRun', 'WorkflowEntity', 'SharedWorkflow']);
|
||||
|
||||
workflowUnderTest = await createWorkflow({ name: 'workflow-under-test' }, ownerShell);
|
||||
|
||||
testDefinition = Container.get(TestDefinitionRepository).create({
|
||||
name: 'test',
|
||||
workflow: { id: workflowUnderTest.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(testDefinition);
|
||||
|
||||
otherWorkflow = await createWorkflow({ name: 'other-workflow' });
|
||||
|
||||
otherTestDefinition = Container.get(TestDefinitionRepository).create({
|
||||
name: 'other-test',
|
||||
workflow: { id: otherWorkflow.id },
|
||||
});
|
||||
await Container.get(TestDefinitionRepository).save(otherTestDefinition);
|
||||
});
|
||||
|
||||
describe('GET /evaluation/test-definitions/:testDefinitionId/runs', () => {
|
||||
test('should retrieve empty list of runs for a test definition', async () => {
|
||||
const resp = await authOwnerAgent.get(`/evaluation/test-definitions/${testDefinition.id}/runs`);
|
||||
describe('GET /workflows/:workflowId/test-runs', () => {
|
||||
test('should retrieve empty list of test runs', async () => {
|
||||
const resp = await authOwnerAgent.get(`/workflows/${workflowUnderTest.id}/test-runs`);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data).toEqual([]);
|
||||
});
|
||||
|
||||
test('should return 404 if test definition does not exist', async () => {
|
||||
const resp = await authOwnerAgent.get('/evaluation/test-definitions/123/runs');
|
||||
// TODO: replace with non existent workflow
|
||||
// test('should return 404 if test definition does not exist', async () => {
|
||||
// const resp = await authOwnerAgent.get('/evaluation/test-definitions/123/runs');
|
||||
//
|
||||
// expect(resp.statusCode).toBe(404);
|
||||
// });
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
});
|
||||
// TODO: replace with workflow that is not accessible to the user
|
||||
// test('should return 404 if user does not have access to test definition', async () => {
|
||||
// const resp = await authOwnerAgent.get(
|
||||
// `/evaluation/test-definitions/${otherTestDefinition.id}/runs`,
|
||||
// );
|
||||
//
|
||||
// expect(resp.statusCode).toBe(404);
|
||||
// });
|
||||
|
||||
test('should return 404 if user does not have access to test definition', async () => {
|
||||
const resp = await authOwnerAgent.get(
|
||||
`/evaluation/test-definitions/${otherTestDefinition.id}/runs`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should retrieve list of runs for a test definition', async () => {
|
||||
test('should retrieve list of runs for a workflow', async () => {
|
||||
const testRunRepository = Container.get(TestRunRepository);
|
||||
const testRun = await testRunRepository.createTestRun(testDefinition.id);
|
||||
const testRun = await testRunRepository.createTestRun(workflowUnderTest.id);
|
||||
|
||||
const resp = await authOwnerAgent.get(`/evaluation/test-definitions/${testDefinition.id}/runs`);
|
||||
const resp = await authOwnerAgent.get(`/workflows/${workflowUnderTest.id}/test-runs`);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data).toEqual([
|
||||
expect.objectContaining({
|
||||
id: testRun.id,
|
||||
status: 'new',
|
||||
testDefinitionId: testDefinition.id,
|
||||
runAt: null,
|
||||
completedAt: null,
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
test('should retrieve list of runs for a test definition with pagination', async () => {
|
||||
test('should retrieve list of test runs for a workflow with pagination', async () => {
|
||||
const testRunRepository = Container.get(TestRunRepository);
|
||||
const testRun1 = await testRunRepository.createTestRun(testDefinition.id);
|
||||
const testRun1 = await testRunRepository.createTestRun(workflowUnderTest.id);
|
||||
// Mark as running just to make a slight delay between the runs
|
||||
await testRunRepository.markAsRunning(testRun1.id, 10);
|
||||
const testRun2 = await testRunRepository.createTestRun(testDefinition.id);
|
||||
await testRunRepository.markAsRunning(testRun1.id);
|
||||
const testRun2 = await testRunRepository.createTestRun(workflowUnderTest.id);
|
||||
|
||||
// Fetch the first page
|
||||
const resp = await authOwnerAgent.get(
|
||||
`/evaluation/test-definitions/${testDefinition.id}/runs?take=1`,
|
||||
);
|
||||
const resp = await authOwnerAgent.get(`/workflows/${workflowUnderTest.id}/test-runs?take=1`);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
expect(resp.body.data).toEqual([
|
||||
expect.objectContaining({
|
||||
id: testRun2.id,
|
||||
status: 'new',
|
||||
testDefinitionId: testDefinition.id,
|
||||
runAt: null,
|
||||
completedAt: null,
|
||||
}),
|
||||
@@ -118,7 +99,7 @@ describe('GET /evaluation/test-definitions/:testDefinitionId/runs', () => {
|
||||
|
||||
// Fetch the second page
|
||||
const resp2 = await authOwnerAgent.get(
|
||||
`/evaluation/test-definitions/${testDefinition.id}/runs?take=1&skip=1`,
|
||||
`/workflows/${workflowUnderTest.id}/test-runs?take=1&skip=1`,
|
||||
);
|
||||
|
||||
expect(resp2.statusCode).toBe(200);
|
||||
@@ -126,7 +107,6 @@ describe('GET /evaluation/test-definitions/:testDefinitionId/runs', () => {
|
||||
expect.objectContaining({
|
||||
id: testRun1.id,
|
||||
status: 'running',
|
||||
testDefinitionId: testDefinition.id,
|
||||
runAt: expect.any(String),
|
||||
completedAt: null,
|
||||
}),
|
||||
@@ -134,13 +114,13 @@ describe('GET /evaluation/test-definitions/:testDefinitionId/runs', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /evaluation/test-definitions/:testDefinitionId/runs/:id', () => {
|
||||
test('should retrieve test run for a test definition', async () => {
|
||||
describe('GET /workflows/:workflowId/test-runs/:id', () => {
|
||||
test('should retrieve specific test run for a workflow', async () => {
|
||||
const testRunRepository = Container.get(TestRunRepository);
|
||||
const testRun = await testRunRepository.createTestRun(testDefinition.id);
|
||||
const testRun = await testRunRepository.createTestRun(workflowUnderTest.id);
|
||||
|
||||
const resp = await authOwnerAgent.get(
|
||||
`/evaluation/test-definitions/${testDefinition.id}/runs/${testRun.id}`,
|
||||
`/workflows/${workflowUnderTest.id}/test-runs/${testRun.id}`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
@@ -148,7 +128,6 @@ describe('GET /evaluation/test-definitions/:testDefinitionId/runs/:id', () => {
|
||||
expect.objectContaining({
|
||||
id: testRun.id,
|
||||
status: 'new',
|
||||
testDefinitionId: testDefinition.id,
|
||||
runAt: null,
|
||||
completedAt: null,
|
||||
}),
|
||||
@@ -156,25 +135,21 @@ describe('GET /evaluation/test-definitions/:testDefinitionId/runs/:id', () => {
|
||||
});
|
||||
|
||||
test('should return 404 if test run does not exist', async () => {
|
||||
const resp = await authOwnerAgent.get(
|
||||
`/evaluation/test-definitions/${testDefinition.id}/runs/123`,
|
||||
);
|
||||
const resp = await authOwnerAgent.get(`/workflows/${workflowUnderTest.id}/test-runs/123`);
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should return 404 if user does not have access to test definition', async () => {
|
||||
test('should return 404 if user does not have access to the workflow', async () => {
|
||||
const testRunRepository = Container.get(TestRunRepository);
|
||||
const testRun = await testRunRepository.createTestRun(otherTestDefinition.id);
|
||||
const testRun = await testRunRepository.createTestRun(otherWorkflow.id);
|
||||
|
||||
const resp = await authOwnerAgent.get(
|
||||
`/evaluation/test-definitions/${otherTestDefinition.id}/runs/${testRun.id}`,
|
||||
);
|
||||
const resp = await authOwnerAgent.get(`/workflows/${otherWorkflow.id}/test-runs/${testRun.id}`);
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should retrieve test run for a test definition of a shared workflow', async () => {
|
||||
test('should retrieve test run of a shared workflow', async () => {
|
||||
const memberShell = await createUserShell('global:member');
|
||||
const memberAgent = testServer.authAgentFor(memberShell);
|
||||
const memberPersonalProject = await Container.get(
|
||||
@@ -190,11 +165,11 @@ describe('GET /evaluation/test-definitions/:testDefinitionId/runs/:id', () => {
|
||||
|
||||
// Create a test run for the shared workflow
|
||||
const testRunRepository = Container.get(TestRunRepository);
|
||||
const testRun = await testRunRepository.createTestRun(testDefinition.id);
|
||||
const testRun = await testRunRepository.createTestRun(workflowUnderTest.id);
|
||||
|
||||
// Check if member can retrieve the test run of a shared workflow
|
||||
const resp = await memberAgent.get(
|
||||
`/evaluation/test-definitions/${testDefinition.id}/runs/${testRun.id}`,
|
||||
`/workflows/${workflowUnderTest.id}/test-runs/${testRun.id}`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
@@ -209,10 +184,10 @@ describe('GET /evaluation/test-definitions/:testDefinitionId/runs/:id', () => {
|
||||
describe('DELETE /evaluation/test-definitions/:testDefinitionId/runs/:id', () => {
|
||||
test('should delete test run for a test definition', async () => {
|
||||
const testRunRepository = Container.get(TestRunRepository);
|
||||
const testRun = await testRunRepository.createTestRun(testDefinition.id);
|
||||
const testRun = await testRunRepository.createTestRun(workflowUnderTest.id);
|
||||
|
||||
const resp = await authOwnerAgent.delete(
|
||||
`/evaluation/test-definitions/${testDefinition.id}/runs/${testRun.id}`,
|
||||
`/workflows/${workflowUnderTest.id}/test-runs/${testRun.id}`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(200);
|
||||
@@ -223,19 +198,17 @@ describe('DELETE /evaluation/test-definitions/:testDefinitionId/runs/:id', () =>
|
||||
});
|
||||
|
||||
test('should return 404 if test run does not exist', async () => {
|
||||
const resp = await authOwnerAgent.delete(
|
||||
`/evaluation/test-definitions/${testDefinition.id}/runs/123`,
|
||||
);
|
||||
const resp = await authOwnerAgent.delete(`/workflows/${workflowUnderTest.id}/test-runs/123`);
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should return 404 if user does not have access to test definition', async () => {
|
||||
const testRunRepository = Container.get(TestRunRepository);
|
||||
const testRun = await testRunRepository.createTestRun(otherTestDefinition.id);
|
||||
const testRun = await testRunRepository.createTestRun(otherWorkflow.id);
|
||||
|
||||
const resp = await authOwnerAgent.delete(
|
||||
`/evaluation/test-definitions/${otherTestDefinition.id}/runs/${testRun.id}`,
|
||||
`/workflows/${otherWorkflow.id}/test-runs/${testRun.id}`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
@@ -245,12 +218,12 @@ describe('DELETE /evaluation/test-definitions/:testDefinitionId/runs/:id', () =>
|
||||
describe('POST /evaluation/test-definitions/:testDefinitionId/runs/:id/cancel', () => {
|
||||
test('should cancel test run', async () => {
|
||||
const testRunRepository = Container.get(TestRunRepository);
|
||||
const testRun = await testRunRepository.createTestRun(testDefinition.id);
|
||||
const testRun = await testRunRepository.createTestRun(workflowUnderTest.id);
|
||||
|
||||
jest.spyOn(testRunRepository, 'markAsCancelled');
|
||||
|
||||
const resp = await authOwnerAgent.post(
|
||||
`/evaluation/test-definitions/${testDefinition.id}/runs/${testRun.id}/cancel`,
|
||||
`/workflows/${workflowUnderTest.id}/test-runs/${testRun.id}/cancel`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(202);
|
||||
@@ -261,24 +234,24 @@ describe('POST /evaluation/test-definitions/:testDefinitionId/runs/:id/cancel',
|
||||
|
||||
test('should return 404 if test run does not exist', async () => {
|
||||
const resp = await authOwnerAgent.post(
|
||||
`/evaluation/test-definitions/${testDefinition.id}/runs/123/cancel`,
|
||||
`/workflows/${workflowUnderTest.id}/test-runs/123/cancel`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should return 404 if test definition does not exist', async () => {
|
||||
const resp = await authOwnerAgent.post('/evaluation/test-definitions/123/runs/123/cancel');
|
||||
test('should return 404 if workflow does not exist', async () => {
|
||||
const resp = await authOwnerAgent.post('/workflows/123/test-runs/123/cancel');
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should return 404 if user does not have access to test definition', async () => {
|
||||
test('should return 404 if user does not have access to the workflow', async () => {
|
||||
const testRunRepository = Container.get(TestRunRepository);
|
||||
const testRun = await testRunRepository.createTestRun(otherTestDefinition.id);
|
||||
const testRun = await testRunRepository.createTestRun(otherWorkflow.id);
|
||||
|
||||
const resp = await authOwnerAgent.post(
|
||||
`/evaluation/test-definitions/${otherTestDefinition.id}/runs/${testRun.id}/cancel`,
|
||||
`/workflows/${otherWorkflow.id}/test-runs/${testRun.id}/cancel`,
|
||||
);
|
||||
|
||||
expect(resp.statusCode).toBe(404);
|
||||
|
||||
@@ -283,7 +283,6 @@ export const setupTestServer = ({
|
||||
break;
|
||||
|
||||
case 'evaluation':
|
||||
await import('@/evaluation.ee/test-definitions.controller.ee');
|
||||
await import('@/evaluation.ee/test-runs.controller.ee');
|
||||
break;
|
||||
|
||||
|
||||
@@ -52,9 +52,6 @@ export interface TestRunRecord {
|
||||
errorCode?: string;
|
||||
errorDetails?: Record<string, unknown>;
|
||||
finalResult?: 'success' | 'error' | 'warning';
|
||||
failedCases?: number;
|
||||
passedCases?: number;
|
||||
totalCases?: number;
|
||||
}
|
||||
|
||||
interface GetTestRunParams {
|
||||
|
||||
@@ -89,7 +89,7 @@ const runSummaries = computed(() => {
|
||||
class="mr-2xs"
|
||||
/>
|
||||
<template v-if="row.status === 'error'">
|
||||
{{ row.failedCases }} {{ row.status }}
|
||||
{{ row.status }}
|
||||
</template>
|
||||
<template v-else>
|
||||
{{ row.status }}
|
||||
|
||||
@@ -74,9 +74,6 @@ const TEST_RUN: TestRunRecord = {
|
||||
updatedAt: '2024-01-01',
|
||||
runAt: '2024-01-01',
|
||||
completedAt: '2024-01-01',
|
||||
failedCases: 0,
|
||||
totalCases: 1,
|
||||
passedCases: 1,
|
||||
};
|
||||
|
||||
describe('testDefinition.store.ee', () => {
|
||||
|
||||
@@ -85,9 +85,6 @@ describe('TestDefinitionEditView', () => {
|
||||
createdAt: '2023-01-01',
|
||||
updatedAt: '2023-01-01',
|
||||
completedAt: '2023-01-01',
|
||||
failedCases: 0,
|
||||
passedCases: 1,
|
||||
totalCases: 1,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user