mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-16 09:36:44 +00:00
ci: Test container enhancements (#17008)
This commit is contained in:
@@ -46,6 +46,8 @@
|
||||
"test:backend": "turbo run test:backend --concurrency=1",
|
||||
"test:frontend": "turbo run test:frontend --concurrency=1",
|
||||
"test:nodes": "turbo run test:nodes --concurrency=1",
|
||||
"test:with:docker": "pnpm --filter=n8n-playwright run test:standard",
|
||||
"test:show:report": "pnpm --filter=n8n-playwright exec playwright show-report",
|
||||
"watch": "turbo run watch --parallel",
|
||||
"webhook": "./packages/cli/bin/n8n webhook",
|
||||
"worker": "./packages/cli/bin/n8n worker"
|
||||
|
||||
@@ -24,10 +24,10 @@ When started, you'll see:
|
||||
### Development with Container Reuse
|
||||
```bash
|
||||
# Enable container reuse (faster restarts)
|
||||
pnpm run dev # SQLite
|
||||
pnpm run dev:postgres # PostgreSQL
|
||||
pnpm run dev:queue # Queue mode
|
||||
pnpm run dev:multi-main # Multiple main instances
|
||||
pnpm run stack # SQLite
|
||||
pnpm run stack:postgres # PostgreSQL
|
||||
pnpm run stack:queue # Queue mode
|
||||
pnpm run stack:multi-main # Multiple main instances
|
||||
```
|
||||
|
||||
### Queue Mode with Scaling
|
||||
@@ -133,7 +133,7 @@ await stack.stop();
|
||||
### Multi-Main with Load Balancer
|
||||
```
|
||||
┌──────────────┐
|
||||
────│ nginx │ ← Entry point
|
||||
────│ │ ← Entry point
|
||||
/ │ Load Balancer│
|
||||
┌─────────────┐ └──────────────┘
|
||||
│ n8n-main-1 │────┐
|
||||
|
||||
19
packages/testing/containers/eslint.config.mjs
Normal file
19
packages/testing/containers/eslint.config.mjs
Normal file
@@ -0,0 +1,19 @@
|
||||
import { defineConfig } from 'eslint/config';
|
||||
import { baseConfig } from '@n8n/eslint-config/base';
|
||||
|
||||
export default defineConfig(baseConfig, {
|
||||
rules: {
|
||||
'@typescript-eslint/naming-convention': [
|
||||
'error',
|
||||
// Add exception for Docker Compose labels
|
||||
{
|
||||
selector: 'objectLiteralProperty',
|
||||
format: null, // Allow any format
|
||||
filter: {
|
||||
regex: '^com\\.docker\\.',
|
||||
match: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
@@ -1,4 +1,5 @@
|
||||
import { ImagePullPolicy, PullPolicy } from 'testcontainers';
|
||||
import type { ImagePullPolicy } from 'testcontainers';
|
||||
import { PullPolicy } from 'testcontainers';
|
||||
|
||||
/**
|
||||
* Custom pull policy for n8n images:
|
||||
@@ -8,7 +9,7 @@ import { ImagePullPolicy, PullPolicy } from 'testcontainers';
|
||||
export class N8nImagePullPolicy implements ImagePullPolicy {
|
||||
constructor(private readonly image: string) {}
|
||||
|
||||
public shouldPull(): boolean {
|
||||
shouldPull(): boolean {
|
||||
if (this.image === 'n8nio/n8n:local') {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
#!/usr/bin/env tsx
|
||||
import { parseArgs } from 'node:util';
|
||||
|
||||
import { DockerImageNotFoundError } from './docker-image-not-found-error';
|
||||
import type { N8NConfig, N8NStack } from './n8n-test-container-creation';
|
||||
import { createN8NStack } from './n8n-test-container-creation';
|
||||
import { DockerImageNotFoundError } from './docker-image-not-found-error';
|
||||
|
||||
// ANSI colors for terminal output
|
||||
const colors = {
|
||||
@@ -165,6 +165,7 @@ function displayConfig(config: N8NConfig) {
|
||||
log.info(`Docker image: ${dockerImage}`);
|
||||
|
||||
// Determine actual database
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
const usePostgres = config.postgres || config.queueMode;
|
||||
log.info(`Database: ${usePostgres ? 'PostgreSQL' : 'SQLite'}`);
|
||||
|
||||
@@ -175,7 +176,7 @@ function displayConfig(config: N8NConfig) {
|
||||
log.info('(PostgreSQL automatically enabled for queue mode)');
|
||||
}
|
||||
if (qm.mains && qm.mains > 1) {
|
||||
log.info('(nginx load balancer will be configured)');
|
||||
log.info('(load balancer will be configured)');
|
||||
}
|
||||
} else {
|
||||
log.info('Queue mode: disabled');
|
||||
@@ -186,7 +187,7 @@ function displayConfig(config: N8NConfig) {
|
||||
if (envCount > 0) {
|
||||
log.info(`Environment variables: ${envCount} custom variable(s)`);
|
||||
Object.entries(config.env).forEach(([key, value]) => {
|
||||
console.log(` ${key}=${value as string}`);
|
||||
console.log(` ${key}=${value}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,47 +3,48 @@
|
||||
* This file provides a complete n8n container stack for testing with support for:
|
||||
* - Single instances (SQLite or PostgreSQL)
|
||||
* - Queue mode with Redis
|
||||
* - Multi-main instances with nginx load balancing
|
||||
* - Multi-main instances with load balancing
|
||||
* - Parallel execution (multiple stacks running simultaneously)
|
||||
*
|
||||
* Key features for parallel execution:
|
||||
* - Dynamic port allocation to avoid conflicts (handled by testcontainers)
|
||||
* - WebSocket support through nginx load balancer
|
||||
* - Dynamic port allocation to avoid conflicts (handled by testcontainers or get-port)
|
||||
*/
|
||||
|
||||
import getPort from 'get-port';
|
||||
import assert from 'node:assert';
|
||||
import type { StartedNetwork, StartedTestContainer } from 'testcontainers';
|
||||
import { GenericContainer, Network, Wait } from 'testcontainers';
|
||||
|
||||
import {
|
||||
setupNginxLoadBalancer,
|
||||
setupPostgres,
|
||||
setupRedis,
|
||||
} from './n8n-test-container-dependencies';
|
||||
import { DockerImageNotFoundError } from './docker-image-not-found-error';
|
||||
import { N8nImagePullPolicy } from './n8n-image-pull-policy';
|
||||
import {
|
||||
setupPostgres,
|
||||
setupRedis,
|
||||
setupCaddyLoadBalancer,
|
||||
pollContainerHttpEndpoint,
|
||||
} from './n8n-test-container-dependencies';
|
||||
import { createSilentLogConsumer } from './n8n-test-container-utils';
|
||||
|
||||
// --- Constants ---
|
||||
|
||||
const POSTGRES_IMAGE = 'postgres:16-alpine';
|
||||
const REDIS_IMAGE = 'redis:7-alpine';
|
||||
const NGINX_IMAGE = 'nginx:stable';
|
||||
const CADDY_IMAGE = 'caddy:2-alpine';
|
||||
const N8N_E2E_IMAGE = 'n8nio/n8n:local';
|
||||
|
||||
// Default n8n image (can be overridden via N8N_DOCKER_IMAGE env var)
|
||||
const N8N_IMAGE = process.env.N8N_DOCKER_IMAGE || N8N_E2E_IMAGE;
|
||||
const N8N_IMAGE = process.env.N8N_DOCKER_IMAGE ?? N8N_E2E_IMAGE;
|
||||
|
||||
// Base environment for all n8n instances
|
||||
const BASE_ENV: Record<string, string> = {
|
||||
N8N_LOG_LEVEL: 'debug',
|
||||
N8N_ENCRYPTION_KEY: 'test-encryption-key',
|
||||
E2E_TESTS: 'true',
|
||||
E2E_TESTS: 'false',
|
||||
QUEUE_HEALTH_CHECK_ACTIVE: 'true',
|
||||
N8N_DIAGNOSTICS_ENABLED: 'false',
|
||||
N8N_RUNNERS_ENABLED: 'true',
|
||||
NODE_ENV: 'development', // If this is set to test, the n8n container will not start, insights module is not found??
|
||||
};
|
||||
|
||||
const MULTI_MAIN_LICENSE = {
|
||||
N8N_LICENSE_TENANT_ID: '1001',
|
||||
N8N_LICENSE_TENANT_ID: process.env.N8N_LICENSE_TENANT_ID ?? '1001',
|
||||
N8N_LICENSE_ACTIVATION_KEY: process.env.N8N_LICENSE_ACTIVATION_KEY ?? '',
|
||||
};
|
||||
|
||||
@@ -89,7 +90,7 @@ export interface N8NStack {
|
||||
* const stack = await createN8NStack({ queueMode: true });
|
||||
*
|
||||
* @example
|
||||
* // Custom scaling
|
||||
* // Custom scaling (uses load balancer for multiple mains)
|
||||
* const stack = await createN8NStack({
|
||||
* queueMode: { mains: 3, workers: 5 },
|
||||
* env: { N8N_ENABLED_MODULES: 'insights' }
|
||||
@@ -99,22 +100,34 @@ export async function createN8NStack(config: N8NConfig = {}): Promise<N8NStack>
|
||||
const { postgres = false, queueMode = false, env = {}, projectName } = config;
|
||||
const queueConfig = normalizeQueueConfig(queueMode);
|
||||
const usePostgres = postgres || !!queueConfig;
|
||||
const uniqueProjectName = projectName ?? `n8n-${Math.random().toString(36).substring(7)}`;
|
||||
const uniqueProjectName = projectName ?? `n8n-stack-${Math.random().toString(36).substring(7)}`;
|
||||
const containers: StartedTestContainer[] = [];
|
||||
|
||||
const mainCount = queueConfig?.mains ?? 1;
|
||||
const needsLoadBalancer = mainCount > 1;
|
||||
const needsNetwork = usePostgres || !!queueConfig || needsLoadBalancer;
|
||||
|
||||
let network: StartedNetwork | undefined;
|
||||
let nginxContainer: StartedTestContainer | undefined;
|
||||
|
||||
let environment: Record<string, string> = { ...BASE_ENV, ...env };
|
||||
|
||||
if (usePostgres || queueConfig) {
|
||||
if (needsNetwork) {
|
||||
network = await new Network().start();
|
||||
}
|
||||
|
||||
let environment: Record<string, string> = {
|
||||
...BASE_ENV,
|
||||
...env,
|
||||
};
|
||||
|
||||
// Add proxy hops only if using load balancer
|
||||
if (needsLoadBalancer) {
|
||||
environment.N8N_PROXY_HOPS = '1';
|
||||
}
|
||||
|
||||
if (usePostgres) {
|
||||
assert(network, 'Network should be created for postgres');
|
||||
const postgresContainer = await setupPostgres({
|
||||
postgresImage: POSTGRES_IMAGE,
|
||||
projectName: uniqueProjectName,
|
||||
network: network!,
|
||||
network,
|
||||
});
|
||||
containers.push(postgresContainer.container);
|
||||
environment = {
|
||||
@@ -131,10 +144,11 @@ export async function createN8NStack(config: N8NConfig = {}): Promise<N8NStack>
|
||||
}
|
||||
|
||||
if (queueConfig) {
|
||||
assert(network, 'Network should be created for queue mode');
|
||||
const redis = await setupRedis({
|
||||
redisImage: REDIS_IMAGE,
|
||||
projectName: uniqueProjectName,
|
||||
network: network!,
|
||||
network,
|
||||
});
|
||||
containers.push(redis);
|
||||
environment = {
|
||||
@@ -142,6 +156,7 @@ export async function createN8NStack(config: N8NConfig = {}): Promise<N8NStack>
|
||||
EXECUTIONS_MODE: 'queue',
|
||||
QUEUE_BULL_REDIS_HOST: 'redis',
|
||||
QUEUE_BULL_REDIS_PORT: '6379',
|
||||
OFFLOAD_MANUAL_EXECUTIONS_TO_WORKERS: 'true',
|
||||
};
|
||||
|
||||
if (queueConfig.mains > 1) {
|
||||
@@ -150,35 +165,59 @@ export async function createN8NStack(config: N8NConfig = {}): Promise<N8NStack>
|
||||
}
|
||||
environment = {
|
||||
...environment,
|
||||
N8N_PROXY_HOPS: '1',
|
||||
N8N_MULTI_MAIN_SETUP_ENABLED: 'true',
|
||||
...MULTI_MAIN_LICENSE,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let baseUrl: string;
|
||||
|
||||
const instances = await createN8NInstances({
|
||||
mainCount: queueConfig?.mains ?? 1,
|
||||
workerCount: queueConfig?.workers ?? 0,
|
||||
uniqueProjectName: uniqueProjectName,
|
||||
environment,
|
||||
network,
|
||||
});
|
||||
containers.push(...instances);
|
||||
|
||||
if (queueConfig && queueConfig.mains > 1) {
|
||||
nginxContainer = await setupNginxLoadBalancer({
|
||||
nginxImage: NGINX_IMAGE,
|
||||
if (needsLoadBalancer) {
|
||||
assert(network, 'Network should be created for load balancer');
|
||||
const loadBalancerContainer = await setupCaddyLoadBalancer({
|
||||
caddyImage: CADDY_IMAGE,
|
||||
projectName: uniqueProjectName,
|
||||
mainInstances: instances.slice(0, queueConfig.mains),
|
||||
network: network!,
|
||||
mainCount,
|
||||
network,
|
||||
});
|
||||
containers.push(nginxContainer);
|
||||
baseUrl = `http://localhost:${nginxContainer.getMappedPort(80)}`;
|
||||
containers.push(loadBalancerContainer);
|
||||
|
||||
const loadBalancerPort = loadBalancerContainer.getMappedPort(80);
|
||||
baseUrl = `http://localhost:${loadBalancerPort}`;
|
||||
environment = {
|
||||
...environment,
|
||||
WEBHOOK_URL: baseUrl,
|
||||
};
|
||||
|
||||
const instances = await createN8NInstances({
|
||||
mainCount,
|
||||
workerCount: queueConfig?.workers ?? 0,
|
||||
uniqueProjectName,
|
||||
environment,
|
||||
network,
|
||||
});
|
||||
containers.push(...instances);
|
||||
|
||||
// Wait for all containers to be ready behind the load balancer
|
||||
await pollContainerHttpEndpoint(loadBalancerContainer, '/healthz/readiness');
|
||||
} else {
|
||||
baseUrl = `http://localhost:${instances[0].getMappedPort(5678)}`;
|
||||
const assignedPort = await getPort();
|
||||
baseUrl = `http://localhost:${assignedPort}`;
|
||||
environment = {
|
||||
...environment,
|
||||
WEBHOOK_URL: baseUrl,
|
||||
N8N_PORT: '5678', // Internal port
|
||||
};
|
||||
|
||||
const instances = await createN8NInstances({
|
||||
mainCount: 1,
|
||||
workerCount: queueConfig?.workers ?? 0,
|
||||
uniqueProjectName,
|
||||
environment,
|
||||
network,
|
||||
directPort: assignedPort,
|
||||
});
|
||||
containers.push(...instances);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -245,6 +284,7 @@ interface CreateInstancesOptions {
|
||||
uniqueProjectName: string;
|
||||
environment: Record<string, string>;
|
||||
network?: StartedNetwork;
|
||||
directPort?: number;
|
||||
}
|
||||
|
||||
async function createN8NInstances({
|
||||
@@ -253,11 +293,15 @@ async function createN8NInstances({
|
||||
uniqueProjectName,
|
||||
environment,
|
||||
network,
|
||||
/** The host port to use for the main instance */
|
||||
directPort,
|
||||
}: CreateInstancesOptions): Promise<StartedTestContainer[]> {
|
||||
const instances: StartedTestContainer[] = [];
|
||||
|
||||
// Create main instances
|
||||
for (let i = 1; i <= mainCount; i++) {
|
||||
const name = mainCount > 1 ? `${uniqueProjectName}-n8n-main-${i}` : `${uniqueProjectName}-n8n`;
|
||||
const networkAlias = mainCount > 1 ? name : `${uniqueProjectName}-n8n-main-1`;
|
||||
const container = await createN8NContainer({
|
||||
name,
|
||||
uniqueProjectName,
|
||||
@@ -265,18 +309,20 @@ async function createN8NInstances({
|
||||
network,
|
||||
isWorker: false,
|
||||
instanceNumber: i,
|
||||
networkAlias: mainCount > 1 ? name : undefined,
|
||||
networkAlias,
|
||||
directPort: i === 1 ? directPort : undefined, // Only first main gets direct port
|
||||
});
|
||||
instances.push(container);
|
||||
}
|
||||
|
||||
// Create worker instances
|
||||
for (let i = 1; i <= workerCount; i++) {
|
||||
const name = `${uniqueProjectName}-n8n-worker-${i}`;
|
||||
const container = await createN8NContainer({
|
||||
name,
|
||||
uniqueProjectName,
|
||||
environment,
|
||||
network: network!,
|
||||
network,
|
||||
isWorker: true,
|
||||
instanceNumber: i,
|
||||
});
|
||||
@@ -294,6 +340,7 @@ interface CreateContainerOptions {
|
||||
isWorker: boolean;
|
||||
instanceNumber: number;
|
||||
networkAlias?: string;
|
||||
directPort?: number;
|
||||
}
|
||||
|
||||
async function createN8NContainer({
|
||||
@@ -304,7 +351,10 @@ async function createN8NContainer({
|
||||
isWorker,
|
||||
instanceNumber,
|
||||
networkAlias,
|
||||
directPort,
|
||||
}: CreateContainerOptions): Promise<StartedTestContainer> {
|
||||
const { consumer, throwWithLogs } = createSilentLogConsumer();
|
||||
|
||||
let container = new GenericContainer(N8N_IMAGE);
|
||||
|
||||
container = container
|
||||
@@ -316,14 +366,10 @@ async function createN8NContainer({
|
||||
})
|
||||
.withPullPolicy(new N8nImagePullPolicy(N8N_IMAGE))
|
||||
.withName(name)
|
||||
.withLogConsumer(consumer)
|
||||
.withName(name)
|
||||
.withReuse();
|
||||
|
||||
if (isWorker) {
|
||||
container = container.withCommand(['worker']);
|
||||
} else {
|
||||
container = container.withExposedPorts(5678).withWaitStrategy(N8N_WAIT_STRATEGY);
|
||||
}
|
||||
|
||||
if (network) {
|
||||
container = container.withNetwork(network);
|
||||
if (networkAlias) {
|
||||
@@ -331,12 +377,30 @@ async function createN8NContainer({
|
||||
}
|
||||
}
|
||||
|
||||
if (isWorker) {
|
||||
container = container.withCommand(['worker']);
|
||||
} else {
|
||||
container = container.withExposedPorts(5678).withWaitStrategy(N8N_WAIT_STRATEGY);
|
||||
|
||||
if (directPort) {
|
||||
container = container.withExposedPorts({ container: 5678, host: directPort });
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return await container.start();
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'statusCode' in error && error.statusCode === 404) {
|
||||
} catch (error: unknown) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'statusCode' in error &&
|
||||
(error as Error & { statusCode: number }).statusCode === 404
|
||||
) {
|
||||
throw new DockerImageNotFoundError(name, error);
|
||||
}
|
||||
throw error;
|
||||
|
||||
console.error(`Container "${name}" failed to start!`);
|
||||
console.error('Original error:', error instanceof Error ? error.message : String(error));
|
||||
|
||||
return throwWithLogs(error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,9 @@ import { RedisContainer } from '@testcontainers/redis';
|
||||
import type { StartedNetwork, StartedTestContainer } from 'testcontainers';
|
||||
import { GenericContainer, Wait } from 'testcontainers';
|
||||
|
||||
import { createSilentLogConsumer } from './n8n-test-container-utils';
|
||||
import { setTimeout as wait } from 'node:timers/promises';
|
||||
|
||||
export async function setupRedis({
|
||||
redisImage,
|
||||
projectName,
|
||||
@@ -73,35 +76,45 @@ export async function setupPostgres({
|
||||
export async function setupNginxLoadBalancer({
|
||||
nginxImage,
|
||||
projectName,
|
||||
mainInstances,
|
||||
mainCount,
|
||||
network,
|
||||
port,
|
||||
}: {
|
||||
nginxImage: string;
|
||||
projectName: string;
|
||||
mainInstances: StartedTestContainer[];
|
||||
mainCount: number;
|
||||
network: StartedNetwork;
|
||||
port: number;
|
||||
}): Promise<StartedTestContainer> {
|
||||
// Generate upstream server entries from the list of main instances.
|
||||
const upstreamServers = mainInstances
|
||||
.map((_, index) => ` server ${projectName}-n8n-main-${index + 1}:5678;`)
|
||||
.join('\n');
|
||||
const upstreamServers = Array.from(
|
||||
{ length: mainCount },
|
||||
(_, index) => ` server ${projectName}-n8n-main-${index + 1}:5678;`,
|
||||
).join('\n');
|
||||
|
||||
// Build the NGINX configuration with dynamic upstream servers.
|
||||
// This allows us to have the port allocation be dynamic.
|
||||
const nginxConfig = buildNginxConfig(upstreamServers);
|
||||
|
||||
return await new GenericContainer(nginxImage)
|
||||
.withNetwork(network)
|
||||
.withExposedPorts(80)
|
||||
.withCopyContentToContainer([{ content: nginxConfig, target: '/etc/nginx/nginx.conf' }])
|
||||
.withWaitStrategy(Wait.forListeningPorts())
|
||||
.withLabels({
|
||||
'com.docker.compose.project': projectName,
|
||||
'com.docker.compose.service': 'nginx-lb',
|
||||
})
|
||||
.withName(`${projectName}-nginx-lb`)
|
||||
.withReuse()
|
||||
.start();
|
||||
const { consumer, throwWithLogs } = createSilentLogConsumer();
|
||||
|
||||
try {
|
||||
return await new GenericContainer(nginxImage)
|
||||
.withNetwork(network)
|
||||
.withExposedPorts({ container: 80, host: port })
|
||||
.withCopyContentToContainer([{ content: nginxConfig, target: '/etc/nginx/nginx.conf' }])
|
||||
.withWaitStrategy(Wait.forListeningPorts())
|
||||
.withLabels({
|
||||
'com.docker.compose.project': projectName,
|
||||
'com.docker.compose.service': 'nginx-lb',
|
||||
})
|
||||
.withName(`${projectName}-nginx-lb`)
|
||||
.withReuse()
|
||||
.withLogConsumer(consumer)
|
||||
.start();
|
||||
} catch (error) {
|
||||
return throwWithLogs(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -184,6 +197,125 @@ function buildNginxConfig(upstreamServers: string): string {
|
||||
}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds Caddy configuration for load balancing n8n instances
|
||||
* @param upstreamServers Array of upstream server addresses
|
||||
* @returns The complete Caddyfile configuration as a string
|
||||
*/
|
||||
function buildCaddyConfig(upstreamServers: string[]): string {
|
||||
const backends = upstreamServers.join(' ');
|
||||
return `
|
||||
:80 {
|
||||
# Reverse proxy with load balancing
|
||||
reverse_proxy ${backends} {
|
||||
# Enable sticky sessions using cookie
|
||||
lb_policy cookie
|
||||
|
||||
# Health check (optional)
|
||||
health_uri /healthz
|
||||
health_interval 10s
|
||||
|
||||
# Timeouts
|
||||
transport http {
|
||||
dial_timeout 60s
|
||||
read_timeout 60s
|
||||
write_timeout 60s
|
||||
}
|
||||
}
|
||||
|
||||
# Set max request body size
|
||||
request_body {
|
||||
max_size 50MB
|
||||
}
|
||||
}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Caddy for multi-main instances
|
||||
* @param caddyImage The Docker image for Caddy
|
||||
* @param projectName Project name for container naming
|
||||
* @param mainCount Number of main instances
|
||||
* @param network The shared Docker network
|
||||
* @returns A promise that resolves to the started Caddy container
|
||||
*/
|
||||
export async function setupCaddyLoadBalancer({
|
||||
caddyImage = 'caddy:2-alpine',
|
||||
projectName,
|
||||
mainCount,
|
||||
network,
|
||||
}: {
|
||||
caddyImage?: string;
|
||||
projectName: string;
|
||||
mainCount: number;
|
||||
network: StartedNetwork;
|
||||
}): Promise<StartedTestContainer> {
|
||||
// Generate upstream server addresses
|
||||
const upstreamServers = Array.from(
|
||||
{ length: mainCount },
|
||||
(_, index) => `${projectName}-n8n-main-${index + 1}:5678`,
|
||||
);
|
||||
|
||||
// Build the Caddy configuration
|
||||
const caddyConfig = buildCaddyConfig(upstreamServers);
|
||||
|
||||
const { consumer, throwWithLogs } = createSilentLogConsumer();
|
||||
|
||||
try {
|
||||
return await new GenericContainer(caddyImage)
|
||||
.withNetwork(network)
|
||||
.withExposedPorts(80)
|
||||
.withCopyContentToContainer([{ content: caddyConfig, target: '/etc/caddy/Caddyfile' }])
|
||||
.withWaitStrategy(Wait.forListeningPorts())
|
||||
.withLabels({
|
||||
'com.docker.compose.project': projectName,
|
||||
'com.docker.compose.service': 'caddy-lb',
|
||||
})
|
||||
.withName(`${projectName}-caddy-lb`)
|
||||
.withReuse()
|
||||
.withLogConsumer(consumer)
|
||||
.start();
|
||||
} catch (error) {
|
||||
return throwWithLogs(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Polls a container's HTTP endpoint until it returns a 200 status.
|
||||
* Logs a warning if the endpoint does not return 200 within the specified timeout.
|
||||
*
|
||||
* @param container The started container.
|
||||
* @param endpoint The HTTP health check endpoint (e.g., '/healthz/readiness').
|
||||
* @param timeoutMs Total timeout in milliseconds (default: 60,000ms).
|
||||
*/
|
||||
export async function pollContainerHttpEndpoint(
|
||||
container: StartedTestContainer,
|
||||
endpoint: string,
|
||||
timeoutMs: number = 60000,
|
||||
): Promise<void> {
|
||||
const startTime = Date.now();
|
||||
const url = `http://${container.getHost()}:${container.getFirstMappedPort()}${endpoint}`;
|
||||
const retryIntervalMs = 1000;
|
||||
|
||||
while (Date.now() - startTime < timeoutMs) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (response.status === 200) {
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
// Don't log errors, just retry
|
||||
}
|
||||
|
||||
await wait(retryIntervalMs);
|
||||
}
|
||||
|
||||
console.error(
|
||||
`WARNING: HTTP endpoint at ${url} did not return 200 within ${
|
||||
timeoutMs / 1000
|
||||
} seconds. Proceeding with caution.`,
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: Look at Ollama container?
|
||||
// TODO: Look at MariaDB container?
|
||||
// TODO: Look at MockServer container, could we use this for mocking out external services?
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { setTimeout as wait } from 'node:timers/promises';
|
||||
import type { StartedTestContainer, StoppedTestContainer } from 'testcontainers';
|
||||
|
||||
export interface LogMatch {
|
||||
@@ -118,14 +119,15 @@ export class ContainerTestHelpers {
|
||||
|
||||
while (Date.now() - startTime < timeoutMs) {
|
||||
iteration++;
|
||||
await this.sleep(ContainerTestHelpers.POLL_INTERVAL_MS);
|
||||
await wait(ContainerTestHelpers.POLL_INTERVAL_MS);
|
||||
|
||||
// Capture the timestamp for this iteration to avoid race conditions
|
||||
const checkTimestamp = currentCheckTime;
|
||||
|
||||
// Check all containers concurrently
|
||||
const matchPromises = targetContainers.map((container) =>
|
||||
this.checkContainerForMatch(container, messageRegex, checkTimestamp),
|
||||
const matchPromises = targetContainers.map(
|
||||
async (container) =>
|
||||
await this.checkContainerForMatch(container, messageRegex, checkTimestamp),
|
||||
);
|
||||
|
||||
const results = await Promise.all(matchPromises);
|
||||
@@ -228,6 +230,7 @@ export class ContainerTestHelpers {
|
||||
* Strip ANSI escape codes from log text
|
||||
*/
|
||||
private stripAnsiCodes(text: string): string {
|
||||
// eslint-disable-next-line no-control-regex
|
||||
return text.replace(/\x1B\[[0-9;]*[mGKH]/g, '');
|
||||
}
|
||||
|
||||
@@ -248,7 +251,7 @@ export class ContainerTestHelpers {
|
||||
since?: number,
|
||||
): Promise<StreamLogMatch | null> {
|
||||
try {
|
||||
const logOptions: any = {};
|
||||
const logOptions: { since?: number } = {};
|
||||
if (since !== undefined) {
|
||||
logOptions.since = since;
|
||||
}
|
||||
@@ -311,7 +314,7 @@ export class ContainerTestHelpers {
|
||||
since?: number,
|
||||
): Promise<string> {
|
||||
try {
|
||||
const logOptions: any = {};
|
||||
const logOptions: { since?: number } = {};
|
||||
if (since !== undefined) {
|
||||
logOptions.since = since;
|
||||
}
|
||||
@@ -370,8 +373,4 @@ export class ContainerTestHelpers {
|
||||
|
||||
return matches;
|
||||
}
|
||||
|
||||
private sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
|
||||
26
packages/testing/containers/n8n-test-container-utils.ts
Normal file
26
packages/testing/containers/n8n-test-container-utils.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import type { Readable } from 'stream';
|
||||
|
||||
/**
|
||||
* Create a log consumer that does not log to the console
|
||||
* @returns A tuple containing the log consumer and a function to throw an error with logs
|
||||
*/
|
||||
export function createSilentLogConsumer() {
|
||||
const logs: string[] = [];
|
||||
|
||||
const consumer = (stream: Readable) => {
|
||||
stream.on('data', (chunk: Buffer | string) => {
|
||||
logs.push(chunk.toString().trim());
|
||||
});
|
||||
};
|
||||
|
||||
const throwWithLogs = (error: unknown): never => {
|
||||
if (logs.length > 0) {
|
||||
console.error('\n--- Container Logs ---');
|
||||
console.error(logs.join('\n'));
|
||||
console.error('---------------------\n');
|
||||
}
|
||||
throw error;
|
||||
};
|
||||
|
||||
return { consumer, throwWithLogs };
|
||||
}
|
||||
@@ -7,11 +7,15 @@
|
||||
"scripts": {
|
||||
"stack": "tsx ./n8n-start-stack.ts",
|
||||
"stack:help": "tsx ./n8n-start-stack.ts --help",
|
||||
"dev": "TESTCONTAINERS_REUSE_ENABLE=true npm run stack",
|
||||
"dev:postgres": "TESTCONTAINERS_REUSE_ENABLE=true npm run stack -- --postgres",
|
||||
"dev:queue": "TESTCONTAINERS_REUSE_ENABLE=true npm run stack -- --queue",
|
||||
"dev:multi-main": "TESTCONTAINERS_REUSE_ENABLE=true npm run stack -- --mains 2 --workers 1",
|
||||
"stack:clean:all": "docker rm -f $(docker ps -aq --filter 'name=n8n-*') 2>/dev/null || true && docker network prune -f"
|
||||
"stack:sqlite": "TESTCONTAINERS_REUSE_ENABLE=true npm run stack",
|
||||
"stack:postgres": "TESTCONTAINERS_REUSE_ENABLE=true npm run stack -- --postgres",
|
||||
"stack:queue": "TESTCONTAINERS_REUSE_ENABLE=true npm run stack -- --queue",
|
||||
"stack:multi-main": "TESTCONTAINERS_REUSE_ENABLE=true npm run stack -- --mains 2 --workers 1",
|
||||
"stack:clean:containers": "docker ps -aq --filter 'name=n8n-stack-*' | xargs -r docker rm -f 2>/dev/null",
|
||||
"stack:clean:networks": "docker network ls --filter 'label=org.testcontainers=true' -q | xargs -r docker network rm 2>/dev/null",
|
||||
"stack:clean:all": "pnpm run stack:clean:containers && pnpm run stack:clean:networks",
|
||||
"lint": "eslint . --quiet",
|
||||
"lintfix": "eslint . --fix"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
@@ -19,6 +23,7 @@
|
||||
"devDependencies": {
|
||||
"@testcontainers/postgresql": "^11.0.3",
|
||||
"@testcontainers/redis": "^11.0.3",
|
||||
"get-port": "^7.1.0",
|
||||
"testcontainers": "^11.0.3"
|
||||
}
|
||||
}
|
||||
|
||||
12
packages/testing/containers/tsconfig.json
Normal file
12
packages/testing/containers/tsconfig.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"sourceMap": false,
|
||||
"declaration": false,
|
||||
"lib": ["esnext", "dom"],
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["**/*.ts"],
|
||||
"exclude": ["**/dist/**/*", "**/node_modules/**/*"],
|
||||
"references": [{ "path": "../../workflow/tsconfig.build.esm.json" }]
|
||||
}
|
||||
@@ -19,7 +19,7 @@ export class ProjectComposer {
|
||||
const projectNameUnique = projectName ?? `Project ${Date.now()}`;
|
||||
await this.n8n.projectSettings.fillProjectName(projectNameUnique);
|
||||
await this.n8n.projectSettings.clickSaveButton();
|
||||
const projectId = await this.extractProjectIdFromPage('projects', 'settings');
|
||||
const projectId = this.extractProjectIdFromPage('projects', 'settings');
|
||||
return { projectName: projectNameUnique, projectId };
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ export class ProjectComposer {
|
||||
return match?.[1] ?? '';
|
||||
}
|
||||
|
||||
async extractProjectIdFromPage(beforeWord: string, afterWord: string): Promise<string> {
|
||||
extractProjectIdFromPage(beforeWord: string, afterWord: string): string {
|
||||
return this.extractIdFromUrl(this.n8n.page.url(), beforeWord, afterWord);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
/* eslint-disable import-x/no-extraneous-dependencies */
|
||||
import type { FrontendSettings } from '@n8n/api-types';
|
||||
import type { BrowserContext, Route } from '@playwright/test';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
|
||||
@@ -1,21 +1,10 @@
|
||||
const sharedOptions = require('@n8n/eslint-config/shared');
|
||||
|
||||
/**
|
||||
* @type {import('@types/eslint').ESLint.ConfigData}
|
||||
*/
|
||||
module.exports = {
|
||||
extends: ['@n8n/eslint-config/base', 'plugin:playwright/recommended'],
|
||||
|
||||
...sharedOptions(__dirname),
|
||||
|
||||
plugins: ['playwright'],
|
||||
|
||||
env: {
|
||||
node: true,
|
||||
},
|
||||
import { defineConfig, globalIgnores } from 'eslint/config';
|
||||
import { baseConfig } from '@n8n/eslint-config/base';
|
||||
import playwrightPlugin from 'eslint-plugin-playwright';
|
||||
|
||||
export default defineConfig(baseConfig, playwrightPlugin.configs['flat/recommended'], {
|
||||
ignores: ['playwright-report/**'],
|
||||
rules: {
|
||||
// TODO: remove these rules
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-unsafe-argument': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
@@ -28,9 +17,9 @@ module.exports = {
|
||||
'n8n-local-rules/no-uncaught-json-parse': 'off',
|
||||
'playwright/expect-expect': 'warn',
|
||||
'playwright/max-nested-describe': 'warn',
|
||||
'playwright/no-conditional-in-test': 'warn',
|
||||
'playwright/no-conditional-in-test': 'error',
|
||||
'playwright/no-skipped-test': 'warn',
|
||||
'import/no-extraneous-dependencies': [
|
||||
'import-x/no-extraneous-dependencies': [
|
||||
'error',
|
||||
{
|
||||
devDependencies: ['**/tests/**', '**/e2e/**', '**/playwright/**'],
|
||||
@@ -38,4 +27,4 @@ module.exports = {
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
});
|
||||
@@ -2,6 +2,7 @@ import { test as base, expect, type TestInfo } from '@playwright/test';
|
||||
import type { N8NStack } from 'n8n-containers/n8n-test-container-creation';
|
||||
import { createN8NStack } from 'n8n-containers/n8n-test-container-creation';
|
||||
import { ContainerTestHelpers } from 'n8n-containers/n8n-test-container-helpers';
|
||||
import { setTimeout as wait } from 'node:timers/promises';
|
||||
|
||||
import { setupDefaultInterceptors } from '../config/intercepts';
|
||||
import { n8nPage } from '../pages/n8nPage';
|
||||
@@ -28,6 +29,7 @@ interface ContainerConfig {
|
||||
mains: number;
|
||||
workers: number;
|
||||
};
|
||||
env?: Record<string, string>;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -40,6 +42,11 @@ export const test = base.extend<TestFixtures, WorkerFixtures>({
|
||||
containerConfig: [
|
||||
async ({}, use, testInfo: TestInfo) => {
|
||||
const config = (testInfo.project.use?.containerConfig as ContainerConfig) || {};
|
||||
config.env = {
|
||||
...config.env,
|
||||
E2E_TESTS: 'true',
|
||||
};
|
||||
|
||||
await use(config);
|
||||
},
|
||||
{ scope: 'worker' },
|
||||
@@ -60,7 +67,7 @@ export const test = base.extend<TestFixtures, WorkerFixtures>({
|
||||
const container = await createN8NStack(containerConfig);
|
||||
|
||||
// TODO: Remove this once we have a better way to wait for the container to be ready (e.g. healthcheck)
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
await wait(3000);
|
||||
|
||||
console.log(`Container URL: ${container.baseUrl}`);
|
||||
|
||||
|
||||
@@ -39,5 +39,5 @@ async function globalSetup() {
|
||||
console.log('🏁 Global setup completed');
|
||||
}
|
||||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
// eslint-disable-next-line import-x/no-default-export
|
||||
export default globalSetup;
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
"devDependencies": {
|
||||
"@currents/playwright": "1.14.1",
|
||||
"@playwright/test": "1.53.0",
|
||||
"@types/lodash": "catalog:",
|
||||
"eslint-plugin-playwright": "2.2.0",
|
||||
"n8n-containers": "workspace:*"
|
||||
}
|
||||
|
||||
@@ -11,17 +11,17 @@ export class ExecutionsPage extends BasePage {
|
||||
await this.clickButtonByName('Copy to editor');
|
||||
}
|
||||
|
||||
async getExecutionItems(): Promise<Locator> {
|
||||
getExecutionItems(): Locator {
|
||||
return this.page.locator('div.execution-card');
|
||||
}
|
||||
|
||||
async getLastExecutionItem(): Promise<Locator> {
|
||||
const executionItems = await this.getExecutionItems();
|
||||
getLastExecutionItem(): Locator {
|
||||
const executionItems = this.getExecutionItems();
|
||||
return executionItems.nth(0);
|
||||
}
|
||||
|
||||
async clickLastExecutionItem(): Promise<void> {
|
||||
const executionItem = await this.getLastExecutionItem();
|
||||
const executionItem = this.getLastExecutionItem();
|
||||
await executionItem.click();
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import { CanvasComposer } from '../composables/CanvasComposer';
|
||||
import { ProjectComposer } from '../composables/ProjectComposer';
|
||||
import { WorkflowComposer } from '../composables/WorkflowComposer';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
export class n8nPage {
|
||||
readonly page: Page;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
/* eslint-disable import/no-default-export */
|
||||
/* eslint-disable import-x/no-default-export */
|
||||
import type { Project } from '@playwright/test';
|
||||
import { defineConfig } from '@playwright/test';
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// services/api-helper.ts
|
||||
import type { APIRequestContext } from '@playwright/test';
|
||||
import { setTimeout as wait } from 'node:timers/promises';
|
||||
|
||||
import type { UserCredentials } from '../config/test-users';
|
||||
import {
|
||||
@@ -115,7 +116,7 @@ export class ApiHelpers {
|
||||
throw new TestError(errorText);
|
||||
}
|
||||
// Adding small delay to ensure database is reset
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
await wait(1000);
|
||||
}
|
||||
|
||||
async signin(role: UserRole, memberIndex: number = 0): Promise<LoginResponseData> {
|
||||
|
||||
50
pnpm-lock.yaml
generated
50
pnpm-lock.yaml
generated
@@ -37,7 +37,7 @@ catalogs:
|
||||
specifier: ^9.0.9
|
||||
version: 9.0.9
|
||||
'@types/lodash':
|
||||
specifier: ^4.17.17
|
||||
specifier: 4.17.17
|
||||
version: 4.17.17
|
||||
'@types/uuid':
|
||||
specifier: ^10.0.0
|
||||
@@ -2901,6 +2901,9 @@ importers:
|
||||
'@testcontainers/redis':
|
||||
specifier: ^11.0.3
|
||||
version: 11.0.3
|
||||
get-port:
|
||||
specifier: ^7.1.0
|
||||
version: 7.1.0
|
||||
testcontainers:
|
||||
specifier: ^11.0.3
|
||||
version: 11.0.3
|
||||
@@ -2913,6 +2916,9 @@ importers:
|
||||
'@playwright/test':
|
||||
specifier: 1.53.0
|
||||
version: 1.53.0
|
||||
'@types/lodash':
|
||||
specifier: 'catalog:'
|
||||
version: 4.17.17
|
||||
eslint-plugin-playwright:
|
||||
specifier: 2.2.0
|
||||
version: 2.2.0(eslint@9.29.0(jiti@1.21.7))
|
||||
@@ -14222,6 +14228,7 @@ packages:
|
||||
supertest@7.1.1:
|
||||
resolution: {integrity: sha512-aI59HBTlG9e2wTjxGJV+DygfNLgnWbGdZxiA/sgrnNNikIW8lbDvCtF6RnhZoJ82nU7qv7ZLjrvWqCEm52fAmw==}
|
||||
engines: {node: '>=14.18.0'}
|
||||
deprecated: Please upgrade to supertest v7.1.3+, see release notes at https://github.com/forwardemail/supertest/releases/tag/v7.1.3 - maintenance is supported by Forward Email @ https://forwardemail.net
|
||||
|
||||
supports-color@5.5.0:
|
||||
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
|
||||
@@ -17746,7 +17753,7 @@ snapshots:
|
||||
'@currents/commit-info': 1.0.1-beta.0
|
||||
async-retry: 1.3.3
|
||||
axios: 1.10.0(debug@4.4.1)
|
||||
axios-retry: 4.5.0(axios@1.10.0(debug@4.4.1))
|
||||
axios-retry: 4.5.0(axios@1.10.0)
|
||||
c12: 1.11.2(magicast@0.3.5)
|
||||
chalk: 4.1.2
|
||||
commander: 12.1.0
|
||||
@@ -22268,14 +22275,9 @@ snapshots:
|
||||
|
||||
axe-core@4.7.2: {}
|
||||
|
||||
axios-retry@4.5.0(axios@1.10.0(debug@4.4.1)):
|
||||
dependencies:
|
||||
axios: 1.10.0(debug@4.4.1)
|
||||
is-retry-allowed: 2.2.0
|
||||
|
||||
axios-retry@4.5.0(axios@1.10.0):
|
||||
dependencies:
|
||||
axios: 1.10.0
|
||||
axios: 1.10.0(debug@4.4.1)
|
||||
is-retry-allowed: 2.2.0
|
||||
|
||||
axios-retry@4.5.0(axios@1.8.3):
|
||||
@@ -22283,14 +22285,6 @@ snapshots:
|
||||
axios: 1.8.3
|
||||
is-retry-allowed: 2.2.0
|
||||
|
||||
axios@1.10.0:
|
||||
dependencies:
|
||||
follow-redirects: 1.15.9(debug@4.3.6)
|
||||
form-data: 4.0.2
|
||||
proxy-from-env: 1.1.0
|
||||
transitivePeerDependencies:
|
||||
- debug
|
||||
|
||||
axios@1.10.0(debug@4.3.6):
|
||||
dependencies:
|
||||
follow-redirects: 1.15.9(debug@4.3.6)
|
||||
@@ -22589,7 +22583,7 @@ snapshots:
|
||||
|
||||
bundlemon@3.1.0(typescript@5.8.3):
|
||||
dependencies:
|
||||
axios: 1.10.0
|
||||
axios: 1.10.0(debug@4.4.1)
|
||||
axios-retry: 4.5.0(axios@1.10.0)
|
||||
brotli-size: 4.0.0
|
||||
bundlemon-utils: 2.0.1
|
||||
@@ -24077,7 +24071,7 @@ snapshots:
|
||||
|
||||
eslint-import-resolver-node@0.3.9:
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
is-core-module: 2.16.1
|
||||
resolve: 1.22.10
|
||||
transitivePeerDependencies:
|
||||
@@ -24101,7 +24095,7 @@ snapshots:
|
||||
|
||||
eslint-module-utils@2.12.1(@typescript-eslint/parser@8.35.0(eslint@9.29.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.3)(eslint@9.29.0(jiti@1.21.7)):
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
optionalDependencies:
|
||||
'@typescript-eslint/parser': 8.35.0(eslint@9.29.0(jiti@1.21.7))(typescript@5.8.3)
|
||||
eslint: 9.29.0(jiti@1.21.7)
|
||||
@@ -24140,7 +24134,7 @@ snapshots:
|
||||
array.prototype.findlastindex: 1.2.6
|
||||
array.prototype.flat: 1.3.3
|
||||
array.prototype.flatmap: 1.3.3
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
doctrine: 2.1.0
|
||||
eslint: 9.29.0(jiti@1.21.7)
|
||||
eslint-import-resolver-node: 0.3.9
|
||||
@@ -25074,7 +25068,7 @@ snapshots:
|
||||
array-parallel: 0.1.3
|
||||
array-series: 0.1.5
|
||||
cross-spawn: 7.0.6
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
@@ -25455,7 +25449,7 @@ snapshots:
|
||||
|
||||
infisical-node@1.3.0:
|
||||
dependencies:
|
||||
axios: 1.10.0
|
||||
axios: 1.10.0(debug@4.4.1)
|
||||
dotenv: 16.3.1
|
||||
tweetnacl: 1.0.3
|
||||
tweetnacl-util: 0.15.1
|
||||
@@ -26617,7 +26611,7 @@ snapshots:
|
||||
'@langchain/groq': 0.2.3(@langchain/core@0.3.61(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(encoding@0.1.13)
|
||||
'@langchain/mistralai': 0.2.1(@langchain/core@0.3.61(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))(zod@3.25.67)
|
||||
'@langchain/ollama': 0.2.3(@langchain/core@0.3.61(openai@5.8.1(ws@8.18.2)(zod@3.25.67)))
|
||||
axios: 1.10.0
|
||||
axios: 1.10.0(debug@4.4.1)
|
||||
cheerio: 1.0.0
|
||||
handlebars: 4.7.8
|
||||
transitivePeerDependencies:
|
||||
@@ -28217,7 +28211,7 @@ snapshots:
|
||||
|
||||
pdf-parse@1.1.1:
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
node-ensure: 0.0.0
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
@@ -28471,7 +28465,7 @@ snapshots:
|
||||
|
||||
posthog-node@3.2.1:
|
||||
dependencies:
|
||||
axios: 1.10.0
|
||||
axios: 1.10.0(debug@4.4.1)
|
||||
rusha: 0.8.14
|
||||
transitivePeerDependencies:
|
||||
- debug
|
||||
@@ -29098,7 +29092,7 @@ snapshots:
|
||||
|
||||
retry-axios@2.6.0(axios@1.10.0):
|
||||
dependencies:
|
||||
axios: 1.10.0
|
||||
axios: 1.10.0(debug@4.4.1)
|
||||
|
||||
retry-request@7.0.2(encoding@0.1.13):
|
||||
dependencies:
|
||||
@@ -29123,7 +29117,7 @@ snapshots:
|
||||
|
||||
rhea@1.0.24:
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
@@ -29586,7 +29580,7 @@ snapshots:
|
||||
asn1.js: 5.4.1
|
||||
asn1.js-rfc2560: 5.0.1(asn1.js@5.4.1)
|
||||
asn1.js-rfc5280: 3.0.0
|
||||
axios: 1.10.0
|
||||
axios: 1.10.0(debug@4.4.1)
|
||||
big-integer: 1.6.52
|
||||
bignumber.js: 9.1.2
|
||||
binascii: 0.0.2
|
||||
|
||||
@@ -17,7 +17,7 @@ catalog:
|
||||
'@types/basic-auth': ^1.1.3
|
||||
'@types/express': ^5.0.1
|
||||
'@types/jsonwebtoken': ^9.0.9
|
||||
'@types/lodash': ^4.17.17
|
||||
'@types/lodash': 4.17.17
|
||||
'@types/uuid': ^10.0.0
|
||||
'@types/xml2js': ^0.4.14
|
||||
'@vitest/coverage-v8': 3.2.4
|
||||
|
||||
Reference in New Issue
Block a user