refactor(core): Make Logger a service (no-changelog) (#7494)

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™
2023-10-25 16:35:22 +02:00
committed by GitHub
parent db4e61ba24
commit 05586a900d
131 changed files with 761 additions and 919 deletions

View File

@@ -4,7 +4,6 @@ import type { Server } from 'http';
import express from 'express'; import express from 'express';
import compression from 'compression'; import compression from 'compression';
import isbot from 'isbot'; import isbot from 'isbot';
import { LoggerProxy as Logger } from 'n8n-workflow';
import config from '@/config'; import config from '@/config';
import { N8N_VERSION, inDevelopment, inTest } from '@/constants'; import { N8N_VERSION, inDevelopment, inTest } from '@/constants';
@@ -19,8 +18,11 @@ import { TestWebhooks } from '@/TestWebhooks';
import { WaitingWebhooks } from '@/WaitingWebhooks'; import { WaitingWebhooks } from '@/WaitingWebhooks';
import { webhookRequestHandler } from '@/WebhookHelpers'; import { webhookRequestHandler } from '@/WebhookHelpers';
import { generateHostInstanceId } from './databases/utils/generators'; import { generateHostInstanceId } from './databases/utils/generators';
import { Logger } from '@/Logger';
export abstract class AbstractServer { export abstract class AbstractServer {
protected logger: Logger;
protected server: Server; protected server: Server;
readonly app: express.Application; readonly app: express.Application;
@@ -67,6 +69,8 @@ export abstract class AbstractServer {
this.endpointWebhookWaiting = config.getEnv('endpoints.webhookWaiting'); this.endpointWebhookWaiting = config.getEnv('endpoints.webhookWaiting');
this.uniqueInstanceId = generateHostInstanceId(instanceType); this.uniqueInstanceId = generateHostInstanceId(instanceType);
this.logger = Container.get(Logger);
} }
async configure(): Promise<void> { async configure(): Promise<void> {
@@ -194,7 +198,7 @@ export abstract class AbstractServer {
this.app.use((req, res, next) => { this.app.use((req, res, next) => {
const userAgent = req.headers['user-agent']; const userAgent = req.headers['user-agent'];
if (userAgent && checkIfBot(userAgent)) { if (userAgent && checkIfBot(userAgent)) {
Logger.info(`Blocked ${req.method} ${req.url} for "${userAgent}"`); this.logger.info(`Blocked ${req.method} ${req.url} for "${userAgent}"`);
res.status(204).end(); res.status(204).end();
} else next(); } else next();
}); });

View File

@@ -7,7 +7,7 @@ import type {
IRun, IRun,
ExecutionStatus, ExecutionStatus,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { createDeferredPromise, LoggerProxy } from 'n8n-workflow'; import { createDeferredPromise } from 'n8n-workflow';
import type { ChildProcess } from 'child_process'; import type { ChildProcess } from 'child_process';
import type PCancelable from 'p-cancelable'; import type PCancelable from 'p-cancelable';
@@ -20,6 +20,7 @@ import type {
} from '@/Interfaces'; } from '@/Interfaces';
import { isWorkflowIdValid } from '@/utils'; import { isWorkflowIdValid } from '@/utils';
import { ExecutionRepository } from '@db/repositories'; import { ExecutionRepository } from '@db/repositories';
import { Logger } from '@/Logger';
@Service() @Service()
export class ActiveExecutions { export class ActiveExecutions {
@@ -27,6 +28,8 @@ export class ActiveExecutions {
[index: string]: IExecutingWorkflowData; [index: string]: IExecutingWorkflowData;
} = {}; } = {};
constructor(private readonly logger: Logger) {}
/** /**
* Add a new active execution * Add a new active execution
*/ */
@@ -225,7 +228,7 @@ export class ActiveExecutions {
async setStatus(executionId: string, status: ExecutionStatus): Promise<void> { async setStatus(executionId: string, status: ExecutionStatus): Promise<void> {
if (this.activeExecutions[executionId] === undefined) { if (this.activeExecutions[executionId] === undefined) {
LoggerProxy.debug( this.logger.debug(
`There is no active execution with id "${executionId}", can't update status to ${status}.`, `There is no active execution with id "${executionId}", can't update status to ${status}.`,
); );
return; return;

View File

@@ -27,7 +27,6 @@ import {
NodeHelpers, NodeHelpers,
Workflow, Workflow,
WorkflowActivationError, WorkflowActivationError,
LoggerProxy as Logger,
ErrorReporterProxy as ErrorReporter, ErrorReporterProxy as ErrorReporter,
WebhookPathAlreadyTakenError, WebhookPathAlreadyTakenError,
} from 'n8n-workflow'; } from 'n8n-workflow';
@@ -66,6 +65,7 @@ import { WorkflowsService } from './workflows/workflows.services';
import { webhookNotFoundErrorMessage } from './utils'; import { webhookNotFoundErrorMessage } from './utils';
import { In } from 'typeorm'; import { In } from 'typeorm';
import { WebhookService } from './services/webhook.service'; import { WebhookService } from './services/webhook.service';
import { Logger } from './Logger';
const WEBHOOK_PROD_UNREGISTERED_HINT = const WEBHOOK_PROD_UNREGISTERED_HINT =
"The workflow must be active for a production URL to run successfully. You can activate the workflow using the toggle in the top-right of the editor. Note that unlike test URL calls, production URL calls aren't shown on the canvas (only in the executions list)"; "The workflow must be active for a production URL to run successfully. You can activate the workflow using the toggle in the top-right of the editor. Note that unlike test URL calls, production URL calls aren't shown on the canvas (only in the executions list)";
@@ -83,10 +83,11 @@ export class ActiveWorkflowRunner implements IWebhookManager {
} = {}; } = {};
constructor( constructor(
private activeExecutions: ActiveExecutions, private readonly logger: Logger,
private externalHooks: ExternalHooks, private readonly activeExecutions: ActiveExecutions,
private nodeTypes: NodeTypes, private readonly externalHooks: ExternalHooks,
private webhookService: WebhookService, private readonly nodeTypes: NodeTypes,
private readonly webhookService: WebhookService,
) {} ) {}
async init() { async init() {
@@ -113,31 +114,31 @@ export class ActiveWorkflowRunner implements IWebhookManager {
} }
if (workflowsData.length !== 0) { if (workflowsData.length !== 0) {
Logger.info(' ================================'); this.logger.info(' ================================');
Logger.info(' Start Active Workflows:'); this.logger.info(' Start Active Workflows:');
Logger.info(' ================================'); this.logger.info(' ================================');
for (const workflowData of workflowsData) { for (const workflowData of workflowsData) {
Logger.info(` - ${workflowData.name} (ID: ${workflowData.id})`); this.logger.info(` - ${workflowData.name} (ID: ${workflowData.id})`);
Logger.debug(`Initializing active workflow "${workflowData.name}" (startup)`, { this.logger.debug(`Initializing active workflow "${workflowData.name}" (startup)`, {
workflowName: workflowData.name, workflowName: workflowData.name,
workflowId: workflowData.id, workflowId: workflowData.id,
}); });
try { try {
await this.add(workflowData.id, 'init', workflowData); await this.add(workflowData.id, 'init', workflowData);
Logger.verbose(`Successfully started workflow "${workflowData.name}"`, { this.logger.verbose(`Successfully started workflow "${workflowData.name}"`, {
workflowName: workflowData.name, workflowName: workflowData.name,
workflowId: workflowData.id, workflowId: workflowData.id,
}); });
Logger.info(' => Started'); this.logger.info(' => Started');
} catch (error) { } catch (error) {
ErrorReporter.error(error); ErrorReporter.error(error);
Logger.info( this.logger.info(
' => ERROR: Workflow could not be activated on first try, keep on trying if not an auth issue', ' => ERROR: Workflow could not be activated on first try, keep on trying if not an auth issue',
); );
Logger.info(` ${error.message}`); this.logger.info(` ${error.message}`);
Logger.error( this.logger.error(
`Issue on initial workflow activation try "${workflowData.name}" (startup)`, `Issue on initial workflow activation try "${workflowData.name}" (startup)`,
{ {
workflowName: workflowData.name, workflowName: workflowData.name,
@@ -153,7 +154,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
} }
} }
} }
Logger.verbose('Finished initializing active workflows (startup)'); this.logger.verbose('Finished initializing active workflows (startup)');
} }
await this.externalHooks.run('activeWorkflows.initialized', []); await this.externalHooks.run('activeWorkflows.initialized', []);
@@ -165,7 +166,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
*/ */
async removeAll(): Promise<void> { async removeAll(): Promise<void> {
let activeWorkflowIds: string[] = []; let activeWorkflowIds: string[] = [];
Logger.verbose('Call to remove all active workflows received (removeAll)'); this.logger.verbose('Call to remove all active workflows received (removeAll)');
activeWorkflowIds.push(...this.activeWorkflows.allActiveWorkflows()); activeWorkflowIds.push(...this.activeWorkflows.allActiveWorkflows());
@@ -192,7 +193,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
const httpMethod = request.method; const httpMethod = request.method;
let path = request.params.path; let path = request.params.path;
Logger.debug(`Received webhook "${httpMethod}" for path "${path}"`); this.logger.debug(`Received webhook "${httpMethod}" for path "${path}"`);
// Reset request parameters // Reset request parameters
request.params = {} as WebhookRequest['params']; request.params = {} as WebhookRequest['params'];
@@ -421,7 +422,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
await this.removeWorkflowWebhooks(workflow.id); await this.removeWorkflowWebhooks(workflow.id);
} catch (error1) { } catch (error1) {
ErrorReporter.error(error1); ErrorReporter.error(error1);
Logger.error( this.logger.error(
`Could not remove webhooks of workflow "${workflow.id}" because of error: "${error1.message}"`, `Could not remove webhooks of workflow "${workflow.id}" because of error: "${error1.message}"`,
); );
} }
@@ -558,7 +559,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>, responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
donePromise?: IDeferredPromise<IRun | undefined>, donePromise?: IDeferredPromise<IRun | undefined>,
): void => { ): void => {
Logger.debug(`Received event to trigger execution for workflow "${workflow.name}"`); this.logger.debug(`Received event to trigger execution for workflow "${workflow.name}"`);
void WorkflowsService.saveStaticData(workflow); void WorkflowsService.saveStaticData(workflow);
const executePromise = this.runWorkflow( const executePromise = this.runWorkflow(
workflowData, workflowData,
@@ -577,7 +578,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
.catch(donePromise.reject); .catch(donePromise.reject);
}); });
} else { } else {
void executePromise.catch(Logger.error); void executePromise.catch((error: Error) => this.logger.error(error.message, { error }));
} }
}; };
@@ -614,7 +615,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>, responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
donePromise?: IDeferredPromise<IRun | undefined>, donePromise?: IDeferredPromise<IRun | undefined>,
): void => { ): void => {
Logger.debug(`Received trigger for workflow "${workflow.name}"`); this.logger.debug(`Received trigger for workflow "${workflow.name}"`);
void WorkflowsService.saveStaticData(workflow); void WorkflowsService.saveStaticData(workflow);
const executePromise = this.runWorkflow( const executePromise = this.runWorkflow(
@@ -634,11 +635,11 @@ export class ActiveWorkflowRunner implements IWebhookManager {
.catch(donePromise.reject); .catch(donePromise.reject);
}); });
} else { } else {
executePromise.catch(Logger.error); executePromise.catch((error: Error) => this.logger.error(error.message, { error }));
} }
}; };
returnFunctions.emitError = (error: Error): void => { returnFunctions.emitError = (error: Error): void => {
Logger.info( this.logger.info(
`The trigger node "${node.name}" of workflow "${workflowData.name}" failed with the error: "${error.message}". Will try to reactivate.`, `The trigger node "${node.name}" of workflow "${workflowData.name}" failed with the error: "${error.message}". Will try to reactivate.`,
{ {
nodeName: node.name, nodeName: node.name,
@@ -728,7 +729,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
const canBeActivated = workflowInstance.checkIfWorkflowCanBeActivated(STARTING_NODES); const canBeActivated = workflowInstance.checkIfWorkflowCanBeActivated(STARTING_NODES);
if (!canBeActivated) { if (!canBeActivated) {
Logger.error(`Unable to activate workflow "${workflowData.name}"`); this.logger.error(`Unable to activate workflow "${workflowData.name}"`);
throw new Error( throw new Error(
'The workflow can not be activated because it does not contain any nodes which could start the workflow. Only workflows which have trigger or webhook nodes can be activated.', 'The workflow can not be activated because it does not contain any nodes which could start the workflow. Only workflows which have trigger or webhook nodes can be activated.',
); );
@@ -771,7 +772,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
getTriggerFunctions, getTriggerFunctions,
getPollFunctions, getPollFunctions,
); );
Logger.verbose(`Successfully activated workflow "${workflowData.name}"`, { this.logger.verbose(`Successfully activated workflow "${workflowData.name}"`, {
workflowId, workflowId,
workflowName: workflowData.name, workflowName: workflowData.name,
}); });
@@ -828,7 +829,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
const workflowName = workflowData.name; const workflowName = workflowData.name;
const retryFunction = async () => { const retryFunction = async () => {
Logger.info(`Try to activate workflow "${workflowName}" (${workflowId})`, { this.logger.info(`Try to activate workflow "${workflowName}" (${workflowId})`, {
workflowId, workflowId,
workflowName, workflowName,
}); });
@@ -841,7 +842,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
lastTimeout = Math.min(lastTimeout * 2, WORKFLOW_REACTIVATE_MAX_TIMEOUT); lastTimeout = Math.min(lastTimeout * 2, WORKFLOW_REACTIVATE_MAX_TIMEOUT);
} }
Logger.info( this.logger.info(
` -> Activation of workflow "${workflowName}" (${workflowId}) did fail with error: "${ ` -> Activation of workflow "${workflowName}" (${workflowId}) did fail with error: "${
error.message as string error.message as string
}" | retry in ${Math.floor(lastTimeout / 1000)} seconds`, }" | retry in ${Math.floor(lastTimeout / 1000)} seconds`,
@@ -855,10 +856,13 @@ export class ActiveWorkflowRunner implements IWebhookManager {
this.queuedWorkflowActivations[workflowId].timeout = setTimeout(retryFunction, lastTimeout); this.queuedWorkflowActivations[workflowId].timeout = setTimeout(retryFunction, lastTimeout);
return; return;
} }
Logger.info(` -> Activation of workflow "${workflowName}" (${workflowId}) was successful!`, { this.logger.info(
workflowId, ` -> Activation of workflow "${workflowName}" (${workflowId}) was successful!`,
workflowName, {
}); workflowId,
workflowName,
},
);
}; };
// Just to be sure that there is not chance that for any reason // Just to be sure that there is not chance that for any reason
@@ -904,7 +908,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
await this.removeWorkflowWebhooks(workflowId); await this.removeWorkflowWebhooks(workflowId);
} catch (error) { } catch (error) {
ErrorReporter.error(error); ErrorReporter.error(error);
Logger.error( this.logger.error(
`Could not remove webhooks of workflow "${workflowId}" because of error: "${error.message}"`, `Could not remove webhooks of workflow "${workflowId}" because of error: "${error.message}"`,
); );
} }
@@ -923,7 +927,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
if (this.activeWorkflows.isActive(workflowId)) { if (this.activeWorkflows.isActive(workflowId)) {
const removalSuccess = await this.activeWorkflows.remove(workflowId); const removalSuccess = await this.activeWorkflows.remove(workflowId);
if (removalSuccess) { if (removalSuccess) {
Logger.verbose(`Successfully deactivated workflow "${workflowId}"`, { workflowId }); this.logger.verbose(`Successfully deactivated workflow "${workflowId}"`, { workflowId });
} }
} }
} }

View File

@@ -3,8 +3,9 @@ import { mkdir, utimes, open, rm } from 'fs/promises';
import { join, dirname } from 'path'; import { join, dirname } from 'path';
import { Container } from 'typedi'; import { Container } from 'typedi';
import { InstanceSettings } from 'n8n-core'; import { InstanceSettings } from 'n8n-core';
import { LoggerProxy, sleep } from 'n8n-workflow'; import { sleep } from 'n8n-workflow';
import { inProduction } from '@/constants'; import { inProduction } from '@/constants';
import { Logger } from '@/Logger';
export const touchFile = async (filePath: string): Promise<void> => { export const touchFile = async (filePath: string): Promise<void> => {
await mkdir(dirname(filePath), { recursive: true }); await mkdir(dirname(filePath), { recursive: true });
@@ -25,7 +26,7 @@ export const init = async () => {
if (existsSync(journalFile)) { if (existsSync(journalFile)) {
// Crash detected // Crash detected
LoggerProxy.error('Last session crashed'); Container.get(Logger).error('Last session crashed');
// add a 10 seconds pause to slow down crash-looping // add a 10 seconds pause to slow down crash-looping
await sleep(10_000); await sleep(10_000);
} }

View File

@@ -3,6 +3,7 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-return */ /* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-unsafe-call */
import { Service } from 'typedi';
import { Credentials, NodeExecuteFunctions } from 'n8n-core'; import { Credentials, NodeExecuteFunctions } from 'n8n-core';
import get from 'lodash/get'; import get from 'lodash/get';
@@ -38,7 +39,6 @@ import {
NodeHelpers, NodeHelpers,
RoutingNode, RoutingNode,
Workflow, Workflow,
LoggerProxy as Logger,
ErrorReporterProxy as ErrorReporter, ErrorReporterProxy as ErrorReporter,
} from 'n8n-workflow'; } from 'n8n-workflow';
@@ -52,8 +52,8 @@ import { CredentialTypes } from '@/CredentialTypes';
import { CredentialsOverwrites } from '@/CredentialsOverwrites'; import { CredentialsOverwrites } from '@/CredentialsOverwrites';
import { whereClause } from './UserManagement/UserManagementHelper'; import { whereClause } from './UserManagement/UserManagementHelper';
import { RESPONSE_ERROR_MESSAGES } from './constants'; import { RESPONSE_ERROR_MESSAGES } from './constants';
import { Service } from 'typedi';
import { isObjectLiteral } from './utils'; import { isObjectLiteral } from './utils';
import { Logger } from '@/Logger';
const { OAUTH2_CREDENTIAL_TEST_SUCCEEDED, OAUTH2_CREDENTIAL_TEST_FAILED } = RESPONSE_ERROR_MESSAGES; const { OAUTH2_CREDENTIAL_TEST_SUCCEEDED, OAUTH2_CREDENTIAL_TEST_FAILED } = RESPONSE_ERROR_MESSAGES;
@@ -89,6 +89,7 @@ const mockNodeTypes: INodeTypes = {
@Service() @Service()
export class CredentialsHelper extends ICredentialsHelper { export class CredentialsHelper extends ICredentialsHelper {
constructor( constructor(
private readonly logger: Logger,
private readonly credentialTypes: CredentialTypes, private readonly credentialTypes: CredentialTypes,
private readonly nodeTypes: NodeTypes, private readonly nodeTypes: NodeTypes,
private readonly credentialsOverwrites: CredentialsOverwrites, private readonly credentialsOverwrites: CredentialsOverwrites,
@@ -601,7 +602,7 @@ export class CredentialsHelper extends ICredentialsHelper {
user.isOwner, user.isOwner,
); );
} catch (error) { } catch (error) {
Logger.debug('Credential test failed', error); this.logger.debug('Credential test failed', error);
return { return {
status: 'Error', status: 'Error',
message: error.message.toString(), message: error.message.toString(),
@@ -757,7 +758,7 @@ export class CredentialsHelper extends ICredentialsHelper {
message: error.cause.code, message: error.cause.code,
}; };
} }
Logger.debug('Credential test failed', error); this.logger.debug('Credential test failed', error);
return { return {
status: 'Error', status: 'Error',
message: error.message.toString(), message: error.message.toString(),

View File

@@ -1,9 +1,10 @@
import { Service } from 'typedi'; import { Service } from 'typedi';
import type { ICredentialDataDecryptedObject } from 'n8n-workflow'; import type { ICredentialDataDecryptedObject } from 'n8n-workflow';
import { deepCopy, LoggerProxy as Logger, jsonParse } from 'n8n-workflow'; import { deepCopy, jsonParse } from 'n8n-workflow';
import config from '@/config'; import config from '@/config';
import type { ICredentialsOverwrite } from '@/Interfaces'; import type { ICredentialsOverwrite } from '@/Interfaces';
import { CredentialTypes } from '@/CredentialTypes'; import { CredentialTypes } from '@/CredentialTypes';
import { Logger } from '@/Logger';
@Service() @Service()
export class CredentialsOverwrites { export class CredentialsOverwrites {
@@ -11,7 +12,10 @@ export class CredentialsOverwrites {
private resolvedTypes: string[] = []; private resolvedTypes: string[] = [];
constructor(private credentialTypes: CredentialTypes) { constructor(
private readonly credentialTypes: CredentialTypes,
private readonly logger: Logger,
) {
const data = config.getEnv('credentials.overwrite.data'); const data = config.getEnv('credentials.overwrite.data');
const overwriteData = jsonParse<ICredentialsOverwrite>(data, { const overwriteData = jsonParse<ICredentialsOverwrite>(data, {
errorMessage: 'The credentials-overwrite is not valid JSON.', errorMessage: 'The credentials-overwrite is not valid JSON.',
@@ -61,7 +65,7 @@ export class CredentialsOverwrites {
} }
if (!this.credentialTypes.recognizes(type)) { if (!this.credentialTypes.recognizes(type)) {
Logger.warn(`Unknown credential type ${type} in Credential overwrites`); this.logger.warn(`Unknown credential type ${type} in Credential overwrites`);
return; return;
} }

View File

@@ -8,7 +8,7 @@ import type {
import { Cipher } from 'n8n-core'; import { Cipher } from 'n8n-core';
import Container, { Service } from 'typedi'; import Container, { Service } from 'typedi';
import { getLogger } from '@/Logger'; import { Logger } from '@/Logger';
import { jsonParse, type IDataObject } from 'n8n-workflow'; import { jsonParse, type IDataObject } from 'n8n-workflow';
import { import {
@@ -21,8 +21,6 @@ import { InternalHooks } from '@/InternalHooks';
import { ExternalSecretsProviders } from './ExternalSecretsProviders.ee'; import { ExternalSecretsProviders } from './ExternalSecretsProviders.ee';
import { OrchestrationMainService } from '@/services/orchestration/main/orchestration.main.service'; import { OrchestrationMainService } from '@/services/orchestration/main/orchestration.main.service';
const logger = getLogger();
@Service() @Service()
export class ExternalSecretsManager { export class ExternalSecretsManager {
private providers: Record<string, SecretsProvider> = {}; private providers: Record<string, SecretsProvider> = {};
@@ -38,10 +36,11 @@ export class ExternalSecretsManager {
initRetryTimeouts: Record<string, NodeJS.Timer> = {}; initRetryTimeouts: Record<string, NodeJS.Timer> = {};
constructor( constructor(
private settingsRepo: SettingsRepository, private readonly logger: Logger,
private license: License, private readonly settingsRepo: SettingsRepository,
private secretsProviders: ExternalSecretsProviders, private readonly license: License,
private cipher: Cipher, private readonly secretsProviders: ExternalSecretsProviders,
private readonly cipher: Cipher,
) {} ) {}
async init(): Promise<void> { async init(): Promise<void> {
@@ -72,7 +71,7 @@ export class ExternalSecretsManager {
} }
async reloadAllProviders(backoff?: number) { async reloadAllProviders(backoff?: number) {
logger.debug('Reloading all external secrets providers'); this.logger.debug('Reloading all external secrets providers');
const providers = this.getProviderNames(); const providers = this.getProviderNames();
if (!providers) { if (!providers) {
return; return;
@@ -140,7 +139,7 @@ export class ExternalSecretsManager {
try { try {
await provider.init(providerSettings); await provider.init(providerSettings);
} catch (e) { } catch (e) {
logger.error( this.logger.error(
`Error initializing secrets provider ${provider.displayName} (${provider.name}).`, `Error initializing secrets provider ${provider.displayName} (${provider.name}).`,
); );
this.retryInitWithBackoff(name, currentBackoff); this.retryInitWithBackoff(name, currentBackoff);
@@ -155,7 +154,7 @@ export class ExternalSecretsManager {
try { try {
await provider.disconnect(); await provider.disconnect();
} catch {} } catch {}
logger.error( this.logger.error(
`Error initializing secrets provider ${provider.displayName} (${provider.name}).`, `Error initializing secrets provider ${provider.displayName} (${provider.name}).`,
); );
this.retryInitWithBackoff(name, currentBackoff); this.retryInitWithBackoff(name, currentBackoff);
@@ -190,7 +189,7 @@ export class ExternalSecretsManager {
await p.update(); await p.update();
} }
} catch { } catch {
logger.error(`Error updating secrets provider ${p.displayName} (${p.name}).`); this.logger.error(`Error updating secrets provider ${p.displayName} (${p.name}).`);
} }
}), }),
); );

View File

@@ -3,10 +3,9 @@ import { SecretsProvider } from '@/Interfaces';
import type { IDataObject, INodeProperties } from 'n8n-workflow'; import type { IDataObject, INodeProperties } from 'n8n-workflow';
import type { AxiosInstance, AxiosResponse } from 'axios'; import type { AxiosInstance, AxiosResponse } from 'axios';
import axios from 'axios'; import axios from 'axios';
import { getLogger } from '@/Logger'; import { Logger } from '@/Logger';
import { EXTERNAL_SECRETS_NAME_REGEX } from '../constants'; import { EXTERNAL_SECRETS_NAME_REGEX } from '../constants';
import { Container } from 'typedi';
const logger = getLogger();
type VaultAuthMethod = 'token' | 'usernameAndPassword' | 'appRole'; type VaultAuthMethod = 'token' | 'usernameAndPassword' | 'appRole';
@@ -239,6 +238,10 @@ export class VaultProvider extends SecretsProvider {
private refreshAbort = new AbortController(); private refreshAbort = new AbortController();
constructor(readonly logger = Container.get(Logger)) {
super();
}
async init(settings: SecretsProviderSettings): Promise<void> { async init(settings: SecretsProviderSettings): Promise<void> {
this.settings = settings.settings as unknown as VaultSettings; this.settings = settings.settings as unknown as VaultSettings;
@@ -274,7 +277,7 @@ export class VaultProvider extends SecretsProvider {
); );
} catch { } catch {
this.state = 'error'; this.state = 'error';
logger.error('Failed to connect to Vault using Username and Password credentials.'); this.logger.error('Failed to connect to Vault using Username and Password credentials.');
return; return;
} }
} else if (this.settings.authMethod === 'appRole') { } else if (this.settings.authMethod === 'appRole') {
@@ -282,7 +285,7 @@ export class VaultProvider extends SecretsProvider {
this.#currentToken = await this.authAppRole(this.settings.roleId, this.settings.secretId); this.#currentToken = await this.authAppRole(this.settings.roleId, this.settings.secretId);
} catch { } catch {
this.state = 'error'; this.state = 'error';
logger.error('Failed to connect to Vault using AppRole credentials.'); this.logger.error('Failed to connect to Vault using AppRole credentials.');
return; return;
} }
} }
@@ -297,13 +300,13 @@ export class VaultProvider extends SecretsProvider {
} }
} catch (e) { } catch (e) {
this.state = 'error'; this.state = 'error';
logger.error('Failed credentials test on Vault connect.'); this.logger.error('Failed credentials test on Vault connect.');
} }
try { try {
await this.update(); await this.update();
} catch { } catch {
logger.warn('Failed to update Vault secrets'); this.logger.warn('Failed to update Vault secrets');
} }
} }
@@ -343,7 +346,9 @@ export class VaultProvider extends SecretsProvider {
[this.#tokenInfo] = await this.getTokenInfo(); [this.#tokenInfo] = await this.getTokenInfo();
if (!this.#tokenInfo) { if (!this.#tokenInfo) {
logger.error('Failed to fetch token info during renewal. Cancelling all future renewals.'); this.logger.error(
'Failed to fetch token info during renewal. Cancelling all future renewals.',
);
return; return;
} }
@@ -353,7 +358,7 @@ export class VaultProvider extends SecretsProvider {
this.setupTokenRefresh(); this.setupTokenRefresh();
} catch { } catch {
logger.error('Failed to renew Vault token. Attempting to reconnect.'); this.logger.error('Failed to renew Vault token. Attempting to reconnect.');
void this.connect(); void this.connect();
} }
}; };

View File

@@ -1,5 +1,4 @@
import type { Entry as LdapUser } from 'ldapts'; import type { Entry as LdapUser } from 'ldapts';
import { LoggerProxy as Logger } from 'n8n-workflow';
import { QueryFailedError } from 'typeorm/error/QueryFailedError'; import { QueryFailedError } from 'typeorm/error/QueryFailedError';
import type { LdapService } from './LdapService.ee'; import type { LdapService } from './LdapService.ee';
import type { LdapConfig } from './types'; import type { LdapConfig } from './types';
@@ -17,6 +16,7 @@ import type { Role } from '@db/entities/Role';
import type { RunningMode, SyncStatus } from '@db/entities/AuthProviderSyncHistory'; import type { RunningMode, SyncStatus } from '@db/entities/AuthProviderSyncHistory';
import { Container } from 'typedi'; import { Container } from 'typedi';
import { InternalHooks } from '@/InternalHooks'; import { InternalHooks } from '@/InternalHooks';
import { Logger } from '@/Logger';
export class LdapSync { export class LdapSync {
private intervalId: NodeJS.Timeout | undefined = undefined; private intervalId: NodeJS.Timeout | undefined = undefined;
@@ -25,6 +25,12 @@ export class LdapSync {
private _ldapService: LdapService; private _ldapService: LdapService;
private readonly logger: Logger;
constructor() {
this.logger = Container.get(Logger);
}
/** /**
* Updates the LDAP configuration * Updates the LDAP configuration
*/ */
@@ -71,7 +77,7 @@ export class LdapSync {
* else the users are not modified * else the users are not modified
*/ */
async run(mode: RunningMode): Promise<void> { async run(mode: RunningMode): Promise<void> {
Logger.debug(`LDAP - Starting a synchronization run in ${mode} mode`); this.logger.debug(`LDAP - Starting a synchronization run in ${mode} mode`);
let adUsers: LdapUser[] = []; let adUsers: LdapUser[] = [];
@@ -80,14 +86,14 @@ export class LdapSync {
createFilter(`(${this._config.loginIdAttribute}=*)`, this._config.userFilter), createFilter(`(${this._config.loginIdAttribute}=*)`, this._config.userFilter),
); );
Logger.debug('LDAP - Users return by the query', { this.logger.debug('LDAP - Users return by the query', {
users: adUsers, users: adUsers,
}); });
resolveBinaryAttributes(adUsers); resolveBinaryAttributes(adUsers);
} catch (e) { } catch (e) {
if (e instanceof Error) { if (e instanceof Error) {
Logger.error(`LDAP - ${e.message}`); this.logger.error(`LDAP - ${e.message}`);
throw e; throw e;
} }
} }
@@ -104,7 +110,7 @@ export class LdapSync {
role, role,
); );
Logger.debug('LDAP - Users processed', { this.logger.debug('LDAP - Users processed', {
created: usersToCreate.length, created: usersToCreate.length,
updated: usersToUpdate.length, updated: usersToUpdate.length,
disabled: usersToDisable.length, disabled: usersToDisable.length,
@@ -144,7 +150,7 @@ export class LdapSync {
error: errorMessage, error: errorMessage,
}); });
Logger.debug('LDAP - Synchronization finished successfully'); this.logger.debug('LDAP - Synchronization finished successfully');
} }
/** /**

View File

@@ -20,7 +20,7 @@ import {
LDAP_LOGIN_LABEL, LDAP_LOGIN_LABEL,
} from './constants'; } from './constants';
import type { ConnectionSecurity, LdapConfig } from './types'; import type { ConnectionSecurity, LdapConfig } from './types';
import { jsonParse, LoggerProxy as Logger } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import { License } from '@/License'; import { License } from '@/License';
import { InternalHooks } from '@/InternalHooks'; import { InternalHooks } from '@/InternalHooks';
import { import {
@@ -31,6 +31,7 @@ import {
} from '@/sso/ssoHelpers'; } from '@/sso/ssoHelpers';
import { InternalServerError } from '../ResponseHelper'; import { InternalServerError } from '../ResponseHelper';
import { RoleService } from '@/services/role.service'; import { RoleService } from '@/services/role.service';
import { Logger } from '@/Logger';
/** /**
* Check whether the LDAP feature is disabled in the instance * Check whether the LDAP feature is disabled in the instance
@@ -185,7 +186,7 @@ export const handleLdapInit = async (): Promise<void> => {
try { try {
await setGlobalLdapConfigVariables(ldapConfig); await setGlobalLdapConfigVariables(ldapConfig);
} catch (error) { } catch (error) {
Logger.warn( Container.get(Logger).warn(
`Cannot set LDAP login enabled state when an authentication method other than email or ldap is active (current: ${getCurrentAuthenticationMethod()})`, `Cannot set LDAP login enabled state when an authentication method other than email or ldap is active (current: ${getCurrentAuthenticationMethod()})`,
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
error, error,
@@ -235,7 +236,7 @@ export const findAndAuthenticateLdapUser = async (
void Container.get(InternalHooks).onLdapLoginSyncFailed({ void Container.get(InternalHooks).onLdapLoginSyncFailed({
error: e.message, error: e.message,
}); });
Logger.error('LDAP - Error during search', { message: e.message }); Container.get(Logger).error('LDAP - Error during search', { message: e.message });
} }
return undefined; return undefined;
} }
@@ -261,7 +262,9 @@ export const findAndAuthenticateLdapUser = async (
await ldapService.validUser(user.dn, password); await ldapService.validUser(user.dn, password);
} catch (e) { } catch (e) {
if (e instanceof Error) { if (e instanceof Error) {
Logger.error('LDAP - Error validating user against LDAP server', { message: e.message }); Container.get(Logger).error('LDAP - Error validating user against LDAP server', {
message: e.message,
});
} }
return undefined; return undefined;
} }

View File

@@ -1,7 +1,8 @@
import type { TEntitlement, TFeatures, TLicenseBlock } from '@n8n_io/license-sdk'; import type { TEntitlement, TFeatures, TLicenseBlock } from '@n8n_io/license-sdk';
import { LicenseManager } from '@n8n_io/license-sdk'; import { LicenseManager } from '@n8n_io/license-sdk';
import type { ILogger } from 'n8n-workflow'; import { InstanceSettings, ObjectStoreService } from 'n8n-core';
import { getLogger } from './Logger'; import Container, { Service } from 'typedi';
import { Logger } from '@/Logger';
import config from '@/config'; import config from '@/config';
import * as Db from '@/Db'; import * as Db from '@/Db';
import { import {
@@ -11,12 +12,10 @@ import {
SETTINGS_LICENSE_CERT_KEY, SETTINGS_LICENSE_CERT_KEY,
UNLIMITED_LICENSE_QUOTA, UNLIMITED_LICENSE_QUOTA,
} from './constants'; } from './constants';
import Container, { Service } from 'typedi';
import { WorkflowRepository } from '@/databases/repositories'; import { WorkflowRepository } from '@/databases/repositories';
import type { BooleanLicenseFeature, N8nInstanceType, NumericLicenseFeature } from './Interfaces'; import type { BooleanLicenseFeature, N8nInstanceType, NumericLicenseFeature } from './Interfaces';
import type { RedisServicePubSubPublisher } from './services/redis/RedisServicePubSubPublisher'; import type { RedisServicePubSubPublisher } from './services/redis/RedisServicePubSubPublisher';
import { RedisService } from './services/redis.service'; import { RedisService } from './services/redis.service';
import { InstanceSettings, ObjectStoreService } from 'n8n-core';
type FeatureReturnType = Partial< type FeatureReturnType = Partial<
{ {
@@ -26,15 +25,14 @@ type FeatureReturnType = Partial<
@Service() @Service()
export class License { export class License {
private logger: ILogger;
private manager: LicenseManager | undefined; private manager: LicenseManager | undefined;
private redisPublisher: RedisServicePubSubPublisher; private redisPublisher: RedisServicePubSubPublisher;
constructor(private readonly instanceSettings: InstanceSettings) { constructor(
this.logger = getLogger(); private readonly logger: Logger,
} private readonly instanceSettings: InstanceSettings,
) {}
async init(instanceType: N8nInstanceType = 'main') { async init(instanceType: N8nInstanceType = 'main') {
if (this.manager) { if (this.manager) {

View File

@@ -17,7 +17,7 @@ import type {
INodeTypeData, INodeTypeData,
ICredentialTypeData, ICredentialTypeData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { LoggerProxy, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
import config from '@/config'; import config from '@/config';
import { import {
@@ -27,6 +27,7 @@ import {
CLI_DIR, CLI_DIR,
inE2ETests, inE2ETests,
} from '@/constants'; } from '@/constants';
import { Logger } from '@/Logger';
interface LoadedNodesAndCredentials { interface LoadedNodesAndCredentials {
nodes: INodeTypeData; nodes: INodeTypeData;
@@ -49,7 +50,10 @@ export class LoadNodesAndCredentials {
private postProcessors: Array<() => Promise<void>> = []; private postProcessors: Array<() => Promise<void>> = [];
constructor(private readonly instanceSettings: InstanceSettings) {} constructor(
private readonly logger: Logger,
private readonly instanceSettings: InstanceSettings,
) {}
async init() { async init() {
if (inTest) throw new Error('Not available in tests'); if (inTest) throw new Error('Not available in tests');
@@ -197,7 +201,7 @@ export class LoadNodesAndCredentials {
return description.credentials.some(({ name }) => { return description.credentials.some(({ name }) => {
const credType = this.types.credentials.find((t) => t.name === name); const credType = this.types.credentials.find((t) => t.name === name);
if (!credType) { if (!credType) {
LoggerProxy.warn( this.logger.warn(
`Failed to load Custom API options for the node "${description.name}": Unknown credential name "${name}"`, `Failed to load Custom API options for the node "${description.name}": Unknown credential name "${name}"`,
); );
return false; return false;

View File

@@ -1,18 +1,17 @@
/* eslint-disable @typescript-eslint/no-shadow */ import { Service } from 'typedi';
import { inspect } from 'util';
import winston from 'winston'; import winston from 'winston';
import type { IDataObject, ILogger, LogTypes } from 'n8n-workflow';
import callsites from 'callsites'; import callsites from 'callsites';
import { inspect } from 'util';
import { basename } from 'path'; import { basename } from 'path';
import { LoggerProxy, type IDataObject, LOG_LEVELS } from 'n8n-workflow';
import config from '@/config'; import config from '@/config';
const noOp = () => {}; const noOp = () => {};
const levelNames = ['debug', 'verbose', 'info', 'warn', 'error'] as const;
export class Logger implements ILogger { @Service()
export class Logger {
private logger: winston.Logger; private logger: winston.Logger;
constructor() { constructor() {
@@ -24,7 +23,7 @@ export class Logger implements ILogger {
}); });
// Change all methods with higher log-level to no-op // Change all methods with higher log-level to no-op
for (const levelName of levelNames) { for (const levelName of LOG_LEVELS) {
if (this.logger.levels[levelName] > this.logger.levels[level]) { if (this.logger.levels[levelName] > this.logger.levels[level]) {
Object.defineProperty(this, levelName, { value: noOp }); Object.defineProperty(this, levelName, { value: noOp });
} }
@@ -33,7 +32,7 @@ export class Logger implements ILogger {
const output = config const output = config
.getEnv('logs.output') .getEnv('logs.output')
.split(',') .split(',')
.map((output) => output.trim()); .map((line) => line.trim());
if (output.includes('console')) { if (output.includes('console')) {
let format: winston.Logform.Format; let format: winston.Logform.Format;
@@ -43,8 +42,8 @@ export class Logger implements ILogger {
winston.format.timestamp(), winston.format.timestamp(),
winston.format.colorize({ all: true }), winston.format.colorize({ all: true }),
winston.format.printf(({ level, message, timestamp, metadata }) => { winston.format.printf(({ level: logLevel, message, timestamp, metadata }) => {
return `${timestamp} | ${level.padEnd(18)} | ${message}${ return `${timestamp} | ${logLevel.padEnd(18)} | ${message}${
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
Object.keys(metadata).length ? ` ${JSON.stringify(inspect(metadata))}` : '' Object.keys(metadata).length ? ` ${JSON.stringify(inspect(metadata))}` : ''
}`; }`;
@@ -76,13 +75,15 @@ export class Logger implements ILogger {
}), }),
); );
} }
LoggerProxy.init(this);
} }
log(type: LogTypes, message: string, meta: object = {}): void { private log(level: (typeof LOG_LEVELS)[number], message: string, meta: object = {}): void {
const callsite = callsites(); const callsite = callsites();
// We are using the third array element as the structure is as follows: // We are using the third array element as the structure is as follows:
// [0]: this file // [0]: this file
// [1]: Should be LoggerProxy // [1]: Should be Logger
// [2]: Should point to the caller. // [2]: Should point to the caller.
// Note: getting line number is useless because at this point // Note: getting line number is useless because at this point
// We are in runtime, so it means we are looking at compiled js files // We are in runtime, so it means we are looking at compiled js files
@@ -95,38 +96,28 @@ export class Logger implements ILogger {
logDetails.function = functionName; logDetails.function = functionName;
} }
} }
this.logger.log(type, message, { ...meta, ...logDetails }); this.logger.log(level, message, { ...meta, ...logDetails });
} }
// Convenience methods below // Convenience methods below
debug(message: string, meta: object = {}): void { error(message: string, meta: object = {}): void {
this.log('debug', message, meta); this.log('error', message, meta);
}
warn(message: string, meta: object = {}): void {
this.log('warn', message, meta);
} }
info(message: string, meta: object = {}): void { info(message: string, meta: object = {}): void {
this.log('info', message, meta); this.log('info', message, meta);
} }
error(message: string, meta: object = {}): void { debug(message: string, meta: object = {}): void {
this.log('error', message, meta); this.log('debug', message, meta);
} }
verbose(message: string, meta: object = {}): void { verbose(message: string, meta: object = {}): void {
this.log('verbose', message, meta); this.log('verbose', message, meta);
} }
warn(message: string, meta: object = {}): void {
this.log('warn', message, meta);
}
}
let activeLoggerInstance: Logger | undefined;
export function getLogger() {
if (activeLoggerInstance === undefined) {
activeLoggerInstance = new Logger();
}
return activeLoggerInstance;
} }

View File

@@ -39,11 +39,6 @@ export = {
const savedCredential = await saveCredential(newCredential, req.user, encryptedData); const savedCredential = await saveCredential(newCredential, req.user, encryptedData);
// LoggerProxy.verbose('New credential created', {
// credentialsId: newCredential.id,
// ownerId: req.user.id,
// });
return res.json(sanitizeCredentials(savedCredential)); return res.json(sanitizeCredentials(savedCredential));
} catch ({ message, httpStatusCode }) { } catch ({ message, httpStatusCode }) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment

View File

@@ -46,7 +46,7 @@ import type {
ResourceMapperFields, ResourceMapperFields,
IN8nUISettings, IN8nUISettings,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { LoggerProxy, jsonParse } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
// @ts-ignore // @ts-ignore
import timezones from 'google-timezones-json'; import timezones from 'google-timezones-json';
@@ -204,7 +204,7 @@ export class Server extends AbstractServer {
this.endpointPresetCredentials = config.getEnv('credentials.overwrite.endpoint'); this.endpointPresetCredentials = config.getEnv('credentials.overwrite.endpoint');
await super.start(); await super.start();
LoggerProxy.debug(`Server ID: ${this.uniqueInstanceId}`); this.logger.debug(`Server ID: ${this.uniqueInstanceId}`);
const cpus = os.cpus(); const cpus = os.cpus();
const binaryDataConfig = config.getEnv('binaryDataManager'); const binaryDataConfig = config.getEnv('binaryDataManager');
@@ -270,11 +270,10 @@ export class Server extends AbstractServer {
} }
private async registerControllers(ignoredEndpoints: Readonly<string[]>) { private async registerControllers(ignoredEndpoints: Readonly<string[]>) {
const { app, externalHooks, activeWorkflowRunner, nodeTypes } = this; const { app, externalHooks, activeWorkflowRunner, nodeTypes, logger } = this;
const repositories = Db.collections; const repositories = Db.collections;
setupAuthMiddlewares(app, ignoredEndpoints, this.restEndpoint); setupAuthMiddlewares(app, ignoredEndpoints, this.restEndpoint);
const logger = LoggerProxy;
const internalHooks = Container.get(InternalHooks); const internalHooks = Container.get(InternalHooks);
const mailer = Container.get(UserManagementMailer); const mailer = Container.get(UserManagementMailer);
const userService = Container.get(UserService); const userService = Container.get(UserService);
@@ -285,7 +284,7 @@ export class Server extends AbstractServer {
const controllers: object[] = [ const controllers: object[] = [
new EventBusController(), new EventBusController(),
new EventBusControllerEE(), new EventBusControllerEE(),
new AuthController(config, logger, internalHooks, mfaService, userService, postHog), Container.get(AuthController),
new OwnerController( new OwnerController(
config, config,
logger, logger,
@@ -294,7 +293,7 @@ export class Server extends AbstractServer {
userService, userService,
postHog, postHog,
), ),
new MeController(logger, externalHooks, internalHooks, userService), Container.get(MeController),
new NodeTypesController(config, nodeTypes), new NodeTypesController(config, nodeTypes),
new PasswordResetController( new PasswordResetController(
logger, logger,
@@ -457,7 +456,7 @@ export class Server extends AbstractServer {
try { try {
await Container.get(SamlService).init(); await Container.get(SamlService).init();
} catch (error) { } catch (error) {
LoggerProxy.warn(`SAML initialization failed: ${error.message}`); this.logger.warn(`SAML initialization failed: ${error.message}`);
} }
// ---------------------------------------- // ----------------------------------------
@@ -472,7 +471,7 @@ export class Server extends AbstractServer {
try { try {
await Container.get(SourceControlService).init(); await Container.get(SourceControlService).init();
} catch (error) { } catch (error) {
LoggerProxy.warn(`Source Control initialization failed: ${error.message}`); this.logger.warn(`Source Control initialization failed: ${error.message}`);
} }
// ---------------------------------------- // ----------------------------------------
@@ -667,7 +666,7 @@ export class Server extends AbstractServer {
}); });
if (!shared) { if (!shared) {
LoggerProxy.verbose('User attempted to access workflow errors without permissions', { this.logger.verbose('User attempted to access workflow errors without permissions', {
workflowId, workflowId,
userId: req.user.id, userId: req.user.id,
}); });
@@ -714,14 +713,14 @@ export class Server extends AbstractServer {
const { id: credentialId } = req.query; const { id: credentialId } = req.query;
if (!credentialId) { if (!credentialId) {
LoggerProxy.error('OAuth1 credential authorization failed due to missing credential ID'); this.logger.error('OAuth1 credential authorization failed due to missing credential ID');
throw new ResponseHelper.BadRequestError('Required credential ID is missing'); throw new ResponseHelper.BadRequestError('Required credential ID is missing');
} }
const credential = await getCredentialForUser(credentialId, req.user); const credential = await getCredentialForUser(credentialId, req.user);
if (!credential) { if (!credential) {
LoggerProxy.error( this.logger.error(
'OAuth1 credential authorization failed because the current user does not have the correct permissions', 'OAuth1 credential authorization failed because the current user does not have the correct permissions',
{ userId: req.user.id }, { userId: req.user.id },
); );
@@ -826,7 +825,7 @@ export class Server extends AbstractServer {
// Update the credentials in DB // Update the credentials in DB
await Db.collections.Credentials.update(credentialId, newCredentialsData); await Db.collections.Credentials.update(credentialId, newCredentialsData);
LoggerProxy.verbose('OAuth1 authorization successful for new credential', { this.logger.verbose('OAuth1 authorization successful for new credential', {
userId: req.user.id, userId: req.user.id,
credentialId, credentialId,
}); });
@@ -848,7 +847,7 @@ export class Server extends AbstractServer {
req.query, req.query,
)}`, )}`,
); );
LoggerProxy.error( this.logger.error(
'OAuth1 callback failed because of insufficient parameters received', 'OAuth1 callback failed because of insufficient parameters received',
{ {
userId: req.user?.id, userId: req.user?.id,
@@ -861,7 +860,7 @@ export class Server extends AbstractServer {
const credential = await getCredentialWithoutUser(credentialId); const credential = await getCredentialWithoutUser(credentialId);
if (!credential) { if (!credential) {
LoggerProxy.error('OAuth1 callback failed because of insufficient user permissions', { this.logger.error('OAuth1 callback failed because of insufficient user permissions', {
userId: req.user?.id, userId: req.user?.id,
credentialId, credentialId,
}); });
@@ -906,7 +905,7 @@ export class Server extends AbstractServer {
try { try {
oauthToken = await axios.request(options); oauthToken = await axios.request(options);
} catch (error) { } catch (error) {
LoggerProxy.error('Unable to fetch tokens for OAuth1 callback', { this.logger.error('Unable to fetch tokens for OAuth1 callback', {
userId: req.user?.id, userId: req.user?.id,
credentialId, credentialId,
}); });
@@ -934,13 +933,13 @@ export class Server extends AbstractServer {
// Save the credentials in DB // Save the credentials in DB
await Db.collections.Credentials.update(credentialId, newCredentialsData); await Db.collections.Credentials.update(credentialId, newCredentialsData);
LoggerProxy.verbose('OAuth1 callback successful for new credential', { this.logger.verbose('OAuth1 callback successful for new credential', {
userId: req.user?.id, userId: req.user?.id,
credentialId, credentialId,
}); });
res.sendFile(pathResolve(TEMPLATES_DIR, 'oauth-callback.html')); res.sendFile(pathResolve(TEMPLATES_DIR, 'oauth-callback.html'));
} catch (error) { } catch (error) {
LoggerProxy.error('OAuth1 callback failed because of insufficient user permissions', { this.logger.error('OAuth1 callback failed because of insufficient user permissions', {
userId: req.user?.id, userId: req.user?.id,
credentialId: req.query.cid, credentialId: req.query.cid,
}); });

View File

@@ -1,14 +1,19 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */
import type { Transporter } from 'nodemailer'; import type { Transporter } from 'nodemailer';
import { createTransport } from 'nodemailer'; import { createTransport } from 'nodemailer';
import { ErrorReporterProxy as ErrorReporter, LoggerProxy as Logger } from 'n8n-workflow'; import type SMTPConnection from 'nodemailer/lib/smtp-connection';
import { Service } from 'typedi';
import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
import config from '@/config'; import config from '@/config';
import type { MailData, SendEmailResult } from './Interfaces'; import type { MailData, SendEmailResult } from './Interfaces';
import type SMTPConnection from 'nodemailer/lib/smtp-connection'; import { Logger } from '@/Logger';
@Service()
export class NodeMailer { export class NodeMailer {
private transport?: Transporter; private transport?: Transporter;
constructor(private readonly logger: Logger) {}
async init(): Promise<void> { async init(): Promise<void> {
const transportConfig: SMTPConnection.Options = { const transportConfig: SMTPConnection.Options = {
host: config.getEnv('userManagement.emails.smtp.host'), host: config.getEnv('userManagement.emails.smtp.host'),
@@ -81,12 +86,12 @@ export class NodeMailer {
text: mailData.textOnly, text: mailData.textOnly,
html: mailData.body, html: mailData.body,
}); });
Logger.verbose( this.logger.verbose(
`Email sent successfully to the following recipients: ${mailData.emailRecipients.toString()}`, `Email sent successfully to the following recipients: ${mailData.emailRecipients.toString()}`,
); );
} catch (error) { } catch (error) {
ErrorReporter.error(error); ErrorReporter.error(error);
Logger.error('Failed to send email', { recipients: mailData.emailRecipients, error }); this.logger.error('Failed to send email', { recipients: mailData.emailRecipients, error });
throw error; throw error;
} }

View File

@@ -2,7 +2,7 @@ import { existsSync } from 'fs';
import { readFile } from 'fs/promises'; import { readFile } from 'fs/promises';
import Handlebars from 'handlebars'; import Handlebars from 'handlebars';
import { join as pathJoin } from 'path'; import { join as pathJoin } from 'path';
import { Service } from 'typedi'; import { Container, Service } from 'typedi';
import config from '@/config'; import config from '@/config';
import type { InviteEmailData, PasswordResetData, SendEmailResult } from './Interfaces'; import type { InviteEmailData, PasswordResetData, SendEmailResult } from './Interfaces';
import { NodeMailer } from './NodeMailer'; import { NodeMailer } from './NodeMailer';
@@ -45,7 +45,7 @@ export class UserManagementMailer {
// Other implementations can be used in the future. // Other implementations can be used in the future.
if (this.isEmailSetUp) { if (this.isEmailSetUp) {
this.mailer = new NodeMailer(); this.mailer = Container.get(NodeMailer);
} }
} }

View File

@@ -1,8 +1,4 @@
import { import { ErrorReporterProxy as ErrorReporter, WorkflowOperationError } from 'n8n-workflow';
ErrorReporterProxy as ErrorReporter,
LoggerProxy as Logger,
WorkflowOperationError,
} from 'n8n-workflow';
import { Container, Service } from 'typedi'; import { Container, Service } from 'typedi';
import type { FindManyOptions, ObjectLiteral } from 'typeorm'; import type { FindManyOptions, ObjectLiteral } from 'typeorm';
import { Not, LessThanOrEqual } from 'typeorm'; import { Not, LessThanOrEqual } from 'typeorm';
@@ -20,6 +16,7 @@ import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEven
import { ExecutionRepository } from '@db/repositories'; import { ExecutionRepository } from '@db/repositories';
import type { ExecutionEntity } from '@db/entities/ExecutionEntity'; import type { ExecutionEntity } from '@db/entities/ExecutionEntity';
import { OwnershipService } from './services/ownership.service'; import { OwnershipService } from './services/ownership.service';
import { Logger } from '@/Logger';
@Service() @Service()
export class WaitTracker { export class WaitTracker {
@@ -33,8 +30,9 @@ export class WaitTracker {
mainTimer: NodeJS.Timeout; mainTimer: NodeJS.Timeout;
constructor( constructor(
private executionRepository: ExecutionRepository, private readonly logger: Logger,
private ownershipService: OwnershipService, private readonly executionRepository: ExecutionRepository,
private readonly ownershipService: OwnershipService,
) { ) {
// Poll every 60 seconds a list of upcoming executions // Poll every 60 seconds a list of upcoming executions
this.mainTimer = setInterval(() => { this.mainTimer = setInterval(() => {
@@ -45,7 +43,7 @@ export class WaitTracker {
} }
async getWaitingExecutions() { async getWaitingExecutions() {
Logger.debug('Wait tracker querying database for waiting executions'); this.logger.debug('Wait tracker querying database for waiting executions');
// Find all the executions which should be triggered in the next 70 seconds // Find all the executions which should be triggered in the next 70 seconds
const findQuery: FindManyOptions<ExecutionEntity> = { const findQuery: FindManyOptions<ExecutionEntity> = {
select: ['id', 'waitTill'], select: ['id', 'waitTill'],
@@ -74,7 +72,7 @@ export class WaitTracker {
} }
const executionIds = executions.map((execution) => execution.id).join(', '); const executionIds = executions.map((execution) => execution.id).join(', ');
Logger.debug( this.logger.debug(
`Wait tracker found ${executions.length} executions. Setting timer for IDs: ${executionIds}`, `Wait tracker found ${executions.length} executions. Setting timer for IDs: ${executionIds}`,
); );
@@ -163,7 +161,7 @@ export class WaitTracker {
} }
startExecution(executionId: string) { startExecution(executionId: string) {
Logger.debug(`Wait tracker resuming execution ${executionId}`, { executionId }); this.logger.debug(`Wait tracker resuming execution ${executionId}`, { executionId });
delete this.waitingExecutions[executionId]; delete this.waitingExecutions[executionId];
(async () => { (async () => {
@@ -198,7 +196,7 @@ export class WaitTracker {
await workflowRunner.run(data, false, false, executionId); await workflowRunner.run(data, false, false, executionId);
})().catch((error: Error) => { })().catch((error: Error) => {
ErrorReporter.error(error); ErrorReporter.error(error);
Logger.error( this.logger.error(
`There was a problem starting the waiting execution with id "${executionId}": "${error.message}"`, `There was a problem starting the waiting execution with id "${executionId}": "${error.message}"`,
{ executionId }, { executionId },
); );

View File

@@ -1,4 +1,4 @@
import { NodeHelpers, Workflow, LoggerProxy as Logger } from 'n8n-workflow'; import { NodeHelpers, Workflow } from 'n8n-workflow';
import { Service } from 'typedi'; import { Service } from 'typedi';
import type express from 'express'; import type express from 'express';
@@ -14,13 +14,15 @@ import type {
import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData';
import { ExecutionRepository } from '@db/repositories'; import { ExecutionRepository } from '@db/repositories';
import { OwnershipService } from './services/ownership.service'; import { OwnershipService } from './services/ownership.service';
import { Logger } from '@/Logger';
@Service() @Service()
export class WaitingWebhooks implements IWebhookManager { export class WaitingWebhooks implements IWebhookManager {
constructor( constructor(
private nodeTypes: NodeTypes, private readonly logger: Logger,
private executionRepository: ExecutionRepository, private readonly nodeTypes: NodeTypes,
private ownershipService: OwnershipService, private readonly executionRepository: ExecutionRepository,
private readonly ownershipService: OwnershipService,
) {} ) {}
// TODO: implement `getWebhookMethods` for CORS support // TODO: implement `getWebhookMethods` for CORS support
@@ -30,7 +32,7 @@ export class WaitingWebhooks implements IWebhookManager {
res: express.Response, res: express.Response,
): Promise<IResponseCallbackData> { ): Promise<IResponseCallbackData> {
const { path: executionId, suffix } = req.params; const { path: executionId, suffix } = req.params;
Logger.debug(`Received waiting-webhook "${req.method}" for execution "${executionId}"`); this.logger.debug(`Received waiting-webhook "${req.method}" for execution "${executionId}"`);
// Reset request parameters // Reset request parameters
req.params = {} as WaitingWebhookRequest['params']; req.params = {} as WaitingWebhookRequest['params'];

View File

@@ -39,7 +39,6 @@ import {
createDeferredPromise, createDeferredPromise,
ErrorReporterProxy as ErrorReporter, ErrorReporterProxy as ErrorReporter,
FORM_TRIGGER_PATH_IDENTIFIER, FORM_TRIGGER_PATH_IDENTIFIER,
LoggerProxy as Logger,
NodeHelpers, NodeHelpers,
} from 'n8n-workflow'; } from 'n8n-workflow';
@@ -64,6 +63,7 @@ import { EventsService } from '@/services/events.service';
import { OwnershipService } from './services/ownership.service'; import { OwnershipService } from './services/ownership.service';
import { parseBody } from './middlewares'; import { parseBody } from './middlewares';
import { WorkflowsService } from './workflows/workflows.services'; import { WorkflowsService } from './workflows/workflows.services';
import { Logger } from './Logger';
const pipeline = promisify(stream.pipeline); const pipeline = promisify(stream.pipeline);
@@ -534,7 +534,7 @@ export async function executeWebhook(
}) })
.catch(async (error) => { .catch(async (error) => {
ErrorReporter.error(error); ErrorReporter.error(error);
Logger.error( Container.get(Logger).error(
`Error with Webhook-Response for execution "${executionId}": "${error.message}"`, `Error with Webhook-Response for execution "${executionId}": "${error.message}"`,
{ executionId, workflowId: workflow.id }, { executionId, workflowId: workflow.id },
); );
@@ -551,7 +551,7 @@ export async function executeWebhook(
responsePromise, responsePromise,
); );
Logger.verbose( Container.get(Logger).verbose(
`Started execution of workflow "${workflow.name}" from webhook with execution ID ${executionId}`, `Started execution of workflow "${workflow.name}" from webhook with execution ID ${executionId}`,
{ executionId }, { executionId },
); );

View File

@@ -27,7 +27,6 @@ import type {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
ErrorReporterProxy as ErrorReporter, ErrorReporterProxy as ErrorReporter,
LoggerProxy as Logger,
NodeOperationError, NodeOperationError,
Workflow, Workflow,
WorkflowHooks, WorkflowHooks,
@@ -64,6 +63,7 @@ import {
updateExistingExecution, updateExistingExecution,
} from './executionLifecycleHooks/shared/sharedHookFunctions'; } from './executionLifecycleHooks/shared/sharedHookFunctions';
import { restoreBinaryDataId } from './executionLifecycleHooks/restoreBinaryDataId'; import { restoreBinaryDataId } from './executionLifecycleHooks/restoreBinaryDataId';
import { Logger } from './Logger';
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType'); const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
@@ -118,8 +118,9 @@ export function executeErrorWorkflow(
executionId?: string, executionId?: string,
retryOf?: string, retryOf?: string,
): void { ): void {
// Check if there was an error and if so if an errorWorkflow or a trigger is set const logger = Container.get(Logger);
// Check if there was an error and if so if an errorWorkflow or a trigger is set
let pastExecutionUrl: string | undefined; let pastExecutionUrl: string | undefined;
if (executionId !== undefined) { if (executionId !== undefined) {
pastExecutionUrl = `${WebhookHelpers.getWebhookBaseUrl()}workflow/${ pastExecutionUrl = `${WebhookHelpers.getWebhookBaseUrl()}workflow/${
@@ -165,7 +166,7 @@ export function executeErrorWorkflow(
// To avoid an infinite loop do not run the error workflow again if the error-workflow itself failed and it is its own error-workflow. // To avoid an infinite loop do not run the error workflow again if the error-workflow itself failed and it is its own error-workflow.
const { errorWorkflow } = workflowData.settings ?? {}; const { errorWorkflow } = workflowData.settings ?? {};
if (errorWorkflow && !(mode === 'error' && workflowId && errorWorkflow === workflowId)) { if (errorWorkflow && !(mode === 'error' && workflowId && errorWorkflow === workflowId)) {
Logger.verbose('Start external error workflow', { logger.verbose('Start external error workflow', {
executionId, executionId,
errorWorkflowId: errorWorkflow, errorWorkflowId: errorWorkflow,
workflowId, workflowId,
@@ -187,7 +188,7 @@ export function executeErrorWorkflow(
}) })
.catch((error: Error) => { .catch((error: Error) => {
ErrorReporter.error(error); ErrorReporter.error(error);
Logger.error( logger.error(
`Could not execute ErrorWorkflow for execution ID ${this.executionId} because of error querying the workflow owner`, `Could not execute ErrorWorkflow for execution ID ${this.executionId} because of error querying the workflow owner`,
{ {
executionId, executionId,
@@ -203,7 +204,7 @@ export function executeErrorWorkflow(
workflowId !== undefined && workflowId !== undefined &&
workflowData.nodes.some((node) => node.type === ERROR_TRIGGER_TYPE) workflowData.nodes.some((node) => node.type === ERROR_TRIGGER_TYPE)
) { ) {
Logger.verbose('Start internal error workflow', { executionId, workflowId }); logger.verbose('Start internal error workflow', { executionId, workflowId });
void Container.get(OwnershipService) void Container.get(OwnershipService)
.getWorkflowOwnerCached(workflowId) .getWorkflowOwnerCached(workflowId)
.then((user) => { .then((user) => {
@@ -218,6 +219,7 @@ export function executeErrorWorkflow(
* *
*/ */
function hookFunctionsPush(): IWorkflowExecuteHooks { function hookFunctionsPush(): IWorkflowExecuteHooks {
const logger = Container.get(Logger);
const pushInstance = Container.get(Push); const pushInstance = Container.get(Push);
return { return {
nodeExecuteBefore: [ nodeExecuteBefore: [
@@ -229,7 +231,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
return; return;
} }
Logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, { logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, {
executionId, executionId,
sessionId, sessionId,
workflowId: this.workflowData.id, workflowId: this.workflowData.id,
@@ -246,7 +248,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
return; return;
} }
Logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, { logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, {
executionId, executionId,
sessionId, sessionId,
workflowId: this.workflowData.id, workflowId: this.workflowData.id,
@@ -259,7 +261,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
async function (this: WorkflowHooks): Promise<void> { async function (this: WorkflowHooks): Promise<void> {
const { sessionId, executionId } = this; const { sessionId, executionId } = this;
const { id: workflowId, name: workflowName } = this.workflowData; const { id: workflowId, name: workflowName } = this.workflowData;
Logger.debug('Executing hook (hookFunctionsPush)', { logger.debug('Executing hook (hookFunctionsPush)', {
executionId, executionId,
sessionId, sessionId,
workflowId, workflowId,
@@ -291,7 +293,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
): Promise<void> { ): Promise<void> {
const { sessionId, executionId, retryOf } = this; const { sessionId, executionId, retryOf } = this;
const { id: workflowId } = this.workflowData; const { id: workflowId } = this.workflowData;
Logger.debug('Executing hook (hookFunctionsPush)', { logger.debug('Executing hook (hookFunctionsPush)', {
executionId, executionId,
sessionId, sessionId,
workflowId, workflowId,
@@ -322,7 +324,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
} }
// Push data to editor-ui once workflow finished // Push data to editor-ui once workflow finished
Logger.debug(`Save execution progress to database for execution ID ${executionId} `, { logger.debug(`Save execution progress to database for execution ID ${executionId} `, {
executionId, executionId,
workflowId, workflowId,
}); });
@@ -340,6 +342,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
} }
export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowExecuteHooks { export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowExecuteHooks {
const logger = Container.get(Logger);
const externalHooks = Container.get(ExternalHooks); const externalHooks = Container.get(ExternalHooks);
return { return {
workflowExecuteBefore: [ workflowExecuteBefore: [
@@ -368,7 +371,7 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
} }
try { try {
Logger.debug( logger.debug(
`Save execution progress to database for execution ID ${this.executionId} `, `Save execution progress to database for execution ID ${this.executionId} `,
{ executionId: this.executionId, nodeName }, { executionId: this.executionId, nodeName },
); );
@@ -436,7 +439,7 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
// For busy machines, we may get "Database is locked" errors. // For busy machines, we may get "Database is locked" errors.
// We do this to prevent crashes and executions ending in `unknown` state. // We do this to prevent crashes and executions ending in `unknown` state.
Logger.error( logger.error(
`Failed saving execution progress to database for execution ID ${this.executionId} (hookFunctionsPreExecute, nodeExecuteAfter)`, `Failed saving execution progress to database for execution ID ${this.executionId} (hookFunctionsPreExecute, nodeExecuteAfter)`,
{ {
...err, ...err,
@@ -456,6 +459,7 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
* *
*/ */
function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
const logger = Container.get(Logger);
const internalHooks = Container.get(InternalHooks); const internalHooks = Container.get(InternalHooks);
const eventsService = Container.get(EventsService); const eventsService = Container.get(EventsService);
return { return {
@@ -476,7 +480,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
fullRunData: IRun, fullRunData: IRun,
newStaticData: IDataObject, newStaticData: IDataObject,
): Promise<void> { ): Promise<void> {
Logger.debug('Executing hook (hookFunctionsSave)', { logger.debug('Executing hook (hookFunctionsSave)', {
executionId: this.executionId, executionId: this.executionId,
workflowId: this.workflowData.id, workflowId: this.workflowData.id,
}); });
@@ -497,7 +501,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
); );
} catch (e) { } catch (e) {
ErrorReporter.error(e); ErrorReporter.error(e);
Logger.error( logger.error(
`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (hookFunctionsSave)`, `There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (hookFunctionsSave)`,
{ executionId: this.executionId, workflowId: this.workflowData.id }, { executionId: this.executionId, workflowId: this.workflowData.id },
); );
@@ -581,7 +585,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
} }
} catch (error) { } catch (error) {
ErrorReporter.error(error); ErrorReporter.error(error);
Logger.error(`Failed saving execution data to DB on execution ID ${this.executionId}`, { logger.error(`Failed saving execution data to DB on execution ID ${this.executionId}`, {
executionId: this.executionId, executionId: this.executionId,
workflowId: this.workflowData.id, workflowId: this.workflowData.id,
error, error,
@@ -615,6 +619,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
* *
*/ */
function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
const logger = Container.get(Logger);
const internalHooks = Container.get(InternalHooks); const internalHooks = Container.get(InternalHooks);
const eventsService = Container.get(EventsService); const eventsService = Container.get(EventsService);
return { return {
@@ -639,7 +644,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
fullRunData: IRun, fullRunData: IRun,
newStaticData: IDataObject, newStaticData: IDataObject,
): Promise<void> { ): Promise<void> {
Logger.debug('Executing hook (hookFunctionsSaveWorker)', { logger.debug('Executing hook (hookFunctionsSaveWorker)', {
executionId: this.executionId, executionId: this.executionId,
workflowId: this.workflowData.id, workflowId: this.workflowData.id,
}); });
@@ -653,7 +658,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
); );
} catch (e) { } catch (e) {
ErrorReporter.error(e); ErrorReporter.error(e);
Logger.error( logger.error(
`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (workflowExecuteAfter)`, `There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (workflowExecuteAfter)`,
{ sessionId: this.sessionId, workflowId: this.workflowData.id }, { sessionId: this.sessionId, workflowId: this.workflowData.id },
); );
@@ -986,15 +991,16 @@ async function executeWorkflow(
} }
export function setExecutionStatus(status: ExecutionStatus) { export function setExecutionStatus(status: ExecutionStatus) {
const logger = Container.get(Logger);
if (this.executionId === undefined) { if (this.executionId === undefined) {
Logger.debug(`Setting execution status "${status}" failed because executionId is undefined`); logger.debug(`Setting execution status "${status}" failed because executionId is undefined`);
return; return;
} }
Logger.debug(`Setting execution status for ${this.executionId} to "${status}"`); logger.debug(`Setting execution status for ${this.executionId} to "${status}"`);
Container.get(ActiveExecutions) Container.get(ActiveExecutions)
.setStatus(this.executionId, status) .setStatus(this.executionId, status)
.catch((error) => { .catch((error) => {
Logger.debug(`Setting execution status "${status}" failed: ${error.message}`); logger.debug(`Setting execution status "${status}" failed: ${error.message}`);
}); });
} }
@@ -1009,7 +1015,8 @@ export function sendDataToUI(type: string, data: IDataObject | IDataObject[]) {
const pushInstance = Container.get(Push); const pushInstance = Container.get(Push);
pushInstance.send(type as IPushDataType, data, sessionId); pushInstance.send(type as IPushDataType, data, sessionId);
} catch (error) { } catch (error) {
Logger.warn(`There was a problem sending message to UI: ${error.message}`); const logger = Container.get(Logger);
logger.warn(`There was a problem sending message to UI: ${error.message}`);
} }
} }

View File

@@ -16,7 +16,6 @@ import type {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
ErrorReporterProxy as ErrorReporter, ErrorReporterProxy as ErrorReporter,
LoggerProxy as Logger,
NodeOperationError, NodeOperationError,
SubworkflowOperationError, SubworkflowOperationError,
Workflow, Workflow,
@@ -44,6 +43,7 @@ import { RoleService } from './services/role.service';
import { ExecutionRepository, RoleRepository } from './databases/repositories'; import { ExecutionRepository, RoleRepository } from './databases/repositories';
import { VariablesService } from './environments/variables/variables.service'; import { VariablesService } from './environments/variables/variables.service';
import type { CredentialsEntity } from './databases/entities/CredentialsEntity'; import type { CredentialsEntity } from './databases/entities/CredentialsEntity';
import { Logger } from './Logger';
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType'); const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
@@ -143,13 +143,14 @@ export async function executeErrorWorkflow(
workflowErrorData: IWorkflowErrorData, workflowErrorData: IWorkflowErrorData,
runningUser: User, runningUser: User,
): Promise<void> { ): Promise<void> {
const logger = Container.get(Logger);
// Wrap everything in try/catch to make sure that no errors bubble up and all get caught here // Wrap everything in try/catch to make sure that no errors bubble up and all get caught here
try { try {
const workflowData = await Db.collections.Workflow.findOneBy({ id: workflowId }); const workflowData = await Db.collections.Workflow.findOneBy({ id: workflowId });
if (workflowData === null) { if (workflowData === null) {
// The error workflow could not be found // The error workflow could not be found
Logger.error( logger.error(
`Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find error workflow "${workflowId}"`, `Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find error workflow "${workflowId}"`,
{ workflowId }, { workflowId },
); );
@@ -205,7 +206,7 @@ export async function executeErrorWorkflow(
await Container.get(ExecutionRepository).createNewExecution(fullExecutionData); await Container.get(ExecutionRepository).createNewExecution(fullExecutionData);
} }
Logger.info('Error workflow execution blocked due to subworkflow settings', { logger.info('Error workflow execution blocked due to subworkflow settings', {
erroredWorkflowId: workflowErrorData.workflow.id, erroredWorkflowId: workflowErrorData.workflow.id,
errorWorkflowId: workflowId, errorWorkflowId: workflowId,
}); });
@@ -222,7 +223,7 @@ export async function executeErrorWorkflow(
} }
if (workflowStartNode === undefined) { if (workflowStartNode === undefined) {
Logger.error( logger.error(
`Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find "${ERROR_TRIGGER_TYPE}" in workflow "${workflowId}"`, `Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find "${ERROR_TRIGGER_TYPE}" in workflow "${workflowId}"`,
); );
return; return;
@@ -271,7 +272,7 @@ export async function executeErrorWorkflow(
await workflowRunner.run(runData); await workflowRunner.run(runData);
} catch (error) { } catch (error) {
ErrorReporter.error(error); ErrorReporter.error(error);
Logger.error( logger.error(
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
`Calling Error Workflow for "${workflowErrorData.workflow.id}": "${error.message}"`, `Calling Error Workflow for "${workflowErrorData.workflow.id}": "${error.message}"`,
{ workflowId: workflowErrorData.workflow.id }, { workflowId: workflowErrorData.workflow.id },
@@ -533,9 +534,10 @@ export function validateWorkflowCredentialUsage(
const isTamperingAttempt = (inaccessibleCredNodeId: string) => const isTamperingAttempt = (inaccessibleCredNodeId: string) =>
!previouslyExistingNodeIds.includes(inaccessibleCredNodeId); !previouslyExistingNodeIds.includes(inaccessibleCredNodeId);
const logger = Container.get(Logger);
nodesWithCredentialsUserDoesNotHaveAccessTo.forEach((node) => { nodesWithCredentialsUserDoesNotHaveAccessTo.forEach((node) => {
if (isTamperingAttempt(node.id)) { if (isTamperingAttempt(node.id)) {
Logger.verbose('Blocked workflow update due to tampering attempt', { logger.verbose('Blocked workflow update due to tampering attempt', {
nodeType: node.type, nodeType: node.type,
nodeName: node.name, nodeName: node.name,
nodeId: node.id, nodeId: node.id,
@@ -553,7 +555,7 @@ export function validateWorkflowCredentialUsage(
(newWorkflowNode) => newWorkflowNode.id === node.id, (newWorkflowNode) => newWorkflowNode.id === node.id,
); );
Logger.debug('Replacing node with previous version when saving updated workflow', { logger.debug('Replacing node with previous version when saving updated workflow', {
nodeType: node.type, nodeType: node.type,
nodeName: node.name, nodeName: node.name,
nodeId: node.id, nodeId: node.id,

View File

@@ -18,7 +18,6 @@ import type {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
ErrorReporterProxy as ErrorReporter, ErrorReporterProxy as ErrorReporter,
LoggerProxy as Logger,
Workflow, Workflow,
WorkflowOperationError, WorkflowOperationError,
} from 'n8n-workflow'; } from 'n8n-workflow';
@@ -54,8 +53,11 @@ import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEven
import { Container } from 'typedi'; import { Container } from 'typedi';
import { InternalHooks } from './InternalHooks'; import { InternalHooks } from './InternalHooks';
import { ExecutionRepository } from '@db/repositories'; import { ExecutionRepository } from '@db/repositories';
import { Logger } from './Logger';
export class WorkflowRunner { export class WorkflowRunner {
logger: Logger;
activeExecutions: ActiveExecutions; activeExecutions: ActiveExecutions;
push: Push; push: Push;
@@ -63,6 +65,7 @@ export class WorkflowRunner {
jobQueue: Queue; jobQueue: Queue;
constructor() { constructor() {
this.logger = Container.get(Logger);
this.push = Container.get(Push); this.push = Container.get(Push);
this.activeExecutions = Container.get(ActiveExecutions); this.activeExecutions = Container.get(ActiveExecutions);
} }
@@ -298,14 +301,14 @@ export class WorkflowRunner {
const executionId = await this.activeExecutions.add(data, undefined, restartExecutionId); const executionId = await this.activeExecutions.add(data, undefined, restartExecutionId);
additionalData.executionId = executionId; additionalData.executionId = executionId;
Logger.verbose( this.logger.verbose(
`Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`, `Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`,
{ executionId }, { executionId },
); );
let workflowExecution: PCancelable<IRun>; let workflowExecution: PCancelable<IRun>;
try { try {
Logger.verbose( this.logger.verbose(
`Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`, `Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`,
{ executionId }, { executionId },
); );
@@ -349,7 +352,7 @@ export class WorkflowRunner {
}); });
if (data.executionData !== undefined) { if (data.executionData !== undefined) {
Logger.debug(`Execution ID ${executionId} had Execution data. Running with payload.`, { this.logger.debug(`Execution ID ${executionId} had Execution data. Running with payload.`, {
executionId, executionId,
}); });
const workflowExecute = new WorkflowExecute( const workflowExecute = new WorkflowExecute(
@@ -363,7 +366,9 @@ export class WorkflowRunner {
data.startNodes === undefined || data.startNodes === undefined ||
data.startNodes.length === 0 data.startNodes.length === 0
) { ) {
Logger.debug(`Execution ID ${executionId} will run executing all nodes.`, { executionId }); this.logger.debug(`Execution ID ${executionId} will run executing all nodes.`, {
executionId,
});
// Execute all nodes // Execute all nodes
const startNode = WorkflowHelpers.getExecutionStartNode(data, workflow); const startNode = WorkflowHelpers.getExecutionStartNode(data, workflow);
@@ -377,7 +382,7 @@ export class WorkflowRunner {
data.pinData, data.pinData,
); );
} else { } else {
Logger.debug(`Execution ID ${executionId} is a partial execution.`, { executionId }); this.logger.debug(`Execution ID ${executionId} is a partial execution.`, { executionId });
// Execute only the nodes between start and destination nodes // Execute only the nodes between start and destination nodes
const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); const workflowExecute = new WorkflowExecute(additionalData, data.executionMode);
workflowExecution = workflowExecute.runPartialWorkflow( workflowExecution = workflowExecute.runPartialWorkflow(
@@ -576,7 +581,7 @@ export class WorkflowRunner {
data.workflowData, data.workflowData,
{ retryOf: data.retryOf ? data.retryOf.toString() : undefined }, { retryOf: data.retryOf ? data.retryOf.toString() : undefined },
); );
Logger.error(`Problem with execution ${executionId}: ${error.message}. Aborting.`); this.logger.error(`Problem with execution ${executionId}: ${error.message}. Aborting.`);
if (clearWatchdogInterval !== undefined) { if (clearWatchdogInterval !== undefined) {
clearWatchdogInterval(); clearWatchdogInterval();
} }
@@ -590,11 +595,11 @@ export class WorkflowRunner {
this.activeExecutions.getPostExecutePromiseCount(executionId) > 0; this.activeExecutions.getPostExecutePromiseCount(executionId) > 0;
if (executionHasPostExecutionPromises) { if (executionHasPostExecutionPromises) {
Logger.debug( this.logger.debug(
`Reading execution data for execution ${executionId} from db for PostExecutionPromise.`, `Reading execution data for execution ${executionId} from db for PostExecutionPromise.`,
); );
} else { } else {
Logger.debug( this.logger.debug(
`Skipping execution data for execution ${executionId} since there are no PostExecutionPromise.`, `Skipping execution data for execution ${executionId} since there are no PostExecutionPromise.`,
); );
} }
@@ -737,7 +742,7 @@ export class WorkflowRunner {
// Listen to data from the subprocess // Listen to data from the subprocess
subprocess.on('message', async (message: IProcessMessage) => { subprocess.on('message', async (message: IProcessMessage) => {
Logger.debug( this.logger.debug(
`Received child process message of type ${message.type} for execution ID ${executionId}.`, `Received child process message of type ${message.type} for execution ID ${executionId}.`,
{ executionId }, { executionId },
); );
@@ -811,7 +816,7 @@ export class WorkflowRunner {
// Also get informed when the processes does exit especially when it did crash or timed out // Also get informed when the processes does exit especially when it did crash or timed out
subprocess.on('exit', async (code, signal) => { subprocess.on('exit', async (code, signal) => {
if (signal === 'SIGTERM') { if (signal === 'SIGTERM') {
Logger.debug(`Subprocess for execution ID ${executionId} timed out.`, { executionId }); this.logger.debug(`Subprocess for execution ID ${executionId} timed out.`, { executionId });
// Execution timed out and its process has been terminated // Execution timed out and its process has been terminated
const timeoutError = new WorkflowOperationError('Workflow execution timed out!'); const timeoutError = new WorkflowOperationError('Workflow execution timed out!');
@@ -823,7 +828,7 @@ export class WorkflowRunner {
workflowHooks, workflowHooks,
); );
} else if (code !== 0) { } else if (code !== 0) {
Logger.debug( this.logger.debug(
`Subprocess for execution ID ${executionId} finished with error code ${code}.`, `Subprocess for execution ID ${executionId} finished with error code ${code}.`,
{ executionId }, { executionId },
); );

View File

@@ -17,7 +17,6 @@ import type {
IDataObject, IDataObject,
IExecuteResponsePromiseData, IExecuteResponsePromiseData,
IExecuteWorkflowInfo, IExecuteWorkflowInfo,
ILogger,
INode, INode,
INodeExecutionData, INodeExecutionData,
IRun, IRun,
@@ -30,7 +29,6 @@ import type {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
ErrorReporterProxy as ErrorReporter, ErrorReporterProxy as ErrorReporter,
LoggerProxy,
Workflow, Workflow,
WorkflowHooks, WorkflowHooks,
WorkflowOperationError, WorkflowOperationError,
@@ -46,7 +44,7 @@ import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
import * as WebhookHelpers from '@/WebhookHelpers'; import * as WebhookHelpers from '@/WebhookHelpers';
import * as WorkflowHelpers from '@/WorkflowHelpers'; import * as WorkflowHelpers from '@/WorkflowHelpers';
import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData';
import { getLogger } from '@/Logger'; import { Logger } from '@/Logger';
import config from '@/config'; import config from '@/config';
import { generateFailedExecutionFromError } from '@/WorkflowHelpers'; import { generateFailedExecutionFromError } from '@/WorkflowHelpers';
@@ -63,7 +61,7 @@ if (process.env.NODEJS_PREFER_IPV4 === 'true') {
class WorkflowRunnerProcess { class WorkflowRunnerProcess {
data: IWorkflowExecutionDataProcessWithExecution | undefined; data: IWorkflowExecutionDataProcessWithExecution | undefined;
logger: ILogger; logger: Logger;
startedAt = new Date(); startedAt = new Date();
@@ -85,19 +83,20 @@ class WorkflowRunnerProcess {
}, 30000); }, 30000);
} }
constructor() {
this.logger = Container.get(Logger);
}
async runWorkflow(inputData: IWorkflowExecutionDataProcessWithExecution): Promise<IRun> { async runWorkflow(inputData: IWorkflowExecutionDataProcessWithExecution): Promise<IRun> {
process.once('SIGTERM', WorkflowRunnerProcess.stopProcess); process.once('SIGTERM', WorkflowRunnerProcess.stopProcess);
process.once('SIGINT', WorkflowRunnerProcess.stopProcess); process.once('SIGINT', WorkflowRunnerProcess.stopProcess);
await initErrorHandling(); await initErrorHandling();
const logger = (this.logger = getLogger());
LoggerProxy.init(logger);
this.data = inputData; this.data = inputData;
const { userId } = inputData; const { userId } = inputData;
logger.verbose('Initializing n8n sub-process', { this.logger.verbose('Initializing n8n sub-process', {
pid: process.pid, pid: process.pid,
workflowId: this.data.workflowData.id, workflowId: this.data.workflowData.id,
}); });

View File

@@ -2,10 +2,10 @@ import 'reflect-metadata';
import { Command } from '@oclif/command'; import { Command } from '@oclif/command';
import { ExitError } from '@oclif/errors'; import { ExitError } from '@oclif/errors';
import { Container } from 'typedi'; import { Container } from 'typedi';
import { LoggerProxy, ErrorReporterProxy as ErrorReporter, sleep } from 'n8n-workflow'; import { ErrorReporterProxy as ErrorReporter, sleep } from 'n8n-workflow';
import { BinaryDataService, InstanceSettings, ObjectStoreService } from 'n8n-core'; import { BinaryDataService, InstanceSettings, ObjectStoreService } from 'n8n-core';
import type { AbstractServer } from '@/AbstractServer'; import type { AbstractServer } from '@/AbstractServer';
import { getLogger } from '@/Logger'; import { Logger } from '@/Logger';
import config from '@/config'; import config from '@/config';
import * as Db from '@/Db'; import * as Db from '@/Db';
import * as CrashJournal from '@/CrashJournal'; import * as CrashJournal from '@/CrashJournal';
@@ -24,7 +24,7 @@ import { generateHostInstanceId } from '../databases/utils/generators';
import { WorkflowHistoryManager } from '@/workflows/workflowHistory/workflowHistoryManager.ee'; import { WorkflowHistoryManager } from '@/workflows/workflowHistory/workflowHistoryManager.ee';
export abstract class BaseCommand extends Command { export abstract class BaseCommand extends Command {
protected logger = LoggerProxy.init(getLogger()); protected logger = Container.get(Logger);
protected externalHooks: IExternalHooksClass; protected externalHooks: IExternalHooksClass;
@@ -64,12 +64,12 @@ export abstract class BaseCommand extends Command {
const dbType = config.getEnv('database.type'); const dbType = config.getEnv('database.type');
if (['mysqldb', 'mariadb'].includes(dbType)) { if (['mysqldb', 'mariadb'].includes(dbType)) {
LoggerProxy.warn( this.logger.warn(
'Support for MySQL/MariaDB has been deprecated and will be removed with an upcoming version of n8n. Please migrate to PostgreSQL.', 'Support for MySQL/MariaDB has been deprecated and will be removed with an upcoming version of n8n. Please migrate to PostgreSQL.',
); );
} }
if (process.env.EXECUTIONS_PROCESS === 'own') { if (process.env.EXECUTIONS_PROCESS === 'own') {
LoggerProxy.warn( this.logger.warn(
'Own mode has been deprecated and will be removed in a future version of n8n. If you need the isolation and performance gains, please consider using queue mode.', 'Own mode has been deprecated and will be removed in a future version of n8n. If you need the isolation and performance gains, please consider using queue mode.',
); );
} }
@@ -129,7 +129,7 @@ export abstract class BaseCommand extends Command {
const isLicensed = Container.get(License).isFeatureEnabled(LICENSE_FEATURES.BINARY_DATA_S3); const isLicensed = Container.get(License).isFeatureEnabled(LICENSE_FEATURES.BINARY_DATA_S3);
if (isSelected && isAvailable && isLicensed) { if (isSelected && isAvailable && isLicensed) {
LoggerProxy.debug( this.logger.debug(
'License found for external storage - object store to init in read-write mode', 'License found for external storage - object store to init in read-write mode',
); );
@@ -139,7 +139,7 @@ export abstract class BaseCommand extends Command {
} }
if (isSelected && isAvailable && !isLicensed) { if (isSelected && isAvailable && !isLicensed) {
LoggerProxy.debug( this.logger.debug(
'No license found for external storage - object store to init with writes blocked. To enable writes, please upgrade to a license that supports this feature.', 'No license found for external storage - object store to init with writes blocked. To enable writes, please upgrade to a license that supports this feature.',
); );
@@ -149,7 +149,7 @@ export abstract class BaseCommand extends Command {
} }
if (!isSelected && isAvailable) { if (!isSelected && isAvailable) {
LoggerProxy.debug( this.logger.debug(
'External storage unselected but available - object store to init with writes unused', 'External storage unselected but available - object store to init with writes unused',
); );
@@ -204,17 +204,17 @@ export abstract class BaseCommand extends Command {
); );
} }
LoggerProxy.debug('Initializing object store service'); this.logger.debug('Initializing object store service');
try { try {
await objectStoreService.init(host, bucket, credentials); await objectStoreService.init(host, bucket, credentials);
objectStoreService.setReadonly(options.isReadOnly); objectStoreService.setReadonly(options.isReadOnly);
LoggerProxy.debug('Object store init completed'); this.logger.debug('Object store init completed');
} catch (e) { } catch (e) {
const error = e instanceof Error ? e : new Error(`${e}`); const error = e instanceof Error ? e : new Error(`${e}`);
LoggerProxy.debug('Object store init failed', { error }); this.logger.debug('Object store init failed', { error });
} }
} }
@@ -223,7 +223,7 @@ export abstract class BaseCommand extends Command {
await this.initObjectStoreService(); await this.initObjectStoreService();
} catch (e) { } catch (e) {
const error = e instanceof Error ? e : new Error(`${e}`); const error = e instanceof Error ? e : new Error(`${e}`);
LoggerProxy.error(`Failed to init object store: ${error.message}`, { error }); this.logger.error(`Failed to init object store: ${error.message}`, { error });
process.exit(1); process.exit(1);
} }
@@ -246,14 +246,14 @@ export abstract class BaseCommand extends Command {
const hasCert = (await license.loadCertStr()).length > 0; const hasCert = (await license.loadCertStr()).length > 0;
if (hasCert) { if (hasCert) {
return LoggerProxy.debug('Skipping license activation'); return this.logger.debug('Skipping license activation');
} }
try { try {
LoggerProxy.debug('Attempting license activation'); this.logger.debug('Attempting license activation');
await license.activate(activationKey); await license.activate(activationKey);
} catch (e) { } catch (e) {
LoggerProxy.error('Could not activate license', e as Error); this.logger.error('Could not activate license', e as Error);
} }
} }
} }

View File

@@ -1,8 +1,8 @@
import { Command, flags } from '@oclif/command'; import { Command, flags } from '@oclif/command';
import type { DataSourceOptions as ConnectionOptions } from 'typeorm'; import type { DataSourceOptions as ConnectionOptions } from 'typeorm';
import { DataSource as Connection } from 'typeorm'; import { DataSource as Connection } from 'typeorm';
import { LoggerProxy } from 'n8n-workflow'; import { Container } from 'typedi';
import { getLogger } from '@/Logger'; import { Logger } from '@/Logger';
import { getConnectionOptions } from '@/Db'; import { getConnectionOptions } from '@/Db';
import type { Migration } from '@db/types'; import type { Migration } from '@db/types';
import { wrapMigration } from '@db/utils/migrationHelpers'; import { wrapMigration } from '@db/utils/migrationHelpers';
@@ -17,7 +17,7 @@ export class DbRevertMigrationCommand extends Command {
help: flags.help({ char: 'h' }), help: flags.help({ char: 'h' }),
}; };
protected logger = LoggerProxy.init(getLogger()); protected logger = Container.get(Logger);
private connection: Connection; private connection: Connection;

View File

@@ -1,5 +1,5 @@
import { flags } from '@oclif/command'; import { flags } from '@oclif/command';
import { LoggerProxy, sleep } from 'n8n-workflow'; import { sleep } from 'n8n-workflow';
import config from '@/config'; import config from '@/config';
import { ActiveExecutions } from '@/ActiveExecutions'; import { ActiveExecutions } from '@/ActiveExecutions';
import { WebhookServer } from '@/WebhookServer'; import { WebhookServer } from '@/WebhookServer';
@@ -36,7 +36,7 @@ export class Webhook extends BaseCommand {
* get removed. * get removed.
*/ */
async stopProcess() { async stopProcess() {
LoggerProxy.info('\nStopping n8n...'); this.logger.info('\nStopping n8n...');
try { try {
await this.externalHooks.run('n8n.stop', []); await this.externalHooks.run('n8n.stop', []);
@@ -54,7 +54,7 @@ export class Webhook extends BaseCommand {
let count = 0; let count = 0;
while (executingWorkflows.length !== 0) { while (executingWorkflows.length !== 0) {
if (count++ % 4 === 0) { if (count++ % 4 === 0) {
LoggerProxy.info( this.logger.info(
`Waiting for ${executingWorkflows.length} active executions to finish...`, `Waiting for ${executingWorkflows.length} active executions to finish...`,
); );
} }

View File

@@ -13,7 +13,7 @@ import type {
INodeTypes, INodeTypes,
IRun, IRun,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { Workflow, NodeOperationError, LoggerProxy, sleep } from 'n8n-workflow'; import { Workflow, NodeOperationError, sleep } from 'n8n-workflow';
import * as Db from '@/Db'; import * as Db from '@/Db';
import * as ResponseHelper from '@/ResponseHelper'; import * as ResponseHelper from '@/ResponseHelper';
@@ -71,7 +71,7 @@ export class Worker extends BaseCommand {
* get removed. * get removed.
*/ */
async stopProcess() { async stopProcess() {
LoggerProxy.info('Stopping n8n...'); this.logger.info('Stopping n8n...');
// Stop accepting new jobs // Stop accepting new jobs
await Worker.jobQueue.pause(true); await Worker.jobQueue.pause(true);
@@ -94,7 +94,7 @@ export class Worker extends BaseCommand {
while (Object.keys(Worker.runningJobs).length !== 0) { while (Object.keys(Worker.runningJobs).length !== 0) {
if (count++ % 4 === 0) { if (count++ % 4 === 0) {
const waitLeft = Math.ceil((stopTime - new Date().getTime()) / 1000); const waitLeft = Math.ceil((stopTime - new Date().getTime()) / 1000);
LoggerProxy.info( this.logger.info(
`Waiting for ${ `Waiting for ${
Object.keys(Worker.runningJobs).length Object.keys(Worker.runningJobs).length
} active executions to finish... (wait ${waitLeft} more seconds)`, } active executions to finish... (wait ${waitLeft} more seconds)`,
@@ -121,7 +121,7 @@ export class Worker extends BaseCommand {
); );
if (!fullExecutionData) { if (!fullExecutionData) {
LoggerProxy.error( this.logger.error(
`Worker failed to find data of execution "${executionId}" in database. Cannot continue.`, `Worker failed to find data of execution "${executionId}" in database. Cannot continue.`,
{ executionId }, { executionId },
); );
@@ -130,7 +130,7 @@ export class Worker extends BaseCommand {
); );
} }
const workflowId = fullExecutionData.workflowData.id!; const workflowId = fullExecutionData.workflowData.id!;
LoggerProxy.info( this.logger.info(
`Start job: ${job.id} (Workflow ID: ${workflowId} | Execution: ${executionId})`, `Start job: ${job.id} (Workflow ID: ${workflowId} | Execution: ${executionId})`,
); );
@@ -145,7 +145,7 @@ export class Worker extends BaseCommand {
}, },
}); });
if (workflowData === null) { if (workflowData === null) {
LoggerProxy.error( this.logger.error(
'Worker execution failed because workflow could not be found in database.', 'Worker execution failed because workflow could not be found in database.',
{ workflowId, executionId }, { workflowId, executionId },
); );
@@ -217,7 +217,7 @@ export class Worker extends BaseCommand {
additionalData.setExecutionStatus = (status: ExecutionStatus) => { additionalData.setExecutionStatus = (status: ExecutionStatus) => {
// Can't set the status directly in the queued worker, but it will happen in InternalHook.onWorkflowPostExecute // Can't set the status directly in the queued worker, but it will happen in InternalHook.onWorkflowPostExecute
LoggerProxy.debug(`Queued worker execution status for ${executionId} is "${status}"`); this.logger.debug(`Queued worker execution status for ${executionId} is "${status}"`);
}; };
let workflowExecute: WorkflowExecute; let workflowExecute: WorkflowExecute;
@@ -400,7 +400,7 @@ export class Worker extends BaseCommand {
'/healthz', '/healthz',
async (req: express.Request, res: express.Response) => { async (req: express.Request, res: express.Response) => {
LoggerProxy.debug('Health check started!'); this.logger.debug('Health check started!');
const connection = Db.getConnection(); const connection = Db.getConnection();
@@ -412,7 +412,7 @@ export class Worker extends BaseCommand {
// DB ping // DB ping
await connection.query('SELECT 1'); await connection.query('SELECT 1');
} catch (e) { } catch (e) {
LoggerProxy.error('No Database connection!', e as Error); this.logger.error('No Database connection!', e as Error);
const error = new ResponseHelper.ServiceUnavailableError('No Database connection!'); const error = new ResponseHelper.ServiceUnavailableError('No Database connection!');
return ResponseHelper.sendErrorResponse(res, error); return ResponseHelper.sendErrorResponse(res, error);
} }
@@ -423,7 +423,7 @@ export class Worker extends BaseCommand {
// Redis ping // Redis ping
await Worker.jobQueue.ping(); await Worker.jobQueue.ping();
} catch (e) { } catch (e) {
LoggerProxy.error('No Redis connection!', e as Error); this.logger.error('No Redis connection!', e as Error);
const error = new ResponseHelper.ServiceUnavailableError('No Redis connection!'); const error = new ResponseHelper.ServiceUnavailableError('No Redis connection!');
return ResponseHelper.sendErrorResponse(res, error); return ResponseHelper.sendErrorResponse(res, error);
} }
@@ -433,7 +433,7 @@ export class Worker extends BaseCommand {
status: 'ok', status: 'ok',
}; };
LoggerProxy.debug('Health check completed successfully!'); this.logger.debug('Health check completed successfully!');
ResponseHelper.sendSuccessResponse(res, responseData, true, 200); ResponseHelper.sendSuccessResponse(res, responseData, true, 200);
}, },

View File

@@ -2,7 +2,7 @@ import path from 'path';
import convict from 'convict'; import convict from 'convict';
import { Container } from 'typedi'; import { Container } from 'typedi';
import { InstanceSettings } from 'n8n-core'; import { InstanceSettings } from 'n8n-core';
import { jsonParse } from 'n8n-workflow'; import { LOG_LEVELS, jsonParse } from 'n8n-workflow';
import { ensureStringArray } from './utils'; import { ensureStringArray } from './utils';
convict.addFormat({ convict.addFormat({
@@ -856,7 +856,7 @@ export const schema = {
logs: { logs: {
level: { level: {
doc: 'Log output level', doc: 'Log output level',
format: ['error', 'warn', 'info', 'verbose', 'debug', 'silent'] as const, format: LOG_LEVELS,
default: 'info', default: 'info',
env: 'N8N_LOG_LEVEL', env: 'N8N_LOG_LEVEL',
}, },

View File

@@ -1,6 +1,6 @@
import validator from 'validator'; import validator from 'validator';
import { In } from 'typeorm'; import { In } from 'typeorm';
import { Container } from 'typedi'; import { Service } from 'typedi';
import { Authorized, Get, Post, RestController } from '@/decorators'; import { Authorized, Get, Post, RestController } from '@/decorators';
import { import {
AuthError, AuthError,
@@ -11,12 +11,10 @@ import {
import { issueCookie, resolveJwt } from '@/auth/jwt'; import { issueCookie, resolveJwt } from '@/auth/jwt';
import { AUTH_COOKIE_NAME, RESPONSE_ERROR_MESSAGES } from '@/constants'; import { AUTH_COOKIE_NAME, RESPONSE_ERROR_MESSAGES } from '@/constants';
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { ILogger } from 'n8n-workflow';
import type { User } from '@db/entities/User'; import type { User } from '@db/entities/User';
import { LoginRequest, UserRequest } from '@/requests'; import { LoginRequest, UserRequest } from '@/requests';
import type { PublicUser } from '@/Interfaces'; import type { PublicUser } from '@/Interfaces';
import { Config } from '@/config'; import config from '@/config';
import { IInternalHooksClass } from '@/Interfaces';
import { handleEmailLogin, handleLdapLogin } from '@/auth'; import { handleEmailLogin, handleLdapLogin } from '@/auth';
import { PostHogClient } from '@/posthog'; import { PostHogClient } from '@/posthog';
import { import {
@@ -28,15 +26,17 @@ import { InternalHooks } from '../InternalHooks';
import { License } from '@/License'; import { License } from '@/License';
import { UserService } from '@/services/user.service'; import { UserService } from '@/services/user.service';
import { MfaService } from '@/Mfa/mfa.service'; import { MfaService } from '@/Mfa/mfa.service';
import { Logger } from '@/Logger';
@Service()
@RestController() @RestController()
export class AuthController { export class AuthController {
constructor( constructor(
private readonly config: Config, private readonly logger: Logger,
private readonly logger: ILogger, private readonly internalHooks: InternalHooks,
private readonly internalHooks: IInternalHooksClass,
private readonly mfaService: MfaService, private readonly mfaService: MfaService,
private readonly userService: UserService, private readonly userService: UserService,
private readonly license: License,
private readonly postHog?: PostHogClient, private readonly postHog?: PostHogClient,
) {} ) {}
@@ -93,14 +93,14 @@ export class AuthController {
} }
await issueCookie(res, user); await issueCookie(res, user);
void Container.get(InternalHooks).onUserLoginSuccess({ void this.internalHooks.onUserLoginSuccess({
user, user,
authenticationMethod: usedAuthenticationMethod, authenticationMethod: usedAuthenticationMethod,
}); });
return this.userService.toPublic(user, { posthog: this.postHog }); return this.userService.toPublic(user, { posthog: this.postHog });
} }
void Container.get(InternalHooks).onUserLoginFailed({ void this.internalHooks.onUserLoginFailed({
user: email, user: email,
authenticationMethod: usedAuthenticationMethod, authenticationMethod: usedAuthenticationMethod,
reason: 'wrong credentials', reason: 'wrong credentials',
@@ -129,7 +129,7 @@ export class AuthController {
} }
} }
if (this.config.getEnv('userManagement.isInstanceOwnerSetUp')) { if (config.getEnv('userManagement.isInstanceOwnerSetUp')) {
throw new AuthError('Not logged in'); throw new AuthError('Not logged in');
} }
@@ -155,7 +155,7 @@ export class AuthController {
@Get('/resolve-signup-token') @Get('/resolve-signup-token')
async resolveSignupToken(req: UserRequest.ResolveSignUp) { async resolveSignupToken(req: UserRequest.ResolveSignUp) {
const { inviterId, inviteeId } = req.query; const { inviterId, inviteeId } = req.query;
const isWithinUsersLimit = Container.get(License).isWithinUsersLimit(); const isWithinUsersLimit = this.license.isWithinUsersLimit();
if (!isWithinUsersLimit) { if (!isWithinUsersLimit) {
this.logger.debug('Request to resolve signup token failed because of users quota reached', { this.logger.debug('Request to resolve signup token failed because of users quota reached', {

View File

@@ -1,32 +1,35 @@
import validator from 'validator'; import validator from 'validator';
import { plainToInstance } from 'class-transformer'; import { plainToInstance } from 'class-transformer';
import { Response } from 'express';
import { Service } from 'typedi';
import { randomBytes } from 'crypto';
import { Authorized, Delete, Get, Patch, Post, RestController } from '@/decorators'; import { Authorized, Delete, Get, Patch, Post, RestController } from '@/decorators';
import { compareHash, hashPassword, validatePassword } from '@/UserManagement/UserManagementHelper'; import { compareHash, hashPassword, validatePassword } from '@/UserManagement/UserManagementHelper';
import { BadRequestError } from '@/ResponseHelper'; import { BadRequestError } from '@/ResponseHelper';
import { validateEntity } from '@/GenericHelpers'; import { validateEntity } from '@/GenericHelpers';
import { issueCookie } from '@/auth/jwt'; import { issueCookie } from '@/auth/jwt';
import type { User } from '@db/entities/User'; import type { User } from '@db/entities/User';
import { Response } from 'express';
import { ILogger } from 'n8n-workflow';
import { import {
AuthenticatedRequest, AuthenticatedRequest,
MeRequest, MeRequest,
UserSettingsUpdatePayload, UserSettingsUpdatePayload,
UserUpdatePayload, UserUpdatePayload,
} from '@/requests'; } from '@/requests';
import { IExternalHooksClass, IInternalHooksClass } from '@/Interfaces';
import type { PublicUser } from '@/Interfaces'; import type { PublicUser } from '@/Interfaces';
import { randomBytes } from 'crypto';
import { isSamlLicensedAndEnabled } from '../sso/saml/samlHelpers'; import { isSamlLicensedAndEnabled } from '../sso/saml/samlHelpers';
import { UserService } from '@/services/user.service'; import { UserService } from '@/services/user.service';
import { Logger } from '@/Logger';
import { ExternalHooks } from '@/ExternalHooks';
import { InternalHooks } from '@/InternalHooks';
@Service()
@Authorized() @Authorized()
@RestController('/me') @RestController('/me')
export class MeController { export class MeController {
constructor( constructor(
private readonly logger: ILogger, private readonly logger: Logger,
private readonly externalHooks: IExternalHooksClass, private readonly externalHooks: ExternalHooks,
private readonly internalHooks: IInternalHooksClass, private readonly internalHooks: InternalHooks,
private readonly userService: UserService, private readonly userService: UserService,
) {} ) {}

View File

@@ -5,20 +5,20 @@ import { BadRequestError } from '@/ResponseHelper';
import { hashPassword, validatePassword } from '@/UserManagement/UserManagementHelper'; import { hashPassword, validatePassword } from '@/UserManagement/UserManagementHelper';
import { issueCookie } from '@/auth/jwt'; import { issueCookie } from '@/auth/jwt';
import { Response } from 'express'; import { Response } from 'express';
import { ILogger } from 'n8n-workflow';
import { Config } from '@/config'; import { Config } from '@/config';
import { OwnerRequest } from '@/requests'; import { OwnerRequest } from '@/requests';
import { IInternalHooksClass } from '@/Interfaces'; import { IInternalHooksClass } from '@/Interfaces';
import { SettingsRepository } from '@db/repositories'; import { SettingsRepository } from '@db/repositories';
import { PostHogClient } from '@/posthog'; import { PostHogClient } from '@/posthog';
import { UserService } from '@/services/user.service'; import { UserService } from '@/services/user.service';
import { Logger } from '@/Logger';
@Authorized(['global', 'owner']) @Authorized(['global', 'owner'])
@RestController('/owner') @RestController('/owner')
export class OwnerController { export class OwnerController {
constructor( constructor(
private readonly config: Config, private readonly config: Config,
private readonly logger: ILogger, private readonly logger: Logger,
private readonly internalHooks: IInternalHooksClass, private readonly internalHooks: IInternalHooksClass,
private readonly settingsRepository: SettingsRepository, private readonly settingsRepository: SettingsRepository,
private readonly userService: UserService, private readonly userService: UserService,

View File

@@ -16,7 +16,6 @@ import {
import { UserManagementMailer } from '@/UserManagement/email'; import { UserManagementMailer } from '@/UserManagement/email';
import { Response } from 'express'; import { Response } from 'express';
import { ILogger } from 'n8n-workflow';
import { PasswordResetRequest } from '@/requests'; import { PasswordResetRequest } from '@/requests';
import { IExternalHooksClass, IInternalHooksClass } from '@/Interfaces'; import { IExternalHooksClass, IInternalHooksClass } from '@/Interfaces';
import { issueCookie } from '@/auth/jwt'; import { issueCookie } from '@/auth/jwt';
@@ -30,11 +29,12 @@ import { TokenExpiredError } from 'jsonwebtoken';
import type { JwtPayload } from '@/services/jwt.service'; import type { JwtPayload } from '@/services/jwt.service';
import { JwtService } from '@/services/jwt.service'; import { JwtService } from '@/services/jwt.service';
import { MfaService } from '@/Mfa/mfa.service'; import { MfaService } from '@/Mfa/mfa.service';
import { Logger } from '@/Logger';
@RestController() @RestController()
export class PasswordResetController { export class PasswordResetController {
constructor( constructor(
private readonly logger: ILogger, private readonly logger: Logger,
private readonly externalHooks: IExternalHooksClass, private readonly externalHooks: IExternalHooksClass,
private readonly internalHooks: IInternalHooksClass, private readonly internalHooks: IInternalHooksClass,
private readonly mailer: UserManagementMailer, private readonly mailer: UserManagementMailer,

View File

@@ -1,7 +1,7 @@
import validator from 'validator'; import validator from 'validator';
import type { FindManyOptions } from 'typeorm'; import type { FindManyOptions } from 'typeorm';
import { In, Not } from 'typeorm'; import { In, Not } from 'typeorm';
import { ILogger, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
import { User } from '@db/entities/User'; import { User } from '@db/entities/User';
import { SharedCredentials } from '@db/entities/SharedCredentials'; import { SharedCredentials } from '@db/entities/SharedCredentials';
import { SharedWorkflow } from '@db/entities/SharedWorkflow'; import { SharedWorkflow } from '@db/entities/SharedWorkflow';
@@ -38,13 +38,14 @@ import { JwtService } from '@/services/jwt.service';
import { RoleService } from '@/services/role.service'; import { RoleService } from '@/services/role.service';
import { UserService } from '@/services/user.service'; import { UserService } from '@/services/user.service';
import { listQueryMiddleware } from '@/middlewares'; import { listQueryMiddleware } from '@/middlewares';
import { Logger } from '@/Logger';
@Authorized(['global', 'owner']) @Authorized(['global', 'owner'])
@RestController('/users') @RestController('/users')
export class UsersController { export class UsersController {
constructor( constructor(
private readonly config: Config, private readonly config: Config,
private readonly logger: ILogger, private readonly logger: Logger,
private readonly externalHooks: IExternalHooksClass, private readonly externalHooks: IExternalHooksClass,
private readonly internalHooks: IInternalHooksClass, private readonly internalHooks: IInternalHooksClass,
private readonly sharedCredentialsRepository: SharedCredentialsRepository, private readonly sharedCredentialsRepository: SharedCredentialsRepository,

View File

@@ -1,6 +1,5 @@
import { Service } from 'typedi'; import { Service } from 'typedi';
import { Response, NextFunction } from 'express'; import { Response, NextFunction } from 'express';
import { ILogger } from 'n8n-workflow';
import { Get, Middleware, RestController } from '@/decorators'; import { Get, Middleware, RestController } from '@/decorators';
import type { WorkflowStatistics } from '@db/entities/WorkflowStatistics'; import type { WorkflowStatistics } from '@db/entities/WorkflowStatistics';
import { StatisticsNames } from '@db/entities/WorkflowStatistics'; import { StatisticsNames } from '@db/entities/WorkflowStatistics';
@@ -9,6 +8,7 @@ import { ExecutionRequest } from '@/requests';
import { whereClause } from '@/UserManagement/UserManagementHelper'; import { whereClause } from '@/UserManagement/UserManagementHelper';
import { NotFoundError } from '@/ResponseHelper'; import { NotFoundError } from '@/ResponseHelper';
import type { IWorkflowStatisticsDataLoaded } from '@/Interfaces'; import type { IWorkflowStatisticsDataLoaded } from '@/Interfaces';
import { Logger } from '@/Logger';
interface WorkflowStatisticsData<T> { interface WorkflowStatisticsData<T> {
productionSuccess: T; productionSuccess: T;
@@ -23,7 +23,7 @@ export class WorkflowStatisticsController {
constructor( constructor(
private sharedWorkflowRepository: SharedWorkflowRepository, private sharedWorkflowRepository: SharedWorkflowRepository,
private workflowStatisticsRepository: WorkflowStatisticsRepository, private workflowStatisticsRepository: WorkflowStatisticsRepository,
private readonly logger: ILogger, private readonly logger: Logger,
) {} ) {}
/** /**

View File

@@ -1,11 +1,10 @@
import express from 'express'; import express from 'express';
import type { INodeCredentialTestResult } from 'n8n-workflow'; import type { INodeCredentialTestResult } from 'n8n-workflow';
import { deepCopy, LoggerProxy } from 'n8n-workflow'; import { deepCopy } from 'n8n-workflow';
import * as GenericHelpers from '@/GenericHelpers'; import * as GenericHelpers from '@/GenericHelpers';
import * as ResponseHelper from '@/ResponseHelper'; import * as ResponseHelper from '@/ResponseHelper';
import config from '@/config'; import config from '@/config';
import { getLogger } from '@/Logger';
import { EECredentialsController } from './credentials.controller.ee'; import { EECredentialsController } from './credentials.controller.ee';
import { CredentialsService } from './credentials.service'; import { CredentialsService } from './credentials.service';
@@ -14,21 +13,9 @@ import type { CredentialRequest, ListQuery } from '@/requests';
import { Container } from 'typedi'; import { Container } from 'typedi';
import { InternalHooks } from '@/InternalHooks'; import { InternalHooks } from '@/InternalHooks';
import { listQueryMiddleware } from '@/middlewares'; import { listQueryMiddleware } from '@/middlewares';
import { Logger } from '@/Logger';
export const credentialsController = express.Router(); export const credentialsController = express.Router();
/**
* Initialize Logger if needed
*/
credentialsController.use((req, res, next) => {
try {
LoggerProxy.getInstance();
} catch (error) {
LoggerProxy.init(getLogger());
}
next();
});
credentialsController.use('/', EECredentialsController); credentialsController.use('/', EECredentialsController);
/** /**
@@ -151,10 +138,13 @@ credentialsController.patch(
const sharing = await CredentialsService.getSharing(req.user, credentialId); const sharing = await CredentialsService.getSharing(req.user, credentialId);
if (!sharing) { if (!sharing) {
LoggerProxy.info('Attempt to update credential blocked due to lack of permissions', { Container.get(Logger).info(
credentialId, 'Attempt to update credential blocked due to lack of permissions',
userId: req.user.id, {
}); credentialId,
userId: req.user.id,
},
);
throw new ResponseHelper.NotFoundError( throw new ResponseHelper.NotFoundError(
'Credential to be updated not found. You can only update credentials owned by you', 'Credential to be updated not found. You can only update credentials owned by you',
); );
@@ -183,7 +173,7 @@ credentialsController.patch(
// Remove the encrypted data as it is not needed in the frontend // Remove the encrypted data as it is not needed in the frontend
const { data: _, ...rest } = responseData; const { data: _, ...rest } = responseData;
LoggerProxy.verbose('Credential updated', { credentialId }); Container.get(Logger).verbose('Credential updated', { credentialId });
return { ...rest }; return { ...rest };
}), }),
@@ -200,10 +190,13 @@ credentialsController.delete(
const sharing = await CredentialsService.getSharing(req.user, credentialId); const sharing = await CredentialsService.getSharing(req.user, credentialId);
if (!sharing) { if (!sharing) {
LoggerProxy.info('Attempt to delete credential blocked due to lack of permissions', { Container.get(Logger).info(
credentialId, 'Attempt to delete credential blocked due to lack of permissions',
userId: req.user.id, {
}); credentialId,
userId: req.user.id,
},
);
throw new ResponseHelper.NotFoundError( throw new ResponseHelper.NotFoundError(
'Credential to be deleted not found. You can only removed credentials owned by you', 'Credential to be deleted not found. You can only removed credentials owned by you',
); );

View File

@@ -6,7 +6,7 @@ import type {
INodeCredentialTestResult, INodeCredentialTestResult,
INodeProperties, INodeProperties,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { CREDENTIAL_EMPTY_VALUE, deepCopy, LoggerProxy, NodeHelpers } from 'n8n-workflow'; import { CREDENTIAL_EMPTY_VALUE, deepCopy, NodeHelpers } from 'n8n-workflow';
import { Container } from 'typedi'; import { Container } from 'typedi';
import type { FindManyOptions, FindOptionsWhere } from 'typeorm'; import type { FindManyOptions, FindOptionsWhere } from 'typeorm';
import { In, Like } from 'typeorm'; import { In, Like } from 'typeorm';
@@ -24,6 +24,7 @@ import type { CredentialRequest, ListQuery } from '@/requests';
import { CredentialTypes } from '@/CredentialTypes'; import { CredentialTypes } from '@/CredentialTypes';
import { RoleService } from '@/services/role.service'; import { RoleService } from '@/services/role.service';
import { OwnershipService } from '@/services/ownership.service'; import { OwnershipService } from '@/services/ownership.service';
import { Logger } from '@/Logger';
export class CredentialsService { export class CredentialsService {
static async get( static async get(
@@ -270,7 +271,7 @@ export class CredentialsService {
return savedCredential; return savedCredential;
}); });
LoggerProxy.verbose('New credential created', { Container.get(Logger).verbose('New credential created', {
credentialId: newCredential.id, credentialId: newCredential.id,
ownerId: user.id, ownerId: user.id,
}); });

View File

@@ -11,7 +11,7 @@ import split from 'lodash/split';
import unset from 'lodash/unset'; import unset from 'lodash/unset';
import { Credentials } from 'n8n-core'; import { Credentials } from 'n8n-core';
import type { WorkflowExecuteMode, INodeCredentialsDetails } from 'n8n-workflow'; import type { WorkflowExecuteMode, INodeCredentialsDetails } from 'n8n-workflow';
import { LoggerProxy, jsonStringify } from 'n8n-workflow'; import { jsonStringify } from 'n8n-workflow';
import { resolve as pathResolve } from 'path'; import { resolve as pathResolve } from 'path';
import * as Db from '@/Db'; import * as Db from '@/Db';
@@ -23,7 +23,6 @@ import {
getCredentialForUser, getCredentialForUser,
getCredentialWithoutUser, getCredentialWithoutUser,
} from '@/CredentialsHelper'; } from '@/CredentialsHelper';
import { getLogger } from '@/Logger';
import type { OAuthRequest } from '@/requests'; import type { OAuthRequest } from '@/requests';
import { ExternalHooks } from '@/ExternalHooks'; import { ExternalHooks } from '@/ExternalHooks';
import config from '@/config'; import config from '@/config';
@@ -31,21 +30,10 @@ import { getInstanceBaseUrl } from '@/UserManagement/UserManagementHelper';
import { Container } from 'typedi'; import { Container } from 'typedi';
import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData';
import { Logger } from '@/Logger';
export const oauth2CredentialController = express.Router(); export const oauth2CredentialController = express.Router();
/**
* Initialize Logger if needed
*/
oauth2CredentialController.use((req, res, next) => {
try {
LoggerProxy.getInstance();
} catch (error) {
LoggerProxy.init(getLogger());
}
next();
});
const restEndpoint = config.getEnv('endpoints.rest'); const restEndpoint = config.getEnv('endpoints.rest');
/** /**
@@ -65,7 +53,7 @@ oauth2CredentialController.get(
const credential = await getCredentialForUser(credentialId, req.user); const credential = await getCredentialForUser(credentialId, req.user);
if (!credential) { if (!credential) {
LoggerProxy.error('Failed to authorize OAuth2 due to lack of permissions', { Container.get(Logger).error('Failed to authorize OAuth2 due to lack of permissions', {
userId: req.user.id, userId: req.user.id,
credentialId, credentialId,
}); });
@@ -164,7 +152,7 @@ oauth2CredentialController.get(
// Update the credentials in DB // Update the credentials in DB
await Db.collections.Credentials.update(req.query.id, newCredentialsData); await Db.collections.Credentials.update(req.query.id, newCredentialsData);
LoggerProxy.verbose('OAuth2 authorization url created for credential', { Container.get(Logger).verbose('OAuth2 authorization url created for credential', {
userId: req.user.id, userId: req.user.id,
credentialId, credentialId,
}); });
@@ -210,7 +198,7 @@ oauth2CredentialController.get(
if (!credential) { if (!credential) {
const errorMessage = 'OAuth2 callback failed because of insufficient permissions'; const errorMessage = 'OAuth2 callback failed because of insufficient permissions';
LoggerProxy.error(errorMessage, { Container.get(Logger).error(errorMessage, {
userId: req.user?.id, userId: req.user?.id,
credentialId: state.cid, credentialId: state.cid,
}); });
@@ -244,7 +232,7 @@ oauth2CredentialController.get(
!token.verify(decryptedDataOriginal.csrfSecret as string, state.token) !token.verify(decryptedDataOriginal.csrfSecret as string, state.token)
) { ) {
const errorMessage = 'The OAuth2 callback state is invalid!'; const errorMessage = 'The OAuth2 callback state is invalid!';
LoggerProxy.debug(errorMessage, { Container.get(Logger).debug(errorMessage, {
userId: req.user?.id, userId: req.user?.id,
credentialId: state.cid, credentialId: state.cid,
}); });
@@ -298,7 +286,7 @@ oauth2CredentialController.get(
if (oauthToken === undefined) { if (oauthToken === undefined) {
const errorMessage = 'Unable to get OAuth2 access tokens!'; const errorMessage = 'Unable to get OAuth2 access tokens!';
LoggerProxy.error(errorMessage, { Container.get(Logger).error(errorMessage, {
userId: req.user?.id, userId: req.user?.id,
credentialId: state.cid, credentialId: state.cid,
}); });
@@ -327,7 +315,7 @@ oauth2CredentialController.get(
newCredentialsData.updatedAt = new Date(); newCredentialsData.updatedAt = new Date();
// Save the credentials in DB // Save the credentials in DB
await Db.collections.Credentials.update(state.cid, newCredentialsData); await Db.collections.Credentials.update(state.cid, newCredentialsData);
LoggerProxy.verbose('OAuth2 callback successful for new credential', { Container.get(Logger).verbose('OAuth2 callback successful for new credential', {
userId: req.user?.id, userId: req.user?.id,
credentialId: state.cid, credentialId: state.cid,
}); });

View File

@@ -17,7 +17,6 @@ import type {
SelectQueryBuilder, SelectQueryBuilder,
} from 'typeorm'; } from 'typeorm';
import { parse, stringify } from 'flatted'; import { parse, stringify } from 'flatted';
import { LoggerProxy as Logger } from 'n8n-workflow';
import type { IExecutionsSummary, IRunExecutionData } from 'n8n-workflow'; import type { IExecutionsSummary, IRunExecutionData } from 'n8n-workflow';
import { BinaryDataService } from 'n8n-core'; import { BinaryDataService } from 'n8n-core';
import type { import type {
@@ -35,6 +34,7 @@ import { ExecutionEntity } from '../entities/ExecutionEntity';
import { ExecutionMetadata } from '../entities/ExecutionMetadata'; import { ExecutionMetadata } from '../entities/ExecutionMetadata';
import { ExecutionDataRepository } from './executionData.repository'; import { ExecutionDataRepository } from './executionData.repository';
import { TIME, inTest } from '@/constants'; import { TIME, inTest } from '@/constants';
import { Logger } from '@/Logger';
function parseFiltersToQueryBuilder( function parseFiltersToQueryBuilder(
qb: SelectQueryBuilder<ExecutionEntity>, qb: SelectQueryBuilder<ExecutionEntity>,
@@ -77,8 +77,6 @@ function parseFiltersToQueryBuilder(
@Service() @Service()
export class ExecutionRepository extends Repository<ExecutionEntity> { export class ExecutionRepository extends Repository<ExecutionEntity> {
private logger = Logger;
private hardDeletionBatchSize = 100; private hardDeletionBatchSize = 100;
private rates: Record<string, number> = { private rates: Record<string, number> = {
@@ -96,6 +94,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
constructor( constructor(
dataSource: DataSource, dataSource: DataSource,
private readonly logger: Logger,
private readonly executionDataRepository: ExecutionDataRepository, private readonly executionDataRepository: ExecutionDataRepository,
private readonly binaryDataService: BinaryDataService, private readonly binaryDataService: BinaryDataService,
) { ) {
@@ -360,7 +359,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
} }
} catch (error) { } catch (error) {
if (error instanceof Error) { if (error instanceof Error) {
Logger.warn(`Failed to get executions count from Postgres: ${error.message}`, { this.logger.warn(`Failed to get executions count from Postgres: ${error.message}`, {
error, error,
}); });
} }
@@ -461,7 +460,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
if (!executions.length) { if (!executions.length) {
if (deleteConditions.ids) { if (deleteConditions.ids) {
Logger.error('Failed to delete an execution due to insufficient permissions', { this.logger.error('Failed to delete an execution due to insufficient permissions', {
executionIds: deleteConditions.ids, executionIds: deleteConditions.ids,
}); });
} }
@@ -480,7 +479,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
* Mark executions as deleted based on age and count, in a pruning cycle. * Mark executions as deleted based on age and count, in a pruning cycle.
*/ */
async softDeleteOnPruningCycle() { async softDeleteOnPruningCycle() {
Logger.debug('Starting soft-deletion of executions (pruning cycle)'); this.logger.debug('Starting soft-deletion of executions (pruning cycle)');
const maxAge = config.getEnv('executions.pruneDataMaxAge'); // in h const maxAge = config.getEnv('executions.pruneDataMaxAge'); // in h
const maxCount = config.getEnv('executions.pruneDataMaxCount'); const maxCount = config.getEnv('executions.pruneDataMaxCount');
@@ -527,7 +526,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
.execute(); .execute();
if (result.affected === 0) { if (result.affected === 0) {
Logger.debug('Found no executions to soft-delete (pruning cycle)'); this.logger.debug('Found no executions to soft-delete (pruning cycle)');
} }
} }

View File

@@ -8,10 +8,8 @@ import config from '@/config';
import { inTest } from '@/constants'; import { inTest } from '@/constants';
import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@db/types'; import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@db/types';
import { createSchemaBuilder } from '@db/dsl'; import { createSchemaBuilder } from '@db/dsl';
import { getLogger } from '@/Logger';
import { NodeTypes } from '@/NodeTypes'; import { NodeTypes } from '@/NodeTypes';
import { Logger } from '@/Logger';
const logger = getLogger();
const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json'; const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json';
@@ -48,6 +46,7 @@ let runningMigrations = false;
function logMigrationStart(migrationName: string): void { function logMigrationStart(migrationName: string): void {
if (inTest) return; if (inTest) return;
const logger = Container.get(Logger);
if (!runningMigrations) { if (!runningMigrations) {
logger.warn('Migrations in progress, please do NOT stop the process.'); logger.warn('Migrations in progress, please do NOT stop the process.');
runningMigrations = true; runningMigrations = true;
@@ -59,6 +58,7 @@ function logMigrationStart(migrationName: string): void {
function logMigrationEnd(migrationName: string): void { function logMigrationEnd(migrationName: string): void {
if (inTest) return; if (inTest) return;
const logger = Container.get(Logger);
logger.debug(`Finished migration ${migrationName}`); logger.debug(`Finished migration ${migrationName}`);
} }
@@ -94,7 +94,7 @@ const dbName = config.getEnv(`database.${dbType === 'mariadb' ? 'mysqldb' : dbTy
const tablePrefix = config.getEnv('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
const createContext = (queryRunner: QueryRunner, migration: Migration): MigrationContext => ({ const createContext = (queryRunner: QueryRunner, migration: Migration): MigrationContext => ({
logger, logger: Container.get(Logger),
tablePrefix, tablePrefix,
dbType, dbType,
isMysql, isMysql,

View File

@@ -1,3 +1,6 @@
import { Container, Service } from 'typedi';
import type { PullResult } from 'simple-git';
import express from 'express';
import { Authorized, Get, Post, Patch, RestController } from '@/decorators'; import { Authorized, Get, Post, Patch, RestController } from '@/decorators';
import { import {
sourceControlLicensedMiddleware, sourceControlLicensedMiddleware,
@@ -10,10 +13,7 @@ import type { SourceControlPreferences } from './types/sourceControlPreferences'
import type { SourceControlledFile } from './types/sourceControlledFile'; import type { SourceControlledFile } from './types/sourceControlledFile';
import { SOURCE_CONTROL_API_ROOT, SOURCE_CONTROL_DEFAULT_BRANCH } from './constants'; import { SOURCE_CONTROL_API_ROOT, SOURCE_CONTROL_DEFAULT_BRANCH } from './constants';
import { BadRequestError } from '@/ResponseHelper'; import { BadRequestError } from '@/ResponseHelper';
import type { PullResult } from 'simple-git';
import express from 'express';
import type { ImportResult } from './types/importResult'; import type { ImportResult } from './types/importResult';
import Container, { Service } from 'typedi';
import { InternalHooks } from '../../InternalHooks'; import { InternalHooks } from '../../InternalHooks';
import { getRepoType } from './sourceControlHelper.ee'; import { getRepoType } from './sourceControlHelper.ee';
import { SourceControlGetStatus } from './types/sourceControlGetStatus'; import { SourceControlGetStatus } from './types/sourceControlGetStatus';

View File

@@ -14,7 +14,6 @@ import {
SOURCE_CONTROL_DEFAULT_NAME, SOURCE_CONTROL_DEFAULT_NAME,
SOURCE_CONTROL_README, SOURCE_CONTROL_README,
} from './constants'; } from './constants';
import { LoggerProxy } from 'n8n-workflow';
import { SourceControlGitService } from './sourceControlGit.service.ee'; import { SourceControlGitService } from './sourceControlGit.service.ee';
import type { PushResult } from 'simple-git'; import type { PushResult } from 'simple-git';
import { SourceControlExportService } from './sourceControlExport.service.ee'; import { SourceControlExportService } from './sourceControlExport.service.ee';
@@ -35,6 +34,7 @@ import type { SourceControlWorkflowVersionId } from './types/sourceControlWorkfl
import type { ExportableCredential } from './types/exportableCredential'; import type { ExportableCredential } from './types/exportableCredential';
import { InternalHooks } from '@/InternalHooks'; import { InternalHooks } from '@/InternalHooks';
import { TagRepository } from '@/databases/repositories'; import { TagRepository } from '@/databases/repositories';
import { Logger } from '@/Logger';
@Service() @Service()
export class SourceControlService { export class SourceControlService {
@@ -45,6 +45,7 @@ export class SourceControlService {
private gitFolder: string; private gitFolder: string;
constructor( constructor(
private readonly logger: Logger,
private gitService: SourceControlGitService, private gitService: SourceControlGitService,
private sourceControlPreferencesService: SourceControlPreferencesService, private sourceControlPreferencesService: SourceControlPreferencesService,
private sourceControlExportService: SourceControlExportService, private sourceControlExportService: SourceControlExportService,
@@ -123,14 +124,14 @@ export class SourceControlService {
if (!this.gitService.git) { if (!this.gitService.git) {
await this.initGitService(); await this.initGitService();
} }
LoggerProxy.debug('Initializing repository...'); this.logger.debug('Initializing repository...');
await this.gitService.initRepository(preferences, user); await this.gitService.initRepository(preferences, user);
let getBranchesResult; let getBranchesResult;
try { try {
getBranchesResult = await this.getBranches(); getBranchesResult = await this.getBranches();
} catch (error) { } catch (error) {
if ((error as Error).message.includes('Warning: Permanently added')) { if ((error as Error).message.includes('Warning: Permanently added')) {
LoggerProxy.debug('Added repository host to the list of known hosts. Retrying...'); this.logger.debug('Added repository host to the list of known hosts. Retrying...');
getBranchesResult = await this.getBranches(); getBranchesResult = await this.getBranches();
} else { } else {
throw error; throw error;
@@ -152,7 +153,7 @@ export class SourceControlService {
getBranchesResult = await this.getBranches(); getBranchesResult = await this.getBranches();
await this.gitService.setBranch(preferences.branchName); await this.gitService.setBranch(preferences.branchName);
} catch (fileError) { } catch (fileError) {
LoggerProxy.error(`Failed to create initial commit: ${(fileError as Error).message}`); this.logger.error(`Failed to create initial commit: ${(fileError as Error).message}`);
} }
} }
} }
@@ -193,7 +194,7 @@ export class SourceControlService {
await this.gitService.resetBranch(); await this.gitService.resetBranch();
await this.gitService.pull(); await this.gitService.pull();
} catch (error) { } catch (error) {
LoggerProxy.error(`Failed to reset workfolder: ${(error as Error).message}`); this.logger.error(`Failed to reset workfolder: ${(error as Error).message}`);
throw new Error( throw new Error(
'Unable to fetch updates from git - your folder might be out of sync. Try reconnecting from the Source Control settings page.', 'Unable to fetch updates from git - your folder might be out of sync. Try reconnecting from the Source Control settings page.',
); );

View File

@@ -8,7 +8,6 @@ import {
} from './constants'; } from './constants';
import * as Db from '@/Db'; import * as Db from '@/Db';
import type { ICredentialDataDecryptedObject } from 'n8n-workflow'; import type { ICredentialDataDecryptedObject } from 'n8n-workflow';
import { LoggerProxy } from 'n8n-workflow';
import { writeFile as fsWriteFile, rm as fsRm } from 'fs/promises'; import { writeFile as fsWriteFile, rm as fsRm } from 'fs/promises';
import { rmSync } from 'fs'; import { rmSync } from 'fs';
import { Credentials, InstanceSettings } from 'n8n-core'; import { Credentials, InstanceSettings } from 'n8n-core';
@@ -27,6 +26,7 @@ import { In } from 'typeorm';
import type { SourceControlledFile } from './types/sourceControlledFile'; import type { SourceControlledFile } from './types/sourceControlledFile';
import { VariablesService } from '../variables/variables.service'; import { VariablesService } from '../variables/variables.service';
import { TagRepository } from '@/databases/repositories'; import { TagRepository } from '@/databases/repositories';
import { Logger } from '@/Logger';
@Service() @Service()
export class SourceControlExportService { export class SourceControlExportService {
@@ -37,6 +37,7 @@ export class SourceControlExportService {
private credentialExportFolder: string; private credentialExportFolder: string;
constructor( constructor(
private readonly logger: Logger,
private readonly variablesService: VariablesService, private readonly variablesService: VariablesService,
private readonly tagRepository: TagRepository, private readonly tagRepository: TagRepository,
instanceSettings: InstanceSettings, instanceSettings: InstanceSettings,
@@ -61,7 +62,7 @@ export class SourceControlExportService {
try { try {
await fsRm(this.gitFolder, { recursive: true }); await fsRm(this.gitFolder, { recursive: true });
} catch (error) { } catch (error) {
LoggerProxy.error(`Failed to delete work folder: ${(error as Error).message}`); this.logger.error(`Failed to delete work folder: ${(error as Error).message}`);
} }
} }
@@ -69,7 +70,7 @@ export class SourceControlExportService {
try { try {
filesToBeDeleted.forEach((e) => rmSync(e)); filesToBeDeleted.forEach((e) => rmSync(e));
} catch (error) { } catch (error) {
LoggerProxy.error(`Failed to delete workflows from work folder: ${(error as Error).message}`); this.logger.error(`Failed to delete workflows from work folder: ${(error as Error).message}`);
} }
return filesToBeDeleted; return filesToBeDeleted;
} }
@@ -91,7 +92,7 @@ export class SourceControlExportService {
versionId: e.versionId, versionId: e.versionId,
owner: owners[e.id], owner: owners[e.id],
}; };
LoggerProxy.debug(`Writing workflow ${e.id} to ${fileName}`); this.logger.debug(`Writing workflow ${e.id} to ${fileName}`);
return fsWriteFile(fileName, JSON.stringify(sanitizedWorkflow, null, 2)); return fsWriteFile(fileName, JSON.stringify(sanitizedWorkflow, null, 2));
}), }),
); );
@@ -224,7 +225,7 @@ export class SourceControlExportService {
continue; continue;
} }
} catch (error) { } catch (error) {
LoggerProxy.error(`Failed to sanitize credential data: ${(error as Error).message}`); this.logger.error(`Failed to sanitize credential data: ${(error as Error).message}`);
throw error; throw error;
} }
} }
@@ -262,7 +263,7 @@ export class SourceControlExportService {
data: sanitizedData, data: sanitizedData,
nodesAccess: sharedCredential.credentials.nodesAccess, nodesAccess: sharedCredential.credentials.nodesAccess,
}; };
LoggerProxy.debug(`Writing credential ${sharedCredential.credentials.id} to ${fileName}`); this.logger.debug(`Writing credential ${sharedCredential.credentials.id} to ${fileName}`);
return fsWriteFile(fileName, JSON.stringify(sanitizedCredential, null, 2)); return fsWriteFile(fileName, JSON.stringify(sanitizedCredential, null, 2));
}), }),
); );

View File

@@ -1,6 +1,5 @@
import { Service } from 'typedi'; import { Service } from 'typedi';
import { execSync } from 'child_process'; import { execSync } from 'child_process';
import { LoggerProxy } from 'n8n-workflow';
import path from 'path'; import path from 'path';
import type { import type {
CommitResult, CommitResult,
@@ -20,8 +19,9 @@ import {
SOURCE_CONTROL_ORIGIN, SOURCE_CONTROL_ORIGIN,
} from './constants'; } from './constants';
import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee'; import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee';
import type { User } from '../../databases/entities/User'; import type { User } from '@db/entities/User';
import { getInstanceOwner } from '../../UserManagement/UserManagementHelper'; import { getInstanceOwner } from '../../UserManagement/UserManagementHelper';
import { Logger } from '@/Logger';
@Service() @Service()
export class SourceControlGitService { export class SourceControlGitService {
@@ -29,17 +29,19 @@ export class SourceControlGitService {
private gitOptions: Partial<SimpleGitOptions> = {}; private gitOptions: Partial<SimpleGitOptions> = {};
constructor(private readonly logger: Logger) {}
/** /**
* Run pre-checks before initialising git * Run pre-checks before initialising git
* Checks for existence of required binaries (git and ssh) * Checks for existence of required binaries (git and ssh)
*/ */
private preInitCheck(): boolean { private preInitCheck(): boolean {
LoggerProxy.debug('GitService.preCheck'); this.logger.debug('GitService.preCheck');
try { try {
const gitResult = execSync('git --version', { const gitResult = execSync('git --version', {
stdio: ['pipe', 'pipe', 'pipe'], stdio: ['pipe', 'pipe', 'pipe'],
}); });
LoggerProxy.debug(`Git binary found: ${gitResult.toString()}`); this.logger.debug(`Git binary found: ${gitResult.toString()}`);
} catch (error) { } catch (error) {
throw new Error(`Git binary not found: ${(error as Error).message}`); throw new Error(`Git binary not found: ${(error as Error).message}`);
} }
@@ -47,7 +49,7 @@ export class SourceControlGitService {
const sshResult = execSync('ssh -V', { const sshResult = execSync('ssh -V', {
stdio: ['pipe', 'pipe', 'pipe'], stdio: ['pipe', 'pipe', 'pipe'],
}); });
LoggerProxy.debug(`SSH binary found: ${sshResult.toString()}`); this.logger.debug(`SSH binary found: ${sshResult.toString()}`);
} catch (error) { } catch (error) {
throw new Error(`SSH binary not found: ${(error as Error).message}`); throw new Error(`SSH binary not found: ${(error as Error).message}`);
} }
@@ -66,13 +68,13 @@ export class SourceControlGitService {
sshKeyName, sshKeyName,
sshFolder, sshFolder,
} = options; } = options;
LoggerProxy.debug('GitService.init'); this.logger.debug('GitService.init');
if (this.git !== null) { if (this.git !== null) {
return; return;
} }
this.preInitCheck(); this.preInitCheck();
LoggerProxy.debug('Git pre-check passed'); this.logger.debug('Git pre-check passed');
sourceControlFoldersExistCheck([gitFolder, sshFolder]); sourceControlFoldersExistCheck([gitFolder, sshFolder]);
@@ -135,13 +137,13 @@ export class SourceControlGitService {
(e) => e.name === SOURCE_CONTROL_ORIGIN && e.refs.push === remote, (e) => e.name === SOURCE_CONTROL_ORIGIN && e.refs.push === remote,
); );
if (foundRemote) { if (foundRemote) {
LoggerProxy.debug(`Git remote found: ${foundRemote.name}: ${foundRemote.refs.push}`); this.logger.debug(`Git remote found: ${foundRemote.name}: ${foundRemote.refs.push}`);
return true; return true;
} }
} catch (error) { } catch (error) {
throw new Error(`Git is not initialized ${(error as Error).message}`); throw new Error(`Git is not initialized ${(error as Error).message}`);
} }
LoggerProxy.debug(`Git remote not found: ${remote}`); this.logger.debug(`Git remote not found: ${remote}`);
return false; return false;
} }
@@ -159,14 +161,14 @@ export class SourceControlGitService {
try { try {
await this.git.init(); await this.git.init();
} catch (error) { } catch (error) {
LoggerProxy.debug(`Git init: ${(error as Error).message}`); this.logger.debug(`Git init: ${(error as Error).message}`);
} }
} }
try { try {
await this.git.addRemote(SOURCE_CONTROL_ORIGIN, sourceControlPreferences.repositoryUrl); await this.git.addRemote(SOURCE_CONTROL_ORIGIN, sourceControlPreferences.repositoryUrl);
} catch (error) { } catch (error) {
if ((error as Error).message.includes('remote origin already exists')) { if ((error as Error).message.includes('remote origin already exists')) {
LoggerProxy.debug(`Git remote already exists: ${(error as Error).message}`); this.logger.debug(`Git remote already exists: ${(error as Error).message}`);
} else { } else {
throw error; throw error;
} }
@@ -182,7 +184,7 @@ export class SourceControlGitService {
await this.git.raw(['branch', '-M', sourceControlPreferences.branchName]); await this.git.raw(['branch', '-M', sourceControlPreferences.branchName]);
} }
} catch (error) { } catch (error) {
LoggerProxy.debug(`Git init: ${(error as Error).message}`); this.logger.debug(`Git init: ${(error as Error).message}`);
} }
} }
} }
@@ -305,7 +307,7 @@ export class SourceControlGitService {
try { try {
await this.git.rm(Array.from(deletedFiles)); await this.git.rm(Array.from(deletedFiles));
} catch (error) { } catch (error) {
LoggerProxy.debug(`Git rm: ${(error as Error).message}`); this.logger.debug(`Git rm: ${(error as Error).message}`);
} }
} }
return this.git.add(Array.from(files)); return this.git.add(Array.from(files));

View File

@@ -1,9 +1,8 @@
import Container from 'typedi'; import { Container } from 'typedi';
import { License } from '@/License'; import { License } from '@/License';
import { generateKeyPairSync } from 'crypto'; import { generateKeyPairSync } from 'crypto';
import type { KeyPair } from './types/keyPair'; import type { KeyPair } from './types/keyPair';
import { constants as fsConstants, mkdirSync, accessSync } from 'fs'; import { constants as fsConstants, mkdirSync, accessSync } from 'fs';
import { LoggerProxy } from 'n8n-workflow';
import { import {
SOURCE_CONTROL_GIT_KEY_COMMENT, SOURCE_CONTROL_GIT_KEY_COMMENT,
SOURCE_CONTROL_TAGS_EXPORT_FILE, SOURCE_CONTROL_TAGS_EXPORT_FILE,
@@ -12,6 +11,7 @@ import {
import type { SourceControlledFile } from './types/sourceControlledFile'; import type { SourceControlledFile } from './types/sourceControlledFile';
import path from 'path'; import path from 'path';
import type { KeyPairType } from './types/keyPairType'; import type { KeyPairType } from './types/keyPairType';
import { Logger } from '@/Logger';
export function stringContainsExpression(testString: string): boolean { export function stringContainsExpression(testString: string): boolean {
return /^=.*\{\{.*\}\}/.test(testString); return /^=.*\{\{.*\}\}/.test(testString);
@@ -51,7 +51,7 @@ export function sourceControlFoldersExistCheck(
try { try {
mkdirSync(folder, { recursive: true }); mkdirSync(folder, { recursive: true });
} catch (error) { } catch (error) {
LoggerProxy.error((error as Error).message); Container.get(Logger).error((error as Error).message);
} }
} }
} }

View File

@@ -1,4 +1,4 @@
import Container, { Service } from 'typedi'; import { Container, Service } from 'typedi';
import path from 'path'; import path from 'path';
import { import {
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
@@ -9,7 +9,7 @@ import {
} from './constants'; } from './constants';
import * as Db from '@/Db'; import * as Db from '@/Db';
import glob from 'fast-glob'; import glob from 'fast-glob';
import { LoggerProxy, jsonParse } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import { readFile as fsReadFile } from 'fs/promises'; import { readFile as fsReadFile } from 'fs/promises';
import { Credentials, InstanceSettings } from 'n8n-core'; import { Credentials, InstanceSettings } from 'n8n-core';
import type { IWorkflowToImport } from '@/Interfaces'; import type { IWorkflowToImport } from '@/Interfaces';
@@ -28,6 +28,7 @@ import { RoleService } from '@/services/role.service';
import { VariablesService } from '../variables/variables.service'; import { VariablesService } from '../variables/variables.service';
import { TagRepository } from '@/databases/repositories'; import { TagRepository } from '@/databases/repositories';
import { UM_FIX_INSTRUCTION } from '@/constants'; import { UM_FIX_INSTRUCTION } from '@/constants';
import { Logger } from '@/Logger';
@Service() @Service()
export class SourceControlImportService { export class SourceControlImportService {
@@ -38,6 +39,7 @@ export class SourceControlImportService {
private credentialExportFolder: string; private credentialExportFolder: string;
constructor( constructor(
private readonly logger: Logger,
private readonly variablesService: VariablesService, private readonly variablesService: VariablesService,
private readonly activeWorkflowRunner: ActiveWorkflowRunner, private readonly activeWorkflowRunner: ActiveWorkflowRunner,
private readonly tagRepository: TagRepository, private readonly tagRepository: TagRepository,
@@ -88,7 +90,7 @@ export class SourceControlImportService {
}); });
const remoteWorkflowFilesParsed = await Promise.all( const remoteWorkflowFilesParsed = await Promise.all(
remoteWorkflowFiles.map(async (file) => { remoteWorkflowFiles.map(async (file) => {
LoggerProxy.debug(`Parsing workflow file ${file}`); this.logger.debug(`Parsing workflow file ${file}`);
const remote = jsonParse<IWorkflowToImport>(await fsReadFile(file, { encoding: 'utf8' })); const remote = jsonParse<IWorkflowToImport>(await fsReadFile(file, { encoding: 'utf8' }));
if (!remote?.id) { if (!remote?.id) {
return undefined; return undefined;
@@ -130,7 +132,7 @@ export class SourceControlImportService {
}); });
const remoteCredentialFilesParsed = await Promise.all( const remoteCredentialFilesParsed = await Promise.all(
remoteCredentialFiles.map(async (file) => { remoteCredentialFiles.map(async (file) => {
LoggerProxy.debug(`Parsing credential file ${file}`); this.logger.debug(`Parsing credential file ${file}`);
const remote = jsonParse<ExportableCredential>( const remote = jsonParse<ExportableCredential>(
await fsReadFile(file, { encoding: 'utf8' }), await fsReadFile(file, { encoding: 'utf8' }),
); );
@@ -169,7 +171,7 @@ export class SourceControlImportService {
absolute: true, absolute: true,
}); });
if (variablesFile.length > 0) { if (variablesFile.length > 0) {
LoggerProxy.debug(`Importing variables from file ${variablesFile[0]}`); this.logger.debug(`Importing variables from file ${variablesFile[0]}`);
return jsonParse<Variables[]>(await fsReadFile(variablesFile[0], { encoding: 'utf8' }), { return jsonParse<Variables[]>(await fsReadFile(variablesFile[0], { encoding: 'utf8' }), {
fallbackValue: [], fallbackValue: [],
}); });
@@ -190,7 +192,7 @@ export class SourceControlImportService {
absolute: true, absolute: true,
}); });
if (tagsFile.length > 0) { if (tagsFile.length > 0) {
LoggerProxy.debug(`Importing tags from file ${tagsFile[0]}`); this.logger.debug(`Importing tags from file ${tagsFile[0]}`);
const mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>( const mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>(
await fsReadFile(tagsFile[0], { encoding: 'utf8' }), await fsReadFile(tagsFile[0], { encoding: 'utf8' }),
{ fallbackValue: { tags: [], mappings: [] } }, { fallbackValue: { tags: [], mappings: [] } },
@@ -232,7 +234,7 @@ export class SourceControlImportService {
const cachedOwnerIds = new Map<string, string>(); const cachedOwnerIds = new Map<string, string>();
const importWorkflowsResult = await Promise.all( const importWorkflowsResult = await Promise.all(
candidates.map(async (candidate) => { candidates.map(async (candidate) => {
LoggerProxy.debug(`Parsing workflow file ${candidate.file}`); this.logger.debug(`Parsing workflow file ${candidate.file}`);
const importedWorkflow = jsonParse<IWorkflowToImport & { owner: string }>( const importedWorkflow = jsonParse<IWorkflowToImport & { owner: string }>(
await fsReadFile(candidate.file, { encoding: 'utf8' }), await fsReadFile(candidate.file, { encoding: 'utf8' }),
); );
@@ -241,7 +243,7 @@ export class SourceControlImportService {
} }
const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id); const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id);
importedWorkflow.active = existingWorkflow?.active ?? false; importedWorkflow.active = existingWorkflow?.active ?? false;
LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`); this.logger.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`);
const upsertResult = await Db.collections.Workflow.upsert({ ...importedWorkflow }, ['id']); const upsertResult = await Db.collections.Workflow.upsert({ ...importedWorkflow }, ['id']);
if (upsertResult?.identifiers?.length !== 1) { if (upsertResult?.identifiers?.length !== 1) {
throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`); throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`);
@@ -299,14 +301,14 @@ export class SourceControlImportService {
if (existingWorkflow?.active) { if (existingWorkflow?.active) {
try { try {
// remove active pre-import workflow // remove active pre-import workflow
LoggerProxy.debug(`Deactivating workflow id ${existingWorkflow.id}`); this.logger.debug(`Deactivating workflow id ${existingWorkflow.id}`);
await workflowRunner.remove(existingWorkflow.id); await workflowRunner.remove(existingWorkflow.id);
// try activating the imported workflow // try activating the imported workflow
LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`); this.logger.debug(`Reactivating workflow id ${existingWorkflow.id}`);
await workflowRunner.add(existingWorkflow.id, 'activate'); await workflowRunner.add(existingWorkflow.id, 'activate');
// update the versionId of the workflow to match the imported workflow // update the versionId of the workflow to match the imported workflow
} catch (error) { } catch (error) {
LoggerProxy.error(`Failed to activate workflow ${existingWorkflow.id}`, error as Error); this.logger.error(`Failed to activate workflow ${existingWorkflow.id}`, error as Error);
} finally { } finally {
await Db.collections.Workflow.update( await Db.collections.Workflow.update(
{ id: existingWorkflow.id }, { id: existingWorkflow.id },
@@ -347,7 +349,7 @@ export class SourceControlImportService {
let importCredentialsResult: Array<{ id: string; name: string; type: string }> = []; let importCredentialsResult: Array<{ id: string; name: string; type: string }> = [];
importCredentialsResult = await Promise.all( importCredentialsResult = await Promise.all(
candidates.map(async (candidate) => { candidates.map(async (candidate) => {
LoggerProxy.debug(`Importing credentials file ${candidate.file}`); this.logger.debug(`Importing credentials file ${candidate.file}`);
const credential = jsonParse<ExportableCredential>( const credential = jsonParse<ExportableCredential>(
await fsReadFile(candidate.file, { encoding: 'utf8' }), await fsReadFile(candidate.file, { encoding: 'utf8' }),
); );
@@ -367,7 +369,7 @@ export class SourceControlImportService {
} }
newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || []; newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || [];
LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`); this.logger.debug(`Updating credential id ${newCredentialObject.id as string}`);
await Db.collections.Credentials.upsert(newCredentialObject, ['id']); await Db.collections.Credentials.upsert(newCredentialObject, ['id']);
if (!sharedOwner) { if (!sharedOwner) {
@@ -395,13 +397,13 @@ export class SourceControlImportService {
public async importTagsFromWorkFolder(candidate: SourceControlledFile) { public async importTagsFromWorkFolder(candidate: SourceControlledFile) {
let mappedTags; let mappedTags;
try { try {
LoggerProxy.debug(`Importing tags from file ${candidate.file}`); this.logger.debug(`Importing tags from file ${candidate.file}`);
mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>( mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>(
await fsReadFile(candidate.file, { encoding: 'utf8' }), await fsReadFile(candidate.file, { encoding: 'utf8' }),
{ fallbackValue: { tags: [], mappings: [] } }, { fallbackValue: { tags: [], mappings: [] } },
); );
} catch (error) { } catch (error) {
LoggerProxy.error(`Failed to import tags from file ${candidate.file}`, error as Error); this.logger.error(`Failed to import tags from file ${candidate.file}`, error as Error);
return; return;
} }
@@ -462,13 +464,13 @@ export class SourceControlImportService {
const result: { imported: string[] } = { imported: [] }; const result: { imported: string[] } = { imported: [] };
let importedVariables; let importedVariables;
try { try {
LoggerProxy.debug(`Importing variables from file ${candidate.file}`); this.logger.debug(`Importing variables from file ${candidate.file}`);
importedVariables = jsonParse<Array<Partial<Variables>>>( importedVariables = jsonParse<Array<Partial<Variables>>>(
await fsReadFile(candidate.file, { encoding: 'utf8' }), await fsReadFile(candidate.file, { encoding: 'utf8' }),
{ fallbackValue: [] }, { fallbackValue: [] },
); );
} catch (error) { } catch (error) {
LoggerProxy.error(`Failed to import tags from file ${candidate.file}`, error as Error); this.logger.error(`Failed to import tags from file ${candidate.file}`, error as Error);
return; return;
} }
const overriddenKeys = Object.keys(valueOverrides ?? {}); const overriddenKeys = Object.keys(valueOverrides ?? {});
@@ -490,12 +492,12 @@ export class SourceControlImportService {
await Db.collections.Variables.upsert({ ...variable }, ['id']); await Db.collections.Variables.upsert({ ...variable }, ['id']);
} catch (errorUpsert) { } catch (errorUpsert) {
if (isUniqueConstraintError(errorUpsert as Error)) { if (isUniqueConstraintError(errorUpsert as Error)) {
LoggerProxy.debug(`Variable ${variable.key} already exists, updating instead`); this.logger.debug(`Variable ${variable.key} already exists, updating instead`);
try { try {
await Db.collections.Variables.update({ key: variable.key }, { ...variable }); await Db.collections.Variables.update({ key: variable.key }, { ...variable });
} catch (errorUpdate) { } catch (errorUpdate) {
LoggerProxy.debug(`Failed to update variable ${variable.key}, skipping`); this.logger.debug(`Failed to update variable ${variable.key}, skipping`);
LoggerProxy.debug((errorUpdate as Error).message); this.logger.debug((errorUpdate as Error).message);
} }
} }
} finally { } finally {

View File

@@ -10,7 +10,7 @@ import {
sourceControlFoldersExistCheck, sourceControlFoldersExistCheck,
} from './sourceControlHelper.ee'; } from './sourceControlHelper.ee';
import { InstanceSettings } from 'n8n-core'; import { InstanceSettings } from 'n8n-core';
import { LoggerProxy, jsonParse } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import * as Db from '@/Db'; import * as Db from '@/Db';
import { import {
SOURCE_CONTROL_SSH_FOLDER, SOURCE_CONTROL_SSH_FOLDER,
@@ -21,6 +21,7 @@ import {
import path from 'path'; import path from 'path';
import type { KeyPairType } from './types/keyPairType'; import type { KeyPairType } from './types/keyPairType';
import config from '@/config'; import config from '@/config';
import { Logger } from '@/Logger';
@Service() @Service()
export class SourceControlPreferencesService { export class SourceControlPreferencesService {
@@ -32,7 +33,10 @@ export class SourceControlPreferencesService {
readonly gitFolder: string; readonly gitFolder: string;
constructor(instanceSettings: InstanceSettings) { constructor(
instanceSettings: InstanceSettings,
private readonly logger: Logger,
) {
this.sshFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_SSH_FOLDER); this.sshFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_SSH_FOLDER);
this.gitFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_GIT_FOLDER); this.gitFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_GIT_FOLDER);
this.sshKeyName = path.join(this.sshFolder, SOURCE_CONTROL_SSH_KEY_NAME); this.sshKeyName = path.join(this.sshFolder, SOURCE_CONTROL_SSH_KEY_NAME);
@@ -66,7 +70,7 @@ export class SourceControlPreferencesService {
try { try {
return fsReadFileSync(this.sshKeyName + '.pub', { encoding: 'utf8' }); return fsReadFileSync(this.sshKeyName + '.pub', { encoding: 'utf8' });
} catch (error) { } catch (error) {
LoggerProxy.error(`Failed to read public key: ${(error as Error).message}`); this.logger.error(`Failed to read public key: ${(error as Error).message}`);
} }
return ''; return '';
} }
@@ -79,7 +83,7 @@ export class SourceControlPreferencesService {
try { try {
await fsRm(this.sshFolder, { recursive: true }); await fsRm(this.sshFolder, { recursive: true });
} catch (error) { } catch (error) {
LoggerProxy.error(`Failed to delete ssh folder: ${(error as Error).message}`); this.logger.error(`Failed to delete ssh folder: ${(error as Error).message}`);
} }
} }
@@ -160,7 +164,7 @@ export class SourceControlPreferencesService {
const keyPairType = const keyPairType =
preferences.keyGeneratorType ?? preferences.keyGeneratorType ??
(config.get('sourceControl.defaultKeyPairType') as KeyPairType); (config.get('sourceControl.defaultKeyPairType') as KeyPairType);
LoggerProxy.debug(`No key pair files found, generating new pair using type: ${keyPairType}`); this.logger.debug(`No key pair files found, generating new pair using type: ${keyPairType}`);
await this.generateAndSaveKeyPair(keyPairType); await this.generateAndSaveKeyPair(keyPairType);
} }
this.sourceControlPreferences = preferences; this.sourceControlPreferences = preferences;
@@ -194,7 +198,7 @@ export class SourceControlPreferencesService {
return preferences; return preferences;
} }
} catch (error) { } catch (error) {
LoggerProxy.warn( this.logger.warn(
`Could not parse Source Control settings from database: ${(error as Error).message}`, `Could not parse Source Control settings from database: ${(error as Error).message}`,
); );
} }

View File

@@ -1,5 +1,5 @@
import express from 'express'; import express from 'express';
import { LoggerProxy } from 'n8n-workflow'; import { Container } from 'typedi';
import * as ResponseHelper from '@/ResponseHelper'; import * as ResponseHelper from '@/ResponseHelper';
import type { VariablesRequest } from '@/requests'; import type { VariablesRequest } from '@/requests';
@@ -9,14 +9,11 @@ import {
VariablesValidationError, VariablesValidationError,
} from './variables.service.ee'; } from './variables.service.ee';
import { isVariablesEnabled } from './enviromentHelpers'; import { isVariablesEnabled } from './enviromentHelpers';
import Container from 'typedi'; import { Logger } from '@/Logger';
// eslint-disable-next-line @typescript-eslint/naming-convention // eslint-disable-next-line @typescript-eslint/naming-convention
export const EEVariablesController = express.Router(); export const EEVariablesController = express.Router();
/**
* Initialize Logger if needed
*/
EEVariablesController.use((req, res, next) => { EEVariablesController.use((req, res, next) => {
if (!isVariablesEnabled()) { if (!isVariablesEnabled()) {
next('router'); next('router');
@@ -30,9 +27,12 @@ EEVariablesController.post(
'/', '/',
ResponseHelper.send(async (req: VariablesRequest.Create) => { ResponseHelper.send(async (req: VariablesRequest.Create) => {
if (req.user.globalRole.name !== 'owner') { if (req.user.globalRole.name !== 'owner') {
LoggerProxy.info('Attempt to update a variable blocked due to lack of permissions', { Container.get(Logger).info(
userId: req.user.id, 'Attempt to update a variable blocked due to lack of permissions',
}); {
userId: req.user.id,
},
);
throw new ResponseHelper.AuthError('Unauthorized'); throw new ResponseHelper.AuthError('Unauthorized');
} }
const variable = req.body; const variable = req.body;
@@ -55,10 +55,13 @@ EEVariablesController.patch(
ResponseHelper.send(async (req: VariablesRequest.Update) => { ResponseHelper.send(async (req: VariablesRequest.Update) => {
const id = req.params.id; const id = req.params.id;
if (req.user.globalRole.name !== 'owner') { if (req.user.globalRole.name !== 'owner') {
LoggerProxy.info('Attempt to update a variable blocked due to lack of permissions', { Container.get(Logger).info(
id, 'Attempt to update a variable blocked due to lack of permissions',
userId: req.user.id, {
}); id,
userId: req.user.id,
},
);
throw new ResponseHelper.AuthError('Unauthorized'); throw new ResponseHelper.AuthError('Unauthorized');
} }
const variable = req.body; const variable = req.body;

View File

@@ -1,29 +1,15 @@
import express from 'express'; import express from 'express';
import { LoggerProxy } from 'n8n-workflow'; import { Container } from 'typedi';
import { getLogger } from '@/Logger';
import * as ResponseHelper from '@/ResponseHelper'; import * as ResponseHelper from '@/ResponseHelper';
import type { VariablesRequest } from '@/requests'; import type { VariablesRequest } from '@/requests';
import { VariablesService } from './variables.service'; import { VariablesService } from './variables.service';
import { EEVariablesController } from './variables.controller.ee'; import { EEVariablesController } from './variables.controller.ee';
import Container from 'typedi'; import { Logger } from '@/Logger';
export const variablesController = express.Router(); export const variablesController = express.Router();
variablesController.use('/', EEVariablesController); variablesController.use('/', EEVariablesController);
/**
* Initialize Logger if needed
*/
variablesController.use((req, res, next) => {
try {
LoggerProxy.getInstance();
} catch (error) {
LoggerProxy.init(getLogger());
}
next();
});
variablesController.use(EEVariablesController); variablesController.use(EEVariablesController);
variablesController.get( variablesController.get(
@@ -64,10 +50,13 @@ variablesController.delete(
ResponseHelper.send(async (req: VariablesRequest.Delete) => { ResponseHelper.send(async (req: VariablesRequest.Delete) => {
const id = req.params.id; const id = req.params.id;
if (req.user.globalRole.name !== 'owner') { if (req.user.globalRole.name !== 'owner') {
LoggerProxy.info('Attempt to delete a variable blocked due to lack of permissions', { Container.get(Logger).info(
id, 'Attempt to delete a variable blocked due to lack of permissions',
userId: req.user.id, {
}); id,
userId: req.user.id,
},
);
throw new ResponseHelper.AuthError('Unauthorized'); throw new ResponseHelper.AuthError('Unauthorized');
} }
await Container.get(VariablesService).delete(id); await Container.get(VariablesService).delete(id);

View File

@@ -1,4 +1,4 @@
import { LoggerProxy, jsonParse } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import type { MessageEventBusDestinationOptions } from 'n8n-workflow'; import type { MessageEventBusDestinationOptions } from 'n8n-workflow';
import type { DeleteResult } from 'typeorm'; import type { DeleteResult } from 'typeorm';
import { In } from 'typeorm'; import { In } from 'typeorm';
@@ -28,11 +28,12 @@ import {
} from '../EventMessageClasses/EventMessageGeneric'; } from '../EventMessageClasses/EventMessageGeneric';
import { recoverExecutionDataFromEventLogMessages } from './recoverEvents'; import { recoverExecutionDataFromEventLogMessages } from './recoverEvents';
import { METRICS_EVENT_NAME } from '../MessageEventBusDestination/Helpers.ee'; import { METRICS_EVENT_NAME } from '../MessageEventBusDestination/Helpers.ee';
import Container, { Service } from 'typedi'; import { Container, Service } from 'typedi';
import { ExecutionRepository, WorkflowRepository } from '@/databases/repositories'; import { ExecutionRepository, WorkflowRepository } from '@/databases/repositories';
import type { AbstractEventMessageOptions } from '../EventMessageClasses/AbstractEventMessageOptions'; import type { AbstractEventMessageOptions } from '../EventMessageClasses/AbstractEventMessageOptions';
import { getEventMessageObjectByType } from '../EventMessageClasses/Helpers'; import { getEventMessageObjectByType } from '../EventMessageClasses/Helpers';
import { OrchestrationMainService } from '@/services/orchestration/main/orchestration.main.service'; import { OrchestrationMainService } from '@/services/orchestration/main/orchestration.main.service';
import { Logger } from '@/Logger';
export type EventMessageReturnMode = 'sent' | 'unsent' | 'all' | 'unfinished'; export type EventMessageReturnMode = 'sent' | 'unsent' | 'all' | 'unfinished';
@@ -48,8 +49,6 @@ export interface MessageEventBusInitializeOptions {
@Service() @Service()
export class MessageEventBus extends EventEmitter { export class MessageEventBus extends EventEmitter {
private static instance: MessageEventBus;
isInitialized: boolean; isInitialized: boolean;
logWriter: MessageEventBusLogWriter; logWriter: MessageEventBusLogWriter;
@@ -60,18 +59,11 @@ export class MessageEventBus extends EventEmitter {
private pushIntervalTimer: NodeJS.Timer; private pushIntervalTimer: NodeJS.Timer;
constructor() { constructor(private readonly logger: Logger) {
super(); super();
this.isInitialized = false; this.isInitialized = false;
} }
static getInstance(): MessageEventBus {
if (!MessageEventBus.instance) {
MessageEventBus.instance = new MessageEventBus();
}
return MessageEventBus.instance;
}
/** /**
* Needs to be called once at startup to set the event bus instance up. Will launch the event log writer and, * Needs to be called once at startup to set the event bus instance up. Will launch the event log writer and,
* if configured to do so, the previously stored event destinations. * if configured to do so, the previously stored event destinations.
@@ -85,7 +77,7 @@ export class MessageEventBus extends EventEmitter {
return; return;
} }
LoggerProxy.debug('Initializing event bus...'); this.logger.debug('Initializing event bus...');
const savedEventDestinations = await Db.collections.EventDestinations.find({}); const savedEventDestinations = await Db.collections.EventDestinations.find({});
if (savedEventDestinations.length > 0) { if (savedEventDestinations.length > 0) {
@@ -97,12 +89,12 @@ export class MessageEventBus extends EventEmitter {
} }
} catch (error) { } catch (error) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
if (error.message) LoggerProxy.debug(error.message as string); if (error.message) this.logger.debug(error.message as string);
} }
} }
} }
LoggerProxy.debug('Initializing event writer'); this.logger.debug('Initializing event writer');
if (options?.workerId) { if (options?.workerId) {
// only add 'worker' to log file name since the ID changes on every start and we // only add 'worker' to log file name since the ID changes on every start and we
// would not be able to recover the log files from the previous run not knowing it // would not be able to recover the log files from the previous run not knowing it
@@ -115,19 +107,19 @@ export class MessageEventBus extends EventEmitter {
} }
if (!this.logWriter) { if (!this.logWriter) {
LoggerProxy.warn('Could not initialize event writer'); this.logger.warn('Could not initialize event writer');
} }
if (options?.skipRecoveryPass) { if (options?.skipRecoveryPass) {
LoggerProxy.debug('Skipping unsent event check'); this.logger.debug('Skipping unsent event check');
} else { } else {
// unsent event check: // unsent event check:
// - find unsent messages in current event log(s) // - find unsent messages in current event log(s)
// - cycle event logs and start the logging to a fresh file // - cycle event logs and start the logging to a fresh file
// - retry sending events // - retry sending events
LoggerProxy.debug('Checking for unsent event messages'); this.logger.debug('Checking for unsent event messages');
const unsentAndUnfinished = await this.getUnsentAndUnfinishedExecutions(); const unsentAndUnfinished = await this.getUnsentAndUnfinishedExecutions();
LoggerProxy.debug( this.logger.debug(
`Start logging into ${this.logWriter?.getLogFileName() ?? 'unknown filename'} `, `Start logging into ${this.logWriter?.getLogFileName() ?? 'unknown filename'} `,
); );
this.logWriter?.startLogging(); this.logWriter?.startLogging();
@@ -152,16 +144,16 @@ export class MessageEventBus extends EventEmitter {
} }
if (unfinishedExecutionIds.length > 0) { if (unfinishedExecutionIds.length > 0) {
LoggerProxy.warn(`Found unfinished executions: ${unfinishedExecutionIds.join(', ')}`); this.logger.warn(`Found unfinished executions: ${unfinishedExecutionIds.join(', ')}`);
LoggerProxy.info('This could be due to a crash of an active workflow or a restart of n8n.'); this.logger.info('This could be due to a crash of an active workflow or a restart of n8n.');
const activeWorkflows = await Container.get(WorkflowRepository).find({ const activeWorkflows = await Container.get(WorkflowRepository).find({
where: { active: true }, where: { active: true },
select: ['id', 'name'], select: ['id', 'name'],
}); });
if (activeWorkflows.length > 0) { if (activeWorkflows.length > 0) {
LoggerProxy.info('Currently active workflows:'); this.logger.info('Currently active workflows:');
for (const workflowData of activeWorkflows) { for (const workflowData of activeWorkflows) {
LoggerProxy.info(` - ${workflowData.name} (ID: ${workflowData.id})`); this.logger.info(` - ${workflowData.name} (ID: ${workflowData.id})`);
} }
} }
const recoveryAlreadyAttempted = this.logWriter?.isRecoveryProcessRunning(); const recoveryAlreadyAttempted = this.logWriter?.isRecoveryProcessRunning();
@@ -171,14 +163,14 @@ export class MessageEventBus extends EventEmitter {
// a possible reason would be that recreating the workflow data itself caused e.g an OOM error // a possible reason would be that recreating the workflow data itself caused e.g an OOM error
// in that case, we do not want to retry the recovery process, but rather mark the executions as crashed // in that case, we do not want to retry the recovery process, but rather mark the executions as crashed
if (recoveryAlreadyAttempted) if (recoveryAlreadyAttempted)
LoggerProxy.warn('Skipped recovery process since it previously failed.'); this.logger.warn('Skipped recovery process since it previously failed.');
} else { } else {
// start actual recovery process and write recovery process flag file // start actual recovery process and write recovery process flag file
this.logWriter?.startRecoveryProcess(); this.logWriter?.startRecoveryProcess();
for (const executionId of unfinishedExecutionIds) { for (const executionId of unfinishedExecutionIds) {
LoggerProxy.warn(`Attempting to recover execution ${executionId}`); this.logger.warn(`Attempting to recover execution ${executionId}`);
if (!unsentAndUnfinished.unfinishedExecutions[executionId]?.length) { if (!unsentAndUnfinished.unfinishedExecutions[executionId]?.length) {
LoggerProxy.debug( this.logger.debug(
`No event messages found, marking execution ${executionId} as 'crashed'`, `No event messages found, marking execution ${executionId} as 'crashed'`,
); );
await Container.get(ExecutionRepository).markAsCrashed([executionId]); await Container.get(ExecutionRepository).markAsCrashed([executionId]);
@@ -205,7 +197,7 @@ export class MessageEventBus extends EventEmitter {
}, config.getEnv('eventBus.checkUnsentInterval')); }, config.getEnv('eventBus.checkUnsentInterval'));
} }
LoggerProxy.debug('MessageEventBus initialized'); this.logger.debug('MessageEventBus initialized');
this.isInitialized = true; this.isInitialized = true;
} }
@@ -263,25 +255,25 @@ export class MessageEventBus extends EventEmitter {
private async trySendingUnsent(msgs?: EventMessageTypes[]) { private async trySendingUnsent(msgs?: EventMessageTypes[]) {
const unsentMessages = msgs ?? (await this.getEventsUnsent()); const unsentMessages = msgs ?? (await this.getEventsUnsent());
if (unsentMessages.length > 0) { if (unsentMessages.length > 0) {
LoggerProxy.debug(`Found unsent event messages: ${unsentMessages.length}`); this.logger.debug(`Found unsent event messages: ${unsentMessages.length}`);
for (const unsentMsg of unsentMessages) { for (const unsentMsg of unsentMessages) {
LoggerProxy.debug(`Retrying: ${unsentMsg.id} ${unsentMsg.__type}`); this.logger.debug(`Retrying: ${unsentMsg.id} ${unsentMsg.__type}`);
await this.emitMessage(unsentMsg); await this.emitMessage(unsentMsg);
} }
} }
} }
async close() { async close() {
LoggerProxy.debug('Shutting down event writer...'); this.logger.debug('Shutting down event writer...');
await this.logWriter?.close(); await this.logWriter?.close();
for (const destinationName of Object.keys(this.destinations)) { for (const destinationName of Object.keys(this.destinations)) {
LoggerProxy.debug( this.logger.debug(
`Shutting down event destination ${this.destinations[destinationName].getId()}...`, `Shutting down event destination ${this.destinations[destinationName].getId()}...`,
); );
await this.destinations[destinationName].close(); await this.destinations[destinationName].close();
} }
this.isInitialized = false; this.isInitialized = false;
LoggerProxy.debug('EventBus shut down.'); this.logger.debug('EventBus shut down.');
} }
async restart() { async restart() {

View File

@@ -1,14 +1,15 @@
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
import { Container } from 'typedi';
import type { DeleteResult, InsertResult } from 'typeorm';
import type { INodeCredentials } from 'n8n-workflow'; import type { INodeCredentials } from 'n8n-workflow';
import { import {
LoggerProxy,
MessageEventBusDestinationTypeNames, MessageEventBusDestinationTypeNames,
MessageEventBusDestinationOptions, MessageEventBusDestinationOptions,
} from 'n8n-workflow'; } from 'n8n-workflow';
import * as Db from '@/Db'; import * as Db from '@/Db';
import { Logger } from '@/Logger';
import type { AbstractEventMessage } from '../EventMessageClasses/AbstractEventMessage'; import type { AbstractEventMessage } from '../EventMessageClasses/AbstractEventMessage';
import type { EventMessageTypes } from '../EventMessageClasses'; import type { EventMessageTypes } from '../EventMessageClasses';
import type { DeleteResult, InsertResult } from 'typeorm';
import type { EventMessageConfirmSource } from '../EventMessageClasses/EventMessageConfirm'; import type { EventMessageConfirmSource } from '../EventMessageClasses/EventMessageConfirm';
import { MessageEventBus } from '../MessageEventBus/MessageEventBus'; import { MessageEventBus } from '../MessageEventBus/MessageEventBus';
import type { MessageWithCallback } from '../MessageEventBus/MessageEventBus'; import type { MessageWithCallback } from '../MessageEventBus/MessageEventBus';
@@ -20,6 +21,8 @@ export abstract class MessageEventBusDestination implements MessageEventBusDesti
readonly eventBusInstance: MessageEventBus; readonly eventBusInstance: MessageEventBus;
protected readonly logger: Logger;
__type: MessageEventBusDestinationTypeNames; __type: MessageEventBusDestinationTypeNames;
label: string; label: string;
@@ -33,6 +36,7 @@ export abstract class MessageEventBusDestination implements MessageEventBusDesti
anonymizeAuditMessages: boolean; anonymizeAuditMessages: boolean;
constructor(eventBusInstance: MessageEventBus, options: MessageEventBusDestinationOptions) { constructor(eventBusInstance: MessageEventBus, options: MessageEventBusDestinationOptions) {
this.logger = Container.get(Logger);
this.eventBusInstance = eventBusInstance; this.eventBusInstance = eventBusInstance;
this.id = !options.id || options.id.length !== 36 ? uuid() : options.id; this.id = !options.id || options.id.length !== 36 ? uuid() : options.id;
this.__type = options.__type ?? MessageEventBusDestinationTypeNames.abstract; this.__type = options.__type ?? MessageEventBusDestinationTypeNames.abstract;
@@ -41,7 +45,7 @@ export abstract class MessageEventBusDestination implements MessageEventBusDesti
this.subscribedEvents = options.subscribedEvents ?? []; this.subscribedEvents = options.subscribedEvents ?? [];
this.anonymizeAuditMessages = options.anonymizeAuditMessages ?? false; this.anonymizeAuditMessages = options.anonymizeAuditMessages ?? false;
if (options.credentials) this.credentials = options.credentials; if (options.credentials) this.credentials = options.credentials;
LoggerProxy.debug(`${this.__type}(${this.id}) event destination constructed`); this.logger.debug(`${this.__type}(${this.id}) event destination constructed`);
} }
startListening() { startListening() {
@@ -55,7 +59,7 @@ export abstract class MessageEventBusDestination implements MessageEventBusDesti
await this.receiveFromEventBus({ msg, confirmCallback }); await this.receiveFromEventBus({ msg, confirmCallback });
}, },
); );
LoggerProxy.debug(`${this.id} listener started`); this.logger.debug(`${this.id} listener started`);
} }
} }

View File

@@ -1,10 +1,12 @@
import { MessageEventBusDestinationTypeNames, LoggerProxy } from 'n8n-workflow'; import { MessageEventBusDestinationTypeNames } from 'n8n-workflow';
import type { EventDestinations } from '@/databases/entities/EventDestinations'; import type { EventDestinations } from '@/databases/entities/EventDestinations';
import type { MessageEventBus } from '../MessageEventBus/MessageEventBus'; import type { MessageEventBus } from '../MessageEventBus/MessageEventBus';
import type { MessageEventBusDestination } from './MessageEventBusDestination.ee'; import type { MessageEventBusDestination } from './MessageEventBusDestination.ee';
import { MessageEventBusDestinationSentry } from './MessageEventBusDestinationSentry.ee'; import { MessageEventBusDestinationSentry } from './MessageEventBusDestinationSentry.ee';
import { MessageEventBusDestinationSyslog } from './MessageEventBusDestinationSyslog.ee'; import { MessageEventBusDestinationSyslog } from './MessageEventBusDestinationSyslog.ee';
import { MessageEventBusDestinationWebhook } from './MessageEventBusDestinationWebhook.ee'; import { MessageEventBusDestinationWebhook } from './MessageEventBusDestinationWebhook.ee';
import { Container } from 'typedi';
import { Logger } from '@/Logger';
export function messageEventBusDestinationFromDb( export function messageEventBusDestinationFromDb(
eventBusInstance: MessageEventBus, eventBusInstance: MessageEventBus,
@@ -20,7 +22,7 @@ export function messageEventBusDestinationFromDb(
case MessageEventBusDestinationTypeNames.webhook: case MessageEventBusDestinationTypeNames.webhook:
return MessageEventBusDestinationWebhook.deserialize(eventBusInstance, destinationData); return MessageEventBusDestinationWebhook.deserialize(eventBusInstance, destinationData);
default: default:
LoggerProxy.debug('MessageEventBusDestination __type unknown'); Container.get(Logger).debug('MessageEventBusDestination __type unknown');
} }
} }
return null; return null;

View File

@@ -3,7 +3,6 @@
import { MessageEventBusDestination } from './MessageEventBusDestination.ee'; import { MessageEventBusDestination } from './MessageEventBusDestination.ee';
import * as Sentry from '@sentry/node'; import * as Sentry from '@sentry/node';
import { import {
LoggerProxy,
MessageEventBusDestinationTypeNames, MessageEventBusDestinationTypeNames,
MessageEventBusDestinationSentryOptions, MessageEventBusDestinationSentryOptions,
} from 'n8n-workflow'; } from 'n8n-workflow';
@@ -90,7 +89,7 @@ export class MessageEventBusDestinationSentry
sendResult = true; sendResult = true;
} }
} catch (error) { } catch (error) {
if (error.message) LoggerProxy.debug(error.message as string); if (error.message) this.logger.debug(error.message as string);
} }
return sendResult; return sendResult;
} }

View File

@@ -1,8 +1,8 @@
/* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-unsafe-member-access */
import syslog from 'syslog-client'; import syslog from 'syslog-client';
import type { MessageEventBusDestinationOptions } from 'n8n-workflow'; import type { MessageEventBusDestinationOptions } from 'n8n-workflow';
import { import {
LoggerProxy,
MessageEventBusDestinationTypeNames, MessageEventBusDestinationTypeNames,
MessageEventBusDestinationSyslogOptions, MessageEventBusDestinationSyslogOptions,
} from 'n8n-workflow'; } from 'n8n-workflow';
@@ -11,7 +11,6 @@ import { isLogStreamingEnabled } from '../MessageEventBus/MessageEventBusHelper'
import { eventMessageGenericDestinationTestEvent } from '../EventMessageClasses/EventMessageGeneric'; import { eventMessageGenericDestinationTestEvent } from '../EventMessageClasses/EventMessageGeneric';
import { MessageEventBus } from '../MessageEventBus/MessageEventBus'; import { MessageEventBus } from '../MessageEventBus/MessageEventBus';
import type { MessageWithCallback } from '../MessageEventBus/MessageEventBus'; import type { MessageWithCallback } from '../MessageEventBus/MessageEventBus';
export const isMessageEventBusDestinationSyslogOptions = ( export const isMessageEventBusDestinationSyslogOptions = (
candidate: unknown, candidate: unknown,
): candidate is MessageEventBusDestinationSyslogOptions => { ): candidate is MessageEventBusDestinationSyslogOptions => {
@@ -63,7 +62,7 @@ export class MessageEventBusDestinationSyslog
? syslog.Transport.Tcp ? syslog.Transport.Tcp
: syslog.Transport.Udp, : syslog.Transport.Udp,
}); });
LoggerProxy.debug(`MessageEventBusDestinationSyslog with id ${this.getId()} initialized`); this.logger.debug(`MessageEventBusDestinationSyslog with id ${this.getId()} initialized`);
this.client.on('error', function (error) { this.client.on('error', function (error) {
console.error(error); console.error(error);
}); });
@@ -93,7 +92,7 @@ export class MessageEventBusDestinationSyslog
}, },
async (error) => { async (error) => {
if (error?.message) { if (error?.message) {
LoggerProxy.debug(error.message); this.logger.debug(error.message);
} else { } else {
// eventBus.confirmSent(msg, { id: this.id, name: this.label }); // eventBus.confirmSent(msg, { id: this.id, name: this.label });
confirmCallback(msg, { id: this.id, name: this.label }); confirmCallback(msg, { id: this.id, name: this.label });
@@ -102,7 +101,7 @@ export class MessageEventBusDestinationSyslog
}, },
); );
} catch (error) { } catch (error) {
if (error.message) LoggerProxy.debug(error.message as string); if (error.message) this.logger.debug(error.message as string);
} }
if (msg.eventName === eventMessageGenericDestinationTestEvent) { if (msg.eventName === eventMessageGenericDestinationTestEvent) {
await new Promise((resolve) => setTimeout(resolve, 500)); await new Promise((resolve) => setTimeout(resolve, 500));

View File

@@ -7,7 +7,6 @@ import axios from 'axios';
import type { AxiosRequestConfig, Method } from 'axios'; import type { AxiosRequestConfig, Method } from 'axios';
import { import {
jsonParse, jsonParse,
LoggerProxy,
MessageEventBusDestinationTypeNames, MessageEventBusDestinationTypeNames,
MessageEventBusDestinationWebhookOptions, MessageEventBusDestinationWebhookOptions,
} from 'n8n-workflow'; } from 'n8n-workflow';
@@ -102,7 +101,7 @@ export class MessageEventBusDestinationWebhook
if (options.sendPayload) this.sendPayload = options.sendPayload; if (options.sendPayload) this.sendPayload = options.sendPayload;
if (options.options) this.options = options.options; if (options.options) this.options = options.options;
LoggerProxy.debug(`MessageEventBusDestinationWebhook with id ${this.getId()} initialized`); this.logger.debug(`MessageEventBusDestinationWebhook with id ${this.getId()} initialized`);
} }
async matchDecryptedCredentialType(credentialType: string) { async matchDecryptedCredentialType(credentialType: string) {
@@ -359,7 +358,7 @@ export class MessageEventBusDestinationWebhook
} }
} }
} catch (error) { } catch (error) {
LoggerProxy.warn( this.logger.warn(
`Webhook destination ${this.label} failed to send message to: ${this.url} - ${ `Webhook destination ${this.label} failed to send message to: ${this.url} - ${
(error as Error).message (error as Error).message
}`, }`,

View File

@@ -6,7 +6,7 @@ import path, { parse } from 'path';
import { Worker } from 'worker_threads'; import { Worker } from 'worker_threads';
import { createReadStream, existsSync, rmSync } from 'fs'; import { createReadStream, existsSync, rmSync } from 'fs';
import readline from 'readline'; import readline from 'readline';
import { jsonParse, LoggerProxy } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import remove from 'lodash/remove'; import remove from 'lodash/remove';
import config from '@/config'; import config from '@/config';
import { getEventMessageObjectByType } from '../EventMessageClasses/Helpers'; import { getEventMessageObjectByType } from '../EventMessageClasses/Helpers';
@@ -19,6 +19,7 @@ import {
} from '../EventMessageClasses/EventMessageConfirm'; } from '../EventMessageClasses/EventMessageConfirm';
import { once as eventOnce } from 'events'; import { once as eventOnce } from 'events';
import { inTest } from '@/constants'; import { inTest } from '@/constants';
import { Logger } from '@/Logger';
import Container from 'typedi'; import Container from 'typedi';
interface MessageEventBusLogWriterConstructorOptions { interface MessageEventBusLogWriterConstructorOptions {
@@ -48,8 +49,14 @@ export class MessageEventBusLogWriter {
static options: Required<MessageEventBusLogWriterOptions>; static options: Required<MessageEventBusLogWriterOptions>;
private readonly logger: Logger;
private _worker: Worker | undefined; private _worker: Worker | undefined;
constructor() {
this.logger = Container.get(Logger);
}
public get worker(): Worker | undefined { public get worker(): Worker | undefined {
return this._worker; return this._worker;
} }
@@ -136,7 +143,7 @@ export class MessageEventBusLogWriter {
this._worker = new Worker(workerFileName); this._worker = new Worker(workerFileName);
if (this.worker) { if (this.worker) {
this.worker.on('messageerror', async (error) => { this.worker.on('messageerror', async (error) => {
LoggerProxy.error('Event Bus Log Writer thread error, attempting to restart...', error); this.logger.error('Event Bus Log Writer thread error, attempting to restart...', error);
await MessageEventBusLogWriter.instance.startThread(); await MessageEventBusLogWriter.instance.startThread();
}); });
return true; return true;
@@ -235,7 +242,7 @@ export class MessageEventBusLogWriter {
} }
} }
} catch (error) { } catch (error) {
LoggerProxy.error( this.logger.error(
`Error reading line messages from file: ${logFileName}, line: ${line}, ${error.message}}`, `Error reading line messages from file: ${logFileName}, line: ${line}, ${error.message}}`,
); );
} }
@@ -243,7 +250,7 @@ export class MessageEventBusLogWriter {
// wait for stream to finish before continue // wait for stream to finish before continue
await eventOnce(rl, 'close'); await eventOnce(rl, 'close');
} catch { } catch {
LoggerProxy.error(`Error reading logged messages from file: ${logFileName}`); this.logger.error(`Error reading logged messages from file: ${logFileName}`);
} }
} }
return results; return results;
@@ -308,7 +315,7 @@ export class MessageEventBusLogWriter {
if (msg !== null) messages.push(msg); if (msg !== null) messages.push(msg);
} }
} catch { } catch {
LoggerProxy.error( this.logger.error(
`Error reading line messages from file: ${logFileName}, line: ${line}`, `Error reading line messages from file: ${logFileName}, line: ${line}`,
); );
} }
@@ -316,7 +323,7 @@ export class MessageEventBusLogWriter {
// wait for stream to finish before continue // wait for stream to finish before continue
await eventOnce(rl, 'close'); await eventOnce(rl, 'close');
} catch { } catch {
LoggerProxy.error(`Error reading logged messages from file: ${logFileName}`); this.logger.error(`Error reading logged messages from file: ${logFileName}`);
} }
} }
return messages; return messages;

View File

@@ -1,11 +1,11 @@
import { Container } from 'typedi';
import type { ExecutionStatus, IRun, IWorkflowBase } from 'n8n-workflow'; import type { ExecutionStatus, IRun, IWorkflowBase } from 'n8n-workflow';
import type { ExecutionPayload, IExecutionDb } from '@/Interfaces'; import type { ExecutionPayload, IExecutionDb } from '@/Interfaces';
import pick from 'lodash/pick'; import pick from 'lodash/pick';
import { isWorkflowIdValid } from '@/utils'; import { isWorkflowIdValid } from '@/utils';
import { LoggerProxy } from 'n8n-workflow'; import { ExecutionRepository } from '@db/repositories';
import Container from 'typedi'; import { ExecutionMetadataService } from '@/services/executionMetadata.service';
import { ExecutionRepository } from '../../databases/repositories'; import { Logger } from '@/Logger';
import { ExecutionMetadataService } from '../../services/executionMetadata.service';
export function determineFinalExecutionStatus(runData: IRun): ExecutionStatus { export function determineFinalExecutionStatus(runData: IRun): ExecutionStatus {
const workflowHasCrashed = runData.status === 'crashed'; const workflowHasCrashed = runData.status === 'crashed';
@@ -69,9 +69,10 @@ export async function updateExistingExecution(parameters: {
workflowId: string; workflowId: string;
executionData: Partial<IExecutionDb>; executionData: Partial<IExecutionDb>;
}) { }) {
const logger = Container.get(Logger);
const { executionId, workflowId, executionData } = parameters; const { executionId, workflowId, executionData } = parameters;
// Leave log message before flatten as that operation increased memory usage a lot and the chance of a crash is highest here // Leave log message before flatten as that operation increased memory usage a lot and the chance of a crash is highest here
LoggerProxy.debug(`Save execution data to database for execution ID ${executionId}`, { logger.debug(`Save execution data to database for execution ID ${executionId}`, {
executionId, executionId,
workflowId, workflowId,
finished: executionData.finished, finished: executionData.finished,
@@ -88,7 +89,7 @@ export async function updateExistingExecution(parameters: {
); );
} }
} catch (e) { } catch (e) {
LoggerProxy.error(`Failed to save metadata for execution ID ${executionId}`, e as Error); logger.error(`Failed to save metadata for execution ID ${executionId}`, e as Error);
} }
if (executionData.finished === true && executionData.retryOf !== undefined) { if (executionData.finished === true && executionData.retryOf !== undefined) {

View File

@@ -1,30 +1,15 @@
import express from 'express'; import express from 'express';
import { LoggerProxy } from 'n8n-workflow';
import type { import type {
IExecutionFlattedResponse, IExecutionFlattedResponse,
IExecutionResponse, IExecutionResponse,
IExecutionsListResponse, IExecutionsListResponse,
} from '@/Interfaces'; } from '@/Interfaces';
import * as ResponseHelper from '@/ResponseHelper'; import * as ResponseHelper from '@/ResponseHelper';
import { getLogger } from '@/Logger';
import type { ExecutionRequest } from '@/requests'; import type { ExecutionRequest } from '@/requests';
import { EEExecutionsController } from './executions.controller.ee'; import { EEExecutionsController } from './executions.controller.ee';
import { ExecutionsService } from './executions.service'; import { ExecutionsService } from './executions.service';
export const executionsController = express.Router(); export const executionsController = express.Router();
/**
* Initialise Logger if needed
*/
executionsController.use((req, res, next) => {
try {
LoggerProxy.getInstance();
} catch (error) {
LoggerProxy.init(getLogger());
}
next();
});
executionsController.use('/', EEExecutionsController); executionsController.use('/', EEExecutionsController);
/** /**

View File

@@ -1,6 +1,6 @@
import { validate as jsonSchemaValidate } from 'jsonschema'; import { validate as jsonSchemaValidate } from 'jsonschema';
import type { IWorkflowBase, JsonObject, ExecutionStatus } from 'n8n-workflow'; import type { IWorkflowBase, JsonObject, ExecutionStatus } from 'n8n-workflow';
import { LoggerProxy, jsonParse, Workflow } from 'n8n-workflow'; import { jsonParse, Workflow } from 'n8n-workflow';
import type { FindOperator } from 'typeorm'; import type { FindOperator } from 'typeorm';
import { In } from 'typeorm'; import { In } from 'typeorm';
import { ActiveExecutions } from '@/ActiveExecutions'; import { ActiveExecutions } from '@/ActiveExecutions';
@@ -23,6 +23,7 @@ import * as GenericHelpers from '@/GenericHelpers';
import { Container } from 'typedi'; import { Container } from 'typedi';
import { getStatusUsingPreviousExecutionStatusMethod } from './executionHelpers'; import { getStatusUsingPreviousExecutionStatusMethod } from './executionHelpers';
import { ExecutionRepository } from '@db/repositories'; import { ExecutionRepository } from '@db/repositories';
import { Logger } from '@/Logger';
export interface IGetExecutionsQueryFilter { export interface IGetExecutionsQueryFilter {
id?: FindOperator<string> | string; id?: FindOperator<string> | string;
@@ -110,7 +111,7 @@ export class ExecutionsService {
} }
} }
} catch (error) { } catch (error) {
LoggerProxy.error('Failed to parse filter', { Container.get(Logger).error('Failed to parse filter', {
userId: req.user.id, userId: req.user.id,
filter: req.query.filter, filter: req.query.filter,
}); });
@@ -123,7 +124,7 @@ export class ExecutionsService {
// safeguard against querying workflowIds not shared with the user // safeguard against querying workflowIds not shared with the user
const workflowId = filter?.workflowId?.toString(); const workflowId = filter?.workflowId?.toString();
if (workflowId !== undefined && !sharedWorkflowIds.includes(workflowId)) { if (workflowId !== undefined && !sharedWorkflowIds.includes(workflowId)) {
LoggerProxy.verbose( Container.get(Logger).verbose(
`User ${req.user.id} attempted to query non-shared workflow ${workflowId}`, `User ${req.user.id} attempted to query non-shared workflow ${workflowId}`,
); );
return { return {
@@ -193,10 +194,13 @@ export class ExecutionsService {
}); });
if (!execution) { if (!execution) {
LoggerProxy.info('Attempt to read execution was blocked due to insufficient permissions', { Container.get(Logger).info(
userId: req.user.id, 'Attempt to read execution was blocked due to insufficient permissions',
executionId, {
}); userId: req.user.id,
executionId,
},
);
return undefined; return undefined;
} }
@@ -221,7 +225,7 @@ export class ExecutionsService {
}); });
if (!execution) { if (!execution) {
LoggerProxy.info( Container.get(Logger).info(
'Attempt to retry an execution was blocked due to insufficient permissions', 'Attempt to retry an execution was blocked due to insufficient permissions',
{ {
userId: req.user.id, userId: req.user.id,
@@ -299,11 +303,14 @@ export class ExecutionsService {
// Find the data of the last executed node in the new workflow // Find the data of the last executed node in the new workflow
const node = workflowInstance.getNode(stack.node.name); const node = workflowInstance.getNode(stack.node.name);
if (node === null) { if (node === null) {
LoggerProxy.error('Failed to retry an execution because a node could not be found', { Container.get(Logger).error(
userId: req.user.id, 'Failed to retry an execution because a node could not be found',
executionId, {
nodeName: stack.node.name, userId: req.user.id,
}); executionId,
nodeName: stack.node.name,
},
);
throw new Error( throw new Error(
`Could not find the node "${stack.node.name}" in workflow. It probably got deleted or renamed. Without it the workflow can sadly not be retried.`, `Could not find the node "${stack.node.name}" in workflow. It probably got deleted or renamed. Without it the workflow can sadly not be retried.`,
); );

View File

@@ -1,38 +1,25 @@
import express from 'express'; import express from 'express';
import { LoggerProxy } from 'n8n-workflow'; import { Container } from 'typedi';
import { getLogger } from '@/Logger'; import { Logger } from '@/Logger';
import * as ResponseHelper from '@/ResponseHelper'; import * as ResponseHelper from '@/ResponseHelper';
import type { ILicensePostResponse, ILicenseReadResponse } from '@/Interfaces'; import type { ILicensePostResponse, ILicenseReadResponse } from '@/Interfaces';
import { LicenseService } from './License.service'; import { LicenseService } from './License.service';
import { License } from '@/License'; import { License } from '@/License';
import type { AuthenticatedRequest, LicenseRequest } from '@/requests'; import type { AuthenticatedRequest, LicenseRequest } from '@/requests';
import { Container } from 'typedi';
import { InternalHooks } from '@/InternalHooks'; import { InternalHooks } from '@/InternalHooks';
export const licenseController = express.Router(); export const licenseController = express.Router();
const OWNER_ROUTES = ['/activate', '/renew']; const OWNER_ROUTES = ['/activate', '/renew'];
/**
* Initialize Logger if needed
*/
licenseController.use((req, res, next) => {
try {
LoggerProxy.getInstance();
} catch (error) {
LoggerProxy.init(getLogger());
}
next();
});
/** /**
* Owner checking * Owner checking
*/ */
licenseController.use((req: AuthenticatedRequest, res, next) => { licenseController.use((req: AuthenticatedRequest, res, next) => {
if (OWNER_ROUTES.includes(req.path) && req.user) { if (OWNER_ROUTES.includes(req.path) && req.user) {
if (!req.user.isOwner) { if (!req.user.isOwner) {
LoggerProxy.info('Non-owner attempted to activate or renew a license', { Container.get(Logger).info('Non-owner attempted to activate or renew a license', {
userId: req.user.id, userId: req.user.id,
}); });
ResponseHelper.sendErrorResponse( ResponseHelper.sendErrorResponse(
@@ -95,7 +82,7 @@ licenseController.post(
break; break;
default: default:
message += `: ${error.message}`; message += `: ${error.message}`;
getLogger().error(message, { stack: error.stack ?? 'n/a' }); Container.get(Logger).error(message, { stack: error.stack ?? 'n/a' });
} }
throw new ResponseHelper.BadRequestError(message); throw new ResponseHelper.BadRequestError(message);

View File

@@ -1,15 +1,16 @@
import type { Application, NextFunction, Request, RequestHandler, Response } from 'express'; import type { Application, NextFunction, Request, RequestHandler, Response } from 'express';
import { Container } from 'typedi';
import jwt from 'jsonwebtoken'; import jwt from 'jsonwebtoken';
import passport from 'passport'; import passport from 'passport';
import { Strategy } from 'passport-jwt'; import { Strategy } from 'passport-jwt';
import { sync as globSync } from 'fast-glob'; import { sync as globSync } from 'fast-glob';
import { LoggerProxy as Logger } from 'n8n-workflow';
import type { JwtPayload } from '@/Interfaces'; import type { JwtPayload } from '@/Interfaces';
import type { AuthenticatedRequest } from '@/requests'; import type { AuthenticatedRequest } from '@/requests';
import config from '@/config'; import config from '@/config';
import { AUTH_COOKIE_NAME, EDITOR_UI_DIST_DIR } from '@/constants'; import { AUTH_COOKIE_NAME, EDITOR_UI_DIST_DIR } from '@/constants';
import { issueCookie, resolveJwtContent } from '@/auth/jwt'; import { issueCookie, resolveJwtContent } from '@/auth/jwt';
import { canSkipAuth } from '@/decorators/registerController'; import { canSkipAuth } from '@/decorators/registerController';
import { Logger } from '@/Logger';
const jwtFromRequest = (req: Request) => { const jwtFromRequest = (req: Request) => {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
@@ -27,7 +28,7 @@ const userManagementJwtAuth = (): RequestHandler => {
const user = await resolveJwtContent(jwtPayload); const user = await resolveJwtContent(jwtPayload);
return done(null, user); return done(null, user);
} catch (error) { } catch (error) {
Logger.debug('Failed to extract user from JWT payload', { jwtPayload }); Container.get(Logger).debug('Failed to extract user from JWT payload', { jwtPayload });
return done(null, false, { message: 'User not found' }); return done(null, false, { message: 'User not found' });
} }
}, },

View File

@@ -1,5 +1,6 @@
import { jsonStringify, LoggerProxy as Logger } from 'n8n-workflow'; import { jsonStringify } from 'n8n-workflow';
import type { IPushDataType } from '@/Interfaces'; import type { IPushDataType } from '@/Interfaces';
import { Logger } from '@/Logger';
export abstract class AbstractPush<T> { export abstract class AbstractPush<T> {
protected connections: Record<string, T> = {}; protected connections: Record<string, T> = {};
@@ -7,9 +8,11 @@ export abstract class AbstractPush<T> {
protected abstract close(connection: T): void; protected abstract close(connection: T): void;
protected abstract sendToOne(connection: T, data: string): void; protected abstract sendToOne(connection: T, data: string): void;
constructor(private readonly logger: Logger) {}
protected add(sessionId: string, connection: T): void { protected add(sessionId: string, connection: T): void {
const { connections } = this; const { connections } = this;
Logger.debug('Add editor-UI session', { sessionId }); this.logger.debug('Add editor-UI session', { sessionId });
const existingConnection = connections[sessionId]; const existingConnection = connections[sessionId];
if (existingConnection) { if (existingConnection) {
@@ -22,7 +25,7 @@ export abstract class AbstractPush<T> {
protected remove(sessionId?: string): void { protected remove(sessionId?: string): void {
if (sessionId !== undefined) { if (sessionId !== undefined) {
Logger.debug('Remove editor-UI session', { sessionId }); this.logger.debug('Remove editor-UI session', { sessionId });
delete this.connections[sessionId]; delete this.connections[sessionId];
} }
} }
@@ -30,11 +33,11 @@ export abstract class AbstractPush<T> {
send<D>(type: IPushDataType, data: D, sessionId: string | undefined) { send<D>(type: IPushDataType, data: D, sessionId: string | undefined) {
const { connections } = this; const { connections } = this;
if (sessionId !== undefined && connections[sessionId] === undefined) { if (sessionId !== undefined && connections[sessionId] === undefined) {
Logger.error(`The session "${sessionId}" is not registered.`, { sessionId }); this.logger.error(`The session "${sessionId}" is not registered.`, { sessionId });
return; return;
} }
Logger.debug(`Send data of type "${type}" to editor-UI`, { dataType: type, sessionId }); this.logger.debug(`Send data of type "${type}" to editor-UI`, { dataType: type, sessionId });
const sendData = jsonStringify({ type, data }, { replaceCircularRefs: true }); const sendData = jsonStringify({ type, data }, { replaceCircularRefs: true });

View File

@@ -18,7 +18,7 @@ const useWebSockets = config.getEnv('push.backend') === 'websocket';
@Service() @Service()
export class Push extends EventEmitter { export class Push extends EventEmitter {
private backend = useWebSockets ? new WebSocketPush() : new SSEPush(); private backend = useWebSockets ? Container.get(WebSocketPush) : Container.get(SSEPush);
handleRequest(req: SSEPushRequest | WebSocketPushRequest, res: PushResponse) { handleRequest(req: SSEPushRequest | WebSocketPushRequest, res: PushResponse) {
if (req.ws) { if (req.ws) {

View File

@@ -1,16 +1,19 @@
import SSEChannel from 'sse-channel'; import SSEChannel from 'sse-channel';
import { Service } from 'typedi';
import { Logger } from '@/Logger';
import { AbstractPush } from './abstract.push'; import { AbstractPush } from './abstract.push';
import type { PushRequest, PushResponse } from './types'; import type { PushRequest, PushResponse } from './types';
type Connection = { req: PushRequest; res: PushResponse }; type Connection = { req: PushRequest; res: PushResponse };
@Service()
export class SSEPush extends AbstractPush<Connection> { export class SSEPush extends AbstractPush<Connection> {
readonly channel = new SSEChannel(); readonly channel = new SSEChannel();
readonly connections: Record<string, Connection> = {}; readonly connections: Record<string, Connection> = {};
constructor() { constructor(logger: Logger) {
super(); super(logger);
this.channel.on('disconnect', (channel, { req }) => { this.channel.on('disconnect', (channel, { req }) => {
this.remove(req?.query?.sessionId); this.remove(req?.query?.sessionId);
}); });

View File

@@ -1,13 +1,16 @@
import type WebSocket from 'ws'; import type WebSocket from 'ws';
import { Service } from 'typedi';
import { Logger } from '@/Logger';
import { AbstractPush } from './abstract.push'; import { AbstractPush } from './abstract.push';
function heartbeat(this: WebSocket) { function heartbeat(this: WebSocket) {
this.isAlive = true; this.isAlive = true;
} }
@Service()
export class WebSocketPush extends AbstractPush<WebSocket> { export class WebSocketPush extends AbstractPush<WebSocket> {
constructor() { constructor(logger: Logger) {
super(); super(logger);
// Ping all connected clients every 60 seconds // Ping all connected clients every 60 seconds
setInterval(() => this.pingAll(), 60 * 1000); setInterval(() => this.pingAll(), 60 * 1000);

View File

@@ -1,10 +1,10 @@
import { exec } from 'child_process'; import { exec } from 'child_process';
import { access as fsAccess, mkdir as fsMkdir } from 'fs/promises'; import { access as fsAccess, mkdir as fsMkdir } from 'fs/promises';
import { Service } from 'typedi'; import { Service } from 'typedi';
import { promisify } from 'util'; import { promisify } from 'util';
import axios from 'axios'; import axios from 'axios';
import { LoggerProxy as Logger } from 'n8n-workflow';
import type { PublicInstalledPackage } from 'n8n-workflow'; import type { PublicInstalledPackage } from 'n8n-workflow';
import { InstanceSettings } from 'n8n-core'; import { InstanceSettings } from 'n8n-core';
import type { PackageDirectoryLoader } from 'n8n-core'; import type { PackageDirectoryLoader } from 'n8n-core';
@@ -21,6 +21,7 @@ import {
} from '@/constants'; } from '@/constants';
import type { CommunityPackages } from '@/Interfaces'; import type { CommunityPackages } from '@/Interfaces';
import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
import { Logger } from '@/Logger';
const { const {
PACKAGE_NAME_NOT_PROVIDED, PACKAGE_NAME_NOT_PROVIDED,
@@ -48,6 +49,7 @@ export class CommunityPackagesService {
constructor( constructor(
private readonly instanceSettings: InstanceSettings, private readonly instanceSettings: InstanceSettings,
private readonly logger: Logger,
private readonly installedPackageRepository: InstalledPackagesRepository, private readonly installedPackageRepository: InstalledPackagesRepository,
private readonly loadNodesAndCredentials: LoadNodesAndCredentials, private readonly loadNodesAndCredentials: LoadNodesAndCredentials,
) {} ) {}
@@ -81,7 +83,7 @@ export class CommunityPackagesService {
} catch (maybeError) { } catch (maybeError) {
const error = toError(maybeError); const error = toError(maybeError);
Logger.error('Failed to save installed packages and nodes', { this.logger.error('Failed to save installed packages and nodes', {
error, error,
packageName: packageLoader.packageJson.name, packageName: packageLoader.packageJson.name,
}); });
@@ -156,7 +158,7 @@ export class CommunityPackagesService {
if (errorMessage.includes(npmMessage)) throw new Error(n8nMessage); if (errorMessage.includes(npmMessage)) throw new Error(n8nMessage);
}); });
Logger.warn('npm command failed', { errorMessage }); this.logger.warn('npm command failed', { errorMessage });
throw new Error(PACKAGE_FAILED_TO_INSTALL); throw new Error(PACKAGE_FAILED_TO_INSTALL);
} }
@@ -269,12 +271,12 @@ export class CommunityPackagesService {
if (missingPackages.size === 0) return; if (missingPackages.size === 0) return;
Logger.error( this.logger.error(
'n8n detected that some packages are missing. For more information, visit https://docs.n8n.io/integrations/community-nodes/troubleshooting/', 'n8n detected that some packages are missing. For more information, visit https://docs.n8n.io/integrations/community-nodes/troubleshooting/',
); );
if (reinstallMissingPackages || process.env.N8N_REINSTALL_MISSING_PACKAGES) { if (reinstallMissingPackages || process.env.N8N_REINSTALL_MISSING_PACKAGES) {
Logger.info('Attempting to reinstall missing packages', { missingPackages }); this.logger.info('Attempting to reinstall missing packages', { missingPackages });
try { try {
// Optimistic approach - stop if any installation fails // Optimistic approach - stop if any installation fails
@@ -283,9 +285,9 @@ export class CommunityPackagesService {
missingPackages.delete(missingPackage); missingPackages.delete(missingPackage);
} }
Logger.info('Packages reinstalled successfully. Resuming regular initialization.'); this.logger.info('Packages reinstalled successfully. Resuming regular initialization.');
} catch (error) { } catch (error) {
Logger.error('n8n was unable to install the missing packages.'); this.logger.error('n8n was unable to install the missing packages.');
} }
} }

View File

@@ -1,17 +1,18 @@
import { EventEmitter } from 'events'; import { EventEmitter } from 'events';
import Container, { Service } from 'typedi'; import { Container, Service } from 'typedi';
import type { INode, IRun, IWorkflowBase } from 'n8n-workflow'; import type { INode, IRun, IWorkflowBase } from 'n8n-workflow';
import { LoggerProxy } from 'n8n-workflow';
import { StatisticsNames } from '@db/entities/WorkflowStatistics'; import { StatisticsNames } from '@db/entities/WorkflowStatistics';
import { WorkflowStatisticsRepository } from '@db/repositories'; import { WorkflowStatisticsRepository } from '@db/repositories';
import { UserService } from '@/services/user.service'; import { UserService } from '@/services/user.service';
import { Logger } from '@/Logger';
import { OwnershipService } from './ownership.service'; import { OwnershipService } from './ownership.service';
@Service() @Service()
export class EventsService extends EventEmitter { export class EventsService extends EventEmitter {
constructor( constructor(
private repository: WorkflowStatisticsRepository, private readonly logger: Logger,
private ownershipService: OwnershipService, private readonly repository: WorkflowStatisticsRepository,
private readonly ownershipService: OwnershipService,
) { ) {
super({ captureRejections: true }); super({ captureRejections: true });
if ('SKIP_STATISTICS_EVENTS' in process.env) return; if ('SKIP_STATISTICS_EVENTS' in process.env) return;
@@ -43,7 +44,7 @@ export class EventsService extends EventEmitter {
try { try {
const upsertResult = await this.repository.upsertWorkflowStatistics(name, workflowId); const upsertResult = await this.repository.upsertWorkflowStatistics(name, workflowId);
if (name === 'production_success' && upsertResult === 'insert') { if (name === StatisticsNames.productionSuccess && upsertResult === 'insert') {
const owner = await Container.get(OwnershipService).getWorkflowOwnerCached(workflowId); const owner = await Container.get(OwnershipService).getWorkflowOwnerCached(workflowId);
const metrics = { const metrics = {
user_id: owner.id, user_id: owner.id,
@@ -61,7 +62,7 @@ export class EventsService extends EventEmitter {
this.emit('telemetry.onFirstProductionWorkflowSuccess', metrics); this.emit('telemetry.onFirstProductionWorkflowSuccess', metrics);
} }
} catch (error) { } catch (error) {
LoggerProxy.verbose('Unable to fire first workflow success telemetry event'); this.logger.verbose('Unable to fire first workflow success telemetry event');
} }
} }

View File

@@ -19,7 +19,6 @@ import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
import { License } from '@/License'; import { License } from '@/License';
import { getInstanceBaseUrl } from '@/UserManagement/UserManagementHelper'; import { getInstanceBaseUrl } from '@/UserManagement/UserManagementHelper';
import * as WebhookHelpers from '@/WebhookHelpers'; import * as WebhookHelpers from '@/WebhookHelpers';
import { LoggerProxy } from 'n8n-workflow';
import config from '@/config'; import config from '@/config';
import { getCurrentAuthenticationMethod } from '@/sso/ssoHelpers'; import { getCurrentAuthenticationMethod } from '@/sso/ssoHelpers';
import { getLdapLoginLabel } from '@/Ldap/helpers'; import { getLdapLoginLabel } from '@/Ldap/helpers';
@@ -31,6 +30,7 @@ import {
} from '@/workflows/workflowHistory/workflowHistoryHelper.ee'; } from '@/workflows/workflowHistory/workflowHistoryHelper.ee';
import { UserManagementMailer } from '@/UserManagement/email'; import { UserManagementMailer } from '@/UserManagement/email';
import type { CommunityPackagesService } from '@/services/communityPackages.service'; import type { CommunityPackagesService } from '@/services/communityPackages.service';
import { Logger } from '@/Logger';
@Service() @Service()
export class FrontendService { export class FrontendService {
@@ -39,6 +39,7 @@ export class FrontendService {
private communityPackagesService?: CommunityPackagesService; private communityPackagesService?: CommunityPackagesService;
constructor( constructor(
private readonly logger: Logger,
private readonly loadNodesAndCredentials: LoadNodesAndCredentials, private readonly loadNodesAndCredentials: LoadNodesAndCredentials,
private readonly credentialTypes: CredentialTypes, private readonly credentialTypes: CredentialTypes,
private readonly credentialsOverwrites: CredentialsOverwrites, private readonly credentialsOverwrites: CredentialsOverwrites,
@@ -72,7 +73,7 @@ export class FrontendService {
const [key, url] = conf.split(';'); const [key, url] = conf.split(';');
if (!key || !url) { if (!key || !url) {
LoggerProxy.warn('Diagnostics frontend config is invalid'); this.logger.warn('Diagnostics frontend config is invalid');
telemetrySettings.enabled = false; telemetrySettings.enabled = false;
} }

View File

@@ -6,7 +6,6 @@ import promClient, { type Counter } from 'prom-client';
import semverParse from 'semver/functions/parse'; import semverParse from 'semver/functions/parse';
import { Service } from 'typedi'; import { Service } from 'typedi';
import EventEmitter from 'events'; import EventEmitter from 'events';
import { LoggerProxy } from 'n8n-workflow';
import { CacheService } from '@/services/cache.service'; import { CacheService } from '@/services/cache.service';
import type { EventMessageTypes } from '@/eventbus/EventMessageClasses'; import type { EventMessageTypes } from '@/eventbus/EventMessageClasses';
@@ -15,10 +14,14 @@ import {
getLabelsForEvent, getLabelsForEvent,
} from '@/eventbus/MessageEventBusDestination/Helpers.ee'; } from '@/eventbus/MessageEventBusDestination/Helpers.ee';
import { eventBus } from '@/eventbus'; import { eventBus } from '@/eventbus';
import { Logger } from '@/Logger';
@Service() @Service()
export class MetricsService extends EventEmitter { export class MetricsService extends EventEmitter {
constructor(private readonly cacheService: CacheService) { constructor(
private readonly logger: Logger,
private readonly cacheService: CacheService,
) {
super(); super();
} }
@@ -130,7 +133,7 @@ export class MetricsService extends EventEmitter {
prefix + event.eventName.replace('n8n.', '').replace(/\./g, '_') + '_total'; prefix + event.eventName.replace('n8n.', '').replace(/\./g, '_') + '_total';
if (!promClient.validateMetricName(metricName)) { if (!promClient.validateMetricName(metricName)) {
LoggerProxy.debug(`Invalid metric name: ${metricName}. Ignoring it!`); this.logger.debug(`Invalid metric name: ${metricName}. Ignoring it!`);
this.counters[event.eventName] = null; this.counters[event.eventName] = null;
return null; return null;
} }

View File

@@ -1,4 +1,6 @@
import { LoggerProxy, jsonParse } from 'n8n-workflow'; import { Container } from 'typedi';
import { jsonParse } from 'n8n-workflow';
import { Logger } from '@/Logger';
import type { RedisServiceCommandObject } from '../redis/RedisServiceCommands'; import type { RedisServiceCommandObject } from '../redis/RedisServiceCommands';
import { COMMAND_REDIS_CHANNEL } from '../redis/RedisServiceHelper'; import { COMMAND_REDIS_CHANNEL } from '../redis/RedisServiceHelper';
import * as os from 'os'; import * as os from 'os';
@@ -13,7 +15,7 @@ export function messageToRedisServiceCommandObject(messageString: string) {
try { try {
message = jsonParse<RedisServiceCommandObject>(messageString); message = jsonParse<RedisServiceCommandObject>(messageString);
} catch { } catch {
LoggerProxy.debug( Container.get(Logger).debug(
`Received invalid message via channel ${COMMAND_REDIS_CHANNEL}: "${messageString}"`, `Received invalid message via channel ${COMMAND_REDIS_CHANNEL}: "${messageString}"`,
); );
return; return;

View File

@@ -1,18 +1,19 @@
import { LoggerProxy } from 'n8n-workflow'; import { Container } from 'typedi';
import { debounceMessageReceiver, messageToRedisServiceCommandObject } from '../helpers'; import { debounceMessageReceiver, messageToRedisServiceCommandObject } from '../helpers';
import config from '@/config'; import config from '@/config';
import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus';
import Container from 'typedi';
import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee'; import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee';
import { License } from '@/License'; import { License } from '@/License';
import { Logger } from '@/Logger';
export async function handleCommandMessageMain(messageString: string) { export async function handleCommandMessageMain(messageString: string) {
const queueModeId = config.get('redis.queueModeId'); const queueModeId = config.get('redis.queueModeId');
const isMainInstance = config.get('generic.instanceType') === 'main'; const isMainInstance = config.get('generic.instanceType') === 'main';
const message = messageToRedisServiceCommandObject(messageString); const message = messageToRedisServiceCommandObject(messageString);
const logger = Container.get(Logger);
if (message) { if (message) {
LoggerProxy.debug( logger.debug(
`RedisCommandHandler(main): Received command message ${message.command} from ${message.senderId}`, `RedisCommandHandler(main): Received command message ${message.command} from ${message.senderId}`,
); );
if ( if (
@@ -20,7 +21,7 @@ export async function handleCommandMessageMain(messageString: string) {
(message.targets && !message.targets.includes(queueModeId)) (message.targets && !message.targets.includes(queueModeId))
) { ) {
// Skipping command message because it's not for this instance // Skipping command message because it's not for this instance
LoggerProxy.debug( logger.debug(
`Skipping command message ${message.command} because it's not for this instance.`, `Skipping command message ${message.command} because it's not for this instance.`,
); );
return message; return message;
@@ -35,7 +36,7 @@ export async function handleCommandMessageMain(messageString: string) {
} }
if (isMainInstance) { if (isMainInstance) {
// at this point in time, only a single main instance is supported, thus this command _should_ never be caught currently // at this point in time, only a single main instance is supported, thus this command _should_ never be caught currently
LoggerProxy.error( logger.error(
'Received command to reload license via Redis, but this should not have happened and is not supported on the main instance yet.', 'Received command to reload license via Redis, but this should not have happened and is not supported on the main instance yet.',
); );
return message; return message;

View File

@@ -1,11 +1,13 @@
import { jsonParse, LoggerProxy } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import Container from 'typedi';
import { Logger } from '@/Logger';
import type { RedisServiceWorkerResponseObject } from '../../redis/RedisServiceCommands'; import type { RedisServiceWorkerResponseObject } from '../../redis/RedisServiceCommands';
export async function handleWorkerResponseMessageMain(messageString: string) { export async function handleWorkerResponseMessageMain(messageString: string) {
const workerResponse = jsonParse<RedisServiceWorkerResponseObject>(messageString); const workerResponse = jsonParse<RedisServiceWorkerResponseObject>(messageString);
if (workerResponse) { if (workerResponse) {
// TODO: Handle worker response // TODO: Handle worker response
LoggerProxy.debug( Container.get(Logger).debug(
`Received worker response ${workerResponse.command} from ${workerResponse.workerId}`, `Received worker response ${workerResponse.command} from ${workerResponse.workerId}`,
); );
} }

View File

@@ -1,29 +1,31 @@
import { jsonParse, LoggerProxy } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import Container from 'typedi';
import type { RedisServiceCommandObject } from '@/services/redis/RedisServiceCommands'; import type { RedisServiceCommandObject } from '@/services/redis/RedisServiceCommands';
import { COMMAND_REDIS_CHANNEL } from '@/services/redis/RedisServiceHelper'; import { COMMAND_REDIS_CHANNEL } from '@/services/redis/RedisServiceHelper';
import * as os from 'os'; import * as os from 'os';
import Container from 'typedi';
import { License } from '@/License'; import { License } from '@/License';
import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus';
import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee'; import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee';
import { debounceMessageReceiver, getOsCpuString } from '../helpers'; import { debounceMessageReceiver, getOsCpuString } from '../helpers';
import type { WorkerCommandReceivedHandlerOptions } from './types'; import type { WorkerCommandReceivedHandlerOptions } from './types';
import { Logger } from '@/Logger';
export function getWorkerCommandReceivedHandler(options: WorkerCommandReceivedHandlerOptions) { export function getWorkerCommandReceivedHandler(options: WorkerCommandReceivedHandlerOptions) {
return async (channel: string, messageString: string) => { return async (channel: string, messageString: string) => {
if (channel === COMMAND_REDIS_CHANNEL) { if (channel === COMMAND_REDIS_CHANNEL) {
if (!messageString) return; if (!messageString) return;
const logger = Container.get(Logger);
let message: RedisServiceCommandObject; let message: RedisServiceCommandObject;
try { try {
message = jsonParse<RedisServiceCommandObject>(messageString); message = jsonParse<RedisServiceCommandObject>(messageString);
} catch { } catch {
LoggerProxy.debug( logger.debug(
`Received invalid message via channel ${COMMAND_REDIS_CHANNEL}: "${messageString}"`, `Received invalid message via channel ${COMMAND_REDIS_CHANNEL}: "${messageString}"`,
); );
return; return;
} }
if (message) { if (message) {
LoggerProxy.debug( logger.debug(
`RedisCommandHandler(worker): Received command message ${message.command} from ${message.senderId}`, `RedisCommandHandler(worker): Received command message ${message.command} from ${message.senderId}`,
); );
if (message.targets && !message.targets.includes(options.queueModeId)) { if (message.targets && !message.targets.includes(options.queueModeId)) {
@@ -115,7 +117,7 @@ export function getWorkerCommandReceivedHandler(options: WorkerCommandReceivedHa
// await this.stopProcess(); // await this.stopProcess();
break; break;
default: default:
LoggerProxy.debug( logger.debug(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions // eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Received unknown command via channel ${COMMAND_REDIS_CHANNEL}: "${message.command}"`, `Received unknown command via channel ${COMMAND_REDIS_CHANNEL}: "${message.command}"`,
); );

View File

@@ -1,8 +1,9 @@
import type Redis from 'ioredis'; import type Redis from 'ioredis';
import type { Cluster } from 'ioredis'; import type { Cluster } from 'ioredis';
import { getDefaultRedisClient } from './RedisServiceHelper'; import { Service } from 'typedi';
import { LoggerProxy } from 'n8n-workflow';
import config from '@/config'; import config from '@/config';
import { Logger } from '@/Logger';
import { getDefaultRedisClient } from './RedisServiceHelper';
export type RedisClientType = export type RedisClientType =
| 'subscriber' | 'subscriber'
@@ -22,11 +23,14 @@ export type RedisServiceMessageHandler =
| ((channel: string, message: string) => void) | ((channel: string, message: string) => void)
| ((stream: string, id: string, message: string[]) => void); | ((stream: string, id: string, message: string[]) => void);
@Service()
class RedisServiceBase { class RedisServiceBase {
redisClient: Redis | Cluster | undefined; redisClient: Redis | Cluster | undefined;
isInitialized = false; isInitialized = false;
constructor(protected readonly logger: Logger) {}
async init(type: RedisClientType = 'client'): Promise<void> { async init(type: RedisClientType = 'client'): Promise<void> {
if (this.redisClient && this.isInitialized) { if (this.redisClient && this.isInitialized) {
return; return;
@@ -34,13 +38,13 @@ class RedisServiceBase {
this.redisClient = await getDefaultRedisClient(undefined, type); this.redisClient = await getDefaultRedisClient(undefined, type);
this.redisClient.on('close', () => { this.redisClient.on('close', () => {
LoggerProxy.warn('Redis unavailable - trying to reconnect...'); this.logger.warn('Redis unavailable - trying to reconnect...');
}); });
this.redisClient.on('error', (error) => { this.redisClient.on('error', (error) => {
if (!String(error).includes('ECONNREFUSED')) { if (!String(error).includes('ECONNREFUSED')) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
LoggerProxy.warn('Error with Redis: ', error); this.logger.warn('Error with Redis: ', error);
} }
}); });
} }

View File

@@ -1,8 +1,9 @@
import type Redis from 'ioredis'; import type Redis from 'ioredis';
import type { Cluster, RedisOptions } from 'ioredis'; import type { Cluster, RedisOptions } from 'ioredis';
import config from '@/config'; import config from '@/config';
import { LoggerProxy } from 'n8n-workflow';
import type { RedisClientType } from './RedisServiceBaseClasses'; import type { RedisClientType } from './RedisServiceBaseClasses';
import Container from 'typedi';
import { Logger } from '@/Logger';
export const EVENT_BUS_REDIS_STREAM = 'n8n:eventstream'; export const EVENT_BUS_REDIS_STREAM = 'n8n:eventstream';
export const COMMAND_REDIS_STREAM = 'n8n:commandstream'; export const COMMAND_REDIS_STREAM = 'n8n:commandstream';
@@ -56,7 +57,9 @@ export function getRedisStandardClient(
maxRetriesPerRequest: null, maxRetriesPerRequest: null,
}; };
if (config.getEnv('queue.bull.redis.tls')) sharedRedisOptions.tls = {}; if (config.getEnv('queue.bull.redis.tls')) sharedRedisOptions.tls = {};
LoggerProxy.debug(
const logger = Container.get(Logger);
logger.debug(
`Initialising Redis client${redisType ? ` of type ${redisType}` : ''} connection with host: ${ `Initialising Redis client${redisType ? ` of type ${redisType}` : ''} connection with host: ${
host ?? 'localhost' host ?? 'localhost'
} and port: ${port ?? '6379'}`, } and port: ${port ?? '6379'}`,
@@ -73,7 +76,7 @@ export function getRedisStandardClient(
cumulativeTimeout += now - lastTimer; cumulativeTimeout += now - lastTimer;
lastTimer = now; lastTimer = now;
if (cumulativeTimeout > redisConnectionTimeoutLimit) { if (cumulativeTimeout > redisConnectionTimeoutLimit) {
LoggerProxy.error( logger.error(
`Unable to connect to Redis after ${redisConnectionTimeoutLimit}. Exiting process.`, `Unable to connect to Redis after ${redisConnectionTimeoutLimit}. Exiting process.`,
); );
process.exit(1); process.exit(1);
@@ -103,7 +106,9 @@ export function getRedisClusterClient(
maxRetriesPerRequest: null, maxRetriesPerRequest: null,
}; };
if (config.getEnv('queue.bull.redis.tls')) sharedRedisOptions.tls = {}; if (config.getEnv('queue.bull.redis.tls')) sharedRedisOptions.tls = {};
LoggerProxy.debug(
const logger = Container.get(Logger);
logger.debug(
`Initialising Redis cluster${ `Initialising Redis cluster${
redisType ? ` of type ${redisType}` : '' redisType ? ` of type ${redisType}` : ''
} connection with nodes: ${clusterNodes.map((e) => `${e.host}:${e.port}`).join(',')}`, } connection with nodes: ${clusterNodes.map((e) => `${e.host}:${e.port}`).join(',')}`,
@@ -122,7 +127,7 @@ export function getRedisClusterClient(
cumulativeTimeout += now - lastTimer; cumulativeTimeout += now - lastTimer;
lastTimer = now; lastTimer = now;
if (cumulativeTimeout > redisConnectionTimeoutLimit) { if (cumulativeTimeout > redisConnectionTimeoutLimit) {
LoggerProxy.error( logger.error(
`Unable to connect to Redis after ${redisConnectionTimeoutLimit}. Exiting process.`, `Unable to connect to Redis after ${redisConnectionTimeoutLimit}. Exiting process.`,
); );
process.exit(1); process.exit(1);

View File

@@ -1,8 +1,8 @@
import { Service } from 'typedi'; import { Service } from 'typedi';
import { jsonParse } from 'n8n-workflow';
import { WORKER_RESPONSE_REDIS_LIST } from './RedisServiceHelper'; import { WORKER_RESPONSE_REDIS_LIST } from './RedisServiceHelper';
import type { RedisServiceWorkerResponseObject } from './RedisServiceCommands'; import type { RedisServiceWorkerResponseObject } from './RedisServiceCommands';
import { RedisServiceBaseReceiver } from './RedisServiceBaseClasses'; import { RedisServiceBaseReceiver } from './RedisServiceBaseClasses';
import { LoggerProxy, jsonParse } from 'n8n-workflow';
@Service() @Service()
export class RedisServiceListReceiver extends RedisServiceBaseReceiver { export class RedisServiceListReceiver extends RedisServiceBaseReceiver {
@@ -37,7 +37,7 @@ export class RedisServiceListReceiver extends RedisServiceBaseReceiver {
} }
return workerResponse; return workerResponse;
} catch (error) { } catch (error) {
LoggerProxy.warn( this.logger.warn(
`Error parsing worker response on list ${list}: ${(error as Error).message}`, `Error parsing worker response on list ${list}: ${(error as Error).message}`,
); );
} }

View File

@@ -1,5 +1,4 @@
import { Service } from 'typedi'; import { Service } from 'typedi';
import { LoggerProxy as Logger } from 'n8n-workflow';
import { import {
COMMAND_REDIS_CHANNEL, COMMAND_REDIS_CHANNEL,
EVENT_BUS_REDIS_CHANNEL, EVENT_BUS_REDIS_CHANNEL,
@@ -25,9 +24,9 @@ export class RedisServicePubSubSubscriber extends RedisServiceBaseReceiver {
} }
await this.redisClient?.subscribe(channel, (error, _count: number) => { await this.redisClient?.subscribe(channel, (error, _count: number) => {
if (error) { if (error) {
Logger.error(`Error subscribing to channel ${channel}`); this.logger.error(`Error subscribing to channel ${channel}`);
} else { } else {
Logger.debug(`Subscribed Redis PubSub client to channel: ${channel}`); this.logger.debug(`Subscribed Redis PubSub client to channel: ${channel}`);
} }
}); });
} }
@@ -38,9 +37,9 @@ export class RedisServicePubSubSubscriber extends RedisServiceBaseReceiver {
} }
await this.redisClient?.unsubscribe(channel, (error, _count: number) => { await this.redisClient?.unsubscribe(channel, (error, _count: number) => {
if (error) { if (error) {
Logger.error(`Error unsubscribing from channel ${channel}`); this.logger.error(`Error unsubscribing from channel ${channel}`);
} else { } else {
Logger.debug(`Unsubscribed Redis PubSub client from channel: ${channel}`); this.logger.debug(`Unsubscribed Redis PubSub client from channel: ${channel}`);
} }
}); });
} }

View File

@@ -1,5 +1,4 @@
import { Service } from 'typedi'; import { Service } from 'typedi';
import { LoggerProxy } from 'n8n-workflow';
import { RedisServiceBaseReceiver } from './RedisServiceBaseClasses'; import { RedisServiceBaseReceiver } from './RedisServiceBaseClasses';
type LastId = string; type LastId = string;
@@ -26,7 +25,7 @@ export class RedisServiceStreamConsumer extends RedisServiceBaseReceiver {
if (!this.redisClient) { if (!this.redisClient) {
await this.init(); await this.init();
} }
LoggerProxy.debug(`Redis client now listening to stream ${stream} starting with id ${lastId}`); this.logger.debug(`Redis client now listening to stream ${stream} starting with id ${lastId}`);
this.setLastId(stream, lastId); this.setLastId(stream, lastId);
const interval = this.streams.get(stream)?.pollingInterval ?? 1000; const interval = this.streams.get(stream)?.pollingInterval ?? 1000;
const waiter = setInterval(async () => { const waiter = setInterval(async () => {
@@ -54,7 +53,7 @@ export class RedisServiceStreamConsumer extends RedisServiceBaseReceiver {
} }
stopListeningToStream(stream: StreamName): void { stopListeningToStream(stream: StreamName): void {
LoggerProxy.debug(`Redis client stopped listening to stream ${stream}`); this.logger.debug(`Redis client stopped listening to stream ${stream}`);
const existing = this.streams.get(stream); const existing = this.streams.get(stream);
if (existing?.waiter) { if (existing?.waiter) {
clearInterval(existing.waiter); clearInterval(existing.waiter);

View File

@@ -2,7 +2,7 @@ import type express from 'express';
import { Service } from 'typedi'; import { Service } from 'typedi';
import * as Db from '@/Db'; import * as Db from '@/Db';
import type { User } from '@db/entities/User'; import type { User } from '@db/entities/User';
import { jsonParse, LoggerProxy } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import { AuthError, BadRequestError } from '@/ResponseHelper'; import { AuthError, BadRequestError } from '@/ResponseHelper';
import { getServiceProviderInstance } from './serviceProvider.ee'; import { getServiceProviderInstance } from './serviceProvider.ee';
import type { SamlUserAttributes } from './types/samlUserAttributes'; import type { SamlUserAttributes } from './types/samlUserAttributes';
@@ -27,6 +27,7 @@ import https from 'https';
import type { SamlLoginBinding } from './types'; import type { SamlLoginBinding } from './types';
import { validateMetadata, validateResponse } from './samlValidator'; import { validateMetadata, validateResponse } from './samlValidator';
import { getInstanceBaseUrl } from '@/UserManagement/UserManagementHelper'; import { getInstanceBaseUrl } from '@/UserManagement/UserManagementHelper';
import { Logger } from '@/Logger';
@Service() @Service()
export class SamlService { export class SamlService {
@@ -70,6 +71,8 @@ export class SamlService {
}; };
} }
constructor(private readonly logger: Logger) {}
async init(): Promise<void> { async init(): Promise<void> {
// load preferences first but do not apply so as to not load samlify unnecessarily // load preferences first but do not apply so as to not load samlify unnecessarily
await this.loadFromDbAndApplySamlPreferences(false); await this.loadFromDbAndApplySamlPreferences(false);
@@ -81,7 +84,7 @@ export class SamlService {
async loadSamlify() { async loadSamlify() {
if (this.samlify === undefined) { if (this.samlify === undefined) {
LoggerProxy.debug('Loading samlify library into memory'); this.logger.debug('Loading samlify library into memory');
this.samlify = await import('samlify'); this.samlify = await import('samlify');
} }
this.samlify.setSchemaValidator({ this.samlify.setSchemaValidator({

View File

@@ -1,5 +1,6 @@
import { LoggerProxy } from 'n8n-workflow'; import { Container } from 'typedi';
import type { XMLFileInfo } from 'xmllint-wasm'; import type { XMLFileInfo } from 'xmllint-wasm';
import { Logger } from '@/Logger';
let xml: XMLFileInfo; let xml: XMLFileInfo;
let xmldsigCore: XMLFileInfo; let xmldsigCore: XMLFileInfo;
@@ -14,7 +15,7 @@ let xmllintWasm: typeof import('xmllint-wasm') | undefined;
// dynamically load schema files // dynamically load schema files
async function loadSchemas(): Promise<void> { async function loadSchemas(): Promise<void> {
if (!xml || xml.contents === '') { if (!xml || xml.contents === '') {
LoggerProxy.debug('Loading XML schema files for SAML validation into memory'); Container.get(Logger).debug('Loading XML schema files for SAML validation into memory');
const f = await import('./schema/xml.xsd'); const f = await import('./schema/xml.xsd');
xml = { xml = {
fileName: 'xml.xsd', fileName: 'xml.xsd',
@@ -61,12 +62,13 @@ async function loadSchemas(): Promise<void> {
// dynamically load xmllint-wasm // dynamically load xmllint-wasm
async function loadXmllintWasm(): Promise<void> { async function loadXmllintWasm(): Promise<void> {
if (xmllintWasm === undefined) { if (xmllintWasm === undefined) {
LoggerProxy.debug('Loading xmllint-wasm library into memory'); Container.get(Logger).debug('Loading xmllint-wasm library into memory');
xmllintWasm = await import('xmllint-wasm'); xmllintWasm = await import('xmllint-wasm');
} }
} }
export async function validateMetadata(metadata: string): Promise<boolean> { export async function validateMetadata(metadata: string): Promise<boolean> {
const logger = Container.get(Logger);
try { try {
await loadXmllintWasm(); await loadXmllintWasm();
await loadSchemas(); await loadSchemas();
@@ -82,20 +84,21 @@ export async function validateMetadata(metadata: string): Promise<boolean> {
preload: [xmlProtocol, xmlAssertion, xmldsigCore, xmlXenc, xml], preload: [xmlProtocol, xmlAssertion, xmldsigCore, xmlXenc, xml],
}); });
if (validationResult?.valid) { if (validationResult?.valid) {
LoggerProxy.debug('SAML Metadata is valid'); logger.debug('SAML Metadata is valid');
return true; return true;
} else { } else {
LoggerProxy.warn('SAML Validate Metadata: Invalid metadata'); logger.warn('SAML Validate Metadata: Invalid metadata');
LoggerProxy.warn(validationResult ? validationResult.errors.join('\n') : ''); logger.warn(validationResult ? validationResult.errors.join('\n') : '');
} }
} catch (error) { } catch (error) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
LoggerProxy.warn(error); logger.warn(error);
} }
return false; return false;
} }
export async function validateResponse(response: string): Promise<boolean> { export async function validateResponse(response: string): Promise<boolean> {
const logger = Container.get(Logger);
try { try {
await loadXmllintWasm(); await loadXmllintWasm();
await loadSchemas(); await loadSchemas();
@@ -111,15 +114,15 @@ export async function validateResponse(response: string): Promise<boolean> {
preload: [xmlMetadata, xmlAssertion, xmldsigCore, xmlXenc, xml], preload: [xmlMetadata, xmlAssertion, xmldsigCore, xmlXenc, xml],
}); });
if (validationResult?.valid) { if (validationResult?.valid) {
LoggerProxy.debug('SAML Response is valid'); logger.debug('SAML Response is valid');
return true; return true;
} else { } else {
LoggerProxy.warn('SAML Validate Response: Failed'); logger.warn('SAML Validate Response: Failed');
LoggerProxy.warn(validationResult ? validationResult.errors.join('\n') : ''); logger.warn(validationResult ? validationResult.errors.join('\n') : '');
} }
} catch (error) { } catch (error) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
LoggerProxy.warn(error); logger.warn(error);
} }
return false; return false;
} }

View File

@@ -1,14 +1,13 @@
import type RudderStack from '@rudderstack/rudder-sdk-node'; import type RudderStack from '@rudderstack/rudder-sdk-node';
import { PostHogClient } from '@/posthog'; import { PostHogClient } from '@/posthog';
import { Container, Service } from 'typedi';
import type { ITelemetryTrackProperties } from 'n8n-workflow'; import type { ITelemetryTrackProperties } from 'n8n-workflow';
import { LoggerProxy } from 'n8n-workflow';
import config from '@/config'; import config from '@/config';
import type { IExecutionTrackProperties } from '@/Interfaces'; import type { IExecutionTrackProperties } from '@/Interfaces';
import { getLogger } from '@/Logger'; import { Logger } from '@/Logger';
import { License } from '@/License'; import { License } from '@/License';
import { LicenseService } from '@/license/License.service'; import { LicenseService } from '@/license/License.service';
import { N8N_VERSION } from '@/constants'; import { N8N_VERSION } from '@/constants';
import Container, { Service } from 'typedi';
import { SourceControlPreferencesService } from '../environments/sourceControl/sourceControlPreferences.service.ee'; import { SourceControlPreferencesService } from '../environments/sourceControl/sourceControlPreferences.service.ee';
import { InstanceSettings } from 'n8n-core'; import { InstanceSettings } from 'n8n-core';
@@ -38,6 +37,7 @@ export class Telemetry {
private executionCountsBuffer: IExecutionsBuffer = {}; private executionCountsBuffer: IExecutionsBuffer = {};
constructor( constructor(
private readonly logger: Logger,
private postHog: PostHogClient, private postHog: PostHogClient,
private license: License, private license: License,
private readonly instanceSettings: InstanceSettings, private readonly instanceSettings: InstanceSettings,
@@ -50,9 +50,7 @@ export class Telemetry {
const [key, url] = conf.split(';'); const [key, url] = conf.split(';');
if (!key || !url) { if (!key || !url) {
const logger = getLogger(); this.logger.warn('Diagnostics backend config is invalid');
LoggerProxy.init(logger);
logger.warn('Diagnostics backend config is invalid');
return; return;
} }

View File

@@ -6,7 +6,7 @@ import { SharedWorkflowRepository } from '@/databases/repositories';
import { WorkflowHistoryRepository } from '@db/repositories/workflowHistory.repository'; import { WorkflowHistoryRepository } from '@db/repositories/workflowHistory.repository';
import { Service } from 'typedi'; import { Service } from 'typedi';
import { isWorkflowHistoryEnabled } from './workflowHistoryHelper.ee'; import { isWorkflowHistoryEnabled } from './workflowHistoryHelper.ee';
import { getLogger } from '@/Logger'; import { Logger } from '@/Logger';
export class SharedWorkflowNotFoundError extends Error {} export class SharedWorkflowNotFoundError extends Error {}
export class HistoryVersionNotFoundError extends Error {} export class HistoryVersionNotFoundError extends Error {}
@@ -14,6 +14,7 @@ export class HistoryVersionNotFoundError extends Error {}
@Service() @Service()
export class WorkflowHistoryService { export class WorkflowHistoryService {
constructor( constructor(
private readonly logger: Logger,
private readonly workflowHistoryRepository: WorkflowHistoryRepository, private readonly workflowHistoryRepository: WorkflowHistoryRepository,
private readonly sharedWorkflowRepository: SharedWorkflowRepository, private readonly sharedWorkflowRepository: SharedWorkflowRepository,
) {} ) {}
@@ -76,7 +77,7 @@ export class WorkflowHistoryService {
workflowId, workflowId,
}); });
} catch (e) { } catch (e) {
getLogger().error( this.logger.error(
`Failed to save workflow history version for workflow ${workflowId}`, `Failed to save workflow history version for workflow ${workflowId}`,
e as Error, e as Error,
); );

View File

@@ -11,7 +11,6 @@ import { isSharingEnabled, rightDiff } from '@/UserManagement/UserManagementHelp
import { EEWorkflowsService as EEWorkflows } from './workflows.services.ee'; import { EEWorkflowsService as EEWorkflows } from './workflows.services.ee';
import { ExternalHooks } from '@/ExternalHooks'; import { ExternalHooks } from '@/ExternalHooks';
import { SharedWorkflow } from '@db/entities/SharedWorkflow'; import { SharedWorkflow } from '@db/entities/SharedWorkflow';
import { LoggerProxy } from 'n8n-workflow';
import { CredentialsService } from '../credentials/credentials.service'; import { CredentialsService } from '../credentials/credentials.service';
import type { IExecutionPushResponse } from '@/Interfaces'; import type { IExecutionPushResponse } from '@/Interfaces';
import * as GenericHelpers from '@/GenericHelpers'; import * as GenericHelpers from '@/GenericHelpers';
@@ -22,6 +21,7 @@ import { RoleService } from '@/services/role.service';
import * as utils from '@/utils'; import * as utils from '@/utils';
import { listQueryMiddleware } from '@/middlewares'; import { listQueryMiddleware } from '@/middlewares';
import { TagService } from '@/services/tag.service'; import { TagService } from '@/services/tag.service';
import { Logger } from '@/Logger';
import { WorkflowHistoryService } from './workflowHistory/workflowHistory.service.ee'; import { WorkflowHistoryService } from './workflowHistory/workflowHistory.service.ee';
// eslint-disable-next-line @typescript-eslint/naming-convention // eslint-disable-next-line @typescript-eslint/naming-convention
@@ -181,7 +181,7 @@ EEWorkflowController.post(
}); });
if (!savedWorkflow) { if (!savedWorkflow) {
LoggerProxy.error('Failed to create workflow', { userId: req.user.id }); Container.get(Logger).error('Failed to create workflow', { userId: req.user.id });
throw new ResponseHelper.InternalServerError( throw new ResponseHelper.InternalServerError(
'An error occurred while saving your workflow. Please try again.', 'An error occurred while saving your workflow. Please try again.',
); );

View File

@@ -1,6 +1,5 @@
import express from 'express'; import express from 'express';
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
import { LoggerProxy } from 'n8n-workflow';
import axios from 'axios'; import axios from 'axios';
import * as Db from '@/Db'; import * as Db from '@/Db';
@@ -13,7 +12,6 @@ import { SharedWorkflow } from '@db/entities/SharedWorkflow';
import { WorkflowEntity } from '@db/entities/WorkflowEntity'; import { WorkflowEntity } from '@db/entities/WorkflowEntity';
import { validateEntity } from '@/GenericHelpers'; import { validateEntity } from '@/GenericHelpers';
import { ExternalHooks } from '@/ExternalHooks'; import { ExternalHooks } from '@/ExternalHooks';
import { getLogger } from '@/Logger';
import type { ListQuery, WorkflowRequest } from '@/requests'; import type { ListQuery, WorkflowRequest } from '@/requests';
import { isBelowOnboardingThreshold } from '@/WorkflowHelpers'; import { isBelowOnboardingThreshold } from '@/WorkflowHelpers';
import { EEWorkflowController } from './workflows.controller.ee'; import { EEWorkflowController } from './workflows.controller.ee';
@@ -27,21 +25,9 @@ import * as utils from '@/utils';
import { listQueryMiddleware } from '@/middlewares'; import { listQueryMiddleware } from '@/middlewares';
import { TagService } from '@/services/tag.service'; import { TagService } from '@/services/tag.service';
import { WorkflowHistoryService } from './workflowHistory/workflowHistory.service.ee'; import { WorkflowHistoryService } from './workflowHistory/workflowHistory.service.ee';
import { Logger } from '@/Logger';
export const workflowsController = express.Router(); export const workflowsController = express.Router();
/**
* Initialize Logger if needed
*/
workflowsController.use((req, res, next) => {
try {
LoggerProxy.getInstance();
} catch (error) {
LoggerProxy.init(getLogger());
}
next();
});
workflowsController.use('/', EEWorkflowController); workflowsController.use('/', EEWorkflowController);
/** /**
@@ -96,7 +82,7 @@ workflowsController.post(
}); });
if (!savedWorkflow) { if (!savedWorkflow) {
LoggerProxy.error('Failed to create workflow', { userId: req.user.id }); Container.get(Logger).error('Failed to create workflow', { userId: req.user.id });
throw new ResponseHelper.InternalServerError('Failed to save workflow'); throw new ResponseHelper.InternalServerError('Failed to save workflow');
} }
@@ -230,7 +216,7 @@ workflowsController.get(
}); });
if (!shared) { if (!shared) {
LoggerProxy.verbose('User attempted to access a workflow without permissions', { Container.get(Logger).verbose('User attempted to access a workflow without permissions', {
workflowId, workflowId,
userId: req.user.id, userId: req.user.id,
}); });
@@ -280,7 +266,7 @@ workflowsController.delete(
const workflow = await WorkflowsService.delete(req.user, workflowId); const workflow = await WorkflowsService.delete(req.user, workflowId);
if (!workflow) { if (!workflow) {
LoggerProxy.verbose('User attempted to delete a workflow without permissions', { Container.get(Logger).verbose('User attempted to delete a workflow without permissions', {
workflowId, workflowId,
userId: req.user.id, userId: req.user.id,
}); });

View File

@@ -1,11 +1,6 @@
import { Container } from 'typedi'; import { Container } from 'typedi';
import type { IDataObject, INode, IPinData } from 'n8n-workflow'; import type { IDataObject, INode, IPinData } from 'n8n-workflow';
import { import { NodeApiError, ErrorReporterProxy as ErrorReporter, Workflow } from 'n8n-workflow';
NodeApiError,
ErrorReporterProxy as ErrorReporter,
LoggerProxy,
Workflow,
} from 'n8n-workflow';
import type { FindManyOptions, FindOptionsSelect, FindOptionsWhere, UpdateResult } from 'typeorm'; import type { FindManyOptions, FindOptionsSelect, FindOptionsWhere, UpdateResult } from 'typeorm';
import { In, Like } from 'typeorm'; import { In, Like } from 'typeorm';
import pick from 'lodash/pick'; import pick from 'lodash/pick';
@@ -35,6 +30,7 @@ import { OwnershipService } from '@/services/ownership.service';
import { isStringArray, isWorkflowIdValid } from '@/utils'; import { isStringArray, isWorkflowIdValid } from '@/utils';
import { WorkflowHistoryService } from './workflowHistory/workflowHistory.service.ee'; import { WorkflowHistoryService } from './workflowHistory/workflowHistory.service.ee';
import { BinaryDataService } from 'n8n-core'; import { BinaryDataService } from 'n8n-core';
import { Logger } from '@/Logger';
export class WorkflowsService { export class WorkflowsService {
static async getSharing( static async getSharing(
@@ -200,8 +196,9 @@ export class WorkflowsService {
}), }),
}); });
const logger = Container.get(Logger);
if (!shared) { if (!shared) {
LoggerProxy.verbose('User attempted to update a workflow without permissions', { logger.verbose('User attempted to update a workflow without permissions', {
workflowId, workflowId,
userId: user.id, userId: user.id,
}); });
@@ -237,7 +234,7 @@ export class WorkflowsService {
} else { } else {
// Update the workflow's version // Update the workflow's version
workflow.versionId = uuid(); workflow.versionId = uuid();
LoggerProxy.verbose( logger.verbose(
`Updating versionId for workflow ${workflowId} for user ${user.id} after saving`, `Updating versionId for workflow ${workflowId} for user ${user.id} after saving`,
{ {
previousVersionId: shared.workflow.versionId, previousVersionId: shared.workflow.versionId,
@@ -514,7 +511,7 @@ export class WorkflowsService {
workflow.staticData.__dataChanged = false; workflow.staticData.__dataChanged = false;
} catch (error) { } catch (error) {
ErrorReporter.error(error); ErrorReporter.error(error);
LoggerProxy.error( Container.get(Logger).error(
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
`There was a problem saving the workflow with id "${workflow.id}" to save changed staticData: "${error.message}"`, `There was a problem saving the workflow with id "${workflow.id}" to save changed staticData: "${error.message}"`,
{ workflowId: workflow.id }, { workflowId: workflow.id },

View File

@@ -5,7 +5,7 @@ import * as utils from '../shared/utils/';
import type { ExternalSecretsSettings, SecretsProviderState } from '@/Interfaces'; import type { ExternalSecretsSettings, SecretsProviderState } from '@/Interfaces';
import { Cipher } from 'n8n-core'; import { Cipher } from 'n8n-core';
import { SettingsRepository } from '@/databases/repositories/settings.repository'; import { SettingsRepository } from '@/databases/repositories/settings.repository';
import Container from 'typedi'; import { Container } from 'typedi';
import { ExternalSecretsProviders } from '@/ExternalSecrets/ExternalSecretsProviders.ee'; import { ExternalSecretsProviders } from '@/ExternalSecrets/ExternalSecretsProviders.ee';
import { import {
DummyProvider, DummyProvider,
@@ -17,6 +17,7 @@ import config from '@/config';
import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee'; import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee';
import { CREDENTIAL_BLANKING_VALUE } from '@/constants'; import { CREDENTIAL_BLANKING_VALUE } from '@/constants';
import { jsonParse, type IDataObject } from 'n8n-workflow'; import { jsonParse, type IDataObject } from 'n8n-workflow';
import { mock } from 'jest-mock-extended';
let authOwnerAgent: SuperAgentTest; let authOwnerAgent: SuperAgentTest;
let authMemberAgent: SuperAgentTest; let authMemberAgent: SuperAgentTest;
@@ -52,6 +53,7 @@ const resetManager = async () => {
Container.set( Container.set(
ExternalSecretsManager, ExternalSecretsManager,
new ExternalSecretsManager( new ExternalSecretsManager(
mock(),
Container.get(SettingsRepository), Container.get(SettingsRepository),
licenseLike, licenseLike,
mockProvidersInstance, mockProvidersInstance,

View File

@@ -7,11 +7,6 @@ import { getRiskSection } from './utils';
import * as testDb from '../shared/testDb'; import * as testDb from '../shared/testDb';
import { generateNanoId } from '@db/utils/generators'; import { generateNanoId } from '@db/utils/generators';
import { LoggerProxy } from 'n8n-workflow';
import { getLogger } from '@/Logger';
LoggerProxy.init(getLogger());
beforeAll(async () => { beforeAll(async () => {
await testDb.init(); await testDb.init();
}); });

View File

@@ -10,11 +10,6 @@ import { getRiskSection, saveManualTriggerWorkflow } from './utils';
import * as testDb from '../shared/testDb'; import * as testDb from '../shared/testDb';
import { generateNanoId } from '@db/utils/generators'; import { generateNanoId } from '@db/utils/generators';
import { LoggerProxy } from 'n8n-workflow';
import { getLogger } from '@/Logger';
LoggerProxy.init(getLogger());
beforeAll(async () => { beforeAll(async () => {
await testDb.init(); await testDb.init();
}); });

View File

@@ -5,11 +5,6 @@ import { FILESYSTEM_INTERACTION_NODE_TYPES, FILESYSTEM_REPORT } from '@/audit/co
import { getRiskSection, saveManualTriggerWorkflow } from './utils'; import { getRiskSection, saveManualTriggerWorkflow } from './utils';
import * as testDb from '../shared/testDb'; import * as testDb from '../shared/testDb';
import { LoggerProxy } from 'n8n-workflow';
import { getLogger } from '@/Logger';
LoggerProxy.init(getLogger());
beforeAll(async () => { beforeAll(async () => {
await testDb.init(); await testDb.init();
}); });

View File

@@ -14,11 +14,6 @@ import { toReportTitle } from '@/audit/utils';
import config from '@/config'; import config from '@/config';
import { generateNanoId } from '@db/utils/generators'; import { generateNanoId } from '@db/utils/generators';
import { LoggerProxy } from 'n8n-workflow';
import { getLogger } from '@/Logger';
LoggerProxy.init(getLogger());
beforeAll(async () => { beforeAll(async () => {
await testDb.init(); await testDb.init();

View File

@@ -1,4 +1,5 @@
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
import { Container } from 'typedi';
import * as Db from '@/Db'; import * as Db from '@/Db';
import { audit } from '@/audit'; import { audit } from '@/audit';
import { OFFICIAL_RISKY_NODE_TYPES, NODES_REPORT } from '@/audit/constants'; import { OFFICIAL_RISKY_NODE_TYPES, NODES_REPORT } from '@/audit/constants';
@@ -9,12 +10,6 @@ import { mockInstance } from '../shared/utils/';
import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
import { NodeTypes } from '@/NodeTypes'; import { NodeTypes } from '@/NodeTypes';
import { CommunityPackagesService } from '@/services/communityPackages.service'; import { CommunityPackagesService } from '@/services/communityPackages.service';
import Container from 'typedi';
import { LoggerProxy } from 'n8n-workflow';
import { getLogger } from '@/Logger';
LoggerProxy.init(getLogger());
const nodesAndCredentials = mockInstance(LoadNodesAndCredentials); const nodesAndCredentials = mockInstance(LoadNodesAndCredentials);
nodesAndCredentials.getCustomDirectories.mockReturnValue([]); nodesAndCredentials.getCustomDirectories.mockReturnValue([]);

View File

@@ -1,6 +1,4 @@
import * as Config from '@oclif/config'; import * as Config from '@oclif/config';
import { mock } from 'jest-mock-extended';
import { type ILogger, LoggerProxy } from 'n8n-workflow';
import { InternalHooks } from '@/InternalHooks'; import { InternalHooks } from '@/InternalHooks';
import { ImportWorkflowsCommand } from '@/commands/import/workflow'; import { ImportWorkflowsCommand } from '@/commands/import/workflow';
@@ -8,8 +6,6 @@ import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
import * as testDb from '../shared/testDb'; import * as testDb from '../shared/testDb';
import { mockInstance } from '../shared/utils/'; import { mockInstance } from '../shared/utils/';
LoggerProxy.init(mock<ILogger>());
beforeAll(async () => { beforeAll(async () => {
mockInstance(InternalHooks); mockInstance(InternalHooks);
mockInstance(LoadNodesAndCredentials); mockInstance(LoadNodesAndCredentials);

View File

@@ -2,9 +2,7 @@ import { mockInstance } from '../shared/utils/';
import { Worker } from '@/commands/worker'; import { Worker } from '@/commands/worker';
import * as Config from '@oclif/config'; import * as Config from '@oclif/config';
import config from '@/config'; import config from '@/config';
import { LoggerProxy } from 'n8n-workflow';
import { Telemetry } from '@/telemetry'; import { Telemetry } from '@/telemetry';
import { getLogger } from '@/Logger';
import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee'; import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee';
import { BinaryDataService } from 'n8n-core'; import { BinaryDataService } from 'n8n-core';
import { CacheService } from '@/services/cache.service'; import { CacheService } from '@/services/cache.service';
@@ -23,7 +21,6 @@ import { OrchestrationWorkerService } from '@/services/orchestration/worker/orch
const oclifConfig: Config.IConfig = new Config.Config({ root: __dirname }); const oclifConfig: Config.IConfig = new Config.Config({ root: __dirname });
beforeAll(async () => { beforeAll(async () => {
LoggerProxy.init(getLogger());
config.set('executions.mode', 'queue'); config.set('executions.mode', 'queue');
config.set('binaryDataManager.availableModes', 'filesystem'); config.set('binaryDataManager.availableModes', 'filesystem');
mockInstance(Telemetry); mockInstance(Telemetry);

View File

@@ -1,8 +1,7 @@
import type { SuperAgentTest } from 'supertest'; import type { SuperAgentTest } from 'supertest';
import type { Entry as LdapUser } from 'ldapts'; import type { Entry as LdapUser } from 'ldapts';
import { Not } from 'typeorm'; import { Not } from 'typeorm';
import { type ILogger, jsonParse, LoggerProxy } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import { mock } from 'jest-mock-extended';
import config from '@/config'; import config from '@/config';
import * as Db from '@/Db'; import * as Db from '@/Db';
@@ -27,8 +26,6 @@ let globalMemberRole: Role;
let owner: User; let owner: User;
let authOwnerAgent: SuperAgentTest; let authOwnerAgent: SuperAgentTest;
LoggerProxy.init(mock<ILogger>());
const defaultLdapConfig = { const defaultLdapConfig = {
...LDAP_DEFAULT_CONFIGURATION, ...LDAP_DEFAULT_CONFIGURATION,
loginEnabled: true, loginEnabled: true,

View File

@@ -2,8 +2,8 @@ import { setupTestServer } from './shared/utils';
import config from '@/config'; import config from '@/config';
import request from 'supertest'; import request from 'supertest';
import Container from 'typedi'; import Container from 'typedi';
import { MetricsService } from '../../src/services/metrics.service'; import { MetricsService } from '@/services/metrics.service';
import { N8N_VERSION } from '../../src/constants'; import { N8N_VERSION } from '@/constants';
import { parse as semverParse } from 'semver'; import { parse as semverParse } from 'semver';
jest.unmock('@/eventbus/MessageEventBus/MessageEventBus'); jest.unmock('@/eventbus/MessageEventBus/MessageEventBus');

View File

@@ -5,8 +5,6 @@ import type { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner';
import { randomApiKey } from '../shared/random'; import { randomApiKey } from '../shared/random';
import * as utils from '../shared/utils/'; import * as utils from '../shared/utils/';
import * as testDb from '../shared/testDb'; import * as testDb from '../shared/testDb';
import { LoggerProxy } from 'n8n-workflow';
import { getLogger } from '@/Logger';
let owner: User; let owner: User;
let user1: User; let user1: User;
@@ -16,8 +14,6 @@ let authUser1Agent: SuperAgentTest;
let authUser2Agent: SuperAgentTest; let authUser2Agent: SuperAgentTest;
let workflowRunner: ActiveWorkflowRunner; let workflowRunner: ActiveWorkflowRunner;
LoggerProxy.init(getLogger());
const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] }); const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] });
beforeAll(async () => { beforeAll(async () => {

View File

@@ -3,11 +3,9 @@ import * as Db from '@/Db';
import * as testDb from '../shared/testDb'; import * as testDb from '../shared/testDb';
import type { ExecutionStatus } from 'n8n-workflow'; import type { ExecutionStatus } from 'n8n-workflow';
import { LoggerProxy } from 'n8n-workflow'; import type { ExecutionRepository } from '@/databases/repositories';
import { getLogger } from '@/Logger'; import type { ExecutionEntity } from '@/databases/entities/ExecutionEntity';
import type { ExecutionRepository } from '../../../src/databases/repositories'; import { TIME } from '@/constants';
import type { ExecutionEntity } from '../../../src/databases/entities/ExecutionEntity';
import { TIME } from '../../../src/constants';
describe('softDeleteOnPruningCycle()', () => { describe('softDeleteOnPruningCycle()', () => {
const now = new Date(); const now = new Date();
@@ -16,7 +14,6 @@ describe('softDeleteOnPruningCycle()', () => {
let workflow: Awaited<ReturnType<typeof testDb.createWorkflow>>; let workflow: Awaited<ReturnType<typeof testDb.createWorkflow>>;
beforeAll(async () => { beforeAll(async () => {
LoggerProxy.init(getLogger());
await testDb.init(); await testDb.init();
const { Execution } = Db.collections; const { Execution } = Db.collections;

View File

@@ -1,7 +1,6 @@
import { Container } from 'typedi'; import { Container } from 'typedi';
import cookieParser from 'cookie-parser'; import cookieParser from 'cookie-parser';
import express from 'express'; import express from 'express';
import { LoggerProxy } from 'n8n-workflow';
import type superagent from 'superagent'; import type superagent from 'superagent';
import request from 'supertest'; import request from 'supertest';
import { URL } from 'url'; import { URL } from 'url';
@@ -13,7 +12,6 @@ import { workflowsController } from '@/workflows/workflows.controller';
import { AUTH_COOKIE_NAME } from '@/constants'; import { AUTH_COOKIE_NAME } from '@/constants';
import { credentialsController } from '@/credentials/credentials.controller'; import { credentialsController } from '@/credentials/credentials.controller';
import type { User } from '@db/entities/User'; import type { User } from '@db/entities/User';
import { getLogger } from '@/Logger';
import { loadPublicApiVersions } from '@/PublicApi/'; import { loadPublicApiVersions } from '@/PublicApi/';
import { issueJWT } from '@/auth/jwt'; import { issueJWT } from '@/auth/jwt';
import { UserManagementMailer } from '@/UserManagement/email/UserManagementMailer'; import { UserManagementMailer } from '@/UserManagement/email/UserManagementMailer';
@@ -61,6 +59,7 @@ import { UserService } from '@/services/user.service';
import { executionsController } from '@/executions/executions.controller'; import { executionsController } from '@/executions/executions.controller';
import { WorkflowHistoryController } from '@/workflows/workflowHistory/workflowHistory.controller.ee'; import { WorkflowHistoryController } from '@/workflows/workflowHistory/workflowHistory.controller.ee';
import { BinaryDataController } from '@/controllers/binaryData.controller'; import { BinaryDataController } from '@/controllers/binaryData.controller';
import { Logger } from '@/Logger';
/** /**
* Plugin to prefix a path segment into a request URL pathname. * Plugin to prefix a path segment into a request URL pathname.
@@ -137,8 +136,10 @@ export const setupTestServer = ({
app.use(rawBodyReader); app.use(rawBodyReader);
app.use(cookieParser()); app.use(cookieParser());
const logger = getLogger(); // Mock all telemetry and logging
LoggerProxy.init(logger); const logger = mockInstance(Logger);
mockInstance(InternalHooks);
mockInstance(PostHogClient);
const testServer: TestServer = { const testServer: TestServer = {
app, app,
@@ -151,10 +152,6 @@ export const setupTestServer = ({
beforeAll(async () => { beforeAll(async () => {
await testDb.init(); await testDb.init();
// Mock all telemetry.
mockInstance(InternalHooks);
mockInstance(PostHogClient);
config.set('userManagement.jwtSecret', 'My JWT secret'); config.set('userManagement.jwtSecret', 'My JWT secret');
config.set('userManagement.isInstanceOwnerSetUp', true); config.set('userManagement.isInstanceOwnerSetUp', true);
@@ -213,11 +210,7 @@ export const setupTestServer = ({
registerController(app, config, new EventBusControllerEE()); registerController(app, config, new EventBusControllerEE());
break; break;
case 'auth': case 'auth':
registerController( registerController(app, config, Container.get(AuthController));
app,
config,
new AuthController(config, logger, internalHooks, mfaService, userService),
);
break; break;
case 'mfa': case 'mfa':
registerController(app, config, new MFAController(mfaService)); registerController(app, config, new MFAController(mfaService));
@@ -240,11 +233,7 @@ export const setupTestServer = ({
); );
registerController(app, config, Container.get(CommunityPackagesController)); registerController(app, config, Container.get(CommunityPackagesController));
case 'me': case 'me':
registerController( registerController(app, config, Container.get(MeController));
app,
config,
new MeController(logger, externalHooks, internalHooks, userService),
);
break; break;
case 'passwordReset': case 'passwordReset':
registerController( registerController(

View File

@@ -2,12 +2,10 @@ import { readFileSync } from 'fs';
import type { SuperAgentTest } from 'supertest'; import type { SuperAgentTest } from 'supertest';
import { agent as testAgent } from 'supertest'; import { agent as testAgent } from 'supertest';
import type { INodeType, INodeTypeDescription, IWebhookFunctions } from 'n8n-workflow'; import type { INodeType, INodeTypeDescription, IWebhookFunctions } from 'n8n-workflow';
import { LoggerProxy } from 'n8n-workflow';
import { AbstractServer } from '@/AbstractServer'; import { AbstractServer } from '@/AbstractServer';
import { ExternalHooks } from '@/ExternalHooks'; import { ExternalHooks } from '@/ExternalHooks';
import { InternalHooks } from '@/InternalHooks'; import { InternalHooks } from '@/InternalHooks';
import { getLogger } from '@/Logger';
import { NodeTypes } from '@/NodeTypes'; import { NodeTypes } from '@/NodeTypes';
import { Push } from '@/push'; import { Push } from '@/push';
import type { WorkflowEntity } from '@db/entities/WorkflowEntity'; import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
@@ -19,7 +17,6 @@ describe('Webhook API', () => {
mockInstance(ExternalHooks); mockInstance(ExternalHooks);
mockInstance(InternalHooks); mockInstance(InternalHooks);
mockInstance(Push); mockInstance(Push);
LoggerProxy.init(getLogger());
let agent: SuperAgentTest; let agent: SuperAgentTest;

View File

@@ -6,6 +6,7 @@ import type { IExecuteResponsePromiseData, IRun } from 'n8n-workflow';
import { createDeferredPromise } from 'n8n-workflow'; import { createDeferredPromise } from 'n8n-workflow';
import type { IWorkflowExecutionDataProcess } from '@/Interfaces'; import type { IWorkflowExecutionDataProcess } from '@/Interfaces';
import { ExecutionRepository } from '@db/repositories'; import { ExecutionRepository } from '@db/repositories';
import { mock } from 'jest-mock-extended';
const FAKE_EXECUTION_ID = '15'; const FAKE_EXECUTION_ID = '15';
const FAKE_SECOND_EXECUTION_ID = '20'; const FAKE_SECOND_EXECUTION_ID = '20';
@@ -24,7 +25,7 @@ describe('ActiveExecutions', () => {
let activeExecutions: ActiveExecutions; let activeExecutions: ActiveExecutions;
beforeEach(() => { beforeEach(() => {
activeExecutions = new ActiveExecutions(); activeExecutions = new ActiveExecutions(mock());
}); });
afterEach(() => { afterEach(() => {

Some files were not shown because too many files have changed in this diff Show More