mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 01:56:46 +00:00
feat(core): Integrate object store as binary data manager (#7253)
Depends on: #7225 | Story: [PAY-848](https://linear.app/n8n/issue/PAY-848) This PR integrates the object store service as a new binary data manager for Enterprise.
This commit is contained in:
@@ -15,6 +15,7 @@ import Container, { Service } from 'typedi';
|
||||
import type { BooleanLicenseFeature, N8nInstanceType, NumericLicenseFeature } from './Interfaces';
|
||||
import type { RedisServicePubSubPublisher } from './services/redis/RedisServicePubSubPublisher';
|
||||
import { RedisService } from './services/redis.service';
|
||||
import { ObjectStoreService } from 'n8n-core';
|
||||
|
||||
type FeatureReturnType = Partial<
|
||||
{
|
||||
@@ -103,6 +104,18 @@ export class License {
|
||||
command: 'reloadLicense',
|
||||
});
|
||||
}
|
||||
|
||||
const isS3Selected = config.getEnv('binaryDataManager.mode') === 's3';
|
||||
const isS3Available = config.getEnv('binaryDataManager.availableModes').includes('s3');
|
||||
const isS3Licensed = _features['feat:binaryDataS3'];
|
||||
|
||||
if (isS3Selected && isS3Available && !isS3Licensed) {
|
||||
this.logger.debug(
|
||||
'License changed with no support for external storage - blocking writes on object store. To restore writes, please upgrade to a license that supports this feature.',
|
||||
);
|
||||
|
||||
Container.get(ObjectStoreService).setReadonly(true);
|
||||
}
|
||||
}
|
||||
|
||||
async saveCertStr(value: TLicenseBlock): Promise<void> {
|
||||
|
||||
@@ -1446,28 +1446,39 @@ export class Server extends AbstractServer {
|
||||
// Binary data
|
||||
// ----------------------------------------
|
||||
|
||||
// Download binary
|
||||
// View or download binary file
|
||||
this.app.get(
|
||||
`/${this.restEndpoint}/data/:path`,
|
||||
`/${this.restEndpoint}/data`,
|
||||
async (req: BinaryDataRequest, res: express.Response): Promise<void> => {
|
||||
// TODO UM: check if this needs permission check for UM
|
||||
const identifier = req.params.path;
|
||||
const { id: binaryDataId, action } = req.query;
|
||||
let { fileName, mimeType } = req.query;
|
||||
const [mode] = binaryDataId.split(':') as ['filesystem' | 's3', string];
|
||||
|
||||
try {
|
||||
const binaryPath = this.binaryDataService.getPath(identifier);
|
||||
let { mode, fileName, mimeType } = req.query;
|
||||
const binaryPath = this.binaryDataService.getPath(binaryDataId);
|
||||
|
||||
if (!fileName || !mimeType) {
|
||||
try {
|
||||
const metadata = await this.binaryDataService.getMetadata(identifier);
|
||||
const metadata = await this.binaryDataService.getMetadata(binaryDataId);
|
||||
fileName = metadata.fileName;
|
||||
mimeType = metadata.mimeType;
|
||||
res.setHeader('Content-Length', metadata.fileSize);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
if (mimeType) res.setHeader('Content-Type', mimeType);
|
||||
if (mode === 'download') {
|
||||
|
||||
if (action === 'download') {
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`);
|
||||
}
|
||||
res.sendFile(binaryPath);
|
||||
|
||||
if (mode === 's3') {
|
||||
const readStream = await this.binaryDataService.getAsStream(binaryDataId);
|
||||
readStream.pipe(res);
|
||||
return;
|
||||
} else {
|
||||
res.sendFile(binaryPath);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof FileNotFoundError) res.writeHead(404).end();
|
||||
else throw error;
|
||||
|
||||
@@ -485,7 +485,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
||||
workflowId: this.workflowData.id,
|
||||
});
|
||||
|
||||
if (this.mode === 'webhook' && config.getEnv('binaryDataManager.mode') === 'filesystem') {
|
||||
if (this.mode === 'webhook' && config.getEnv('binaryDataManager.mode') !== 'default') {
|
||||
await restoreBinaryDataId(fullRunData, this.executionId);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,13 +3,13 @@ import { ExitError } from '@oclif/errors';
|
||||
import { Container } from 'typedi';
|
||||
import { LoggerProxy, ErrorReporterProxy as ErrorReporter, sleep } from 'n8n-workflow';
|
||||
import type { IUserSettings } from 'n8n-core';
|
||||
import { BinaryDataService, UserSettings } from 'n8n-core';
|
||||
import { BinaryDataService, ObjectStoreService, UserSettings } from 'n8n-core';
|
||||
import type { AbstractServer } from '@/AbstractServer';
|
||||
import { getLogger } from '@/Logger';
|
||||
import config from '@/config';
|
||||
import * as Db from '@/Db';
|
||||
import * as CrashJournal from '@/CrashJournal';
|
||||
import { inTest } from '@/constants';
|
||||
import { LICENSE_FEATURES, inTest } from '@/constants';
|
||||
import { CredentialTypes } from '@/CredentialTypes';
|
||||
import { CredentialsOverwrites } from '@/CredentialsOverwrites';
|
||||
import { initErrorHandling } from '@/ErrorReporting';
|
||||
@@ -125,7 +125,119 @@ export abstract class BaseCommand extends Command {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async initObjectStoreService() {
|
||||
const isSelected = config.getEnv('binaryDataManager.mode') === 's3';
|
||||
const isAvailable = config.getEnv('binaryDataManager.availableModes').includes('s3');
|
||||
|
||||
if (!isSelected && !isAvailable) return;
|
||||
|
||||
if (isSelected && !isAvailable) {
|
||||
throw new Error(
|
||||
'External storage selected but unavailable. Please make external storage available by adding "s3" to `N8N_AVAILABLE_BINARY_DATA_MODES`.',
|
||||
);
|
||||
}
|
||||
|
||||
const isLicensed = Container.get(License).isFeatureEnabled(LICENSE_FEATURES.BINARY_DATA_S3);
|
||||
|
||||
if (isSelected && isAvailable && isLicensed) {
|
||||
LoggerProxy.debug(
|
||||
'License found for external storage - object store to init in read-write mode',
|
||||
);
|
||||
|
||||
await this._initObjectStoreService();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (isSelected && isAvailable && !isLicensed) {
|
||||
LoggerProxy.debug(
|
||||
'No license found for external storage - object store to init with writes blocked. To enable writes, please upgrade to a license that supports this feature.',
|
||||
);
|
||||
|
||||
await this._initObjectStoreService({ isReadOnly: true });
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isSelected && isAvailable) {
|
||||
LoggerProxy.debug(
|
||||
'External storage unselected but available - object store to init with writes unused',
|
||||
);
|
||||
|
||||
await this._initObjectStoreService();
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private async _initObjectStoreService(options = { isReadOnly: false }) {
|
||||
const objectStoreService = Container.get(ObjectStoreService);
|
||||
|
||||
const host = config.getEnv('externalStorage.s3.host');
|
||||
|
||||
if (host === '') {
|
||||
throw new Error(
|
||||
'External storage host not configured. Please set `N8N_EXTERNAL_STORAGE_S3_HOST`.',
|
||||
);
|
||||
}
|
||||
|
||||
const bucket = {
|
||||
name: config.getEnv('externalStorage.s3.bucket.name'),
|
||||
region: config.getEnv('externalStorage.s3.bucket.region'),
|
||||
};
|
||||
|
||||
if (bucket.name === '') {
|
||||
throw new Error(
|
||||
'External storage bucket name not configured. Please set `N8N_EXTERNAL_STORAGE_S3_BUCKET_NAME`.',
|
||||
);
|
||||
}
|
||||
|
||||
if (bucket.region === '') {
|
||||
throw new Error(
|
||||
'External storage bucket region not configured. Please set `N8N_EXTERNAL_STORAGE_S3_BUCKET_REGION`.',
|
||||
);
|
||||
}
|
||||
|
||||
const credentials = {
|
||||
accessKey: config.getEnv('externalStorage.s3.credentials.accessKey'),
|
||||
accessSecret: config.getEnv('externalStorage.s3.credentials.accessSecret'),
|
||||
};
|
||||
|
||||
if (credentials.accessKey === '') {
|
||||
throw new Error(
|
||||
'External storage access key not configured. Please set `N8N_EXTERNAL_STORAGE_S3_ACCESS_KEY`.',
|
||||
);
|
||||
}
|
||||
|
||||
if (credentials.accessSecret === '') {
|
||||
throw new Error(
|
||||
'External storage access secret not configured. Please set `N8N_EXTERNAL_STORAGE_S3_ACCESS_SECRET`.',
|
||||
);
|
||||
}
|
||||
|
||||
LoggerProxy.debug('Initializing object store service');
|
||||
|
||||
try {
|
||||
await objectStoreService.init(host, bucket, credentials);
|
||||
objectStoreService.setReadonly(options.isReadOnly);
|
||||
|
||||
LoggerProxy.debug('Object store init completed');
|
||||
} catch (e) {
|
||||
const error = e instanceof Error ? e : new Error(`${e}`);
|
||||
|
||||
LoggerProxy.debug('Object store init failed', { error });
|
||||
}
|
||||
}
|
||||
|
||||
async initBinaryDataService() {
|
||||
try {
|
||||
await this.initObjectStoreService();
|
||||
} catch (e) {
|
||||
const error = e instanceof Error ? e : new Error(`${e}`);
|
||||
LoggerProxy.error(`Failed to init object store: ${error.message}`, { error });
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await Container.get(BinaryDataService).init(binaryDataConfig);
|
||||
}
|
||||
|
||||
@@ -908,7 +908,7 @@ export const schema = {
|
||||
doc: 'Available modes of binary data storage, as comma separated strings',
|
||||
},
|
||||
mode: {
|
||||
format: ['default', 'filesystem'] as const,
|
||||
format: ['default', 'filesystem', 's3'] as const,
|
||||
default: 'default',
|
||||
env: 'N8N_DEFAULT_BINARY_DATA_MODE',
|
||||
doc: 'Storage mode for binary data',
|
||||
@@ -921,6 +921,45 @@ export const schema = {
|
||||
},
|
||||
},
|
||||
|
||||
externalStorage: {
|
||||
s3: {
|
||||
host: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_EXTERNAL_STORAGE_S3_HOST',
|
||||
doc: 'Host of the n8n bucket in S3-compatible external storage, e.g. `s3.us-east-1.amazonaws.com`',
|
||||
},
|
||||
bucket: {
|
||||
name: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_EXTERNAL_STORAGE_S3_BUCKET_NAME',
|
||||
doc: 'Name of the n8n bucket in S3-compatible external storage',
|
||||
},
|
||||
region: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_EXTERNAL_STORAGE_S3_BUCKET_REGION',
|
||||
doc: 'Region of the n8n bucket in S3-compatible external storage, e.g. `us-east-1`',
|
||||
},
|
||||
},
|
||||
credentials: {
|
||||
accessKey: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_EXTERNAL_STORAGE_S3_ACCESS_KEY',
|
||||
doc: 'Access key in S3-compatible external storage',
|
||||
},
|
||||
accessSecret: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_EXTERNAL_STORAGE_S3_ACCESS_SECRET',
|
||||
doc: 'Access secret in S3-compatible external storage',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
deployment: {
|
||||
type: {
|
||||
format: String,
|
||||
|
||||
@@ -81,6 +81,7 @@ export const LICENSE_FEATURES = {
|
||||
SHOW_NON_PROD_BANNER: 'feat:showNonProdBanner',
|
||||
WORKFLOW_HISTORY: 'feat:workflowHistory',
|
||||
DEBUG_IN_EDITOR: 'feat:debugInEditor',
|
||||
BINARY_DATA_S3: 'feat:binaryDataS3',
|
||||
} as const;
|
||||
|
||||
export const LICENSE_QUOTAS = {
|
||||
|
||||
@@ -68,6 +68,7 @@ export class E2EController {
|
||||
[LICENSE_FEATURES.SHOW_NON_PROD_BANNER]: false,
|
||||
[LICENSE_FEATURES.WORKFLOW_HISTORY]: false,
|
||||
[LICENSE_FEATURES.DEBUG_IN_EDITOR]: false,
|
||||
[LICENSE_FEATURES.BINARY_DATA_S3]: false,
|
||||
};
|
||||
|
||||
constructor(
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import Container from 'typedi';
|
||||
import { BinaryDataService } from 'n8n-core';
|
||||
import type { IRun } from 'n8n-workflow';
|
||||
import type { BinaryData } from 'n8n-core';
|
||||
|
||||
export function isMissingExecutionId(binaryDataId: string) {
|
||||
const UUID_CHAR_LENGTH = 36;
|
||||
|
||||
return [UUID_CHAR_LENGTH + 'filesystem:'.length, UUID_CHAR_LENGTH + 's3:'.length].some(
|
||||
(incorrectLength) => binaryDataId.length === incorrectLength,
|
||||
);
|
||||
export function isMissingExecutionId(
|
||||
fileId: string,
|
||||
mode: BinaryData.NonDefaultMode,
|
||||
uuidV4CharLength = 36,
|
||||
) {
|
||||
return mode === 'filesystem' ? uuidV4CharLength === fileId.length : fileId.includes('/temp/');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -19,6 +20,9 @@ export function isMissingExecutionId(binaryDataId: string) {
|
||||
* ```txt
|
||||
* filesystem:11869055-83c4-4493-876a-9092c4708b9b ->
|
||||
* filesystem:39011869055-83c4-4493-876a-9092c4708b9b
|
||||
*
|
||||
* s3:workflows/123/executions/temp/binary_data/69055-83c4-4493-876a-9092c4708b9b ->
|
||||
* s3:workflows/123/executions/390/binary_data/69055-83c4-4493-876a-9092c4708b9b
|
||||
* ```
|
||||
*/
|
||||
export async function restoreBinaryDataId(run: IRun, executionId: string) {
|
||||
@@ -27,14 +31,19 @@ export async function restoreBinaryDataId(run: IRun, executionId: string) {
|
||||
const promises = Object.keys(runData).map(async (nodeName) => {
|
||||
const binaryDataId = runData[nodeName]?.[0]?.data?.main?.[0]?.[0]?.binary?.data.id;
|
||||
|
||||
if (!binaryDataId || !isMissingExecutionId(binaryDataId)) return;
|
||||
if (!binaryDataId) return;
|
||||
|
||||
const [mode, fileId] = binaryDataId.split(':') as [BinaryData.NonDefaultMode, string];
|
||||
|
||||
if (!isMissingExecutionId(fileId, mode)) return;
|
||||
|
||||
const correctFileId =
|
||||
mode === 'filesystem' ? `${executionId}${fileId}` : fileId.replace('temp', executionId);
|
||||
|
||||
await Container.get(BinaryDataService).rename(fileId, correctFileId);
|
||||
|
||||
const [mode, incorrectFileId] = binaryDataId.split(':');
|
||||
const correctFileId = `${executionId}${incorrectFileId}`;
|
||||
const correctBinaryDataId = `${mode}:${correctFileId}`;
|
||||
|
||||
await Container.get(BinaryDataService).rename(incorrectFileId, correctFileId);
|
||||
|
||||
// @ts-expect-error Validated at the top
|
||||
run.data.resultData.runData[nodeName][0].data.main[0][0].binary.data.id = correctBinaryDataId;
|
||||
});
|
||||
|
||||
@@ -492,11 +492,12 @@ export declare namespace LicenseRequest {
|
||||
}
|
||||
|
||||
export type BinaryDataRequest = AuthenticatedRequest<
|
||||
{ path: string },
|
||||
{},
|
||||
{},
|
||||
{},
|
||||
{
|
||||
mode: 'view' | 'download';
|
||||
id: string;
|
||||
action: 'view' | 'download';
|
||||
fileName?: string;
|
||||
mimeType?: string;
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ const oclifConfig: Config.IConfig = new Config.Config({ root: __dirname });
|
||||
beforeAll(async () => {
|
||||
LoggerProxy.init(getLogger());
|
||||
config.set('executions.mode', 'queue');
|
||||
config.set('binaryDataManager.availableModes', 'filesystem');
|
||||
mockInstance(Telemetry);
|
||||
mockInstance(PostHogClient);
|
||||
mockInstance(InternalHooks);
|
||||
|
||||
@@ -74,11 +74,13 @@ export async function initNodeTypes() {
|
||||
/**
|
||||
* Initialize a BinaryDataService for test runs.
|
||||
*/
|
||||
export async function initBinaryDataService() {
|
||||
export async function initBinaryDataService(mode: 'default' | 'filesystem' = 'default') {
|
||||
const binaryDataService = new BinaryDataService();
|
||||
|
||||
await binaryDataService.init(config.getEnv('binaryDataManager'));
|
||||
|
||||
await binaryDataService.init({
|
||||
mode,
|
||||
availableModes: [mode],
|
||||
localStoragePath: '',
|
||||
});
|
||||
Container.set(BinaryDataService, binaryDataService);
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { restoreBinaryDataId } from '@/executionLifecycleHooks/restoreBinaryData
|
||||
import { BinaryDataService } from 'n8n-core';
|
||||
import { mockInstance } from '../integration/shared/utils/mocking';
|
||||
import type { IRun } from 'n8n-workflow';
|
||||
import config from '@/config';
|
||||
|
||||
function toIRun(item?: object) {
|
||||
return {
|
||||
@@ -27,62 +28,141 @@ function getDataId(run: IRun, kind: 'binary' | 'json') {
|
||||
return run.data.resultData.runData.myNode[0].data.main[0][0][kind].data.id;
|
||||
}
|
||||
|
||||
describe('restoreBinaryDataId()', () => {
|
||||
const binaryDataService = mockInstance(BinaryDataService);
|
||||
const binaryDataService = mockInstance(BinaryDataService);
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should restore if binary data ID is missing execution ID', async () => {
|
||||
const executionId = '999';
|
||||
const incorrectFileId = 'a5c3f1ed-9d59-4155-bc68-9a370b3c51f6';
|
||||
const run = toIRun({
|
||||
binary: {
|
||||
data: { id: `filesystem:${incorrectFileId}` },
|
||||
},
|
||||
describe('on filesystem mode', () => {
|
||||
describe('restoreBinaryDataId()', () => {
|
||||
beforeAll(() => {
|
||||
config.set('binaryDataManager.mode', 'filesystem');
|
||||
});
|
||||
|
||||
await restoreBinaryDataId(run, executionId);
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
const correctFileId = `${executionId}${incorrectFileId}`;
|
||||
const correctBinaryDataId = `filesystem:${correctFileId}`;
|
||||
|
||||
expect(binaryDataService.rename).toHaveBeenCalledWith(incorrectFileId, correctFileId);
|
||||
expect(getDataId(run, 'binary')).toBe(correctBinaryDataId);
|
||||
});
|
||||
|
||||
it('should do nothing if binary data ID is not missing execution ID', async () => {
|
||||
const executionId = '999';
|
||||
const fileId = `${executionId}a5c3f1ed-9d59-4155-bc68-9a370b3c51f6`;
|
||||
const binaryDataId = `filesystem:${fileId}`;
|
||||
const run = toIRun({
|
||||
binary: {
|
||||
data: {
|
||||
id: binaryDataId,
|
||||
it('should restore if binary data ID is missing execution ID', async () => {
|
||||
const executionId = '999';
|
||||
const incorrectFileId = 'a5c3f1ed-9d59-4155-bc68-9a370b3c51f6';
|
||||
const run = toIRun({
|
||||
binary: {
|
||||
data: { id: `filesystem:${incorrectFileId}` },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await restoreBinaryDataId(run, executionId);
|
||||
|
||||
const correctFileId = `${executionId}${incorrectFileId}`;
|
||||
const correctBinaryDataId = `filesystem:${correctFileId}`;
|
||||
|
||||
expect(binaryDataService.rename).toHaveBeenCalledWith(incorrectFileId, correctFileId);
|
||||
expect(getDataId(run, 'binary')).toBe(correctBinaryDataId);
|
||||
});
|
||||
|
||||
await restoreBinaryDataId(run, executionId);
|
||||
it('should do nothing if binary data ID is not missing execution ID', async () => {
|
||||
const executionId = '999';
|
||||
const fileId = `${executionId}a5c3f1ed-9d59-4155-bc68-9a370b3c51f6`;
|
||||
const binaryDataId = `filesystem:${fileId}`;
|
||||
const run = toIRun({
|
||||
binary: {
|
||||
data: {
|
||||
id: binaryDataId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(binaryDataService.rename).not.toHaveBeenCalled();
|
||||
expect(getDataId(run, 'binary')).toBe(binaryDataId);
|
||||
await restoreBinaryDataId(run, executionId);
|
||||
|
||||
expect(binaryDataService.rename).not.toHaveBeenCalled();
|
||||
expect(getDataId(run, 'binary')).toBe(binaryDataId);
|
||||
});
|
||||
|
||||
it('should do nothing if no binary data ID', async () => {
|
||||
const executionId = '999';
|
||||
const dataId = '123';
|
||||
const run = toIRun({
|
||||
json: {
|
||||
data: { id: dataId },
|
||||
},
|
||||
});
|
||||
|
||||
await restoreBinaryDataId(run, executionId);
|
||||
|
||||
expect(binaryDataService.rename).not.toHaveBeenCalled();
|
||||
expect(getDataId(run, 'json')).toBe(dataId);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should do nothing if no binary data ID', async () => {
|
||||
const executionId = '999';
|
||||
const dataId = '123';
|
||||
const run = toIRun({
|
||||
json: {
|
||||
data: { id: dataId },
|
||||
},
|
||||
describe('on s3 mode', () => {
|
||||
describe('restoreBinaryDataId()', () => {
|
||||
beforeAll(() => {
|
||||
config.set('binaryDataManager.mode', 's3');
|
||||
});
|
||||
|
||||
await restoreBinaryDataId(run, executionId);
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
expect(binaryDataService.rename).not.toHaveBeenCalled();
|
||||
expect(getDataId(run, 'json')).toBe(dataId);
|
||||
it('should restore if binary data ID is missing execution ID', async () => {
|
||||
const workflowId = '6HYhhKmJch2cYxGj';
|
||||
const executionId = 'temp';
|
||||
const binaryDataFileUuid = 'a5c3f1ed-9d59-4155-bc68-9a370b3c51f6';
|
||||
|
||||
const incorrectFileId = `workflows/${workflowId}/executions/temp/binary_data/${binaryDataFileUuid}`;
|
||||
|
||||
const run = toIRun({
|
||||
binary: {
|
||||
data: { id: `s3:${incorrectFileId}` },
|
||||
},
|
||||
});
|
||||
|
||||
await restoreBinaryDataId(run, executionId);
|
||||
|
||||
const correctFileId = incorrectFileId.replace('temp', executionId);
|
||||
const correctBinaryDataId = `s3:${correctFileId}`;
|
||||
|
||||
expect(binaryDataService.rename).toHaveBeenCalledWith(incorrectFileId, correctFileId);
|
||||
expect(getDataId(run, 'binary')).toBe(correctBinaryDataId);
|
||||
});
|
||||
|
||||
it('should do nothing if binary data ID is not missing execution ID', async () => {
|
||||
const workflowId = '6HYhhKmJch2cYxGj';
|
||||
const executionId = '999';
|
||||
const binaryDataFileUuid = 'a5c3f1ed-9d59-4155-bc68-9a370b3c51f6';
|
||||
|
||||
const fileId = `workflows/${workflowId}/executions/${executionId}/binary_data/${binaryDataFileUuid}`;
|
||||
|
||||
const binaryDataId = `s3:${fileId}`;
|
||||
|
||||
const run = toIRun({
|
||||
binary: {
|
||||
data: {
|
||||
id: binaryDataId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await restoreBinaryDataId(run, executionId);
|
||||
|
||||
expect(binaryDataService.rename).not.toHaveBeenCalled();
|
||||
expect(getDataId(run, 'binary')).toBe(binaryDataId);
|
||||
});
|
||||
|
||||
it('should do nothing if no binary data ID', async () => {
|
||||
const executionId = '999';
|
||||
const dataId = '123';
|
||||
|
||||
const run = toIRun({
|
||||
json: {
|
||||
data: { id: dataId },
|
||||
},
|
||||
});
|
||||
|
||||
await restoreBinaryDataId(run, executionId);
|
||||
|
||||
expect(binaryDataService.rename).not.toHaveBeenCalled();
|
||||
expect(getDataId(run, 'json')).toBe(dataId);
|
||||
});
|
||||
});
|
||||
|
||||
it('should do nothing on itemless case', async () => {
|
||||
|
||||
Reference in New Issue
Block a user