feat(core): Add InstanceRole auth support for binary-data object- storage backend (#14800)

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™
2025-04-23 16:56:13 +02:00
committed by GitHub
parent 9082adf89a
commit 271024ded0
13 changed files with 644 additions and 572 deletions

View File

@@ -10,7 +10,6 @@ import { EndpointsConfig } from './configs/endpoints.config';
import { EventBusConfig } from './configs/event-bus.config'; import { EventBusConfig } from './configs/event-bus.config';
import { ExecutionsConfig } from './configs/executions.config'; import { ExecutionsConfig } from './configs/executions.config';
import { ExternalHooksConfig } from './configs/external-hooks.config'; import { ExternalHooksConfig } from './configs/external-hooks.config';
import { ExternalStorageConfig } from './configs/external-storage.config';
import { GenericConfig } from './configs/generic.config'; import { GenericConfig } from './configs/generic.config';
import { LicenseConfig } from './configs/license.config'; import { LicenseConfig } from './configs/license.config';
import { LoggingConfig } from './configs/logging.config'; import { LoggingConfig } from './configs/logging.config';
@@ -34,7 +33,6 @@ export { Config, Env, Nested } from './decorators';
export { TaskRunnersConfig } from './configs/runners.config'; export { TaskRunnersConfig } from './configs/runners.config';
export { SecurityConfig } from './configs/security.config'; export { SecurityConfig } from './configs/security.config';
export { ExecutionsConfig } from './configs/executions.config'; export { ExecutionsConfig } from './configs/executions.config';
export { S3Config } from './configs/external-storage.config';
export { LOG_SCOPES } from './configs/logging.config'; export { LOG_SCOPES } from './configs/logging.config';
export type { LogScope } from './configs/logging.config'; export type { LogScope } from './configs/logging.config';
export { WorkflowsConfig } from './configs/workflows.config'; export { WorkflowsConfig } from './configs/workflows.config';
@@ -76,9 +74,6 @@ export class GlobalConfig {
@Nested @Nested
nodes: NodesConfig; nodes: NodesConfig;
@Nested
externalStorage: ExternalStorageConfig;
@Nested @Nested
workflows: WorkflowsConfig; workflows: WorkflowsConfig;

View File

@@ -139,20 +139,6 @@ describe('GlobalConfig', () => {
endpoint: 'https://api.n8n.io/api/versions/', endpoint: 'https://api.n8n.io/api/versions/',
infoUrl: 'https://docs.n8n.io/hosting/installation/updating/', infoUrl: 'https://docs.n8n.io/hosting/installation/updating/',
}, },
externalStorage: {
s3: {
host: '',
protocol: 'https',
bucket: {
name: '',
region: '',
},
credentials: {
accessKey: '',
accessSecret: '',
},
},
},
workflows: { workflows: {
defaultName: 'My workflow', defaultName: 'My workflow',
callerPolicyDefaultOption: 'workflowsFromSameOwner', callerPolicyDefaultOption: 'workflowsFromSameOwner',

View File

@@ -28,8 +28,6 @@
], ],
"devDependencies": { "devDependencies": {
"@n8n/typescript-config": "workspace:*", "@n8n/typescript-config": "workspace:*",
"@types/aws4": "^1.5.1",
"@types/concat-stream": "^2.0.0",
"@types/express": "catalog:", "@types/express": "catalog:",
"@types/jsonwebtoken": "catalog:", "@types/jsonwebtoken": "catalog:",
"@types/lodash": "catalog:", "@types/lodash": "catalog:",
@@ -38,16 +36,15 @@
"@types/xml2js": "catalog:" "@types/xml2js": "catalog:"
}, },
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "3.666.0",
"@langchain/core": "catalog:", "@langchain/core": "catalog:",
"@n8n/client-oauth2": "workspace:*", "@n8n/client-oauth2": "workspace:*",
"@n8n/config": "workspace:*", "@n8n/config": "workspace:*",
"@n8n/di": "workspace:*", "@n8n/di": "workspace:*",
"@sentry/node": "catalog:", "@sentry/node": "catalog:",
"aws4": "1.11.0",
"axios": "catalog:", "axios": "catalog:",
"callsites": "catalog:", "callsites": "catalog:",
"chardet": "2.0.0", "chardet": "2.0.0",
"concat-stream": "2.0.0",
"cron": "3.1.7", "cron": "3.1.7",
"fast-glob": "catalog:", "fast-glob": "catalog:",
"file-type": "16.5.4", "file-type": "16.5.4",

View File

@@ -2,9 +2,9 @@ import fs from 'node:fs';
import fsp from 'node:fs/promises'; import fsp from 'node:fs/promises';
import { tmpdir } from 'node:os'; import { tmpdir } from 'node:os';
import path from 'node:path'; import path from 'node:path';
import { Readable } from 'node:stream';
import { FileSystemManager } from '@/binary-data/file-system.manager'; import { FileSystemManager } from '@/binary-data/file-system.manager';
import { isStream } from '@/binary-data/object-store/utils';
import { toFileId, toStream } from '@test/utils'; import { toFileId, toStream } from '@test/utils';
jest.mock('fs'); jest.mock('fs');
@@ -70,7 +70,7 @@ describe('getAsStream()', () => {
const stream = await fsManager.getAsStream(fileId); const stream = await fsManager.getAsStream(fileId);
expect(isStream(stream)).toBe(true); expect(stream).toBeInstanceOf(Readable);
expect(fs.createReadStream).toHaveBeenCalledWith(toFullFilePath(fileId), { expect(fs.createReadStream).toHaveBeenCalledWith(toFullFilePath(fileId), {
highWaterMark: undefined, highWaterMark: undefined,
}); });

View File

@@ -1,9 +1,9 @@
import { mock } from 'jest-mock-extended'; import { mock } from 'jest-mock-extended';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import { Readable } from 'node:stream';
import { ObjectStoreService } from '@/binary-data/object-store/object-store.service.ee'; import { ObjectStoreService } from '@/binary-data/object-store/object-store.service.ee';
import type { MetadataResponseHeaders } from '@/binary-data/object-store/types'; import type { MetadataResponseHeaders } from '@/binary-data/object-store/types';
import { isStream } from '@/binary-data/object-store/utils';
import { ObjectStoreManager } from '@/binary-data/object-store.manager'; import { ObjectStoreManager } from '@/binary-data/object-store.manager';
import { mockInstance, toFileId, toStream } from '@test/utils'; import { mockInstance, toFileId, toStream } from '@test/utils';
@@ -67,7 +67,7 @@ describe('getAsStream()', () => {
const stream = await objectStoreManager.getAsStream(fileId); const stream = await objectStoreManager.getAsStream(fileId);
expect(isStream(stream)).toBe(true); expect(stream).toBeInstanceOf(Readable);
expect(objectStoreService.get).toHaveBeenCalledWith(fileId, { mode: 'stream' }); expect(objectStoreService.get).toHaveBeenCalledWith(fileId, { mode: 'stream' });
}); });
}); });

View File

@@ -1,3 +1,4 @@
import { UnexpectedError } from 'n8n-workflow';
import { Readable } from 'node:stream'; import { Readable } from 'node:stream';
import { createGunzip } from 'node:zlib'; import { createGunzip } from 'node:zlib';
@@ -27,7 +28,7 @@ describe('BinaryData/utils', () => {
const gunzip = createGunzip(); const gunzip = createGunzip();
const body = Readable.from(Buffer.from('0001f8b080000000000000000', 'hex')).pipe(gunzip); const body = Readable.from(Buffer.from('0001f8b080000000000000000', 'hex')).pipe(gunzip);
await expect(binaryToBuffer(body)).rejects.toThrow( await expect(binaryToBuffer(body)).rejects.toThrow(
new Error('Failed to decompress response'), new UnexpectedError('Failed to decompress response'),
); );
}); });
}); });

View File

@@ -1,34 +1,51 @@
import type { S3Config } from '@n8n/config'; import {
import axios from 'axios'; DeleteObjectCommand,
import { mock } from 'jest-mock-extended'; DeleteObjectsCommand,
GetObjectCommand,
HeadBucketCommand,
HeadObjectCommand,
ListObjectsV2Command,
PutObjectCommand,
type S3Client,
} from '@aws-sdk/client-s3';
import { captor, mock } from 'jest-mock-extended';
import { Readable } from 'stream'; import { Readable } from 'stream';
import { ObjectStoreService } from '@/binary-data/object-store/object-store.service.ee'; import type { ObjectStoreConfig } from '../object-store.config';
import { ObjectStoreService } from '../object-store.service.ee';
jest.mock('axios'); const mockS3Send = jest.fn();
const s3Client = mock<S3Client>({ send: mockS3Send });
const mockAxios = axios as jest.Mocked<typeof axios>; jest.mock('@aws-sdk/client-s3', () => ({
...jest.requireActual('@aws-sdk/client-s3'),
S3Client: class {
constructor() {
return s3Client;
}
},
}));
describe('ObjectStoreService', () => {
const mockBucket = { region: 'us-east-1', name: 'test-bucket' }; const mockBucket = { region: 'us-east-1', name: 'test-bucket' };
const mockHost = `s3.${mockBucket.region}.amazonaws.com`; const mockHost = `s3.${mockBucket.region}.amazonaws.com`;
const mockCredentials = { accessKey: 'mock-access-key', accessSecret: 'mock-secret-key' };
const mockUrl = `https://${mockHost}/${mockBucket.name}`;
const FAILED_REQUEST_ERROR_MESSAGE = 'Request to S3 failed'; const FAILED_REQUEST_ERROR_MESSAGE = 'Request to S3 failed';
const mockError = new Error('Something went wrong!'); const mockError = new Error('Something went wrong!');
const fileId = const workflowId = 'workflow-id';
'workflows/ObogjVbqpNOQpiyV/executions/999/binary_data/71f6209b-5d48-41a2-a224-80d529d8bb32'; const executionId = 999;
const binaryDataId = '71f6209b-5d48-41a2-a224-80d529d8bb32';
const fileId = `workflows/${workflowId}/executions/${executionId}/binary_data/${binaryDataId}`;
const mockBuffer = Buffer.from('Test data'); const mockBuffer = Buffer.from('Test data');
const s3Config = mock<S3Config>({ const s3Config = mock<ObjectStoreConfig>({
host: mockHost, host: mockHost,
bucket: mockBucket, bucket: mockBucket,
credentials: mockCredentials, credentials: {
accessKey: 'mock-access-key',
accessSecret: 'mock-secret-key',
authAutoDetect: false,
},
protocol: 'https', protocol: 'https',
}); });
const toDeletionXml = (filename: string) => `<Delete>
<Object><Key>${filename}</Key></Object>
</Delete>`;
let objectStoreService: ObjectStoreService; let objectStoreService: ObjectStoreService;
const now = new Date('2024-02-01T01:23:45.678Z'); const now = new Date('2024-02-01T01:23:45.678Z');
@@ -36,36 +53,70 @@ jest.useFakeTimers({ now });
beforeEach(async () => { beforeEach(async () => {
objectStoreService = new ObjectStoreService(mock(), s3Config); objectStoreService = new ObjectStoreService(mock(), s3Config);
mockAxios.request.mockResolvedValueOnce({ status: 200 }); // for checkConnection
await objectStoreService.init(); await objectStoreService.init();
jest.restoreAllMocks(); jest.restoreAllMocks();
}); });
describe('getClientConfig()', () => {
const credentials = {
accessKeyId: s3Config.credentials.accessKey,
secretAccessKey: s3Config.credentials.accessSecret,
};
it('should return client config with endpoint and forcePathStyle when custom host is provided', () => {
s3Config.host = 'example.com';
const clientConfig = objectStoreService.getClientConfig();
expect(clientConfig).toEqual({
endpoint: 'https://example.com',
forcePathStyle: true,
region: mockBucket.region,
credentials,
});
});
it('should return client config without endpoint when host is not provided', () => {
s3Config.host = '';
const clientConfig = objectStoreService.getClientConfig();
expect(clientConfig).toEqual({
region: mockBucket.region,
credentials,
});
});
it('should return client config without credentials when authAutoDetect is true', () => {
s3Config.credentials.authAutoDetect = true;
const clientConfig = objectStoreService.getClientConfig();
expect(clientConfig).toEqual({
region: mockBucket.region,
});
});
});
describe('checkConnection()', () => { describe('checkConnection()', () => {
it('should send a HEAD request to the correct host', async () => { it('should send a HEAD request to the correct bucket', async () => {
mockAxios.request.mockResolvedValue({ status: 200 }); mockS3Send.mockResolvedValueOnce({});
objectStoreService.setReady(false); objectStoreService.setReady(false);
await objectStoreService.checkConnection(); await objectStoreService.checkConnection();
expect(mockAxios.request).toHaveBeenCalledWith({ const commandCaptor = captor<HeadObjectCommand>();
method: 'HEAD', expect(mockS3Send).toHaveBeenCalledWith(commandCaptor);
url: 'https://s3.us-east-1.amazonaws.com/test-bucket', const command = commandCaptor.value;
headers: { expect(command).toBeInstanceOf(HeadBucketCommand);
Host: 's3.us-east-1.amazonaws.com', expect(command.input).toEqual({ Bucket: 'test-bucket' });
'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
'X-Amz-Date': '20240201T012345Z',
Authorization:
'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=a5240c11a706e9e6c60e7033a848fc934911b12330e5a4609b0b943f97d9781b',
},
});
}); });
it('should throw an error on request failure', async () => { it('should throw an error on request failure', async () => {
objectStoreService.setReady(false); objectStoreService.setReady(false);
mockAxios.request.mockRejectedValue(mockError); mockS3Send.mockRejectedValueOnce(mockError);
const promise = objectStoreService.checkConnection(); const promise = objectStoreService.checkConnection();
@@ -74,26 +125,29 @@ describe('checkConnection()', () => {
}); });
describe('getMetadata()', () => { describe('getMetadata()', () => {
it('should send a HEAD request to the correct host and path', async () => { it('should send a HEAD request to the correct bucket and key', async () => {
mockAxios.request.mockResolvedValue({ status: 200 }); mockS3Send.mockResolvedValueOnce({
ContentType: 'text/plain',
ContentLength: 1024,
ETag: '"abc123"',
LastModified: new Date(),
Metadata: { filename: 'test.txt' },
});
await objectStoreService.getMetadata(fileId); await objectStoreService.getMetadata(fileId);
expect(mockAxios.request).toHaveBeenCalledWith({ const commandCaptor = captor<HeadObjectCommand>();
method: 'HEAD', expect(mockS3Send).toHaveBeenCalledWith(commandCaptor);
url: `${mockUrl}/${fileId}`, const command = commandCaptor.value;
headers: { expect(command).toBeInstanceOf(HeadObjectCommand);
Host: mockHost, expect(command.input).toEqual({
'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', Bucket: 'test-bucket',
'X-Amz-Date': '20240201T012345Z', Key: fileId,
Authorization:
'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=60e11c39580ad7dd3a3d549523e7115cdff018540f24c6412ed40053e52a21d0',
},
}); });
}); });
it('should throw an error on request failure', async () => { it('should throw an error on request failure', async () => {
mockAxios.request.mockRejectedValue(mockError); mockS3Send.mockRejectedValueOnce(mockError);
const promise = objectStoreService.getMetadata(fileId); const promise = objectStoreService.getMetadata(fileId);
@@ -105,32 +159,29 @@ describe('put()', () => {
it('should send a PUT request to upload an object', async () => { it('should send a PUT request to upload an object', async () => {
const metadata = { fileName: 'file.txt', mimeType: 'text/plain' }; const metadata = { fileName: 'file.txt', mimeType: 'text/plain' };
mockAxios.request.mockResolvedValue({ status: 200 }); mockS3Send.mockResolvedValueOnce({});
await objectStoreService.put(fileId, mockBuffer, metadata); await objectStoreService.put(fileId, mockBuffer, metadata);
expect(mockAxios.request).toHaveBeenCalledWith({ const commandCaptor = captor<PutObjectCommand>();
method: 'PUT', expect(mockS3Send).toHaveBeenCalledWith(commandCaptor);
url: 'https://s3.us-east-1.amazonaws.com/test-bucket/workflows/ObogjVbqpNOQpiyV/executions/999/binary_data/71f6209b-5d48-41a2-a224-80d529d8bb32', const command = commandCaptor.value;
headers: { expect(command).toBeInstanceOf(PutObjectCommand);
'Content-Length': 9, expect(command.input).toEqual({
'Content-MD5': 'yh6gLBC3w39CW5t92G1eEQ==', Bucket: 'test-bucket',
'x-amz-meta-filename': 'file.txt', Key: fileId,
'Content-Type': 'text/plain', Body: mockBuffer,
Host: 's3.us-east-1.amazonaws.com', ContentLength: mockBuffer.length,
'X-Amz-Content-Sha256': 'e27c8214be8b7cf5bccc7c08247e3cb0c1514a48ee1f63197fe4ef3ef51d7e6f', ContentMD5: 'yh6gLBC3w39CW5t92G1eEQ==',
'X-Amz-Date': '20240201T012345Z', ContentType: 'text/plain',
Authorization: Metadata: { filename: 'file.txt' },
'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-meta-filename, Signature=6b0fbb51a35dbfa73ac79a964ffc7203b40517a062efc5b01f5f9b7ad553fa7a',
},
data: mockBuffer,
}); });
}); });
it('should throw an error on request failure', async () => { it('should throw an error on request failure', async () => {
const metadata = { fileName: 'file.txt', mimeType: 'text/plain' }; const metadata = { fileName: 'file.txt', mimeType: 'text/plain' };
mockAxios.request.mockRejectedValue(mockError); mockS3Send.mockRejectedValueOnce(mockError);
const promise = objectStoreService.put(fileId, mockBuffer, metadata); const promise = objectStoreService.put(fileId, mockBuffer, metadata);
@@ -141,50 +192,46 @@ describe('put()', () => {
describe('get()', () => { describe('get()', () => {
it('should send a GET request to download an object as a buffer', async () => { it('should send a GET request to download an object as a buffer', async () => {
const fileId = 'file.txt'; const fileId = 'file.txt';
const body = Readable.from(mockBuffer);
mockAxios.request.mockResolvedValue({ status: 200, data: Buffer.from('Test content') }); mockS3Send.mockResolvedValueOnce({ Body: body });
const result = await objectStoreService.get(fileId, { mode: 'buffer' }); const result = await objectStoreService.get(fileId, { mode: 'buffer' });
expect(mockAxios.request).toHaveBeenCalledWith({ const commandCaptor = captor<GetObjectCommand>();
method: 'GET', expect(mockS3Send).toHaveBeenCalledWith(commandCaptor);
url: `${mockUrl}/${fileId}`, const command = commandCaptor.value;
responseType: 'arraybuffer', expect(command).toBeInstanceOf(GetObjectCommand);
headers: { expect(command.input).toEqual({
Authorization: Bucket: 'test-bucket',
'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=5f69680786e0ad9f0a0324eb5e4b8fe8c78562afc924489ea423632a2ad2187d', Key: fileId,
Host: 's3.us-east-1.amazonaws.com',
'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
'X-Amz-Date': '20240201T012345Z',
},
}); });
expect(Buffer.isBuffer(result)).toBe(true); expect(Buffer.isBuffer(result)).toBe(true);
}); });
it('should send a GET request to download an object as a stream', async () => { it('should send a GET request to download an object as a stream', async () => {
mockAxios.request.mockResolvedValue({ status: 200, data: new Readable() }); const body = new Readable();
mockS3Send.mockResolvedValueOnce({ Body: body });
const result = await objectStoreService.get(fileId, { mode: 'stream' }); const result = await objectStoreService.get(fileId, { mode: 'stream' });
expect(mockAxios.request).toHaveBeenCalledWith({ const commandCaptor = captor<GetObjectCommand>();
method: 'GET', expect(mockS3Send).toHaveBeenCalledWith(commandCaptor);
url: `${mockUrl}/${fileId}`, const command = commandCaptor.value;
responseType: 'stream', expect(command).toBeInstanceOf(GetObjectCommand);
headers: { expect(command.input).toEqual({
Authorization: Bucket: 'test-bucket',
'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=3ef579ebe2ae89303a89c0faf3ce8ef8e907295dc538d59e95bcf35481c0d03e', Key: fileId,
Host: 's3.us-east-1.amazonaws.com',
'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
'X-Amz-Date': '20240201T012345Z',
},
}); });
expect(result instanceof Readable).toBe(true); expect(result instanceof Readable).toBe(true);
expect(result).toBe(body);
}); });
it('should throw an error on request failure', async () => { it('should throw an error on request failure', async () => {
mockAxios.request.mockRejectedValue(mockError); mockS3Send.mockRejectedValueOnce(mockError);
const promise = objectStoreService.get(fileId, { mode: 'buffer' }); const promise = objectStoreService.get(fileId, { mode: 'buffer' });
@@ -194,25 +241,22 @@ describe('get()', () => {
describe('deleteOne()', () => { describe('deleteOne()', () => {
it('should send a DELETE request to delete a single object', async () => { it('should send a DELETE request to delete a single object', async () => {
mockAxios.request.mockResolvedValue({ status: 204 }); mockS3Send.mockResolvedValueOnce({});
await objectStoreService.deleteOne(fileId); await objectStoreService.deleteOne(fileId);
expect(mockAxios.request).toHaveBeenCalledWith({ const commandCaptor = captor<DeleteObjectCommand>();
method: 'DELETE', expect(mockS3Send).toHaveBeenCalledWith(commandCaptor);
url: `${mockUrl}/${fileId}`, const command = commandCaptor.value;
headers: { expect(command).toBeInstanceOf(DeleteObjectCommand);
Authorization: expect(command.input).toEqual({
'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=4ad61b1b4da335c6c49772d28e54a301f787d199c9403055b217f890f7aec7fc', Bucket: 'test-bucket',
Host: 's3.us-east-1.amazonaws.com', Key: fileId,
'X-Amz-Content-Sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
'X-Amz-Date': '20240201T012345Z',
},
}); });
}); });
it('should throw an error on request failure', async () => { it('should throw an error on request failure', async () => {
mockAxios.request.mockRejectedValue(mockError); mockS3Send.mockRejectedValueOnce(mockError);
const promise = objectStoreService.deleteOne(fileId); const promise = objectStoreService.deleteOne(fileId);
@@ -221,7 +265,7 @@ describe('deleteOne()', () => {
}); });
describe('deleteMany()', () => { describe('deleteMany()', () => {
it('should send a POST request to delete multiple objects', async () => { it('should send a DELETE request to delete multiple objects', async () => {
const prefix = 'test-dir/'; const prefix = 'test-dir/';
const fileName = 'file.txt'; const fileName = 'file.txt';
@@ -236,25 +280,19 @@ describe('deleteMany()', () => {
]; ];
objectStoreService.list = jest.fn().mockResolvedValue(mockList); objectStoreService.list = jest.fn().mockResolvedValue(mockList);
mockS3Send.mockResolvedValueOnce({});
mockAxios.request.mockResolvedValue({ status: 204 });
await objectStoreService.deleteMany(prefix); await objectStoreService.deleteMany(prefix);
expect(mockAxios.request).toHaveBeenCalledWith({ const commandCaptor = captor<DeleteObjectsCommand>();
method: 'POST', expect(mockS3Send).toHaveBeenCalledWith(commandCaptor);
url: `${mockUrl}?delete=`, const command = commandCaptor.value;
headers: { expect(command).toBeInstanceOf(DeleteObjectsCommand);
'Content-Type': 'application/xml', expect(command.input).toEqual({
'Content-Length': 55, Bucket: 'test-bucket',
'Content-MD5': 'ybYDrpQxwYvNIGBQs7PJNA==', Delete: {
Host: 's3.us-east-1.amazonaws.com', Objects: [{ Key: fileName }],
'X-Amz-Content-Sha256': '5708e5c935cb75eb528e41ef1548e08b26c5b3b7504b67dc911abc1ff1881f76',
'X-Amz-Date': '20240201T012345Z',
Authorization:
'AWS4-HMAC-SHA256 Credential=mock-access-key/20240201/us-east-1/s3/aws4_request, SignedHeaders=content-length;content-md5;content-type;host;x-amz-content-sha256;x-amz-date, Signature=039168f10927b31624f3a5edae8eb4c89405f7c594eb2d6e00257c1462363f99',
}, },
data: toDeletionXml(fileName),
}); });
}); });
@@ -264,10 +302,12 @@ describe('deleteMany()', () => {
const result = await objectStoreService.deleteMany('non-matching-prefix'); const result = await objectStoreService.deleteMany('non-matching-prefix');
expect(result).toBeUndefined(); expect(result).toBeUndefined();
expect(mockS3Send).not.toHaveBeenCalled();
}); });
it('should throw an error on request failure', async () => { it('should throw an error on request failure', async () => {
mockAxios.request.mockRejectedValue(mockError); objectStoreService.list = jest.fn().mockResolvedValue([{ key: 'file.txt' }]);
mockS3Send.mockRejectedValueOnce(mockError);
const promise = objectStoreService.deleteMany('test-dir/'); const promise = objectStoreService.deleteMany('test-dir/');
@@ -286,8 +326,6 @@ describe('list()', () => {
objectStoreService.getListPage = jest.fn().mockResolvedValue(mockListPage); objectStoreService.getListPage = jest.fn().mockResolvedValue(mockListPage);
mockAxios.request.mockResolvedValue({ status: 200 });
const result = await objectStoreService.list(prefix); const result = await objectStoreService.list(prefix);
expect(result).toEqual(mockListPage.contents); expect(result).toEqual(mockListPage.contents);
@@ -312,18 +350,80 @@ describe('list()', () => {
.mockResolvedValueOnce(mockFirstListPage) .mockResolvedValueOnce(mockFirstListPage)
.mockResolvedValueOnce(mockSecondListPage); .mockResolvedValueOnce(mockSecondListPage);
mockAxios.request.mockResolvedValue({ status: 200 });
const result = await objectStoreService.list(prefix); const result = await objectStoreService.list(prefix);
expect(result).toEqual([...mockFirstListPage.contents, ...mockSecondListPage.contents]); expect(result).toEqual([...mockFirstListPage.contents, ...mockSecondListPage.contents]);
}); });
it('should throw an error on request failure', async () => { it('should throw an error on request failure', async () => {
mockAxios.request.mockRejectedValue(mockError); objectStoreService.getListPage = jest.fn().mockRejectedValueOnce(mockError);
const promise = objectStoreService.list('test-dir/'); const promise = objectStoreService.list('test-dir/');
await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE); await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE);
}); });
}); });
describe('getListPage()', () => {
it('should fetch a page of objects with a common prefix', async () => {
const prefix = 'test-dir/';
const mockContents = [
{
Key: `${prefix}file1.txt`,
LastModified: new Date(),
ETag: '"abc123"',
Size: 123,
StorageClass: 'STANDARD',
},
];
mockS3Send.mockResolvedValueOnce({
Contents: mockContents,
IsTruncated: false,
});
const result = await objectStoreService.getListPage(prefix);
const commandCaptor = captor<ListObjectsV2Command>();
expect(mockS3Send).toHaveBeenCalledWith(commandCaptor);
const command = commandCaptor.value;
expect(command).toBeInstanceOf(ListObjectsV2Command);
expect(command.input).toEqual({
Bucket: 'test-bucket',
Prefix: prefix,
});
expect(result.contents).toHaveLength(1);
expect(result.isTruncated).toBe(false);
});
it('should use continuation token when provided', async () => {
const prefix = 'test-dir/';
const token = 'next-page-token';
mockS3Send.mockResolvedValueOnce({
Contents: [],
IsTruncated: false,
});
await objectStoreService.getListPage(prefix, token);
const commandCaptor = captor<ListObjectsV2Command>();
expect(mockS3Send).toHaveBeenCalledWith(commandCaptor);
const command = commandCaptor.value;
expect(command.input).toEqual({
Bucket: 'test-bucket',
Prefix: prefix,
ContinuationToken: token,
});
});
it('should throw an error on request failure', async () => {
mockS3Send.mockRejectedValueOnce(mockError);
const promise = objectStoreService.getListPage('test-dir/');
await expect(promise).rejects.toThrowError(FAILED_REQUEST_ERROR_MESSAGE);
});
});
});

View File

@@ -1,13 +1,12 @@
import { Config, Env, Nested } from '@n8n/config';
import { z } from 'zod'; import { z } from 'zod';
import { Config, Env, Nested } from '../decorators';
const protocolSchema = z.enum(['http', 'https']); const protocolSchema = z.enum(['http', 'https']);
export type Protocol = z.infer<typeof protocolSchema>; export type Protocol = z.infer<typeof protocolSchema>;
@Config @Config
class S3BucketConfig { class ObjectStoreBucketConfig {
/** Name of the n8n bucket in S3-compatible external storage */ /** Name of the n8n bucket in S3-compatible external storage */
@Env('N8N_EXTERNAL_STORAGE_S3_BUCKET_NAME') @Env('N8N_EXTERNAL_STORAGE_S3_BUCKET_NAME')
name: string = ''; name: string = '';
@@ -18,7 +17,7 @@ class S3BucketConfig {
} }
@Config @Config
class S3CredentialsConfig { class ObjectStoreCredentialsConfig {
/** Access key in S3-compatible external storage */ /** Access key in S3-compatible external storage */
@Env('N8N_EXTERNAL_STORAGE_S3_ACCESS_KEY') @Env('N8N_EXTERNAL_STORAGE_S3_ACCESS_KEY')
accessKey: string = ''; accessKey: string = '';
@@ -26,11 +25,22 @@ class S3CredentialsConfig {
/** Access secret in S3-compatible external storage */ /** Access secret in S3-compatible external storage */
@Env('N8N_EXTERNAL_STORAGE_S3_ACCESS_SECRET') @Env('N8N_EXTERNAL_STORAGE_S3_ACCESS_SECRET')
accessSecret: string = ''; accessSecret: string = '';
/**
* Use automatic credential detection to authenticate S3 calls for external storage
* This will ignore accessKey/accessSecret and use the default credential provider chain
* https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/setting-credentials-node.html#credchain
*/
@Env('N8N_EXTERNAL_STORAGE_S3_AUTH_AUTO_DETECT')
authAutoDetect: boolean = false;
} }
@Config @Config
export class S3Config { export class ObjectStoreConfig {
/** Host of the n8n bucket in S3-compatible external storage @example "s3.us-east-1.amazonaws.com" */ /**
* Host of the object-store bucket in S3-compatible external storage
* @example "s3.us-east-1.amazonaws.com"
**/
@Env('N8N_EXTERNAL_STORAGE_S3_HOST') @Env('N8N_EXTERNAL_STORAGE_S3_HOST')
host: string = ''; host: string = '';
@@ -38,14 +48,8 @@ export class S3Config {
protocol: Protocol = 'https'; protocol: Protocol = 'https';
@Nested @Nested
bucket: S3BucketConfig; bucket: ObjectStoreBucketConfig = {} as ObjectStoreBucketConfig;
@Nested @Nested
credentials: S3CredentialsConfig; credentials: ObjectStoreCredentialsConfig = {} as ObjectStoreCredentialsConfig;
}
@Config
export class ExternalStorageConfig {
@Nested
s3: S3Config;
} }

View File

@@ -1,44 +1,73 @@
import { S3Config } from '@n8n/config'; import type {
PutObjectCommandInput,
DeleteObjectsCommandInput,
ListObjectsV2CommandInput,
S3ClientConfig,
} from '@aws-sdk/client-s3';
import {
S3Client,
HeadBucketCommand,
PutObjectCommand,
GetObjectCommand,
HeadObjectCommand,
DeleteObjectCommand,
DeleteObjectsCommand,
ListObjectsV2Command,
} from '@aws-sdk/client-s3';
import { Service } from '@n8n/di'; import { Service } from '@n8n/di';
import { sign } from 'aws4'; import { UnexpectedError } from 'n8n-workflow';
import type { Request as Aws4Options } from 'aws4';
import axios from 'axios';
import type { AxiosRequestConfig, Method } from 'axios';
import { ApplicationError } from 'n8n-workflow';
import { createHash } from 'node:crypto'; import { createHash } from 'node:crypto';
import type { Readable } from 'stream'; import { Readable } from 'node:stream';
import { Logger } from '@/logging/logger'; import { Logger } from '@/logging/logger';
import type { ListPage, MetadataResponseHeaders, RawListPage, RequestOptions } from './types'; import { ObjectStoreConfig } from './object-store.config';
import { isStream, parseXml } from './utils'; import type { MetadataResponseHeaders } from './types';
import type { BinaryData } from '../types'; import type { BinaryData } from '../types';
import { streamToBuffer } from '../utils';
@Service() @Service()
export class ObjectStoreService { export class ObjectStoreService {
private baseUrl: URL; private s3Client: S3Client;
private isReady = false; private isReady = false;
private bucket: string;
constructor( constructor(
private readonly logger: Logger, private readonly logger: Logger,
private readonly s3Config: S3Config, private readonly s3Config: ObjectStoreConfig,
) { ) {
const { host, bucket, protocol } = s3Config; const { bucket } = s3Config;
if (host === '') {
throw new ApplicationError(
'External storage host not configured. Please set `N8N_EXTERNAL_STORAGE_S3_HOST`.',
);
}
if (bucket.name === '') { if (bucket.name === '') {
throw new ApplicationError( throw new UnexpectedError(
'External storage bucket name not configured. Please set `N8N_EXTERNAL_STORAGE_S3_BUCKET_NAME`.', 'External storage bucket name not configured. Please set `N8N_EXTERNAL_STORAGE_S3_BUCKET_NAME`.',
); );
} }
this.baseUrl = new URL(`${protocol}://${host}/${bucket.name}`); this.bucket = bucket.name;
this.s3Client = new S3Client(this.getClientConfig());
}
/** This generates the config for the S3Client to make it work in all various auth configurations */
getClientConfig() {
const { host, bucket, protocol, credentials } = this.s3Config;
const clientConfig: S3ClientConfig = {};
const endpoint = host ? `${protocol}://${host}` : undefined;
if (endpoint) {
clientConfig.endpoint = endpoint;
clientConfig.forcePathStyle = true; // Needed for non-AWS S3 compatible services
}
if (bucket.region.length) {
clientConfig.region = bucket.region;
}
if (!credentials.authAutoDetect) {
clientConfig.credentials = {
accessKeyId: credentials.accessKey,
secretAccessKey: credentials.accessSecret,
};
}
return clientConfig;
} }
async init() { async init() {
@@ -52,92 +81,153 @@ export class ObjectStoreService {
/** /**
* Confirm that the configured bucket exists and the caller has permission to access it. * Confirm that the configured bucket exists and the caller has permission to access it.
*
* @doc https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html
*/ */
async checkConnection() { async checkConnection() {
if (this.isReady) return; if (this.isReady) return;
return await this.request('HEAD', ''); try {
this.logger.debug('Checking connection to S3 bucket', { bucket: this.bucket });
const command = new HeadBucketCommand({ Bucket: this.bucket });
await this.s3Client.send(command);
} catch (e) {
throw new UnexpectedError('Request to S3 failed', { cause: e });
}
} }
/** /**
* Upload an object to the configured bucket. * Upload an object to the configured bucket.
*
* @doc https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html
*/ */
async put(filename: string, buffer: Buffer, metadata: BinaryData.PreWriteMetadata = {}) { async put(filename: string, buffer: Buffer, metadata: BinaryData.PreWriteMetadata = {}) {
const headers: Record<string, string | number> = { try {
'Content-Length': buffer.length, const params: PutObjectCommandInput = {
'Content-MD5': createHash('md5').update(buffer).digest('base64'), Bucket: this.bucket,
Key: filename,
Body: buffer,
ContentLength: buffer.length,
ContentMD5: createHash('md5').update(buffer).digest('base64'),
}; };
if (metadata.fileName) headers['x-amz-meta-filename'] = metadata.fileName; if (metadata.fileName) {
if (metadata.mimeType) headers['Content-Type'] = metadata.mimeType; params.Metadata = { filename: metadata.fileName };
}
return await this.request('PUT', filename, { headers, body: buffer }); if (metadata.mimeType) {
params.ContentType = metadata.mimeType;
}
this.logger.debug('Sending PUT request to S3', { params });
const command = new PutObjectCommand(params);
return await this.s3Client.send(command);
} catch (e) {
throw new UnexpectedError('Request to S3 failed', { cause: e });
}
} }
/** /**
* Download an object as a stream or buffer from the configured bucket. * Download an object as a stream or buffer from the configured bucket.
*
* @doc https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html
*/ */
async get(fileId: string, { mode }: { mode: 'buffer' }): Promise<Buffer>; async get(fileId: string, { mode }: { mode: 'buffer' }): Promise<Buffer>;
async get(fileId: string, { mode }: { mode: 'stream' }): Promise<Readable>; async get(fileId: string, { mode }: { mode: 'stream' }): Promise<Readable>;
async get(fileId: string, { mode }: { mode: 'stream' | 'buffer' }) { async get(fileId: string, { mode }: { mode: 'stream' | 'buffer' }): Promise<Buffer | Readable> {
const { data } = await this.request('GET', fileId, { this.logger.debug('Sending GET request to S3', { bucket: this.bucket, key: fileId });
responseType: mode === 'buffer' ? 'arraybuffer' : 'stream',
const command = new GetObjectCommand({
Bucket: this.bucket,
Key: fileId,
}); });
if (mode === 'stream' && isStream(data)) return data; try {
const { Body: body } = await this.s3Client.send(command);
if (!body) throw new UnexpectedError('Received empty response body');
if (mode === 'buffer' && Buffer.isBuffer(data)) return data; if (mode === 'stream') {
if (body instanceof Readable) return body;
throw new UnexpectedError(`Expected stream but received ${typeof body}.`);
}
throw new TypeError(`Expected ${mode} but received ${typeof data}.`); return await streamToBuffer(body as Readable);
} catch (e) {
throw new UnexpectedError('Request to S3 failed', { cause: e });
}
} }
/** /**
* Retrieve metadata for an object in the configured bucket. * Retrieve metadata for an object in the configured bucket.
*
* @doc https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingMetadata.html
*/ */
async getMetadata(fileId: string) { async getMetadata(fileId: string): Promise<MetadataResponseHeaders> {
const response = await this.request('HEAD', fileId); try {
const command = new HeadObjectCommand({
Bucket: this.bucket,
Key: fileId,
});
return response.headers as MetadataResponseHeaders; this.logger.debug('Sending HEAD request to S3', { bucket: this.bucket, key: fileId });
const response = await this.s3Client.send(command);
// Convert response to the expected format for backward compatibility
const headers: MetadataResponseHeaders = {};
if (response.ContentType) headers['content-type'] = response.ContentType;
if (response.ContentLength) headers['content-length'] = String(response.ContentLength);
if (response.ETag) headers.etag = response.ETag;
if (response.LastModified) headers['last-modified'] = response.LastModified.toUTCString();
// Add metadata with the expected prefix format
if (response.Metadata) {
Object.entries(response.Metadata).forEach(([key, value]) => {
headers[`x-amz-meta-${key.toLowerCase()}`] = value;
});
}
return headers;
} catch (e) {
throw new UnexpectedError('Request to S3 failed', { cause: e });
}
} }
/** /**
* Delete a single object in the configured bucket. * Delete a single object in the configured bucket.
*
* @doc https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html
*/ */
async deleteOne(fileId: string) { async deleteOne(fileId: string) {
return await this.request('DELETE', fileId); try {
const command = new DeleteObjectCommand({
Bucket: this.bucket,
Key: fileId,
});
this.logger.debug('Sending DELETE request to S3', { bucket: this.bucket, key: fileId });
return await this.s3Client.send(command);
} catch (e) {
throw new UnexpectedError('Request to S3 failed', { cause: e });
}
} }
/** /**
* Delete objects with a common prefix in the configured bucket. * Delete objects with a common prefix in the configured bucket.
*
* @doc https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html
*/ */
async deleteMany(prefix: string) { async deleteMany(prefix: string) {
try {
const objects = await this.list(prefix); const objects = await this.list(prefix);
if (objects.length === 0) return; if (objects.length === 0) return;
const innerXml = objects.map(({ key }) => `<Object><Key>${key}</Key></Object>`).join('\n'); const params: DeleteObjectsCommandInput = {
Bucket: this.bucket,
const body = ['<Delete>', innerXml, '</Delete>'].join('\n'); Delete: {
Objects: objects.map(({ key }) => ({ Key: key })),
const headers = { },
'Content-Type': 'application/xml',
'Content-Length': body.length,
'Content-MD5': createHash('md5').update(body).digest('base64'),
}; };
return await this.request('POST', '', { headers, body, qs: { delete: '' } }); this.logger.debug('Sending DELETE MANY request to S3', {
bucket: this.bucket,
objectCount: objects.length,
});
const command = new DeleteObjectsCommand(params);
return await this.s3Client.send(command);
} catch (e) {
throw new UnexpectedError('Request to S3 failed', { cause: e });
}
} }
/** /**
@@ -145,108 +235,62 @@ export class ObjectStoreService {
*/ */
async list(prefix: string) { async list(prefix: string) {
const items = []; const items = [];
let isTruncated = true;
let continuationToken;
let isTruncated; try {
let nextPageToken; while (isTruncated) {
const listPage = await this.getListPage(prefix, continuationToken);
do { if (listPage.contents?.length > 0) {
const listPage = await this.getListPage(prefix, nextPageToken); items.push(...listPage.contents);
}
if (listPage.contents?.length > 0) items.push(...listPage.contents);
isTruncated = listPage.isTruncated; isTruncated = listPage.isTruncated;
nextPageToken = listPage.nextContinuationToken; continuationToken = listPage.nextContinuationToken;
} while (isTruncated && nextPageToken); }
return items; return items;
} catch (e) {
throw new UnexpectedError('Request to S3 failed', { cause: e });
}
} }
/** /**
* Fetch a page of objects with a common prefix in the configured bucket. * Fetch a page of objects with a common prefix in the configured bucket.
*
* Max 1000 objects per page - set by AWS.
*
* @doc https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html
*/ */
async getListPage(prefix: string, nextPageToken?: string) { async getListPage(prefix: string, continuationToken?: string) {
const qs: Record<string, string | number> = { 'list-type': 2, prefix };
if (nextPageToken) qs['continuation-token'] = nextPageToken;
const { data } = await this.request('GET', '', { qs });
if (typeof data !== 'string') {
throw new TypeError(`Expected XML string but received ${typeof data}`);
}
const { listBucketResult: page } = await parseXml<RawListPage>(data);
if (!page.contents) return { ...page, contents: [] };
// `explicitArray: false` removes array wrapper on single item array, so restore it
if (!Array.isArray(page.contents)) page.contents = [page.contents];
// remove null prototype - https://github.com/Leonidas-from-XIV/node-xml2js/issues/670
page.contents.forEach((item) => {
Object.setPrototypeOf(item, Object.prototype);
});
return page as ListPage;
}
private async request<T>(
method: Method,
rawPath = '',
{ qs, headers, body, responseType }: RequestOptions = {},
) {
const url = new URL(this.baseUrl);
if (rawPath && rawPath !== '/') {
url.pathname = `${url.pathname}/${rawPath}`;
}
Object.entries(qs ?? {}).forEach(([key, value]) => {
url.searchParams.set(key, String(value));
});
const optionsToSign: Aws4Options = {
method,
service: 's3',
region: this.s3Config.bucket.region,
host: this.s3Config.host,
path: `${url.pathname}${url.search}`,
};
if (headers) optionsToSign.headers = headers;
if (body) optionsToSign.body = body;
const { accessKey, accessSecret } = this.s3Config.credentials;
const signedOptions = sign(optionsToSign, {
accessKeyId: accessKey,
secretAccessKey: accessSecret,
});
const config: AxiosRequestConfig = {
method,
url: url.toString(),
headers: signedOptions.headers,
};
if (body) config.data = body;
if (responseType) config.responseType = responseType;
try { try {
this.logger.debug('Sending request to S3', { config }); const params: ListObjectsV2CommandInput = {
Bucket: this.bucket,
Prefix: prefix,
};
return await axios.request<T>(config); if (continuationToken) {
params.ContinuationToken = continuationToken;
}
this.logger.debug('Sending list request to S3', { bucket: this.bucket, prefix });
const command = new ListObjectsV2Command(params);
const response = await this.s3Client.send(command);
// Convert response to match expected format for compatibility
const contents =
response.Contents?.map((item) => ({
key: item.Key ?? '',
lastModified: item.LastModified?.toISOString() ?? '',
eTag: item.ETag ?? '',
size: item.Size ?? 0,
storageClass: item.StorageClass ?? '',
})) ?? [];
return {
contents,
isTruncated: response.IsTruncated ?? false,
nextContinuationToken: response.NextContinuationToken,
};
} catch (e) { } catch (e) {
const error = e instanceof Error ? e : new Error(`${e}`); throw new UnexpectedError('Request to S3 failed', { cause: e });
const message = `Request to S3 failed: ${error.message}`;
this.logger.error(message, { config });
throw new ApplicationError(message, { cause: error, extra: { config } });
} }
} }
} }

View File

@@ -1,38 +1,9 @@
import type { AxiosResponseHeaders, ResponseType } from 'axios';
import type { BinaryData } from '../types'; import type { BinaryData } from '../types';
export type RawListPage = { export type MetadataResponseHeaders = Record<string, string> & {
listBucketResult: { 'content-length'?: string;
name: string;
prefix: string;
keyCount: number;
maxKeys: number;
isTruncated: boolean;
nextContinuationToken?: string; // only if isTruncated is true
contents?: Item | Item[];
};
};
type Item = {
key: string;
lastModified: string;
eTag: string;
size: number; // bytes
storageClass: string;
};
export type ListPage = Omit<RawListPage['listBucketResult'], 'contents'> & { contents: Item[] };
export type RequestOptions = {
qs?: Record<string, string | number>;
headers?: Record<string, string | number>;
body?: string | Buffer;
responseType?: ResponseType;
};
export type MetadataResponseHeaders = AxiosResponseHeaders & {
'content-length': string;
'content-type'?: string; 'content-type'?: string;
'x-amz-meta-filename'?: string; 'x-amz-meta-filename'?: string;
etag?: string;
'last-modified'?: string;
} & BinaryData.PreWriteMetadata; } & BinaryData.PreWriteMetadata;

View File

@@ -1,16 +0,0 @@
import { Stream } from 'node:stream';
import { parseStringPromise } from 'xml2js';
import { firstCharLowerCase, parseBooleans, parseNumbers } from 'xml2js/lib/processors';
export function isStream(maybeStream: unknown): maybeStream is Stream {
return maybeStream instanceof Stream;
}
export async function parseXml<T>(xml: string): Promise<T> {
return await (parseStringPromise(xml, {
explicitArray: false,
ignoreAttrs: true,
tagNameProcessors: [firstCharLowerCase],
valueProcessors: [parseNumbers, parseBooleans],
}) as Promise<T>);
}

View File

@@ -1,4 +1,4 @@
import concatStream from 'concat-stream'; import { UnexpectedError } from 'n8n-workflow';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import type { Readable } from 'node:stream'; import type { Readable } from 'node:stream';
@@ -33,16 +33,22 @@ export async function doesNotExist(dir: string) {
} }
} }
/** Converts a readable stream to a buffer */
export async function streamToBuffer(stream: Readable) {
return await new Promise<Buffer>((resolve, reject) => {
const chunks: Buffer[] = [];
stream.on('data', (chunk: Buffer) => chunks.push(chunk));
stream.on('end', () => resolve(Buffer.concat(chunks)));
stream.once('error', (cause) => {
if ('code' in cause && cause.code === 'Z_DATA_ERROR')
reject(new UnexpectedError('Failed to decompress response', { cause }));
else reject(cause);
});
});
}
/** Converts a buffer or a readable stream to a buffer */ /** Converts a buffer or a readable stream to a buffer */
export async function binaryToBuffer(body: Buffer | Readable) { export async function binaryToBuffer(body: Buffer | Readable) {
if (Buffer.isBuffer(body)) return body; if (Buffer.isBuffer(body)) return body;
return await new Promise<Buffer>((resolve, reject) => { return await streamToBuffer(body);
body
.once('error', (cause) => {
if ('code' in cause && cause.code === 'Z_DATA_ERROR')
reject(new Error('Failed to decompress response', { cause }));
else reject(cause);
})
.pipe(concatStream(resolve));
});
} }

22
pnpm-lock.yaml generated
View File

@@ -1306,6 +1306,9 @@ importers:
packages/core: packages/core:
dependencies: dependencies:
'@aws-sdk/client-s3':
specifier: 3.666.0
version: 3.666.0
'@langchain/core': '@langchain/core':
specifier: 'catalog:' specifier: 'catalog:'
version: 0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) version: 0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))
@@ -1321,9 +1324,6 @@ importers:
'@sentry/node': '@sentry/node':
specifier: 'catalog:' specifier: 'catalog:'
version: 8.52.1 version: 8.52.1
aws4:
specifier: 1.11.0
version: 1.11.0
axios: axios:
specifier: 'catalog:' specifier: 'catalog:'
version: 1.8.2(debug@4.3.6) version: 1.8.2(debug@4.3.6)
@@ -1333,9 +1333,6 @@ importers:
chardet: chardet:
specifier: 2.0.0 specifier: 2.0.0
version: 2.0.0 version: 2.0.0
concat-stream:
specifier: 2.0.0
version: 2.0.0
cron: cron:
specifier: 3.1.7 specifier: 3.1.7
version: 3.1.7 version: 3.1.7
@@ -1403,12 +1400,6 @@ importers:
'@n8n/typescript-config': '@n8n/typescript-config':
specifier: workspace:* specifier: workspace:*
version: link:../@n8n/typescript-config version: link:../@n8n/typescript-config
'@types/aws4':
specifier: ^1.5.1
version: 1.11.2
'@types/concat-stream':
specifier: ^2.0.0
version: 2.0.0
'@types/express': '@types/express':
specifier: 'catalog:' specifier: 'catalog:'
version: 5.0.1 version: 5.0.1
@@ -6014,9 +6005,6 @@ packages:
'@types/compression@1.7.5': '@types/compression@1.7.5':
resolution: {integrity: sha512-AAQvK5pxMpaT+nDvhHrsBhLSYG5yQdtkaJE1WYieSNY2mVFKAgmU4ks65rkZD5oqnGCFLyQpUr1CqI4DmUMyDg==} resolution: {integrity: sha512-AAQvK5pxMpaT+nDvhHrsBhLSYG5yQdtkaJE1WYieSNY2mVFKAgmU4ks65rkZD5oqnGCFLyQpUr1CqI4DmUMyDg==}
'@types/concat-stream@2.0.0':
resolution: {integrity: sha512-t3YCerNM7NTVjLuICZo5gYAXYoDvpuuTceCcFQWcDQz26kxUR5uIWolxbIR5jRNIXpMqhOpW/b8imCR1LEmuJw==}
'@types/connect@3.4.36': '@types/connect@3.4.36':
resolution: {integrity: sha512-P63Zd/JUGq+PdrM1lv0Wv5SBYeA2+CORvbrXbngriYY0jzLUWfQMQQxOhjONEz/wlHOAxOdY7CY65rgQdTjq2w==} resolution: {integrity: sha512-P63Zd/JUGq+PdrM1lv0Wv5SBYeA2+CORvbrXbngriYY0jzLUWfQMQQxOhjONEz/wlHOAxOdY7CY65rgQdTjq2w==}
@@ -18855,10 +18843,6 @@ snapshots:
dependencies: dependencies:
'@types/express': 5.0.1 '@types/express': 5.0.1
'@types/concat-stream@2.0.0':
dependencies:
'@types/node': 18.16.16
'@types/connect@3.4.36': '@types/connect@3.4.36':
dependencies: dependencies:
'@types/node': 18.16.16 '@types/node': 18.16.16