Initial changes for binary data management (#2105)

* introduce binary data management

* cr

* add binary data changes to awsS3 node

* add binary data changes to Box node

* add binary data changes to CiscoWebex node

* add binary data changes to HumaniticAi node

* add binary data changes to Jira node

* add binary data changes to Line node

* add binary data changes to MicrosoftOneDrive node

* add binary data changes to MicrosoftOutlook node

* add binary data changes to Mindee node

* add binary data changes to NocoDB node

* add binary data changes to Pushbullet node

* add binary data changes to Pushover node

* add binary data changes to Raindrop node

* add binary data changes to S3 node

* add binary data changes to Salesforce node

* add binary data changes to Ssh node

* add binary data changes to TheHive node

* add binary data changes to Twist node

* add binary data changes to Twitter node

* remove changes not needed right now

* 🐛 Fix issue with multiple runs

* 🐛 Revert fix and add support for multiple inputs

Co-authored-by: Jan Oberhauser <jan.oberhauser@gmail.com>
This commit is contained in:
Ahsan Virani
2021-08-20 18:08:40 +02:00
committed by GitHub
parent 7da86641d5
commit 178235e148
24 changed files with 103 additions and 66 deletions

View File

@@ -1,5 +1,4 @@
import {
BINARY_ENCODING,
IExecuteFunctions,
} from 'n8n-core';
@@ -226,9 +225,10 @@ export class Compression implements INodeType {
}
const binaryData = (items[i].binary as IBinaryKeyData)[binaryPropertyName];
const binaryDataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
if (binaryData.fileExtension === 'zip') {
const files = await unzip(Buffer.from(binaryData.data as string, BINARY_ENCODING));
const files = await unzip(binaryDataBuffer);
for (const key of Object.keys(files)) {
// when files are compresed using MACOSX for some reason they are duplicated under __MACOSX
@@ -241,7 +241,7 @@ export class Compression implements INodeType {
binaryObject[`${outputPrefix}${zipIndex++}`] = data;
}
} else if (binaryData.fileExtension === 'gz') {
const file = await gunzip(Buffer.from(binaryData.data as string, BINARY_ENCODING));
const file = await gunzip(binaryDataBuffer);
const fileName = binaryData.fileName?.split('.')[0];
@@ -280,10 +280,11 @@ export class Compression implements INodeType {
}
const binaryData = (items[i].binary as IBinaryKeyData)[binaryPropertyName];
const binaryDataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
if (outputFormat === 'zip') {
zipData[binaryData.fileName as string] = [
Buffer.from(binaryData.data, BINARY_ENCODING), {
binaryDataBuffer, {
level: ALREADY_COMPRESSED.includes(binaryData.fileExtension as string) ? 0 : 6,
},
];
@@ -291,7 +292,7 @@ export class Compression implements INodeType {
} else if (outputFormat === 'gzip') {
const outputPrefix = this.getNodeParameter('outputPrefix', 0) as string;
const data = await gzip(Buffer.from(binaryData.data, BINARY_ENCODING)) as Uint8Array;
const data = await gzip(binaryDataBuffer) as Uint8Array;
const fileName = binaryData.fileName?.split('.')[0];
@@ -323,10 +324,10 @@ export class Compression implements INodeType {
});
}
}
} catch (error) {
if (this.continueOnFail()) {
returnData.push({json:{ error: error.message }});
returnData.push({ json: { error: error.message } });
continue;
}
throw error;