mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-18 02:21:13 +00:00
feat(core): Improvements/overhaul for nodes working with binary data (#7651)
Github issue / Community forum post (link here to close automatically): --------- Co-authored-by: Giulio Andreini <andreini@netseven.it> Co-authored-by: Marcus <marcus@n8n.io>
This commit is contained in:
@@ -7,6 +7,7 @@ import { SpreadsheetFileV2 } from './v2/SpreadsheetFileV2.node';
|
||||
export class SpreadsheetFile extends VersionedNodeType {
|
||||
constructor() {
|
||||
const baseDescription: INodeTypeBaseDescription = {
|
||||
hidden: true,
|
||||
displayName: 'Spreadsheet File',
|
||||
name: 'spreadsheetFile',
|
||||
icon: 'fa:table',
|
||||
|
||||
@@ -1,97 +1,41 @@
|
||||
import type { INodeProperties } from 'n8n-workflow';
|
||||
|
||||
export const operationProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Read From File',
|
||||
value: 'fromFile',
|
||||
description: 'Reads data from a spreadsheet file',
|
||||
action: 'Read data from a spreadsheet file',
|
||||
},
|
||||
{
|
||||
name: 'Write to File',
|
||||
value: 'toFile',
|
||||
description: 'Writes the workflow data to a spreadsheet file',
|
||||
action: 'Write data to a spreadsheet file',
|
||||
},
|
||||
],
|
||||
default: 'fromFile',
|
||||
},
|
||||
];
|
||||
|
||||
export const fromFileProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
export const operationProperty: INodeProperties = {
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Read From File',
|
||||
value: 'fromFile',
|
||||
description: 'Reads data from a spreadsheet file',
|
||||
action: 'Read data from a spreadsheet file',
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property from which to read the binary data of the spreadsheet file',
|
||||
},
|
||||
];
|
||||
|
||||
export const fromFileV2Properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'File Format',
|
||||
name: 'fileFormat',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Autodetect',
|
||||
value: 'autodetect',
|
||||
},
|
||||
{
|
||||
name: 'CSV',
|
||||
value: 'csv',
|
||||
description: 'Comma-separated values',
|
||||
},
|
||||
{
|
||||
name: 'HTML',
|
||||
value: 'html',
|
||||
description: 'HTML Table',
|
||||
},
|
||||
{
|
||||
name: 'ODS',
|
||||
value: 'ods',
|
||||
description: 'OpenDocument Spreadsheet',
|
||||
},
|
||||
{
|
||||
name: 'RTF',
|
||||
value: 'rtf',
|
||||
description: 'Rich Text Format',
|
||||
},
|
||||
{
|
||||
name: 'XLS',
|
||||
value: 'xls',
|
||||
description: 'Excel',
|
||||
},
|
||||
{
|
||||
name: 'XLSX',
|
||||
value: 'xlsx',
|
||||
description: 'Excel',
|
||||
},
|
||||
],
|
||||
default: 'autodetect',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
{
|
||||
name: 'Write to File',
|
||||
value: 'toFile',
|
||||
description: 'Writes the workflow data to a spreadsheet file',
|
||||
action: 'Write data to a spreadsheet file',
|
||||
},
|
||||
],
|
||||
default: 'fromFile',
|
||||
};
|
||||
|
||||
export const binaryProperty: INodeProperties = {
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: '',
|
||||
hint: 'The name of the input field containing the file data to be processed',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
description: 'The format of the binary data to read from',
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
export const toFileProperties: INodeProperties[] = [
|
||||
{
|
||||
@@ -139,7 +83,7 @@ export const toFileProperties: INodeProperties[] = [
|
||||
description: 'The format of the file to save the data as',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
@@ -150,189 +94,176 @@ export const toFileProperties: INodeProperties[] = [
|
||||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property in which to save the binary data of the spreadsheet file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
];
|
||||
|
||||
export const optionsProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Compression',
|
||||
name: 'compression',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['toFile'],
|
||||
'/fileFormat': ['xlsx', 'ods'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'Whether compression will be applied or not',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['toFile'],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
description:
|
||||
'File name to set in binary data. By default will "spreadsheet.<fileFormat>" be used.',
|
||||
},
|
||||
{
|
||||
displayName: 'Header Row',
|
||||
name: 'headerRow',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile', 'toFile'],
|
||||
},
|
||||
},
|
||||
default: true,
|
||||
description: 'Whether the first row of the file contains the header names',
|
||||
},
|
||||
{
|
||||
displayName: 'Delimiter',
|
||||
name: 'delimiter',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: ',',
|
||||
description: 'Set the field delimiter',
|
||||
},
|
||||
{
|
||||
displayName: 'Starting Line',
|
||||
name: 'fromLine',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: 0,
|
||||
description: 'Start handling records from the requested line number',
|
||||
},
|
||||
{
|
||||
displayName: 'Max Number of Rows to Load',
|
||||
name: 'maxRowCount',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: -1,
|
||||
description: 'Stop handling records after the requested number of rows are read',
|
||||
},
|
||||
{
|
||||
displayName: 'Exclude Byte Order Mark (BOM)',
|
||||
name: 'enableBOM',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description:
|
||||
'Whether to detect and exclude the byte-order-mark from the CSV Input if present',
|
||||
},
|
||||
{
|
||||
displayName: 'Include Empty Cells',
|
||||
name: 'includeEmptyCells',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-description-boolean-without-whether
|
||||
description:
|
||||
'When reading from file the empty cells will be filled with an empty string in the JSON',
|
||||
},
|
||||
{
|
||||
displayName: 'RAW Data',
|
||||
name: 'rawData',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'Whether the data should be returned RAW instead of parsed',
|
||||
},
|
||||
{
|
||||
displayName: 'Read As String',
|
||||
name: 'readAsString',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-description-boolean-without-whether
|
||||
description:
|
||||
'In some cases and file formats, it is necessary to read specifically as string else some special character get interpreted wrong',
|
||||
},
|
||||
{
|
||||
displayName: 'Range',
|
||||
name: 'range',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
description:
|
||||
'The range to read from the table. If set to a number it will be the starting row. If set to string it will be used as A1-style bounded range.',
|
||||
},
|
||||
{
|
||||
displayName: 'Sheet Name',
|
||||
name: 'sheetName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: 'Sheet',
|
||||
description:
|
||||
'Name of the sheet to read from in the spreadsheet (if supported). If not set, the first one gets chosen.',
|
||||
},
|
||||
{
|
||||
displayName: 'Sheet Name',
|
||||
name: 'sheetName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['toFile'],
|
||||
'/fileFormat': ['ods', 'xls', 'xlsx'],
|
||||
},
|
||||
},
|
||||
default: 'Sheet',
|
||||
description: 'Name of the sheet to create in the spreadsheet',
|
||||
},
|
||||
],
|
||||
export const toFileOptions: INodeProperties = {
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['toFile'],
|
||||
},
|
||||
},
|
||||
];
|
||||
options: [
|
||||
{
|
||||
displayName: 'Compression',
|
||||
name: 'compression',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['xlsx', 'ods'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'Whether compression will be applied or not',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'File name to set in binary data. By default will "spreadsheet.<fileFormat>" be used.',
|
||||
},
|
||||
{
|
||||
displayName: 'Header Row',
|
||||
name: 'headerRow',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'Whether the first row of the file contains the header names',
|
||||
},
|
||||
{
|
||||
displayName: 'Sheet Name',
|
||||
name: 'sheetName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['ods', 'xls', 'xlsx'],
|
||||
},
|
||||
},
|
||||
default: 'Sheet',
|
||||
description: 'Name of the sheet to create in the spreadsheet',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const fromFileOptions: INodeProperties = {
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Delimiter',
|
||||
name: 'delimiter',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: ',',
|
||||
placeholder: 'e.g. ,',
|
||||
description: 'Set the field delimiter, usually a comma',
|
||||
},
|
||||
{
|
||||
displayName: 'Exclude Byte Order Mark (BOM)',
|
||||
name: 'enableBOM',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description:
|
||||
'Whether to detect and exclude the byte-order-mark from the CSV Input if present',
|
||||
},
|
||||
{
|
||||
displayName: 'Header Row',
|
||||
name: 'headerRow',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'Whether the first row of the file contains the header names',
|
||||
},
|
||||
{
|
||||
displayName: 'Include Empty Cells',
|
||||
name: 'includeEmptyCells',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description:
|
||||
'Whether to include empty cells when reading from file. They will be filled with an empty string.',
|
||||
},
|
||||
{
|
||||
displayName: 'Max Number of Rows to Load',
|
||||
name: 'maxRowCount',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: -1,
|
||||
placeholder: 'e.g. 10',
|
||||
description:
|
||||
'Stop handling records after the requested number of rows are read. Use -1 if you want to load all rows.',
|
||||
},
|
||||
{
|
||||
displayName: 'Range',
|
||||
name: 'range',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'The range to read from the table. If set to a number it will be the starting row. If set to string it will be used as A1-style notation range.',
|
||||
},
|
||||
{
|
||||
displayName: 'RAW Data',
|
||||
name: 'rawData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether to return RAW data, instead of parsing it',
|
||||
},
|
||||
{
|
||||
displayName: 'Read As String',
|
||||
name: 'readAsString',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-description-boolean-without-whether
|
||||
description:
|
||||
'In some cases and file formats, it is necessary to read as string to ensure special characters are interpreted correctly',
|
||||
},
|
||||
{
|
||||
displayName: 'Sheet Name',
|
||||
name: 'sheetName',
|
||||
type: 'string',
|
||||
default: 'Sheet',
|
||||
placeholder: 'e.g. mySheet',
|
||||
description:
|
||||
'Name of the sheet to read from in the spreadsheet (if supported). If not set, the first one will be chosen.',
|
||||
},
|
||||
{
|
||||
displayName: 'Starting Line',
|
||||
name: 'fromLine',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: 0,
|
||||
placeholder: 'e.g. 0',
|
||||
description: 'Start handling records from the requested line number. Starts at 0.',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
@@ -23,10 +23,11 @@ import {
|
||||
} from 'xlsx';
|
||||
|
||||
import {
|
||||
operationProperties,
|
||||
fromFileProperties,
|
||||
operationProperty,
|
||||
binaryProperty,
|
||||
toFileProperties,
|
||||
optionsProperties,
|
||||
fromFileOptions,
|
||||
toFileOptions,
|
||||
} from '../description';
|
||||
import { flattenObject, generatePairedItemData } from '@utils/utilities';
|
||||
import { oldVersionNotice } from '@utils/descriptions';
|
||||
@@ -46,10 +47,11 @@ export class SpreadsheetFileV1 implements INodeType {
|
||||
outputs: ['main'],
|
||||
properties: [
|
||||
oldVersionNotice,
|
||||
...operationProperties,
|
||||
...fromFileProperties,
|
||||
operationProperty,
|
||||
binaryProperty,
|
||||
...toFileProperties,
|
||||
...optionsProperties,
|
||||
fromFileOptions,
|
||||
toFileOptions,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,37 +1,14 @@
|
||||
import type {
|
||||
IDataObject,
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeBaseDescription,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
import { BINARY_ENCODING, NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import type {
|
||||
JSON2SheetOpts,
|
||||
Sheet2JSONOpts,
|
||||
WorkBook,
|
||||
WritingOptions,
|
||||
ParsingOptions,
|
||||
} from 'xlsx';
|
||||
|
||||
import {
|
||||
read as xlsxRead,
|
||||
readFile as xlsxReadFile,
|
||||
utils as xlsxUtils,
|
||||
write as xlsxWrite,
|
||||
} from 'xlsx';
|
||||
import { parse as createCSVParser } from 'csv-parse';
|
||||
|
||||
import {
|
||||
operationProperties,
|
||||
fromFileProperties,
|
||||
toFileProperties,
|
||||
optionsProperties,
|
||||
fromFileV2Properties,
|
||||
} from '../description';
|
||||
import { flattenObject, generatePairedItemData } from '@utils/utilities';
|
||||
import { operationProperty } from '../description';
|
||||
import * as fromFile from './fromFile.operation';
|
||||
import * as toFile from './toFile.operation';
|
||||
|
||||
export class SpreadsheetFileV2 implements INodeType {
|
||||
description: INodeTypeDescription;
|
||||
@@ -46,271 +23,23 @@ export class SpreadsheetFileV2 implements INodeType {
|
||||
},
|
||||
inputs: ['main'],
|
||||
outputs: ['main'],
|
||||
properties: [
|
||||
...operationProperties,
|
||||
...fromFileProperties,
|
||||
...fromFileV2Properties,
|
||||
...toFileProperties,
|
||||
...optionsProperties,
|
||||
],
|
||||
properties: [operationProperty, ...fromFile.description, ...toFile.description],
|
||||
};
|
||||
}
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
async execute(this: IExecuteFunctions) {
|
||||
const items = this.getInputData();
|
||||
|
||||
const operation = this.getNodeParameter('operation', 0);
|
||||
|
||||
const newItems: INodeExecutionData[] = [];
|
||||
let returnData: INodeExecutionData[] = [];
|
||||
|
||||
if (operation === 'fromFile') {
|
||||
// Read data from spreadsheet file to workflow
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
try {
|
||||
const options = this.getNodeParameter('options', i, {});
|
||||
let fileFormat = this.getNodeParameter('fileFormat', i, {});
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
|
||||
const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
|
||||
|
||||
let rows: unknown[] = [];
|
||||
|
||||
if (
|
||||
fileFormat === 'autodetect' &&
|
||||
(binaryData.mimeType === 'text/csv' ||
|
||||
(binaryData.mimeType === 'text/plain' && binaryData.fileExtension === 'csv'))
|
||||
) {
|
||||
fileFormat = 'csv';
|
||||
}
|
||||
|
||||
if (fileFormat === 'csv') {
|
||||
const maxRowCount = options.maxRowCount as number;
|
||||
const parser = createCSVParser({
|
||||
delimiter: options.delimiter as string,
|
||||
fromLine: options.fromLine as number,
|
||||
bom: options.enableBOM as boolean,
|
||||
to: maxRowCount > -1 ? maxRowCount : undefined,
|
||||
columns: options.headerRow !== false,
|
||||
onRecord: (record) => {
|
||||
if (!options.includeEmptyCells) {
|
||||
record = Object.fromEntries(
|
||||
Object.entries(record).filter(([_key, value]) => value !== ''),
|
||||
);
|
||||
}
|
||||
rows.push(record);
|
||||
},
|
||||
});
|
||||
if (binaryData.id) {
|
||||
const stream = await this.helpers.getBinaryStream(binaryData.id);
|
||||
await new Promise<void>(async (resolve, reject) => {
|
||||
parser.on('error', reject);
|
||||
parser.on('readable', () => {
|
||||
stream.unpipe(parser);
|
||||
stream.destroy();
|
||||
resolve();
|
||||
});
|
||||
stream.pipe(parser);
|
||||
});
|
||||
} else {
|
||||
parser.write(binaryData.data, BINARY_ENCODING);
|
||||
parser.end();
|
||||
}
|
||||
} else {
|
||||
let workbook: WorkBook;
|
||||
const xlsxOptions: ParsingOptions = { raw: options.rawData as boolean };
|
||||
if (options.readAsString) xlsxOptions.type = 'string';
|
||||
|
||||
if (binaryData.id) {
|
||||
const binaryPath = this.helpers.getBinaryPath(binaryData.id);
|
||||
workbook = xlsxReadFile(binaryPath, xlsxOptions);
|
||||
} else {
|
||||
const binaryDataBuffer = Buffer.from(binaryData.data, BINARY_ENCODING);
|
||||
workbook = xlsxRead(
|
||||
options.readAsString ? binaryDataBuffer.toString() : binaryDataBuffer,
|
||||
xlsxOptions,
|
||||
);
|
||||
}
|
||||
|
||||
if (workbook.SheetNames.length === 0) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'Spreadsheet does not have any sheets!',
|
||||
{
|
||||
itemIndex: i,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let sheetName = workbook.SheetNames[0];
|
||||
if (options.sheetName) {
|
||||
if (!workbook.SheetNames.includes(options.sheetName as string)) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`Spreadsheet does not contain sheet called "${options.sheetName}"!`,
|
||||
{ itemIndex: i },
|
||||
);
|
||||
}
|
||||
sheetName = options.sheetName as string;
|
||||
}
|
||||
|
||||
// Convert it to json
|
||||
const sheetToJsonOptions: Sheet2JSONOpts = {};
|
||||
if (options.range) {
|
||||
if (isNaN(options.range as number)) {
|
||||
sheetToJsonOptions.range = options.range;
|
||||
} else {
|
||||
sheetToJsonOptions.range = parseInt(options.range as string, 10);
|
||||
}
|
||||
}
|
||||
|
||||
if (options.includeEmptyCells) {
|
||||
sheetToJsonOptions.defval = '';
|
||||
}
|
||||
|
||||
if (options.headerRow === false) {
|
||||
sheetToJsonOptions.header = 1; // Consider the first row as a data row
|
||||
}
|
||||
|
||||
rows = xlsxUtils.sheet_to_json(workbook.Sheets[sheetName], sheetToJsonOptions);
|
||||
|
||||
// Check if data could be found in file
|
||||
if (rows.length === 0) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Add all the found data columns to the workflow data
|
||||
if (options.headerRow === false) {
|
||||
// Data was returned as an array - https://github.com/SheetJS/sheetjs#json
|
||||
for (const rowData of rows) {
|
||||
newItems.push({
|
||||
json: {
|
||||
row: rowData,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
} as INodeExecutionData);
|
||||
}
|
||||
} else {
|
||||
for (const rowData of rows) {
|
||||
newItems.push({
|
||||
json: rowData,
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
} as INodeExecutionData);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
newItems.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error, { itemIndex: i });
|
||||
}
|
||||
}
|
||||
|
||||
return [newItems];
|
||||
} else if (operation === 'toFile') {
|
||||
const pairedItem = generatePairedItemData(items.length);
|
||||
try {
|
||||
// Write the workflow data to spreadsheet file
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', 0);
|
||||
const fileFormat = this.getNodeParameter('fileFormat', 0) as string;
|
||||
const options = this.getNodeParameter('options', 0, {});
|
||||
const sheetToJsonOptions: JSON2SheetOpts = {};
|
||||
if (options.headerRow === false) {
|
||||
sheetToJsonOptions.skipHeader = true;
|
||||
}
|
||||
// Get the json data of the items and flatten it
|
||||
let item: INodeExecutionData;
|
||||
const itemData: IDataObject[] = [];
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
item = items[itemIndex];
|
||||
itemData.push(flattenObject(item.json));
|
||||
}
|
||||
|
||||
const ws = xlsxUtils.json_to_sheet(itemData, sheetToJsonOptions);
|
||||
|
||||
const wopts: WritingOptions = {
|
||||
bookSST: false,
|
||||
type: 'buffer',
|
||||
};
|
||||
|
||||
if (fileFormat === 'csv') {
|
||||
wopts.bookType = 'csv';
|
||||
} else if (fileFormat === 'html') {
|
||||
wopts.bookType = 'html';
|
||||
} else if (fileFormat === 'rtf') {
|
||||
wopts.bookType = 'rtf';
|
||||
} else if (fileFormat === 'ods') {
|
||||
wopts.bookType = 'ods';
|
||||
if (options.compression) {
|
||||
wopts.compression = true;
|
||||
}
|
||||
} else if (fileFormat === 'xls') {
|
||||
wopts.bookType = 'xls';
|
||||
} else if (fileFormat === 'xlsx') {
|
||||
wopts.bookType = 'xlsx';
|
||||
if (options.compression) {
|
||||
wopts.compression = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the data in the correct format
|
||||
const sheetName = (options.sheetName as string) || 'Sheet';
|
||||
const wb: WorkBook = {
|
||||
SheetNames: [sheetName],
|
||||
Sheets: {
|
||||
[sheetName]: ws,
|
||||
},
|
||||
};
|
||||
const wbout: Buffer = xlsxWrite(wb, wopts);
|
||||
|
||||
// Create a new item with only the binary spreadsheet data
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
binary: {},
|
||||
pairedItem,
|
||||
};
|
||||
|
||||
let fileName = `spreadsheet.${fileFormat}`;
|
||||
if (options.fileName !== undefined) {
|
||||
fileName = options.fileName as string;
|
||||
}
|
||||
|
||||
newItem.binary![binaryPropertyName] = await this.helpers.prepareBinaryData(wbout, fileName);
|
||||
|
||||
newItems.push(newItem);
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
newItems.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem,
|
||||
});
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (this.continueOnFail()) {
|
||||
return [[{ json: { error: `The operation "${operation}" is not supported!` } }]];
|
||||
} else {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`The operation "${operation}" is not supported!`,
|
||||
);
|
||||
}
|
||||
returnData = await fromFile.execute.call(this, items);
|
||||
}
|
||||
return [newItems];
|
||||
|
||||
if (operation === 'toFile') {
|
||||
returnData = await toFile.execute.call(this, items);
|
||||
}
|
||||
|
||||
return [returnData];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,230 @@
|
||||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
import { BINARY_ENCODING, NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import type { Sheet2JSONOpts, WorkBook, ParsingOptions } from 'xlsx';
|
||||
import { read as xlsxRead, readFile as xlsxReadFile, utils as xlsxUtils } from 'xlsx';
|
||||
|
||||
import { parse as createCSVParser } from 'csv-parse';
|
||||
import { binaryProperty, fromFileOptions } from '../description';
|
||||
|
||||
export const description: INodeProperties[] = [
|
||||
binaryProperty,
|
||||
{
|
||||
displayName: 'File Format',
|
||||
name: 'fileFormat',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Autodetect',
|
||||
value: 'autodetect',
|
||||
},
|
||||
{
|
||||
name: 'CSV',
|
||||
value: 'csv',
|
||||
description: 'Comma-separated values',
|
||||
},
|
||||
{
|
||||
name: 'HTML',
|
||||
value: 'html',
|
||||
description: 'HTML Table',
|
||||
},
|
||||
{
|
||||
name: 'ODS',
|
||||
value: 'ods',
|
||||
description: 'OpenDocument Spreadsheet',
|
||||
},
|
||||
{
|
||||
name: 'RTF',
|
||||
value: 'rtf',
|
||||
description: 'Rich Text Format',
|
||||
},
|
||||
{
|
||||
name: 'XLS',
|
||||
value: 'xls',
|
||||
description: 'Excel',
|
||||
},
|
||||
{
|
||||
name: 'XLSX',
|
||||
value: 'xlsx',
|
||||
description: 'Excel',
|
||||
},
|
||||
],
|
||||
default: 'autodetect',
|
||||
description: 'The format of the binary data to read from',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
},
|
||||
},
|
||||
fromFileOptions,
|
||||
];
|
||||
|
||||
export async function execute(
|
||||
this: IExecuteFunctions,
|
||||
items: INodeExecutionData[],
|
||||
fileFormatProperty = 'fileFormat',
|
||||
) {
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
let fileExtension;
|
||||
let fileFormat;
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
try {
|
||||
const options = this.getNodeParameter('options', i, {});
|
||||
fileFormat = this.getNodeParameter(fileFormatProperty, i, '');
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
|
||||
const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
|
||||
fileExtension = binaryData.fileExtension;
|
||||
|
||||
let rows: unknown[] = [];
|
||||
|
||||
if (
|
||||
fileFormat === 'autodetect' &&
|
||||
(binaryData.mimeType === 'text/csv' ||
|
||||
(binaryData.mimeType === 'text/plain' && binaryData.fileExtension === 'csv'))
|
||||
) {
|
||||
fileFormat = 'csv';
|
||||
}
|
||||
|
||||
if (fileFormat === 'csv') {
|
||||
const maxRowCount = options.maxRowCount as number;
|
||||
const parser = createCSVParser({
|
||||
delimiter: options.delimiter as string,
|
||||
fromLine: options.fromLine as number,
|
||||
bom: options.enableBOM as boolean,
|
||||
to: maxRowCount > -1 ? maxRowCount : undefined,
|
||||
columns: options.headerRow !== false,
|
||||
onRecord: (record) => {
|
||||
if (!options.includeEmptyCells) {
|
||||
record = Object.fromEntries(
|
||||
Object.entries(record).filter(([_key, value]) => value !== ''),
|
||||
);
|
||||
}
|
||||
rows.push(record);
|
||||
},
|
||||
});
|
||||
if (binaryData.id) {
|
||||
const stream = await this.helpers.getBinaryStream(binaryData.id);
|
||||
await new Promise<void>(async (resolve, reject) => {
|
||||
parser.on('error', reject);
|
||||
parser.on('readable', () => {
|
||||
stream.unpipe(parser);
|
||||
stream.destroy();
|
||||
resolve();
|
||||
});
|
||||
stream.pipe(parser);
|
||||
});
|
||||
} else {
|
||||
parser.write(binaryData.data, BINARY_ENCODING);
|
||||
parser.end();
|
||||
}
|
||||
} else {
|
||||
let workbook: WorkBook;
|
||||
const xlsxOptions: ParsingOptions = { raw: options.rawData as boolean };
|
||||
if (options.readAsString) xlsxOptions.type = 'string';
|
||||
|
||||
if (binaryData.id) {
|
||||
const binaryPath = this.helpers.getBinaryPath(binaryData.id);
|
||||
workbook = xlsxReadFile(binaryPath, xlsxOptions);
|
||||
} else {
|
||||
const binaryDataBuffer = Buffer.from(binaryData.data, BINARY_ENCODING);
|
||||
workbook = xlsxRead(
|
||||
options.readAsString ? binaryDataBuffer.toString() : binaryDataBuffer,
|
||||
xlsxOptions,
|
||||
);
|
||||
}
|
||||
|
||||
if (workbook.SheetNames.length === 0) {
|
||||
throw new NodeOperationError(this.getNode(), 'Spreadsheet does not have any sheets!', {
|
||||
itemIndex: i,
|
||||
});
|
||||
}
|
||||
|
||||
let sheetName = workbook.SheetNames[0];
|
||||
if (options.sheetName) {
|
||||
if (!workbook.SheetNames.includes(options.sheetName as string)) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`Spreadsheet does not contain sheet called "${options.sheetName}"!`,
|
||||
{ itemIndex: i },
|
||||
);
|
||||
}
|
||||
sheetName = options.sheetName as string;
|
||||
}
|
||||
|
||||
// Convert it to json
|
||||
const sheetToJsonOptions: Sheet2JSONOpts = {};
|
||||
if (options.range) {
|
||||
if (isNaN(options.range as number)) {
|
||||
sheetToJsonOptions.range = options.range;
|
||||
} else {
|
||||
sheetToJsonOptions.range = parseInt(options.range as string, 10);
|
||||
}
|
||||
}
|
||||
|
||||
if (options.includeEmptyCells) {
|
||||
sheetToJsonOptions.defval = '';
|
||||
}
|
||||
|
||||
if (options.headerRow === false) {
|
||||
sheetToJsonOptions.header = 1; // Consider the first row as a data row
|
||||
}
|
||||
|
||||
rows = xlsxUtils.sheet_to_json(workbook.Sheets[sheetName], sheetToJsonOptions);
|
||||
|
||||
// Check if data could be found in file
|
||||
if (rows.length === 0) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Add all the found data columns to the workflow data
|
||||
if (options.headerRow === false) {
|
||||
// Data was returned as an array - https://github.com/SheetJS/sheetjs#json
|
||||
for (const rowData of rows) {
|
||||
returnData.push({
|
||||
json: {
|
||||
row: rowData,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
} as INodeExecutionData);
|
||||
}
|
||||
} else {
|
||||
for (const rowData of rows) {
|
||||
returnData.push({
|
||||
json: rowData,
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
} as INodeExecutionData);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
let errorDescription = error.description;
|
||||
if (fileExtension && fileExtension !== fileFormat) {
|
||||
error.message = `The file selected in 'Input Binary Field' is not in ${fileFormat} format`;
|
||||
errorDescription = `Try to change the operation or select a ${fileFormat} file in 'Input Binary Field'`;
|
||||
}
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error, {
|
||||
itemIndex: i,
|
||||
description: errorDescription,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
|
||||
import { generatePairedItemData } from '@utils/utilities';
|
||||
import type { JsonToSpreadsheetBinaryFormat, JsonToSpreadsheetBinaryOptions } from '@utils/binary';
|
||||
import { convertJsonToSpreadsheetBinary } from '@utils/binary';
|
||||
import { toFileOptions, toFileProperties } from '../description';
|
||||
|
||||
export const description: INodeProperties[] = [...toFileProperties, toFileOptions];
|
||||
|
||||
export async function execute(this: IExecuteFunctions, items: INodeExecutionData[]) {
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
const pairedItem = generatePairedItemData(items.length);
|
||||
|
||||
try {
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', 0);
|
||||
const fileFormat = this.getNodeParameter('fileFormat', 0) as JsonToSpreadsheetBinaryFormat;
|
||||
const options = this.getNodeParameter('options', 0, {}) as JsonToSpreadsheetBinaryOptions;
|
||||
|
||||
const binaryData = await convertJsonToSpreadsheetBinary.call(this, items, fileFormat, options);
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
binary: {
|
||||
[binaryPropertyName]: binaryData,
|
||||
},
|
||||
pairedItem,
|
||||
};
|
||||
|
||||
returnData.push(newItem);
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem,
|
||||
});
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
return returnData;
|
||||
}
|
||||
Reference in New Issue
Block a user