Browse Source

Refactor processRow method to handle columns filtering and improve error handling

node16
Onja 12 months ago
parent
commit
56b4caf42f
  1. 76
      src/models/file.js

76
src/models/file.js

@ -134,40 +134,42 @@ class File {
const columnsIndex = {};
for (let column of columns) {
columnsIndex[column] = {
index: -1,
exist: false,
main: (String(column)).split('.')[0],
value: column,
rest: (String(column)).split('.').slice(1).join('.'),
last: (String(column)).split('.').pop(),
};
}
let i = 0;
const columnsFiltered = [];
let count = 1;
fs.createReadStream(this.filepath)
.pipe(csvParser({ separator: ';' }))
.on('headers', (headers) => {
headers = headers.map(header => typeof header === 'string' ? header.trim() : header);
const result = [];
for (let key in columnsIndex) {
columnsIndex[key].exist = headers.includes(columnsIndex[key].main);
if ( columnsIndex[key].exist ) {
columnsFiltered.push(columnsIndex[key].value);
result.push(columnsIndex[key].last);
}
}
// Emit a parse.start event with the url and filepath
emitter.emit('parse.start', { url: this.url, filepath: this.filepath, headers });
emitter.emit('parse.start', { url: this.url, filepath: this.filepath, headers, result: result });
stream.write(result.join(';') + "\n");
})
.on('data', (row) => {
// Emit a parse.data event with the url, filepath and data
emitter.emit('parse.data', { url: this.url, filepath: this.filepath, data: row, index: i });
let result = [];
if ( i === 0 ) {
for(let i = 0; i < row.length; i++) {
for (let key of columnsIndex) {
if (row[i] === columnsIndex[key].main) {
columnsIndex[key].index = i;
result.push(columnsIndex[key].last);
}
}
}
} else {
// Loop through indexes and push the value of the column to result
result = this.processRow(row, columnsIndex);
}
stream.write(result.join(';'));
i++;
let result = this.processRow(row, columnsIndex, columnsFiltered);
emitter.emit('parse.data', { url: this.url, filepath: this.filepath, data: row, result, index: count });
stream.write(result.join(';') + "\n");
count++;
})
.on('error', (err) => {
// Emit a parse.error event with the error
@ -175,7 +177,7 @@ class File {
})
.on('end', () => {
// Emit a parse.end event with the url and filepath
emitter.emit('parse.end', { url: this.url, filepath: this.filepath, count: i });
emitter.emit('parse.end', { url: this.url, filepath: this.filepath, count: count - 1 });
stream.end();
});
@ -190,6 +192,10 @@ class File {
* @return string
*/
getValueByPath(obj, path) {
if ( !path || typeof path !== 'string' ) {
return obj;
}
const parts = path.split('.');
let result = obj;
for (let part of parts) {
@ -208,37 +214,39 @@ class File {
/**
* Generate exported row
*
* @param array row
* @param object row
* @param object columnsIndex
* @return array
*/
processRow(row, columnsIndex) {
processRow(row, columnsIndex, columnsFiltered) {
const result = [];
for (let i = 0; i < row.length; i++) {
for (let key in row) {
try {
row[i] = JSON.parse(row[i]);
if ( typeof row[key] === 'string' && row[key].startsWith('{') && row[key].endsWith('}') ) {
row[key] = JSON.parse(row[key]);
}
row[key.trim()] = row[key]
} catch (err) {
// result.push('');
console.log(err);
console.log(err.message);
}
}
const validIndexes = Object.keys(columnsIndex).filter(key => columnsIndex[key].index !== -1);
for (let i=0; i < row.length; i++) {
if (!validIndexes.includes(i)) {
for (let key of columnsFiltered) {
if (!columnsIndex[key].exist) {
continue;
}
const column = columnsIndex[i];
const item = row[i];
const column = columnsIndex[key];
const item = row[column.main] || '';
if ( column.primary === column.value ) {
result.push(item);
} else {
if ( typeof item === 'object' ) {
result.push(this.getValueByPath(item, column.value));
result.push(this.getValueByPath(item, column.rest));
} else {
result.push(item);
}

Loading…
Cancel
Save