From cd747af78ad9bd379e4cebee101b44732727847c Mon Sep 17 00:00:00 2001 From: Onja Date: Tue, 17 Oct 2023 17:15:12 +0300 Subject: [PATCH] Refactored the `FileService.parseFromUrl` method to return the filepath instead of a stream, and updated the `routes/index.js` file to handle the new return value --- src/models/file.js | 147 +++++++++++++++++++++---------------------- src/routes/index.js | 24 +++---- src/services/file.js | 17 ++--- 3 files changed, 90 insertions(+), 98 deletions(-) diff --git a/src/models/file.js b/src/models/file.js index 86600df..6199a94 100644 --- a/src/models/file.js +++ b/src/models/file.js @@ -18,15 +18,6 @@ const { basedir } = require('../config/constants'); const dest = path.join(basedir, 'public/csv'); -// Create a generateFilePath function witch returns a path with a filename and datetime -function generateFilePath(filename) { - return { - filepath: path.join(dest, `${filename}-${Date.now()}.csv`), - generatedpath: path.join(dest, `${filename}-generated-${Date.now()}.csv`), - }; -} - - // Create a class File that extends EventEmitter class File { @@ -38,7 +29,14 @@ class File { this.url = url; } - + // Create a generateFilePath function witch returns a path with a filename and datetime + generateFilePath(filename) { + const date = new Date(); + return { + filepath: path.join(dest, `${filename}-${date.toISOString().split('T')[0]}.csv`), + generatedpath: path.join(dest, `${filename}-generated-${date.toISOString().split('T')[0]}.csv`), + }; + } /** * Download a file from a url @@ -47,7 +45,7 @@ class File { async download() { const url = URL.parse(this.url); this.filename = slugify(url.hostname, { lower: true }); - const { filepath, generatedpath } = generateFilePath(this.filename); + const { filepath, generatedpath } = this.generateFilePath(this.filename); this.filepath = filepath; this.generatedpath = generatedpath; @@ -121,74 +119,73 @@ class File { // create a parse method which read the file and return a stream parse(columns) { - const stream = new PassThrough(); - const fileStream = fs.createWriteStream(this.generatedpath); - - // check if columns is valid - if (!columns || !columns.length) { - // return Promise.reject(new Error('Invalid columns')); - emitter.emit('parse.error', { url: this.url, filepath: this.filepath, error: 'Invalid columns' }); - return false; - } + return new Promise((resolve, reject) => { + const fileStream = fs.createWriteStream(this.generatedpath); + + // check if columns is valid + if (!columns || !columns.length) { + // return Promise.reject(new Error('Invalid columns')); + emitter.emit('parse.error', { url: this.url, filepath: this.filepath, error: 'Invalid columns' }); + reject(new Error('Invalid columns')); + return false; + } - // Create a variable to hold csv columns indexes - const columnsIndex = {}; - for (let column of columns) { - columnsIndex[column] = { - exist: false, - main: (String(column)).split('.')[0], - value: column, - rest: (String(column)).split('.').slice(1).join('.'), - last: (String(column)).split('.').pop(), - }; - } + // Create a variable to hold csv columns indexes + const columnsIndex = {}; + for (let column of columns) { + columnsIndex[column] = { + exist: false, + main: (String(column)).split('.')[0], + value: column, + rest: (String(column)).split('.').slice(1).join('.'), + last: (String(column)).split('.').pop(), + }; + } + + const columnsFiltered = []; - const columnsFiltered = []; - - let count = 1; - fs.createReadStream(this.filepath) - .pipe(csvParser({ separator: ';' })) - .on('headers', (headers) => { - headers = headers.map(header => typeof header === 'string' ? header.trim() : header); - - const result = []; - for (let key in columnsIndex) { - columnsIndex[key].exist = headers.includes(columnsIndex[key].main); - if ( columnsIndex[key].exist ) { - columnsFiltered.push(columnsIndex[key].value); - result.push(columnsIndex[key].last); + let count = 1; + fs.createReadStream(this.filepath) + .pipe(csvParser({ separator: ';' })) + .on('headers', (headers) => { + headers = headers.map(header => typeof header === 'string' ? header.trim() : header); + + const result = []; + for (let key in columnsIndex) { + columnsIndex[key].exist = headers.includes(columnsIndex[key].main); + if ( columnsIndex[key].exist ) { + columnsFiltered.push(columnsIndex[key].value); + result.push(columnsIndex[key].last); + } } - } - - // Emit a parse.start event with the url and filepath - emitter.emit('parse.start', { url: this.url, filepath: this.filepath, headers, result: result }); - - stream.write(result.join(';') + "\n"); - fileStream.write(result.join(';') + "\n"); - }) - .on('data', (row) => { - // Emit a parse.data event with the url, filepath and data - let result = this.processRow(row, columnsIndex, columnsFiltered); - emitter.emit('parse.data', { url: this.url, filepath: this.filepath, data: row, result, index: count }); - stream.write(result.join(';') + "\n"); - fileStream.write(result.join(';') + "\n"); - count++; - }) - .on('error', (err) => { - // Emit a parse.error event with the error - emitter.emit('parse.error', { url: this.url, filepath: this.filepath, error: err.message }); - - fileStream.close(); - fs.unlink(this.generatedpath, () => {}); - }) - .on('end', () => { - // Emit a parse.end event with the url and filepath - stream.end(); - fileStream.close(); - emitter.emit('parse.end', { url: this.url, filepath: this.filepath, count: count - 1, generated: path.basename(this.generatedpath) }); - }); + + // Emit a parse.start event with the url and filepath + emitter.emit('parse.start', { url: this.url, filepath: this.filepath, headers, result: result }); - return stream; + fileStream.write(result.join(';') + "\n"); + }) + .on('data', (row) => { + // Emit a parse.data event with the url, filepath and data + let result = this.processRow(row, columnsIndex, columnsFiltered); + emitter.emit('parse.data', { url: this.url, filepath: this.filepath, data: row, result, index: count }); + fileStream.write(result.join(';') + "\n"); + count++; + }) + .on('error', (err) => { + // Emit a parse.error event with the error + emitter.emit('parse.error', { url: this.url, filepath: this.filepath, error: err.message }); + + fileStream.close(); + reject(err); + fs.unlink(this.generatedpath, () => {}); + }) + .on('end', () => { + // Emit a parse.end event with the url and filepath + fileStream.close(); + resolve(this.generatedpath); + emitter.emit('parse.end', { url: this.url, filepath: this.filepath, count: count - 1, generated: path.basename(this.generatedpath) }); + }); + }); } /** diff --git a/src/routes/index.js b/src/routes/index.js index 1d8e98a..1b939f6 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -105,7 +105,7 @@ router.get('/', async function(req, res, next) { return res; }); -router.post('/', async function(req, res, next) { +router.post('/', function(req, res, next) { // const url = 'https://bodacc-datadila.opendatasoft.com/api/explore/v2.1/catalog/datasets/annonces-commerciales/exports/csv?lang=fr&refine=publicationavis%3A%22A%22&refine=publicationavis_facette%3A%22Bodacc%20A%22&refine=familleavis_lib%3A%22Ventes%20et%20cessions%22&timezone=Asia%2FBaghdad&use_labels=true&delimiter=%3B'; // get url from form @@ -120,22 +120,16 @@ router.post('/', async function(req, res, next) { return res.status(500).send('Invalid columns'); } - let stream = null; - try { - stream = await fileService.parseFromUrl(url, columns); - } catch (err) { + fileService.parseFromUrl(url, columns) + .then((filepath) => { + res.send({ + success: true + }) + }) + .catch(err => { console.error('routes [/] error', err.message); - } - - if ( !stream ) { return res.status(500).send('Invalid stream'); - } - - res.setHeader('Content-Disposition', 'attachment; filename="mon_fichier.csv"'); - res.setHeader('Content-Type', 'text/csv; charset=utf-8'); - stream.pipe(res); - - // res.render('index', { title: 'Express' }); + }); }); module.exports = router; diff --git a/src/services/file.js b/src/services/file.js index 26461f1..5cf446f 100644 --- a/src/services/file.js +++ b/src/services/file.js @@ -29,14 +29,15 @@ class FileService { const file = new File(url); const filepath = await file.download(); - const stream = file.parse(columns); - if ( !stream ) { - emitter.emit('parseFromUrl.error', { url, columns, error: 'Invalid stream' }); - return Promise.reject(new Error('Invalid stream')); - } - - emitter.emit('parseFromUrl.end', { url, columns, filepath }); - return Promise.resolve(stream); + return file.parse(columns) + .then((filepath) => { + emitter.emit('parseFromUrl.end', { url, columns, filepath }); + return filepath; + }) + .catch((err) => { + emitter.emit('parseFromUrl.error', { url, columns, error: err.message }); + return err; + }); } /**