linux - Node.js - exec command for tar files works correctly first time, but produces corrupted tar contents upon subsequent execution -
i building web app node.js, , @ point need produce tar archive of directory of pdfs. application running on vm running ubuntu 14.04 server. code shown below:
function tardirectory(path, token, callback) { var exec = require('child_process').exec; var cmd = 'cd ' + path + ' && tar -cvf genericname-' + token + '.tar' + ' ' + token; exec(cmd, function(error, stdout, stderr) { console.log(stdout); console.log(stderr); if (error) { console.error(error); } if(callback) callback(); }); }
and tardirectory function called following code:
router.post('/files/generate', function(req, res, next) { idlist = req.body['ids[]']; token = req.body['token']; // if single file being generated if (typeof req.body['ids[]'] === "string"){ filehelper.generatefile(idlist[0], req.app.locals.site.basedir + "temp/", token); } // if multiple files being generated else { idlist.foreach(function(id) { filehelper.generatefile(id, req.app.locals.site.basedir + "temp/", token); }); } filehelper.tardirectory(req.app.locals.site.basedir + "temp/", token, res.end); });
the code expects post request dynamic data generated button click in web app, , create files based on data , tar directory. works fine , good... first time. when click button first time in while, tar produced, , when open it, client-side pdfs identical ones on server. when click again within hour or so, though, receive tar file, when open archive , unpack it, pdfs corrupted , half expected byte size. @ loss here... had suspicion might related improper handling of stream closing, i'm not sure.
this code generates pdfs directory, tarred after generation:
function generatefile(id, path, token) { var dirpath = path + token; var filepath = path + token + "/file" + id + ".pdf"; console.log("creating file for: " + id); try{ fs.statsync(dirpath).isdirectory(); } catch (err) { fs.mkdirsync(dirpath); } // start file pdf generation file = new pdfdocument(); output = fs.createwritestream(filepath); output.on('close', function(){ return; }); file.pipe(output); // handle intricacies of file generation file.text("file" + id + ".pdf"); // end file file.end(); }
- is okay pdf files, before compressing?
- in
generatefile
function have writestream, async. but, calling function sync., , start .tar compression without waiting before pdf generation complete, may cause issue. - as recommendation: try wrap
generatefile
promise, or iterate async., , start compression after files generation completed.
example bluebird:
var promise = require('bluebird'); function generatefile(id, path, token) { return new promise(function(resolve, reject) { var dirpath = path + token; var filepath = path + token + "/file" + id + ".pdf"; console.log("creating file for: " + id); try{ fs.statsync(dirpath).isdirectory(); } catch (err) { fs.mkdirsync(dirpath); } // start file pdf generation file = new pdfdocument(); output = fs.createwritestream(filepath); output.on('close', function(){ return resolve(); }); output.on('error', function(error) { return reject(error); }); file.pipe(output); // handle intricacies of file generation file.text("file" + id + ".pdf"); // end file file.end(); }); }
pdfs generation , compressing.
var promise = require('bluebird'); .... //idlist.foreach(function(id) { // filehelper.generatefile(id, req.app.locals.site.basedir + "temp/", //token);}); //replace promise.map(idlist, function(id) { return filehelper.generatefile(id, req.app.locals.site.basedir + "temp/", token); }) .then(function() { //all files ready, start compressing }) .catch(function(error) { //we have error });
Comments
Post a Comment