javascript - Attempting synchronous behaviour with shell script execution in node.js -
i'm quite new node.js , i'm using server receive http.get requests browser, processing , return result browser.
the processing scraping website using phantom.js/casper.js , because of complications using casper.js in node.js i'm executing sub-process use of shell script.
when run code below , make request browser, response seems appear before shell script has run. tried using async library run steps in series run.sh runs , returns value before it's added response doesn't seem work that. can spot stupid mistake?
thanks in advance!
var express = require('express'); var app = express(); var sys = require('sys'); var exec = require('child_process').exec; var async = require('async'); var auth = express.basicauth('test', 'pass'); var server = app.listen(3000, function() { console.log('listening on port %d', server.address().port); }); var result; //app.use(auth); app.get('/free/:u/:p', auth, function(req, res) { async.series([ function(callback){ var puts = function (error, stdout, stderr) { result = stdout; } exec("./run.sh " + req.params.u + " " + req.params.p, puts); callback(null); }, function(callback){ res.writehead(200,{'content-type':'application/json'}); res.send(result) callback(null); } ]); }); app.get('/', function(req, res) { res.send('hello!'); });
you're not calling callback(null)
asynchronously. need put in puts
.
also, shouldn't use global results
variable (it's global across requests, possibly leaking data different clients - arghh!). , there's hardly reason use async.js trivial chain.
app.get('/free/:u/:p', auth, function(req, res) { exec("./run.sh " + req.params.u + " " + req.params.p, function (error, stdout, stderr) { res.writehead(200,{'content-type':'application/json'}); res.send(stdout); }); });
notice security issue introduced not escaping shell parameters.
Comments
Post a Comment