node.js - Warning from mongodb - "(node) warning: Recursive process.nextTick detected. This will break in the next version of node. Please use setImmediate..." -
h there,
i running node 0.10.24 , i'm trying records mongodb collection , once collection goes on 1000 elements error:
node.js:375 throw new error(msg); ^ error: (node) warning: recursive process.nexttick detected. break in next version of node. please use setimmediate recursive deferral. @ maxtickwarn (node.js:375:15) @ process.nexttick (node.js:480:9) @ cursorstream._next (/var/www/html/node_modules/mongodb/lib/mongodb/cursorstream.js:76:11) @ cursorstream._onnextobject (/var/www/html/node_modules/mongodb/lib/mongodb/cursorstream.js:98:8) @ /var/www/html/node_modules/mongodb/lib/mongodb/cursorstream.js:78:12 @ cursor.nextobject (/var/www/html/node_modules/mongodb/lib/mongodb/cursor.js:540:5) @ /var/www/html/node_modules/mongodb/lib/mongodb/cursorstream.js:77:18 @ process._tickcallback (node.js:415:13)
i tried 2 ways of fetching data mongodb without success.
version 1:
exports.findall = function(req, res) { db.collection('products', function(err, collection) { var first = true; var stream = collection.find().batchsize(10000).stream(); var results = []; stream.on('data', function(item){ results.push(item); }).on('error', function (err) { // handle error console.log(err); }).on('close', function () { // stream closed, complete response res.end(); res.send(results); }); }); };
version 2:
var sys = require('sys'), http = require('http'), mongodb = require('mongodb'); var server = new mongodb.server("localhost", 27017, {}); var db = new mongodb.db('productdb', server, {}); var client = null db.open(function (error, clientparam) { if (error) { db.close(); throw error; } client = clientparam; }); http.createserver(function(req, res) { var collection = new mongodb.collection(client, 'products'); collection.find({}, {}, function(err, cursor) { if(err == null){ cursor.toarray(function(err, docs){ res.writehead(200, {'content-type': 'application/json'}); res.write(docs); res.end(); //console.log(docs); }); }else{ res.writehead(200, {'content-type': 'text/html'}); res.write('an error occurred!'); res.end(); throw err; } }); }).listen(8088);
in version 3 when add stream pause , after timeout resume it, result want, significant delay.
version 3:
exports.findall = function(req, res) { db.collection('products', function(err, collection) { var first = true; var stream = collection.find().batchsize(10000).stream(); var results = []; stream.on('data', function(item){ results.push(item); stream.pause(); settimeout(function(){ stream.resume(); }, 1); }).on('error', function (err) { // handle error console.log(err); }).on('close', function () { // stream closed, complete response res.end(); res.send(results); }); }); };
what correct way read lot of rows, fast mongodb using node.js without encountering issue?
thank you.
update:
i have found way still not work, time gives me warning after 500th row returned:
[warning] streamrecords method deprecated, please use stream method faster
and code is:
db.collection('products', function(err, collection) { var stream = collection.find().streamrecords(); var results = []; stream.on('data', function(item){ results.push(item); }).on('error', function (err) { // handle error console.log(err); }).on('end', function () { // stream closed, complete response res.end(); res.send(results); }); });
if change streamrecords() call stream(), initial problem in version 1. if leave way first 500 row. :|
after 3 days of pulling hair decided clean install on server , after installing latest , greatest:
"express": "3.x", "mongodb": "1.3.23", "connect-timeout": "0.0.1" "npm": "1.x"
my previous mongodb driver node.js version was: 1.1.8, think responsible grief.
my version 1 works charm!
Comments
Post a Comment