I am trying to determine if this is a fair benchmark. The goal is trying to see how many concurrent connections vs payloads of various size Node.JS can handle. The code is below.
var express = require('express');
var Sequelize = require('sequelize');
var fs = require('fs');
var app = express();
var data;
var filename = process.argv[2] || './start.json';
console.log("Using: " + filename);
data = fs.readFileSync(filename);
var blockSize = 250000;
app.get('/start', function (req, res) {
// Break up data in blocks. Works to super high concurrents.
// for(var i = 0; i < data.length; i+=blockSize)
// res.write(data.slice(i, i+blockSize));
// Can only handle about 600 concurrent requests if datasize > 500KB
res.send(data);
});
app.listen(3000, function () {
console.log('Listing on 3000.');
});
As stated in the comments, if the payload size is larger than about 500KB and there have 500 concurrents it will get "connection reset by peer" at the load testing client. If you slice the data up and write it in chunks it can survive to much higher concurrents before that starts. Both stock node and express exhibit this behavior.