I'm trying to insert over 1 million rows into Postgres Table using NodeJs The problem is when i start script, the memory constantly keep increasing till it reach 1.5 GB of RAM and then I get error: FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - process out of memory
The result always the same - about 7000 inserted rows instead of 1 million
Here is the code
var pg = require('pg');
var fs = require('fs');
var config = require('./config.js');
var PgClient = new pg.Client(config.pg);
PgClient.connect();
var lineReader = require('readline').createInterface({
input: require('fs').createReadStream('resources/database.csv') //file contains over 1 million lines
});
var n=0;
lineReader.on('line', function(line) {
n++;
var insert={"firstname":"John","lastname":"Conor"};
//No matter what data we insert, the point is that the number of inserted rows much less than it should be
PgClient.query('INSERT INTO HUMANS (firstname,lastname) values ($1,$2)', [insert.firstname,insert.lastname]);
});
lineReader.on('close',function() {
console.log('end '+n);
});
PgClient.query()call and inside there is where you calledlineReader.resume()?