I am reading all the json files in the directory and performing CRUD operations on them in Couchbase in a cyclic manner for some performance benchmarks.
function readFiles(dirName,crudOps, onError){
fs.readdir(dirName,function(err,filenames){
if (err) {
onError(err);
return;
}
var circularIterator = cyclicIterator(filenames);
while(1){
fname = circularIterator.next().value;
fs.readFile(dirName + fname, function(err, content) {
console.log(fname) // NEVER REACHES HERE
if (err) {
console.log(err);
return;
}
crudOps(fname, content);
});
});
}
})
}
However, it does not seem to be executing the fs.readFile function. How can I make it 'circular' iterate through the list of filenames and use the file content for my crudOps function?
EDIT:
Per the suggestion by Ry, I have used a promise to readFile.
const readFile = util.promisify(fs.readFile);
async function getStuff(filename) {
return await readFile(filename);
}
function readFiles(dirName,onFileContent, onError){
fs.readdir(dirName,function(err,filenames){
if (err) {
onError(err);
return;
}
var iterator = circularIterator(filenames);
//filenames.forEach(function(filename) {
while(1){
fname = iterator.next().value;
//iterator.getNext(function(filename){
//
getStuff(dirName + fname).then(data => {console.log(data); onFileContent(fname, data)})
}
})
}
This is hanging and causing my Chrome browser to crash and with the below error on my console:
FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - JavaScript heap out of memory
Is there a way to make this better?
fs.readFileis asynchronous, so it enqueues a task and moves to the next iteration of the loop. The loop runs forever, so infinite tasks get enqueued and they never run. You’ll need to use one of the usual ways to wait for thefs.readFilebefore continuing with the loop, like an async function and a promise wrapper forfs.readFile.awaitwould have to be directly inside it.getStuffand putawait readFile(…)inside the loop. Make the callback tofs.readdiranasync functionfor now.