1

I have a CSV file with 8Mil records and want to read the CSV without loading the data into memory and parse it to an API after some data manipulation. So can anyone help me, who do I parse the data in batches as the API accepts 1000 records at a time?

Below is the code which perfectly works for regular standard files but slow ingestion to the API

Appreciate any kind of help here. Thanks

function sleep(ms) {
    return new Promise((resolve, reject) => {
        setTimeout(resolve,ms)
    })
}

async function readcsv(path) {
    return new Promise((resolve, reject) => {
        try{
            let index = -1;
            fs.createReadStream(path)
                .on('error', () => {
                    // handle error
                })
                .pipe(csv())
                .on('data', async(data) => {
                    index++
                    await sleep(index*50)
                    Object.entries(data).forEach(
                        ([key,value]) => {
                            //console.log(key,value)
                            URL = URL + '&ky=' + key.toLowerCase() + '&vl=' + value + '&tp=s'
                        }
                    )
                    let action = await axios.get(URL)
                    if(action.statusText != 'OK'){
                        failed_requests.push(URL)
                    }
                })
                .on('end', async() => {
                    resolve ("Streaming completed")
                })
        }catch(err) {
            console.log(`Error - ${err.stack}`)
        }
    })
}

readcsv('somehuge.csv').then( (status) => {
    console.log(`Status -- ${status}`)
})

0

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.