ozzyzig
ozzyzig

Reputation: 719

Write final json to a file from repeated requests to rest API

I am trying to build a file of json data from repeated calls to a restAPI. The final file to be written is the sum of the data received from all the calls. At present the file is being written with contents of the first call then overwritten by the contents of the first + second call (see console output below code).

As I have to make many calls, once the code is working, I would like to only write the file once the request has finished and the json string has been built. Does anyone now how I would go about doing this? Maybe with a callback(?), which I still don't have the hang of, once the requests have finished or the json string has finished being built.

"use strict";
const fs = require('fs');
const request = require('request'); 
var parse = require('csv-parse');

const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";

var options = {
    url: '',
    method: 'GET',
    accept: "application/json", 
    json: true,  
};

var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
    .pipe(parse({delimiter: ','}))
    .on('data', function(data) {
        csvData.push(data[1]);        
    })
    .on('end',function() {
    var start = Date.now(); 
    var records = csvData.length //2212 objects
    console.log(records);
    var dataLength = 2 //set low at moment

    for (var i = 0; i < dataLength; i += 1) {
        var url = shapes + csvData[i];
        options.url = url; //set url query
        request(options, function(error, response, body) {   

            var time = Date.now() - start;
            var s = JSON.stringify(body.response);
            console.log( '\n' + (Buffer.byteLength(s)/1000).toFixed(2)+ 
            " kilobytes downloaded in: " + (time/1000) + " sec");
            console.log(i)
            buildJSON(s);
        });
    }

    function buildJSON(s) {
        var newStr = s.substring(1, s .length-1);         
        jsonData += newStr + ',';
        writeFile(jsonData);
    }

    function writeFile(jsonData) {
        fs.writeFile(pathJSON, jsonData, function(err) {
            if (err) {
                return console.log(err);
                } else {
                    console.log("file complete")
                }
            });
        }   

    });

128.13 kilobytes downloaded in: 2.796 sec
2
file complete

256.21 kilobytes downloaded in: 3.167 sec
2
file complete

Upvotes: 0

Views: 90

Answers (1)

Chirag Ravindra
Chirag Ravindra

Reputation: 4830

Perhaps writing to the file after all requests are complete will help. In the current code, the writeFile function is called each time a request is completed (which overwrites the file each time)

A quick way to fix this is to count requests (and failures) and write to file only after all the requests are complete.

"use strict";
const fs = require('fs');
const request = require('request');
var parse = require('csv-parse');

const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";

var options = {
    url: '',
    method: 'GET',
    accept: "application/json",
    json: true,
};

var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
    .pipe(parse({
        delimiter: ','
    }))
    .on('data', function (data) {
        csvData.push(data[1]);
    })
    .on('end', function () {
        var start = Date.now();
        var records = csvData.length //2212 objects
        console.log(records);
        var dataLength = 2 //set low at moment
        var jsonsDownloaded = 0; // Counter to track complete JSON requests
        var jsonsFailed = 0; // Counter to handle failed JSON requests
        for (var i = 0; i < dataLength; i += 1) {
            var url = shapes + csvData[i];
            options.url = url; //set url query
            request(options, function (error, response, body) {
                if(error){
                    jsonsFailed++;
                    writeFile(jsonData);
                    return;
                }
                jsonsDownloaded++;
                var time = Date.now() - start;
                var s = JSON.stringify(body.response);
                console.log('\n' + (Buffer.byteLength(s) / 1000).toFixed(2) +
                    " kilobytes downloaded in: " + (time / 1000) + " sec");
                console.log(i)
                buildJSON(s);
            });
        }

        function buildJSON(s) {
            var newStr = s.substring(1, s.length - 1);
            jsonData += newStr + ',';
            writeFile(jsonData);
        }

        function writeFile(jsonData) {
            if(dataLength - (jsonsDownloaded + jsonsFailed) > 0){
                return;
            }
            fs.writeFile(pathJSON, jsonData, function (err) {
                if (err) {
                    return console.log(err);
                } else {
                    console.log("file complete")
                }
            });
        }

    });

Note:

Requests being fired in quick succession like (2000 requests in a for loop) in my experience does not work well.. Try batching them. Also, doing it this way does not guarantee order (if that is important in your usecase)

An alternative would be to open your file in append mode. You can do this by passing an extra options object with flag set to your fs.writeFile call.

fs.writeFile(pathJSON, jsonData, {
    flag: 'a'
}, function (err) {
    if (err) {
        return console.log(err);
    } 
});

References:

fs.writeFile Docs

File system flags

Upvotes: 1

Related Questions