Yehuda Makarov
Yehuda Makarov

Reputation: 605

How to make one async request for all repositories of a user on Github

The following is probably almost what I need to be doing. But this is written in Python, and I'm not grasping some of the details: How to get all repositories of a specific GitHub user.

This is my code so far:

function loadRepositories() {

  let arrOfRepos = []

  for (var i = 1, j = 1; j > 0; i++) {
    var req = new XMLHttpRequest();

    // must be synchronous or else, j is assigned below, before the response is received from send(). While send() is going on, j is assigned the value of the empy responseText. And the loop doesn't continue.

    req.open('GET', 'https://api.github.com/users/-user-/repos?page=' + i, false)
    req.send()
    arrOfRepos = arrOfRepos.concat(JSON.parse(req.responseText))
    var j = JSON.parse(req.responseText).length
  }
  showRepositories(arrOfRepos)
}

I would like to only make one request to the URL somehow. It seems like the only way to use the pagination is by contacting the URL multiple times, and testing to see if I should make another call.

I tried to test the req for a links key inside my showRepositories() but this.links returns undefined.

What I have works, but it is extremely slow and ugly. Is there a way to make one call to the API and build my separate responses faster?

Upvotes: 2

Views: 99

Answers (1)

Bertrand Martel
Bertrand Martel

Reputation: 45503

One way to do this is to use Promise and wait for all the promises to resolve with Promise.all. You will have to do one initial call to get the Link header and check what is the last page. Then make (number_of_page - 1) asynchronous call (get the response in a callback function).

var url = "https://api.github.com/users/google/repos?per_page=100";
var repos = [];

function sendRequest(url, parseLink) {
  return new Promise(function(resolve, reject) {
    var xhr = new XMLHttpRequest();
    xhr.open("GET", url, true);
    xhr.send();
    xhr.onreadystatechange = function() {
      if (xhr.readyState === 4) {
        if (xhr.status === 200) {
          var resp = xhr.responseText;
          var respJson = JSON.parse(resp);
          if (parseLink) {
            resolve(getLastPage(xhr.getResponseHeader("Link")), respJson);
          } else {
            resolve(respJson);
          }
        } else {
          reject(xhr.status);
        }
      }
    }
  });
}

function getLastPage(link) {
  if (link) {
    var linkArr = link.split(',');
    for (var i = 0; i < linkArr.length; i++) {
      if (linkArr[i].indexOf('rel="last"') != -1) {
        return new URL(linkArr[i].substring(linkArr[i].lastIndexOf("<") + 1, linkArr[i].lastIndexOf(">"))).searchParams.get('page');
      }
    }
  }
  return 1;
}

sendRequest(url, true).then(function(lastPage, data) {
  console.log("last page is " + lastPage);
  repos = repos.concat(data);
  var requests = [];
  if (lastPage > 1) {
    for (var i = 2; i < parseInt(lastPage) + 1; i++) {
      requests.push(sendRequest(url + "&page=" + i).then(function(data) {
        repos = repos.concat(data);
      }));
    }
    Promise.all(requests).then(function(data) {
        console.log("all promises have been resolved, total repo count : " + repos.length);
      })
      .catch(function(err) {
        console.log('Error executing promisses', err);
      });
  } else {
    console.log("only 1 request was necessary, total repo count : " + repos.length);
  }
});

In the snippet above, the result is concatenated to the repos array each time a promise is resolved & once all promises are resolved you can process your data.

Note that you can use per_page=100 to return a maximum of 100 repos per request instead of the default 30.

Upvotes: 1

Related Questions