Reputation: 23
I've been working on a Google Apps Script to fetch data from Clockify's API and insert it into a Google Sheet. Clockify's API has a limit of 50 data entries per request, and I need to get all the data entries for the entire year of 2023.
My initial script made a single request and then processed the data, but I realized I was only getting the first 50 entries. To solve this, I implemented a paginated approach, incrementing the page parameter in my request to fetch the next set of data until all entries are retrieved. I also added a delay between requests to avoid hitting the rate limit.
After these modifications, it seems like I'm stuck in an infinite loop of retrieving the same first 50 entries over and over again, even though I've incremented the page parameter.
Can anyone shed light on what might be going wrong or provide insight on how I can better debug or refine this paginated fetching process?
function pullDataFromClockify() {
var apiKey = "XXXX";
var workspaceId = "XXXX";
var apiUrl = "https://reports.api.clockify.me/v1/workspaces/" + workspaceId + "/reports/detailed";
var startDate = "2023-01-01T00:00:00Z";
var endDate = "2023-12-31T23:59:59Z";
var headers = {
"X-Api-Key": apiKey,
"Content-Type": "application/json"
};
var spreadsheet = SpreadsheetApp.getActiveSpreadsheet();
var sheet = spreadsheet.getSheetByName("Clocky_Exp");
if (!sheet) {
sheet = spreadsheet.insertSheet("Clocky_Exp");
}
var page = 1; // Start with the first page
var pageSize = 50; // Clockify's limit per request
while (true) {
var params = {
"dateRangeStart": startDate,
"dateRangeEnd": endDate,
"dateRangeType": "ABSOLUTE",
"detailedFilter": {
"sortOrder": "DESCENDING",
"sortColumn": "DATE",
},
"users": {
"status": "ACTIVE"
},
"page": page,
"pageSize": pageSize
};
Logger.log("Fetching page: " + page);
Logger.log(JSON.stringify(params));
var options = {
"method" : "POST",
"headers" : headers,
"muteHttpExceptions": true,
"payload": JSON.stringify(params)
};
var response = UrlFetchApp.fetch(apiUrl + "?_t=" + new Date().getTime(), options);
var jsonResponse = JSON.parse(response.getContentText());
if (!jsonResponse.timeentries || jsonResponse.timeentries.length === 0) {
break; // Exit the loop if no more entries are found
}
var entries = jsonResponse.timeentries;
for (var i = 0; i < entries.length; i++) {
var start = new Date(entries[i].timeInterval.start);
var end = new Date(entries[i].timeInterval.end);
var duration = (end - start) / (1000 * 60 * 60); // duration in decimal hours
var formattedStartDate = Utilities.formatDate(start, "GMT", "yyyy.MM.dd");
var row = [
entries[i].description,
entries[i].taskName,
entries[i].userEmail,
formattedStartDate,
duration
];
sheet.appendRow(row);
}
// If the number of entries is less than the limit, then it was the last page
if (entries.length < pageSize) {
Logger.log("Fetched the last page, breaking out of loop."); // <-- LOGGING
break;
}
page++; // Increment the page number for the next request
Logger.log("Incrementing to next page: " + page); // <-- LOGGING
Logger.log(response.getContentText());
Utilities.sleep(5000); // Sleep for a second to avoid hitting rate limits
}
}
Upvotes: 0
Views: 225
Reputation: 23
The problem was that the page and pageSize parameters were not in the proper line in params variable.
Modified section:
var params = {
"dateRangeStart": startDate,
"dateRangeEnd": endDate,
"dateRangeType": "ABSOLUTE",
"detailedFilter": {
"sortOrder": "DESCENDING",
"sortColumn": "DATE",
"page": page, // proper section
"pageSize": pageSize //proper section
},
"users": {
"status": "ACTIVE"
},
};
Upvotes: 0