Reputation: 1
I fetch data from another service to Google Drive. Its format is CSV and being saved in .csv on Google Drive. The size for each file around 5MB and >17K rows!. I tried to use Import script but it didn't work, it worked with smaller file but I just want to copy the whole csv to my spreadsheet without caring about old data.
function import() {
var fSource = DriveApp.getFolderById("****"); // reports_folder_id = id of folder where csv reports are saved
var fi = fSource.getFilesByName('report.csv'); // latest report file
var ss = SpreadsheetApp.openById("***"); // data_sheet_id = id of spreadsheet that holds the data to be updated with new report data
var sheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName("Sheet1")
if ( fi.hasNext() ) { // proceed if "report.csv" file exists in the reports folder
var file = fi.next();
var csv = file.getBlob().getDataAsString();
var csvData = CSVToArray(csv); // see below for CSVToArray function
for ( var i=0, lenCsv=csvData.length; i<lenCsv; i++ ) {
sheet.getRange(i+1, 1, 1, csvData[i].length).setValues(new Array(csvData[i]));
}
/*
** report data is now in 'NEWDATA' sheet in the spreadsheet - process it as needed,
** then delete 'NEWDATA' sheet using ss.deleteSheet(newsheet)
*/
// rename the report.csv file so it is not processed on next scheduled run
// file.setName("report-"+(new Date().toString())+".csv");
}
};
Upvotes: 0
Views: 175
Reputation: 2788
I'm guessing your script is probably timing out if it is processing that many lines. Few things you can do to optimise your code:
Upvotes: 1