Reputation: 15
I got this script to get into a folder and get the key for each file.
function listFilesInFolder(id) {
var folder = DriveApp.getFolderById('');
var contents = folder.getFiles();
var file;
var name;
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("Imported");
var date;
var size;
sheet.clear();
sheet.appendRow(["Name", "Data", "Size", "Id"]);
while(contents.hasNext()) {
file = contents.next();
name = file.getName();
date = file.getDateCreated()
size = file.getSize()
id = file.getId()
data = [name, date, size, id]
sheet.appendRow(data);
//appendRow
}
};
my problem is that taking too much time to finish going through the whole folder, it usually hits run-time limit before it finishes.
The folder contains 1000+ different files, we automatically upload files daily.
is there any way to make this script more efficient?
Upvotes: 1
Views: 233
Reputation: 201643
I believe your goal as follows.
appendRow
is used in a loop. In this case, the process cost will be high. RefWhen above points are reflected to your script, it becomes as follows.
Before you use this script, please enable Drive API at Advanced Google services.
function listFilesInFolder(id) {
var folderId = "###"; // Please set the folder ID. If you want to use "id" for this, you can use var folderId = id;
// 1. Retrieve file list using Drive API.
var ar = [["Name", "Data", "Size", "Id"]];
var pageToken = "";
do {
const res = Drive.Files.list({corpora: "allDrives", includeTeamDriveItems: true, supportsAllDrives: true, maxResults: 1000, pageToken: pageToken, q: `'${folderId}' in parents`});
ar = ar.concat(res.items.map(({title, createdDate, fileSize, id}) => [title, createdDate, fileSize || 0, id]));
pageToken = res.nextPageToken;
} while(pageToken);
// 2. Put the file list to Spreadsheet.
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("Imported");
sheet.clear();
sheet.getRange(1, 1, ar.length, ar[0].length).setValues(ar);
}
Upvotes: 1