Optimizing Google Sheets .appendRow()

2019-08-19 02:46发布

I have been creating a script to iterate through a folder structure and copy specific data to rows in a Google Sheet. The folder structure is similar to this

  • Root folder
    1. Street Name 1
      • 102
      • 104
      • etc
    2. Street Name 2
      • 98-110

After some work, I came up with this code

        // GetFolders function goes through first and second level subfolder 
using while loop
// Requires folderID string, an array for storage, and an active sheet to be 
passed as arguments
    function GetFolders(folderID, dataArray, sheet)
    {
      var rootFolder = DriveApp.getFolderById(folderID).getFolders();

      while (rootFolder.hasNext()) 
  {
var subFolder = rootFolder.next();
var subFolderName = subFolder.getName();
var subFolderID = subFolder.getId();

//append first subfolder name to array
dataArray[0] = subFolderName;

var subFolderIterator = DriveApp.getFolderById(subFolderID).getFolders();

while (subFolderIterator.hasNext())
{
  var subFolder2 = subFolderIterator.next();
  var subFolder2Name = subFolder2.getName();

  //append second subfolder name to array
  dataArray[1] = subFolder2Name;

  //append second subfolder hyperlink to array
  dataArray[2] = subFolder2.getUrl();

  //default flag column to blank string
  dataArray[3] = '';

  //check subFolder2 for hyphen in name
  if (subFolder2Name.indexOf('-') > -1) //if a hyphen is present in foldername
  {
    //append flag to dataArray
    dataArray[3] = 'X'; 
  } //end if      

  //write array contents to next empty row in spreadsheet
  sheet.appendRow(dataArray);        

    } //end while
  } //end while
} //end GetFolders()

At first, this appeared to work wonderfully, writing about 3,500 rows before hitting the timeout limit. However, I have over 12,000 rows that need to be written (preferrably with one single execution). Looking at Google's "Best Practices" page tells me that this can be more efficient, but I'm at a loss with how to go about doing this. From my research, it seems that batching would be my best option. Does anyone know a good way to implement this in my script?

1条回答
甜甜的少女心
2楼-- · 2019-08-19 03:43

You can persist states by saving continuation token of the FileIterator using PropertiesService. The script below checks if the script is nearing the 5-minute time quota limit, saves the continuation token and creates a trigger for the function to run again after set time:

function processFiles() {


  var files, continuationToken;

  var sheet = SpreadsheetApp.openById(SHEET_ID)
                            .getSheets()[0];

  var startTimeInMillisec = new Date().getTime();

  var triggers = ScriptApp.getProjectTriggers();

  for (var i=0; i < triggers.length; i++) { //delete previously created triggers for this project to avoid clutter

       ScriptApp.deleteTrigger(triggers[i]);


}

if (!PropertiesService.getScriptProperties().getProperty("continuationToken")) { //if no token was saved, start from the beginning 

 files = DriveApp.getFiles();


} else {


continuationToken = PropertiesService.getScriptProperties()
                                     .getProperty("continuationToken");
files = DriveApp.continueFileIterator(continuationToken); //pass continuation token to DriveApp to continue iterating where we left off


}


while (files.hasNext()) {

  var file = file.next();

  // process file here

  var elapsedTimeInMillisec = new Date().getTime() - startTimeInMillisec;

  if (elapsedTimeInMillisec > 290000) { //check if we are nearing the 5 minute time limit

    continuationToken = files.getContinuationToken();

    PropertiesService.getScriptProperties()
                     .setProperty("continuationToken", continuationToken);

    ScriptApp.newTrigger("processFiles") //set the function to execute again after 5 minutes
             .timeBased()
             .after(300000)
             .create();
    return;

  }

}

}
查看更多
登录 后发表回答