Reputation: 3
I'm fairly new to google scripts and I am trying to get SQL data imported into a google spreadsheet as quickly as possible.
I have been using this script:
function readItems() {
var conn = Jdbc.getConnection("jdbc:sqlserver://server","UN","PW");
var stmt = conn.createStatement();
var rs = stmt.executeQuery("SELECT * FROM [dbo].[PriceList]WHERE [Product Line]<>'AL' AND [Product Line]<>'EL' AND [Product Line]<>'Accessories'ORDER BY [Product Line],Family,[Item Code]");
var doc = SpreadsheetApp.getActive().getSheetByName('Data');
var cell = doc.getRange('a1');
var row = 0;
while(rs.next()) {
cell.offset(row, 0).setValue(rs.getString(1));
cell.offset(row, 1).setValue(rs.getString(2));
cell.offset(row, 2).setValue(rs.getString(3));
cell.offset(row, 3).setValue(rs.getString(4));
cell.offset(row, 4).setValue(rs.getString(5))
row++;
}
rs.close();
stmt.close();
conn.close();
}
In the example for this particular query it reads and writes ~1200 rows and initially takes about 1.5 mins for the script to complete but then there seems to be some sort of cache as it only takes about 30 seconds to requery after the initial running of the script.
That's a decent run time however when I compare that to connecting to the data in Excel, it is still considerably longer.
So I have been trying with my limited knowledge to see if I can make it go faster.
After some brief googling I came across this article that indicated it would be more efficient to use an array to hold the data.
So with that knowledge and some additional googling I came across this script:
var address = "server"; //ex. '10.1.1.1:1433'
var user = "UN";
var userPwd = "PW";
var db = "DB";
var dbUrl = 'jdbc:sqlserver://' + address + ';databaseName=' + db;
function UpdatePriceList() {
var conn = Jdbc.getConnection(dbUrl, user, userPwd);
var stmt = conn.createStatement();
var results = stmt.executeQuery("SELECT * FROM [dbo].[PriceList]WHERE [Product Line]='AL' OR [Product Line]='EL'OR [Product Line]='Accessories'ORDER BY [Product Line],Family,[Item Code]");
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var sheet = SpreadsheetApp.getActive().getSheetByName('Data');
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
results.close();
stmt.close();
conn.close();
sheet.autoResizeColumns(1, numCols+1);
}
To my surprise the same query with the new script takes from 5-7 min to complete and I am at a loss as to why. There also doesn't seem to be any caching taking place as it does with the other script so it consistently takes around the same amount of time to complete whereas there a considerable gain in time after the initial run with the former.
I like the more dynamic nature of the latter as I don't have to adjust the loop contents based on the query results as I reuse the script however the difference in runtime kills that novelty.
Can anyone provide advice on how to adapt the dynamic nature of the latter to the former and/or offer any advice on how to improve this script to achieve a quicker runtime?
Upvotes: 0
Views: 861
Reputation: 1245
This code reads a row out of results object one by one and appends to sheet. Replace this -
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
with this -
var rows = [];
while (results.next()) {
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
rows.push(arr);
}
sheet.getRange(2, 1, rows.length, numCols).setValues(rows);
Upvotes: 1