Open aharnly-amplify opened 4 years ago
This is happening to me as well!
Also experiencing this on a handful of tables. Anyone have any luck diagnosing?
I abandoned this tap and wrote my own simple extraction in Google Apps Script, which I'll paste below. I then use a GSheets importer to load the data.
// ************* Main ***************
const EXAMPLE_SYNCS = [
{
"sheet": "Sheet1",
"table": "xxxxxxx",
"report": 1
},
{
"sheet": "Sheet2",
"table": "xxxxxxx",
"report": 1
}
];
const QB_HEADERS = {
"Authorization": "QB-USER-TOKEN xxxxxxyyyyyzzzz",
"QB-Realm-Hostname": "example.quickbase.com",
"Content-Type": "application/json;charset=UTF-8"
};
// ************** Controls ********************
/**
* Given an array of objects with {sheet, table, report}, run syncs for each.
*/
function syncQuickbaseReportsToSheets(list) {
list.forEach(s => syncQuickbaseReportToSheet(s.table, s.report, s.sheet));
}
function syncQuickbaseReportToSheet(tableId, reportID, sheetName) {
console.log(sheetName, ': running report: ', tableId, ' ...');
var data = runReport(tableId, reportID);
const rowCount = data.rows.length;
console.log(sheetName, ': Report complete. Fetched ', rowCount, ' rows.');
var sheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName(sheetName);
if (data.rows.length > 0) {
console.log(sheetName, ': Writing sheet...');
replaceSheetContent(sheet, data);
console.log(sheetName, ': Sheet complete.');
}
}
// *************** GSheet ****************
function replaceSheetContent(sheet, data) {
sheet.getDataRange().clear();
ensureWidth(sheet, data.headers.length);
ensureHeight(sheet, data.rows.length + 1); // extra row for headers
console.log(sheet.getName(),': Setting headers');
sheet.getRange(1, 1, 1, data.headers.length).setValues([data.headers]);
console.log(sheet.getName(),': Setting rows - ', data.rows.length, 'rows to set.');
sheet.getRange(2, 1, data.rows.length, data.headers.length).setValues(data.rows);
}
function ensureHeight(sheet, height) {
const sheetHeight = sheet.getMaxRows();
const difference = height - sheetHeight;
if (difference > 0) {
console.log(sheet.getName(), ': Adding',difference, 'rows')
sheet.insertRows(sheetHeight, (difference));
}
}
function ensureWidth(sheet, width) {
const sheetColumns = sheet.getMaxColumns();
if (width > sheetColumns) {
sheet.insertColumns(sheetColumns, (width - sheetColumns));
}
}
// ************* Quickbase ****************
function valueToString(value) {
if (Array.isArray(value)) {
return value.join(";")
} else {
return value;
}
}
function runReport(table, reportId) {
var data = runReportRaw(table, reportId);
const fields = data.fields;
var records = data.data;
var headers = fields.map( f => f.label);
var metadata = data.metadata;
var recordCount = metadata.totalRecords;
var rows = records.map(r => fields.map(f => valueToString(r[f.id.toString()].value)));
var rowCount = rows.length;
while (rowCount < recordCount) {
var data = runReportRaw(table, reportId, rowCount);
var records = data.data;
var metadata = data.metadata;
var newRows = records.map(r => fields.map(f => valueToString(r[f.id.toString()].value)));
rows = rows.concat(newRows);
rowCount = rows.length;
}
return {
headers: headers,
rows: rows
};
}
function runReportRaw(table, reportId, skip) {
if (typeof (skip) === 'undefined') {
skip = 0;
}
const request = {
url: `https://api.quickbase.com/v1/reports/${reportId}/run?tableId=${table}&top=10&skip=${skip}`,
method: "POST",
headers: QB_HEADERS
};
const result = UrlFetchApp.fetch(request.url, request);
const text = result.getContentText();
const json = JSON.parse(text);
return (json);
}
In my first attempt to use this connector, two of the tables in my QB database got caught in infinite loops – I saw them extracting and loading 100 records at a time, over and over and over, loading thousands of rows until I stopped them. (The tables only have 100 or 200 rows.)
I don't see anything obvious about these tables that would make them different than others that loaded fine – but I will try to whittle them down to a minimal example. In the meantime, has anyone else encountered this behavior?
The extraction logs look like this: