We solved it by implementing batch sizes to the script and reduced the traffic drastically.
// Fetch records from TABLE1
let table1 = base.getTable('Supplier summary');
let result1 = await table1.selectRecordsAsync();
// Fetch records from TABLE2
let table2 = base.getTable('ERP_suppliers_hub');
let result2 = await table2.selectRecordsAsync();
// Build a map for quick lookups in TABLE1
let keyMap = new Map(result1.records.map(record1 => [record1.getCellValueAsString('Supplier_id'), record1]));
// Split updates into smaller batches
const batchSize = 25; // Adjust the batch size as needed
const batches = [];
let currentBatch = [];
// Loop through TABLE2 records
for (let record2 of result2.records) {
let SupplierId = record2.getCellValueAsString('supplier_id');
let key = record2.getCellValueAsString('supplier_id_key_formula');
// Check if the key is available in TABLE1
let keyRecord = keyMap.get(key);
// Build an update object for batch update
currentBatch.push({
id: record2.id,
fields: {
'supplier_id_key1_linked': keyRecord ? [{ id: keyRecord.id }] : []
}
});
// Check if the batch size is reached, then start a new batch
if (currentBatch.length === batchSize) {
batches.push(currentBatch);
currentBatch = [];
}
}
// Add the last batch (which may be less than the batchSize)
if (currentBatch.length > 0) {
batches.push(currentBatch);
}
// Perform batch updates using Promise.all for each batch
for (let batch of batches) {
await table2.updateRecordsAsync(batch);
}