diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index b7eba051..dd0ee33a 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -49,7 +49,7 @@ export class Brc20PgStore extends BasePgStoreModule { if (blockHeight < BRC20_GENESIS_BLOCK) continue; logger.info(`Brc20PgStore scanning block ${blockHeight}`); await this.sqlWriteTransaction(async sql => { - const limit = 5000; + const limit = 100_000; let offset = 0; do { const block = await sql` diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index c9f0f3be..5506bedc 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -61,18 +61,6 @@ export function throwOnFirstRejected(settles: { return values; } -/** - * Divides array into equal chunks - * @param arr - Array - * @param chunkSize - Chunk size - * @returns Array of arrays - */ -export function chunkArray(arr: T[], chunkSize: number): T[][] { - const result: T[][] = []; - for (let i = 0; i < arr.length; i += chunkSize) result.push(arr.slice(i, i + chunkSize)); - return result; -} - export function objRemoveUndefinedValues(obj: object) { Object.keys(obj).forEach(key => (obj as any)[key] === undefined && delete (obj as any)[key]); } diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index d2efad4a..484cbb90 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -18,7 +18,7 @@ import { ENV } from '../env'; import { Brc20PgStore } from './brc20/brc20-pg-store'; import { CountsPgStore } from './counts/counts-pg-store'; import { getIndexResultCountType } from './counts/helpers'; -import { assertNoBlockInscriptionGap, chunkArray, getInscriptionRecursion } from './helpers'; +import { assertNoBlockInscriptionGap, getInscriptionRecursion } from './helpers'; import { DbFullyLocatedInscriptionResult, DbInscription, @@ -254,9 +254,9 @@ export class PgStore extends BasePgStore { currentBlockHeight: currentBlockHeight, newBlockHeight: block_height, }); - // Divide insertion array into chunks of 2000 in order to avoid the postgres limit of 65534 + // Divide insertion array into chunks of 4000 in order to avoid the postgres limit of 65534 // query params. - for (const writeChunk of batchIterate(writes, 2000)) + for (const writeChunk of batchIterate(writes, 4000)) await this.insertInscriptions(writeChunk); updatedBlockHeightMin = Math.min(updatedBlockHeightMin, event.block_identifier.index); if (ENV.BRC20_BLOCK_SCAN_ENABLED) @@ -903,7 +903,7 @@ export class PgStore extends BasePgStore { } if (inserts.length === 0) return; await this.sqlWriteTransaction(async sql => { - for (const chunk of chunkArray(inserts, 500)) + for (const chunk of batchIterate(inserts, 500)) await sql` INSERT INTO inscription_recursions ${sql(chunk)} ON CONFLICT ON CONSTRAINT inscription_recursions_unique DO NOTHING