From e16294650b9012549739ce8686e3bb03d5e1eb06 Mon Sep 17 00:00:00 2001 From: Ryan Waits Date: Mon, 12 Feb 2024 16:52:18 -0600 Subject: [PATCH 01/10] docs: update openapi description update openapi description Update src/api/schemas.ts Co-authored-by: max-crawford <102705427+max-crawford@users.noreply.github.com> Update src/api/schemas.ts Co-authored-by: max-crawford <102705427+max-crawford@users.noreply.github.com> Update src/api/schemas.ts Co-authored-by: max-crawford <102705427+max-crawford@users.noreply.github.com> Update src/api/schemas.ts Co-authored-by: max-crawford <102705427+max-crawford@users.noreply.github.com> --- src/api/schemas.ts | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/src/api/schemas.ts b/src/api/schemas.ts index 7c91789a..973ba9f6 100644 --- a/src/api/schemas.ts +++ b/src/api/schemas.ts @@ -8,8 +8,20 @@ export const OpenApiSchemaOptions: SwaggerOptions = { openapi: { info: { title: 'Ordinals API', - description: - 'A service that indexes Bitcoin Ordinals data and exposes it via REST API endpoints.', + description: ` +The [Ordinals API](https://docs.hiro.so/ordinals-api) is a service that indexes Bitcoin Ordinals data and exposes it via REST API endpoints. + +Here are the key features of the Ordinals API: + +- **Ordinal Inscription Ingestion**: The Ordinals API helps with the complete ingestion of ordinal inscriptions. Using our endpoints, you can retrieve the metadata for a particular inscription, all inscriptions held by a particular address, trading activity for inscriptions, and more. + +- **BRC-20 Support**: The Ordinals API also offers support for BRC-20 tokens, a fungible token standard built on top of ordinal theory. Retrieve data for a particular BRC-20 token, a user's BRC-20 holdings, marketplace activity, and more. + +- **REST JSON Endpoints with ETag Caching**: The Ordinals API provides easy-to-use REST endpoints that return responses in JSON format. It also supports ETag caching, which allows you to cache responses based on inscriptions. This helps optimize performance and reduce unnecessary requests. + + +The source code for this project is available in our [GitHub repository](https://github.com/hirosystems/ordinals-api). You can explore the codebase, contribute, and raise issues or pull requests. + `, version: SERVER_VERSION.tag, }, externalDocs: { From 02aa62883a676a767e2c2ae47f9d90a4cb28ee12 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Feb 2024 11:50:30 -0600 Subject: [PATCH 02/10] ci: allow workflow_dispatch for vercel build --- .github/workflows/vercel.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/vercel.yml b/.github/workflows/vercel.yml index 14543fd9..6eaae6bb 100644 --- a/.github/workflows/vercel.yml +++ b/.github/workflows/vercel.yml @@ -13,6 +13,7 @@ on: release: types: - published + workflow_dispatch: jobs: vercel: From 36950861db01dbad49823191c6622ad5920e581d Mon Sep 17 00:00:00 2001 From: Scott Wild Date: Tue, 23 Apr 2024 00:16:46 -0500 Subject: [PATCH 03/10] docs: Add database requirements to README (#341) --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 4fee10ea..ebb746d9 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,8 @@ Before you start, you'll need to have access to: 1. An [Ordhook node](https://github.com/hirosystems/ordhook) with a fully indexed Ordinals database. -1. A local writeable Postgres database for data storage +1. A local writeable Postgres database for data storage. + * We recommended a 1TB volume size here. ## Running the API From 56a88518b1ffe549524941e4d94d6347d11c98f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Tue, 23 Apr 2024 10:02:06 -0600 Subject: [PATCH 04/10] feat!: ingest BRC20 data from ordhook (#347) * chore: progress * feat: new apply * fix: minted supply * fix: track tx counts * feat: rollbacks * fix: transfer rollbacks * fix: activity addresses * fix: multiple transfer test * fix: holders * fix: operation indexes * fix: style * chore: upgrade chainhook client * fix: add brc20 to default predicate --- migrations/1684175792528_brc20-mints.ts | 51 - migrations/1684175795592_brc20-transfers.ts | 55 - migrations/1684175810998_brc20-balances.ts | 63 - migrations/1684344022290_brc20-events.ts | 60 - migrations/1692132685000_brc20-supply-view.ts | 17 - ...692188000000_brc20-deploys-ticker-index.ts | 15 - ...692853050488_brc20-mint-transfer-unique.ts | 18 - .../1692891772000_brc20-events-types.ts | 38 - ...693428793416_brc20-minted-supply-column.ts | 36 - .../1694081119000_brc20-counts-by-tx-count.ts | 13 - ...797181616_brc20-counts-by-address-event.ts | 69 - .../1695243716885_brc20-events-addresses.ts | 35 - ...c20-total-balances-address-deploy-index.ts | 14 - .../1708471015438_remove-unused-indexes.ts | 18 - .../1711465842961_brc20-deploy-self-mint.ts | 19 - ...ploys.ts => 1711575178681_brc20-tokens.ts} | 42 +- migrations/1711575178682_brc20-operations.ts | 68 + ... => 1711575178683_brc20-total-balances.ts} | 25 +- ...711575178684_brc20-counts-by-operation.ts} | 4 +- ...8686_brc20-counts-by-address-operation.ts} | 14 +- package-lock.json | 43 +- package.json | 2 +- src/api/util/helpers.ts | 17 +- src/env.ts | 3 - src/ordhook/server.ts | 1 + src/pg/brc20/brc20-pg-store.ts | 979 ++-- src/pg/brc20/helpers.ts | 166 +- src/pg/brc20/types.ts | 112 +- src/pg/pg-store.ts | 10 +- tests/brc-20/api.test.ts | 1394 ++++++ tests/brc-20/brc20.test.ts | 4073 ++--------------- tests/helpers.ts | 172 +- 32 files changed, 2612 insertions(+), 5034 deletions(-) delete mode 100644 migrations/1684175792528_brc20-mints.ts delete mode 100644 migrations/1684175795592_brc20-transfers.ts delete mode 100644 migrations/1684175810998_brc20-balances.ts delete mode 100644 migrations/1684344022290_brc20-events.ts delete mode 100644 migrations/1692132685000_brc20-supply-view.ts delete mode 100644 migrations/1692188000000_brc20-deploys-ticker-index.ts delete mode 100644 migrations/1692853050488_brc20-mint-transfer-unique.ts delete mode 100644 migrations/1692891772000_brc20-events-types.ts delete mode 100644 migrations/1693428793416_brc20-minted-supply-column.ts delete mode 100644 migrations/1694081119000_brc20-counts-by-tx-count.ts delete mode 100644 migrations/1694797181616_brc20-counts-by-address-event.ts delete mode 100644 migrations/1695243716885_brc20-events-addresses.ts delete mode 100644 migrations/1706894983174_brc20-total-balances-address-deploy-index.ts delete mode 100644 migrations/1711465842961_brc20-deploy-self-mint.ts rename migrations/{1684174644336_brc20-deploys.ts => 1711575178681_brc20-tokens.ts} (60%) create mode 100644 migrations/1711575178682_brc20-operations.ts rename migrations/{1694021174916_brc20-total-balances.ts => 1711575178683_brc20-total-balances.ts} (59%) rename migrations/{1694295793981_brc20-event-counts.ts => 1711575178684_brc20-counts-by-operation.ts} (86%) rename migrations/{1694299763914_brc20-token-count.ts => 1711575178686_brc20-counts-by-address-operation.ts} (55%) create mode 100644 tests/brc-20/api.test.ts diff --git a/migrations/1684175792528_brc20-mints.ts b/migrations/1684175792528_brc20-mints.ts deleted file mode 100644 index 9435bf73..00000000 --- a/migrations/1684175792528_brc20-mints.ts +++ /dev/null @@ -1,51 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createTable('brc20_mints', { - id: { - type: 'bigserial', - primaryKey: true, - }, - inscription_id: { - type: 'bigint', - notNull: true, - }, - brc20_deploy_id: { - type: 'bigint', - notNull: true, - }, - block_height: { - type: 'bigint', - notNull: true, - }, - tx_id: { - type: 'text', - notNull: true, - }, - address: { - type: 'text', - notNull: true, - }, - amount: { - type: 'numeric', - notNull: true, - }, - }); - pgm.createConstraint( - 'brc20_mints', - 'brc20_mints_inscription_id_fk', - 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_mints', - 'brc20_mints_brc20_deploy_id_fk', - 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' - ); - pgm.createIndex('brc20_mints', ['inscription_id']); - pgm.createIndex('brc20_mints', ['brc20_deploy_id']); - pgm.createIndex('brc20_mints', ['block_height']); - pgm.createIndex('brc20_mints', ['address']); -} diff --git a/migrations/1684175795592_brc20-transfers.ts b/migrations/1684175795592_brc20-transfers.ts deleted file mode 100644 index 30f08071..00000000 --- a/migrations/1684175795592_brc20-transfers.ts +++ /dev/null @@ -1,55 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createTable('brc20_transfers', { - id: { - type: 'bigserial', - primaryKey: true, - }, - inscription_id: { - type: 'bigint', - notNull: true, - }, - brc20_deploy_id: { - type: 'bigint', - notNull: true, - }, - block_height: { - type: 'bigint', - notNull: true, - }, - tx_id: { - type: 'text', - notNull: true, - }, - from_address: { - type: 'text', - notNull: true, - }, - to_address: { - type: 'text', - }, - amount: { - type: 'numeric', - notNull: true, - }, - }); - pgm.createConstraint( - 'brc20_transfers', - 'brc20_transfers_inscription_id_fk', - 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_transfers', - 'brc20_transfers_brc20_deploy_id_fk', - 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' - ); - pgm.createIndex('brc20_transfers', ['inscription_id']); - pgm.createIndex('brc20_transfers', ['brc20_deploy_id']); - pgm.createIndex('brc20_transfers', ['block_height']); - pgm.createIndex('brc20_transfers', ['from_address']); - pgm.createIndex('brc20_transfers', ['to_address']); -} diff --git a/migrations/1684175810998_brc20-balances.ts b/migrations/1684175810998_brc20-balances.ts deleted file mode 100644 index 4f918dff..00000000 --- a/migrations/1684175810998_brc20-balances.ts +++ /dev/null @@ -1,63 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createTable('brc20_balances', { - id: { - type: 'bigserial', - primaryKey: true, - }, - inscription_id: { - type: 'bigint', - notNull: true, - }, - location_id: { - type: 'bigint', - notNull: true, - }, - brc20_deploy_id: { - type: 'bigint', - notNull: true, - }, - address: { - type: 'text', - }, - avail_balance: { - type: 'numeric', - notNull: true, - }, - trans_balance: { - type: 'numeric', - notNull: true, - }, - type: { - type: 'smallint', - notNull: true, - }, - }); - pgm.createConstraint( - 'brc20_balances', - 'brc20_balances_inscription_id_fk', - 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_balances', - 'brc20_balances_location_id_fk', - 'FOREIGN KEY(location_id) REFERENCES locations(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_balances', - 'brc20_balances_brc20_deploy_id_fk', - 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_balances', - 'brc20_balances_inscription_id_type_unique', - 'UNIQUE(inscription_id, type)' - ); - pgm.createIndex('brc20_balances', ['location_id']); - pgm.createIndex('brc20_balances', ['brc20_deploy_id']); - pgm.createIndex('brc20_balances', ['address']); -} diff --git a/migrations/1684344022290_brc20-events.ts b/migrations/1684344022290_brc20-events.ts deleted file mode 100644 index dfc0befc..00000000 --- a/migrations/1684344022290_brc20-events.ts +++ /dev/null @@ -1,60 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createTable('brc20_events', { - id: { - type: 'bigserial', - primaryKey: true, - }, - inscription_id: { - type: 'bigint', - notNull: true, - }, - brc20_deploy_id: { - type: 'bigint', - notNull: true, - }, - deploy_id: { - type: 'bigint', - }, - mint_id: { - type: 'bigint', - }, - transfer_id: { - type: 'bigint', - }, - }); - pgm.createConstraint( - 'brc20_events', - 'brc20_events_inscription_id_fk', - 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_events', - 'brc20_events_brc20_deploy_id_fk', - 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_events', - 'brc20_events_deploy_id_fk', - 'FOREIGN KEY(deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_events', - 'brc20_events_mint_id_fk', - 'FOREIGN KEY(mint_id) REFERENCES brc20_mints(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_events', - 'brc20_events_transfer_id_fk', - 'FOREIGN KEY(transfer_id) REFERENCES brc20_transfers(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_events', - 'brc20_valid_event', - 'CHECK(NUM_NONNULLS(deploy_id, mint_id, transfer_id) = 1)' - ); -} diff --git a/migrations/1692132685000_brc20-supply-view.ts b/migrations/1692132685000_brc20-supply-view.ts deleted file mode 100644 index 8ae5cb2f..00000000 --- a/migrations/1692132685000_brc20-supply-view.ts +++ /dev/null @@ -1,17 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createMaterializedView( - 'brc20_supplies', - { data: true }, - ` - SELECT brc20_deploy_id, SUM(amount) as minted_supply, MAX(block_height) as block_height - FROM brc20_mints - GROUP BY brc20_deploy_id - ` - ); - pgm.createIndex('brc20_supplies', ['brc20_deploy_id'], { unique: true }); -} diff --git a/migrations/1692188000000_brc20-deploys-ticker-index.ts b/migrations/1692188000000_brc20-deploys-ticker-index.ts deleted file mode 100644 index 4fd40203..00000000 --- a/migrations/1692188000000_brc20-deploys-ticker-index.ts +++ /dev/null @@ -1,15 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.addColumns('brc20_deploys', { - ticker_lower: { - type: 'text', - notNull: true, - expressionGenerated: '(LOWER(ticker))', - }, - }); - pgm.createIndex('brc20_deploys', ['ticker_lower']); -} diff --git a/migrations/1692853050488_brc20-mint-transfer-unique.ts b/migrations/1692853050488_brc20-mint-transfer-unique.ts deleted file mode 100644 index 2ad987e1..00000000 --- a/migrations/1692853050488_brc20-mint-transfer-unique.ts +++ /dev/null @@ -1,18 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.dropIndex('brc20_transfers', ['inscription_id']); - pgm.createIndex('brc20_transfers', ['inscription_id'], { unique: true }); - pgm.dropIndex('brc20_mints', ['inscription_id']); - pgm.createIndex('brc20_mints', ['inscription_id'], { unique: true }); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropIndex('brc20_transfers', ['inscription_id'], { unique: true }); - pgm.createIndex('brc20_transfers', ['inscription_id']); - pgm.dropIndex('brc20_mints', ['inscription_id'], { unique: true }); - pgm.createIndex('brc20_mints', ['inscription_id']); -} diff --git a/migrations/1692891772000_brc20-events-types.ts b/migrations/1692891772000_brc20-events-types.ts deleted file mode 100644 index 4c575b94..00000000 --- a/migrations/1692891772000_brc20-events-types.ts +++ /dev/null @@ -1,38 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createType('brc20_operation', ['deploy', 'mint', 'transfer', 'transfer_send']); - pgm.addColumns('brc20_events', { - genesis_location_id: { - type: 'bigint', - references: '"locations"', - onDelete: 'CASCADE', - notNull: true, - unique: true, // only one event exists per location - }, - operation: { - type: 'brc20_operation', - notNull: true, - }, - }); - - pgm.createIndex('brc20_events', ['genesis_location_id']); - pgm.createIndex('brc20_events', ['operation']); - - pgm.createIndex('brc20_events', ['brc20_deploy_id']); - pgm.createIndex('brc20_events', ['transfer_id']); - pgm.createIndex('brc20_events', ['mint_id']); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropIndex('brc20_events', ['genesis_location_id']); - pgm.dropIndex('brc20_events', ['operation']); - pgm.dropColumns('brc20_events', ['genesis_location_id', 'operation']); - pgm.dropIndex('brc20_events', ['brc20_deploy_id']); - pgm.dropIndex('brc20_events', ['transfer_id']); - pgm.dropIndex('brc20_events', ['mint_id']); - pgm.dropType('brc20_operation'); -} diff --git a/migrations/1693428793416_brc20-minted-supply-column.ts b/migrations/1693428793416_brc20-minted-supply-column.ts deleted file mode 100644 index 55513825..00000000 --- a/migrations/1693428793416_brc20-minted-supply-column.ts +++ /dev/null @@ -1,36 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.addColumn('brc20_deploys', { - minted_supply: { - type: 'numeric', - default: 0, - }, - }); - pgm.sql(` - UPDATE brc20_deploys AS d - SET minted_supply = ( - SELECT COALESCE(SUM(amount), 0) AS minted_supply - FROM brc20_mints - WHERE brc20_deploy_id = d.id - ) - `); - pgm.dropMaterializedView('brc20_supplies'); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropColumn('brc20_deploys', ['minted_supply']); - pgm.createMaterializedView( - 'brc20_supplies', - { data: true }, - ` - SELECT brc20_deploy_id, SUM(amount) as minted_supply, MAX(block_height) as block_height - FROM brc20_mints - GROUP BY brc20_deploy_id - ` - ); - pgm.createIndex('brc20_supplies', ['brc20_deploy_id'], { unique: true }); -} diff --git a/migrations/1694081119000_brc20-counts-by-tx-count.ts b/migrations/1694081119000_brc20-counts-by-tx-count.ts deleted file mode 100644 index 12bb89d8..00000000 --- a/migrations/1694081119000_brc20-counts-by-tx-count.ts +++ /dev/null @@ -1,13 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.addColumn('brc20_deploys', { - tx_count: { - type: 'bigint', - default: 1, - }, - }); -} diff --git a/migrations/1694797181616_brc20-counts-by-address-event.ts b/migrations/1694797181616_brc20-counts-by-address-event.ts deleted file mode 100644 index b77c2895..00000000 --- a/migrations/1694797181616_brc20-counts-by-address-event.ts +++ /dev/null @@ -1,69 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createTable('brc20_counts_by_address_event_type', { - address: { - type: 'text', - notNull: true, - primaryKey: true, - }, - deploy: { - type: 'bigint', - notNull: true, - default: 0, - }, - mint: { - type: 'bigint', - notNull: true, - default: 0, - }, - transfer: { - type: 'bigint', - notNull: true, - default: 0, - }, - transfer_send: { - type: 'bigint', - notNull: true, - default: 0, - }, - }); - pgm.sql(` - INSERT INTO brc20_counts_by_address_event_type (address, deploy) ( - SELECT address, COUNT(*) AS deploy FROM brc20_deploys GROUP BY address - ) ON CONFLICT (address) DO UPDATE SET deploy = EXCLUDED.deploy - `); - pgm.sql(` - INSERT INTO brc20_counts_by_address_event_type (address, mint) ( - SELECT address, COUNT(*) AS mint FROM brc20_mints GROUP BY address - ) ON CONFLICT (address) DO UPDATE SET mint = EXCLUDED.mint - `); - pgm.sql(` - INSERT INTO brc20_counts_by_address_event_type (address, transfer) ( - SELECT from_address AS address, COUNT(*) AS transfer FROM brc20_transfers GROUP BY from_address - ) ON CONFLICT (address) DO UPDATE SET transfer = EXCLUDED.transfer - `); - pgm.sql(` - INSERT INTO brc20_counts_by_address_event_type (address, transfer_send) ( - SELECT from_address AS address, COUNT(*) AS transfer_send - FROM brc20_transfers - WHERE to_address IS NOT NULL - GROUP BY from_address - ) ON CONFLICT (address) DO UPDATE SET transfer_send = EXCLUDED.transfer_send - `); - pgm.sql(` - INSERT INTO brc20_counts_by_address_event_type (address, transfer_send) ( - SELECT to_address AS address, COUNT(*) AS transfer_send - FROM brc20_transfers - WHERE to_address <> from_address - GROUP BY to_address - ) ON CONFLICT (address) DO UPDATE SET transfer_send = brc20_counts_by_address_event_type.transfer_send + EXCLUDED.transfer_send - `); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropTable('brc20_counts_by_address_event_type'); -} diff --git a/migrations/1695243716885_brc20-events-addresses.ts b/migrations/1695243716885_brc20-events-addresses.ts deleted file mode 100644 index a91732e1..00000000 --- a/migrations/1695243716885_brc20-events-addresses.ts +++ /dev/null @@ -1,35 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.addColumns('brc20_events', { - address: { - type: 'text', - }, - from_address: { - type: 'text', - }, - }); - pgm.createIndex('brc20_events', ['address']); - pgm.createIndex('brc20_events', ['from_address']); - pgm.sql(` - UPDATE brc20_events - SET address = (SELECT address FROM locations WHERE id = brc20_events.genesis_location_id) - `); - pgm.sql(` - UPDATE brc20_events - SET from_address = (SELECT from_address FROM brc20_transfers WHERE id = brc20_events.transfer_id) - WHERE operation = 'transfer_send' - `); - pgm.alterColumn('brc20_events', 'address', { notNull: true }); - pgm.dropIndex('brc20_events', ['genesis_location_id']); // Covered by the unique index. -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropIndex('brc20_events', ['address']); - pgm.dropIndex('brc20_events', ['from_address']); - pgm.dropColumns('brc20_events', ['address', 'from_address']); - pgm.createIndex('brc20_events', ['genesis_location_id']); -} diff --git a/migrations/1706894983174_brc20-total-balances-address-deploy-index.ts b/migrations/1706894983174_brc20-total-balances-address-deploy-index.ts deleted file mode 100644 index 25e79706..00000000 --- a/migrations/1706894983174_brc20-total-balances-address-deploy-index.ts +++ /dev/null @@ -1,14 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.dropIndex('brc20_total_balances', ['address']); - pgm.createIndex('brc20_total_balances', ['address', 'brc20_deploy_id']); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropIndex('brc20_total_balances', ['address', 'brc20_deploy_id']); - pgm.createIndex('brc20_total_balances', ['address']); -} diff --git a/migrations/1708471015438_remove-unused-indexes.ts b/migrations/1708471015438_remove-unused-indexes.ts index 2ba978b7..1d94c6f7 100644 --- a/migrations/1708471015438_remove-unused-indexes.ts +++ b/migrations/1708471015438_remove-unused-indexes.ts @@ -7,15 +7,6 @@ export function up(pgm: MigrationBuilder): void { pgm.dropIndex('locations', ['prev_output']); pgm.dropIndex('locations', ['address']); pgm.dropIndex('current_locations', ['block_height']); - pgm.dropIndex('brc20_mints', ['address']); - pgm.dropIndex('brc20_mints', ['block_height']); - pgm.dropIndex('brc20_mints', ['brc20_deploy_id']); - pgm.dropIndex('brc20_transfers', ['to_address']); - pgm.dropIndex('brc20_transfers', ['from_address']); - pgm.dropIndex('brc20_transfers', ['brc20_deploy_id']); - pgm.dropIndex('brc20_transfers', ['block_height']); - pgm.dropIndex('brc20_deploys', ['address']); - pgm.dropIndex('brc20_deploys', ['block_height']); pgm.dropIndex('inscription_recursions', ['ref_inscription_genesis_id']); } @@ -23,14 +14,5 @@ export function down(pgm: MigrationBuilder): void { pgm.createIndex('locations', ['prev_output']); pgm.createIndex('locations', ['address']); pgm.createIndex('current_locations', ['block_height']); - pgm.createIndex('brc20_mints', ['address']); - pgm.createIndex('brc20_mints', ['block_height']); - pgm.createIndex('brc20_mints', ['brc20_deploy_id']); - pgm.createIndex('brc20_transfers', ['to_address']); - pgm.createIndex('brc20_transfers', ['from_address']); - pgm.createIndex('brc20_transfers', ['brc20_deploy_id']); - pgm.createIndex('brc20_transfers', ['block_height']); - pgm.createIndex('brc20_deploys', ['address']); - pgm.createIndex('brc20_deploys', ['block_height']); pgm.createIndex('inscription_recursions', ['ref_inscription_genesis_id']); } diff --git a/migrations/1711465842961_brc20-deploy-self-mint.ts b/migrations/1711465842961_brc20-deploy-self-mint.ts deleted file mode 100644 index 8cacf691..00000000 --- a/migrations/1711465842961_brc20-deploy-self-mint.ts +++ /dev/null @@ -1,19 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.addColumn('brc20_deploys', { - self_mint: { - type: 'boolean', - default: 'false', - }, - }); - pgm.sql(`UPDATE brc20_deploys SET self_mint = false`); - pgm.alterColumn('brc20_deploys', 'self_mint', { notNull: true }); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropColumn('brc20_deploys', ['self_mint']); -} diff --git a/migrations/1684174644336_brc20-deploys.ts b/migrations/1711575178681_brc20-tokens.ts similarity index 60% rename from migrations/1684174644336_brc20-deploys.ts rename to migrations/1711575178681_brc20-tokens.ts index 3604fa03..48cd3670 100644 --- a/migrations/1684174644336_brc20-deploys.ts +++ b/migrations/1711575178681_brc20-tokens.ts @@ -4,13 +4,13 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.createTable('brc20_deploys', { - id: { - type: 'bigserial', + pgm.createTable('brc20_tokens', { + ticker: { + type: 'text', primaryKey: true, }, - inscription_id: { - type: 'bigint', + genesis_id: { + type: 'string', notNull: true, }, block_height: { @@ -25,10 +25,6 @@ export function up(pgm: MigrationBuilder): void { type: 'text', notNull: true, }, - ticker: { - type: 'text', - notNull: true, - }, max: { type: 'numeric', notNull: true, @@ -40,14 +36,24 @@ export function up(pgm: MigrationBuilder): void { type: 'int', notNull: true, }, + self_mint: { + type: 'boolean', + default: 'false', + notNull: true, + }, + minted_supply: { + type: 'numeric', + default: 0, + }, + burned_supply: { + type: 'numeric', + default: 0, + }, + tx_count: { + type: 'bigint', + default: 0, + }, }); - pgm.createConstraint( - 'brc20_deploys', - 'brc20_deploys_inscription_id_fk', - 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' - ); - pgm.createIndex('brc20_deploys', ['inscription_id']); - pgm.createIndex('brc20_deploys', 'LOWER(ticker)', { unique: true }); - pgm.createIndex('brc20_deploys', ['block_height']); - pgm.createIndex('brc20_deploys', ['address']); + pgm.createIndex('brc20_tokens', ['genesis_id']); + pgm.createIndex('brc20_tokens', ['block_height']); } diff --git a/migrations/1711575178682_brc20-operations.ts b/migrations/1711575178682_brc20-operations.ts new file mode 100644 index 00000000..42582577 --- /dev/null +++ b/migrations/1711575178682_brc20-operations.ts @@ -0,0 +1,68 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createType('brc20_operation', [ + 'deploy', + 'mint', + 'transfer', + 'transfer_send', + 'transfer_receive', + ]); + pgm.createTable('brc20_operations', { + genesis_id: { + type: 'string', + notNull: true, + }, + ticker: { + type: 'string', + notNull: true, + }, + operation: { + type: 'brc20_operation', + notNull: true, + }, + block_height: { + type: 'bigint', + notNull: true, + }, + tx_index: { + type: 'bigint', + notNull: true, + }, + address: { + type: 'text', + notNull: true, + }, + // Only used when operation is `transfer_send`; used to optimize activity lookup for + // receiving addresses. + to_address: { + type: 'text', + }, + avail_balance: { + type: 'numeric', + notNull: true, + }, + trans_balance: { + type: 'numeric', + notNull: true, + }, + }); + pgm.createConstraint('brc20_operations', 'brc20_operations_pkey', { + primaryKey: ['genesis_id', 'operation'], + }); + pgm.createConstraint( + 'brc20_operations', + 'brc20_operations_ticker_fk', + 'FOREIGN KEY(ticker) REFERENCES brc20_tokens(ticker) ON DELETE CASCADE' + ); + pgm.createIndex('brc20_operations', ['operation']); + pgm.createIndex('brc20_operations', ['ticker', 'address']); + pgm.createIndex('brc20_operations', [ + { name: 'block_height', sort: 'DESC' }, + { name: 'tx_index', sort: 'DESC' }, + ]); + pgm.createIndex('brc20_operations', ['address', 'to_address']); +} diff --git a/migrations/1694021174916_brc20-total-balances.ts b/migrations/1711575178683_brc20-total-balances.ts similarity index 59% rename from migrations/1694021174916_brc20-total-balances.ts rename to migrations/1711575178683_brc20-total-balances.ts index c2d66828..6af59f2a 100644 --- a/migrations/1694021174916_brc20-total-balances.ts +++ b/migrations/1711575178683_brc20-total-balances.ts @@ -5,12 +5,8 @@ export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { pgm.createTable('brc20_total_balances', { - id: { - type: 'bigserial', - primaryKey: true, - }, - brc20_deploy_id: { - type: 'bigint', + ticker: { + type: 'string', notNull: true, }, address: { @@ -32,17 +28,12 @@ export function up(pgm: MigrationBuilder): void { }); pgm.createConstraint( 'brc20_total_balances', - 'brc20_total_balances_brc20_deploy_id_fk', - 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'brc20_total_balances', - 'brc20_total_balances_unique', - 'UNIQUE(brc20_deploy_id, address)' + 'brc20_total_balances_ticker_fk', + 'FOREIGN KEY(ticker) REFERENCES brc20_tokens(ticker) ON DELETE CASCADE' ); + pgm.createConstraint('brc20_total_balances', 'brc20_total_balances_pkey', { + primaryKey: ['ticker', 'address'], + }); pgm.createIndex('brc20_total_balances', ['address']); - pgm.createIndex('brc20_total_balances', [ - 'brc20_deploy_id', - { name: 'total_balance', sort: 'DESC' }, - ]); + pgm.createIndex('brc20_total_balances', ['ticker', { name: 'total_balance', sort: 'DESC' }]); } diff --git a/migrations/1694295793981_brc20-event-counts.ts b/migrations/1711575178684_brc20-counts-by-operation.ts similarity index 86% rename from migrations/1694295793981_brc20-event-counts.ts rename to migrations/1711575178684_brc20-counts-by-operation.ts index 9bd0a6eb..52c7b754 100644 --- a/migrations/1694295793981_brc20-event-counts.ts +++ b/migrations/1711575178684_brc20-counts-by-operation.ts @@ -4,8 +4,8 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.createTable('brc20_counts_by_event_type', { - event_type: { + pgm.createTable('brc20_counts_by_operation', { + operation: { type: 'brc20_operation', notNull: true, primaryKey: true, diff --git a/migrations/1694299763914_brc20-token-count.ts b/migrations/1711575178686_brc20-counts-by-address-operation.ts similarity index 55% rename from migrations/1694299763914_brc20-token-count.ts rename to migrations/1711575178686_brc20-counts-by-address-operation.ts index de2ee291..9b1ff7a3 100644 --- a/migrations/1694299763914_brc20-token-count.ts +++ b/migrations/1711575178686_brc20-counts-by-address-operation.ts @@ -4,11 +4,14 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.createTable('brc20_counts_by_tokens', { - token_type: { + pgm.createTable('brc20_counts_by_address_operation', { + address: { type: 'text', notNull: true, - primaryKey: true, + }, + operation: { + type: 'brc20_operation', + notNull: true, }, count: { type: 'bigint', @@ -16,4 +19,9 @@ export function up(pgm: MigrationBuilder): void { default: 1, }, }); + pgm.createConstraint( + 'brc20_counts_by_address_operation', + 'brc20_counts_by_address_operation_pkey', + { primaryKey: ['address', 'operation'] } + ); } diff --git a/package-lock.json b/package-lock.json index 28f91c25..19c72ef7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,7 +15,7 @@ "@fastify/swagger": "^8.3.1", "@fastify/type-provider-typebox": "^3.2.0", "@hirosystems/api-toolkit": "^1.4.0", - "@hirosystems/chainhook-client": "^1.7.0", + "@hirosystems/chainhook-client": "^1.8.0", "@semantic-release/changelog": "^6.0.3", "@semantic-release/commit-analyzer": "^10.0.4", "@semantic-release/git": "^10.0.1", @@ -95,35 +95,6 @@ "node": ">=18" } }, - "../chainhook/components/client/typescript": { - "name": "@hirosystems/chainhook-client", - "version": "1.4.2", - "extraneous": true, - "license": "Apache 2.0", - "dependencies": { - "@fastify/type-provider-typebox": "^3.2.0", - "fastify": "^4.15.0", - "pino": "^8.11.0", - "undici": "^5.21.2" - }, - "devDependencies": { - "@stacks/eslint-config": "^1.2.0", - "@types/jest": "^29.5.0", - "@types/node": "^18.15.7", - "@typescript-eslint/eslint-plugin": "^5.56.0", - "@typescript-eslint/parser": "^5.56.0", - "babel-jest": "^29.5.0", - "eslint": "^8.36.0", - "eslint-plugin-prettier": "^4.2.1", - "eslint-plugin-tsdoc": "^0.2.17", - "jest": "^29.5.0", - "prettier": "^2.8.7", - "rimraf": "^4.4.1", - "ts-jest": "^29.0.5", - "ts-node": "^10.9.1", - "typescript": "^5.0.2" - } - }, "node_modules/@ampproject/remapping": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", @@ -1299,9 +1270,9 @@ } }, "node_modules/@hirosystems/chainhook-client": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.7.0.tgz", - "integrity": "sha512-XRSbpu+Bxwvd8qqQTNcomfO8RYu+Dpnl9ZnB8EJE+tvJ4y3lUZD6Uk65368Us0Hbw+VNWnU2ibej7iqB6mGsOA==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.8.0.tgz", + "integrity": "sha512-BpYwrbxWuH0KGRyKq1T8nIiZUGaapOxz6yFZ653m6CJi7DS7kqOm2+v5X/DR0hbeZUmqriGMUJnROJ1tW08aEg==", "dependencies": { "@fastify/type-provider-typebox": "^3.2.0", "fastify": "^4.15.0", @@ -19743,9 +19714,9 @@ } }, "@hirosystems/chainhook-client": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.7.0.tgz", - "integrity": "sha512-XRSbpu+Bxwvd8qqQTNcomfO8RYu+Dpnl9ZnB8EJE+tvJ4y3lUZD6Uk65368Us0Hbw+VNWnU2ibej7iqB6mGsOA==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.8.0.tgz", + "integrity": "sha512-BpYwrbxWuH0KGRyKq1T8nIiZUGaapOxz6yFZ653m6CJi7DS7kqOm2+v5X/DR0hbeZUmqriGMUJnROJ1tW08aEg==", "requires": { "@fastify/type-provider-typebox": "^3.2.0", "fastify": "^4.15.0", diff --git a/package.json b/package.json index 08d55d38..132e5ff8 100644 --- a/package.json +++ b/package.json @@ -55,7 +55,7 @@ "@fastify/swagger": "^8.3.1", "@fastify/type-provider-typebox": "^3.2.0", "@hirosystems/api-toolkit": "^1.4.0", - "@hirosystems/chainhook-client": "^1.7.0", + "@hirosystems/chainhook-client": "^1.8.0", "@semantic-release/changelog": "^6.0.3", "@semantic-release/commit-analyzer": "^10.0.4", "@semantic-release/git": "^10.0.1", diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index 9b7815eb..9ff8c520 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -146,7 +146,7 @@ export function parseBrc20Activities(items: DbBrc20Activity[]): Brc20ActivityRes const activity = { operation: i.operation, ticker: i.ticker, - address: i.address, + address: i.to_address ?? i.address, tx_id: i.tx_id, inscription_id: i.inscription_id, location: `${i.output}:${i.offset}`, @@ -169,22 +169,27 @@ export function parseBrc20Activities(items: DbBrc20Activity[]): Brc20ActivityRes return { ...activity, mint: { - amount: decimals(i.mint_amount, i.deploy_decimals), + amount: decimals(i.avail_balance, i.deploy_decimals), }, }; } case DbBrc20EventOperation.transfer: { - const [amount, from_address] = i.transfer_data.split(';'); return { ...activity, - transfer: { amount: decimals(amount, i.deploy_decimals), from_address }, + transfer: { + amount: decimals(i.trans_balance, i.deploy_decimals), + from_address: i.address, + }, }; } case DbBrc20EventOperation.transferSend: { - const [amount, from_address, to_address] = i.transfer_data.split(';'); return { ...activity, - transfer_send: { amount: decimals(amount, i.deploy_decimals), from_address, to_address }, + transfer_send: { + amount: decimals(BigNumber(i.trans_balance).abs().toString(), i.deploy_decimals), + from_address: i.address, + to_address: i.to_address ?? i.address, + }, }; } } diff --git a/src/env.ts b/src/env.ts index 956d42f3..fc8f0389 100644 --- a/src/env.ts +++ b/src/env.ts @@ -64,9 +64,6 @@ const schema = Type.Object({ PG_IDLE_TIMEOUT: Type.Number({ default: 30 }), PG_MAX_LIFETIME: Type.Number({ default: 60 }), PG_STATEMENT_TIMEOUT: Type.Number({ default: 60_000 }), - - /** Enables BRC-20 processing in write mode APIs */ - BRC20_BLOCK_SCAN_ENABLED: Type.Boolean({ default: true }), }); type Env = Static; diff --git a/src/ordhook/server.ts b/src/ordhook/server.ts index a414da32..7f604584 100644 --- a/src/ordhook/server.ts +++ b/src/ordhook/server.ts @@ -34,6 +34,7 @@ export async function startOrdhookServer(args: { db: PgStore }): Promise[] | undefined) { - return partials?.reduce((acc, curr) => this.sql`${acc} OR ${curr}`); - } - - async insertOperations(args: { - reveals: InscriptionEventData[]; - pointers: DbLocationPointerInsert[]; - }): Promise { - for (const [i, reveal] of args.reveals.entries()) { - const pointer = args.pointers[i]; - if (parseInt(pointer.block_height) < BRC20_GENESIS_BLOCK) continue; - if ('inscription' in reveal) { - const brc20 = brc20FromInscription(reveal); - if (brc20) { - switch (brc20.op) { - case 'deploy': - await this.insertDeploy({ brc20, reveal, pointer }); - break; - case 'mint': - await this.insertMint({ brc20, reveal, pointer }); - break; - case 'transfer': - await this.insertTransfer({ brc20, reveal, pointer }); - break; + async updateBrc20Operations(event: BitcoinEvent, direction: 'apply' | 'rollback'): Promise { + await this.sqlWriteTransaction(async sql => { + const block_height = event.block_identifier.index.toString(); + const cache = new Brc20BlockCache(); + for (const tx of event.transactions) { + const tx_index = tx.metadata.index.toString(); + if (tx.metadata.brc20_operation) { + const operation = tx.metadata.brc20_operation; + if ('deploy' in operation) { + cache.tokens.push({ + block_height, + genesis_id: operation.deploy.inscription_id, + tx_id: tx.transaction_identifier.hash, + address: operation.deploy.address, + ticker: operation.deploy.tick, + max: operation.deploy.max, + limit: operation.deploy.lim, + decimals: operation.deploy.dec, + self_mint: operation.deploy.self_mint, + }); + cache.operations.push({ + block_height, + tx_index, + genesis_id: operation.deploy.inscription_id, + ticker: operation.deploy.tick, + address: operation.deploy.address, + avail_balance: '0', + trans_balance: '0', + operation: DbBrc20Operation.deploy, + }); + cache.increaseOperationCount(DbBrc20Operation.deploy); + cache.increaseAddressOperationCount(operation.deploy.address, DbBrc20Operation.deploy); + cache.increaseTokenTxCount(operation.deploy.tick); + logger.info( + `Brc20PgStore ${direction} deploy ${operation.deploy.tick} by ${operation.deploy.address} at height ${block_height}` + ); + } else if ('mint' in operation) { + cache.operations.push({ + block_height, + tx_index, + genesis_id: operation.mint.inscription_id, + ticker: operation.mint.tick, + address: operation.mint.address, + avail_balance: operation.mint.amt, + trans_balance: '0', + operation: DbBrc20Operation.mint, + }); + const amt = BigNumber(operation.mint.amt); + cache.increaseTokenMintedSupply(operation.mint.tick, amt); + cache.increaseTokenTxCount(operation.mint.tick); + cache.increaseOperationCount(DbBrc20Operation.mint); + cache.increaseAddressOperationCount(operation.mint.address, DbBrc20Operation.mint); + cache.updateAddressBalance( + operation.mint.tick, + operation.mint.address, + amt, + BigNumber(0), + amt + ); + logger.info( + `Brc20PgStore ${direction} mint ${operation.mint.tick} ${operation.mint.amt} by ${operation.mint.address} at height ${block_height}` + ); + } else if ('transfer' in operation) { + cache.operations.push({ + block_height, + tx_index, + genesis_id: operation.transfer.inscription_id, + ticker: operation.transfer.tick, + address: operation.transfer.address, + avail_balance: BigNumber(operation.transfer.amt).negated().toString(), + trans_balance: operation.transfer.amt, + operation: DbBrc20Operation.transfer, + }); + const amt = BigNumber(operation.transfer.amt); + cache.increaseOperationCount(DbBrc20Operation.transfer); + cache.increaseTokenTxCount(operation.transfer.tick); + cache.increaseAddressOperationCount( + operation.transfer.address, + DbBrc20Operation.transfer + ); + cache.updateAddressBalance( + operation.transfer.tick, + operation.transfer.address, + amt.negated(), + amt, + BigNumber(0) + ); + logger.info( + `Brc20PgStore ${direction} transfer ${operation.transfer.tick} ${operation.transfer.amt} by ${operation.transfer.address} at height ${block_height}` + ); + } else if ('transfer_send' in operation) { + cache.operations.push({ + block_height, + tx_index, + genesis_id: operation.transfer_send.inscription_id, + ticker: operation.transfer_send.tick, + address: operation.transfer_send.sender_address, + avail_balance: '0', + trans_balance: BigNumber(operation.transfer_send.amt).negated().toString(), + operation: DbBrc20Operation.transferSend, + }); + cache.transferReceivers.set( + operation.transfer_send.inscription_id, + operation.transfer_send.receiver_address + ); + cache.operations.push({ + block_height, + tx_index, + genesis_id: operation.transfer_send.inscription_id, + ticker: operation.transfer_send.tick, + address: operation.transfer_send.receiver_address, + avail_balance: operation.transfer_send.amt, + trans_balance: '0', + operation: DbBrc20Operation.transferReceive, + }); + const amt = BigNumber(operation.transfer_send.amt); + cache.increaseOperationCount(DbBrc20Operation.transferSend); + cache.increaseTokenTxCount(operation.transfer_send.tick); + cache.increaseAddressOperationCount( + operation.transfer_send.sender_address, + DbBrc20Operation.transferSend + ); + if ( + operation.transfer_send.sender_address != operation.transfer_send.receiver_address + ) { + cache.increaseAddressOperationCount( + operation.transfer_send.receiver_address, + DbBrc20Operation.transferSend + ); + } + cache.updateAddressBalance( + operation.transfer_send.tick, + operation.transfer_send.sender_address, + BigNumber('0'), + amt.negated(), + amt.negated() + ); + cache.updateAddressBalance( + operation.transfer_send.tick, + operation.transfer_send.receiver_address, + amt, + BigNumber(0), + amt + ); + logger.info( + `Brc20PgStore ${direction} transfer_send ${operation.transfer_send.tick} ${operation.transfer_send.amt} from ${operation.transfer_send.sender_address} to ${operation.transfer_send.receiver_address} at height ${block_height}` + ); } } - } else { - await this.applyTransfer({ reveal, pointer }); } - } + if (direction === 'apply') await this.applyOperations(sql, cache); + else await this.rollBackOperations(sql, cache); + }); } - async applyTransfer(args: { - reveal: InscriptionEventData; - pointer: DbLocationPointerInsert; - }): Promise { - await this.sqlWriteTransaction(async sql => { - // Get the sender address for this transfer. We need to get this in a separate query to know - // if we should alter the write query to accomodate a "return to sender" scenario. - const fromAddressRes = await sql<{ from_address: string }[]>` - SELECT from_address FROM brc20_transfers WHERE inscription_id = ${args.pointer.inscription_id} + private async applyOperations(sql: PgSqlClient, cache: Brc20BlockCache) { + if (cache.tokens.length) + for await (const batch of batchIterate(cache.tokens, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO brc20_tokens ${sql(batch)} + ON CONFLICT (ticker) DO NOTHING + `; + if (cache.operations.length) + for await (const batch of batchIterate(cache.operations, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO brc20_operations ${sql(batch)} + ON CONFLICT (genesis_id, operation) DO NOTHING + `; + for (const [inscription_id, to_address] of cache.transferReceivers) + await sql` + UPDATE brc20_operations SET to_address = ${to_address} + WHERE genesis_id = ${inscription_id} AND operation = 'transfer_send' `; - if (fromAddressRes.count === 0) return; - const fromAddress = fromAddressRes[0].from_address; - // Is this transfer sent as fee or from the same sender? If so, we'll return the balance. - // Is it burnt? Mark as empty owner. - const returnToSender = - args.reveal.location.transfer_type == DbLocationTransferType.spentInFees || - fromAddress == args.pointer.address; - const toAddress = returnToSender - ? fromAddress - : args.reveal.location.transfer_type == DbLocationTransferType.burnt - ? '' - : args.pointer.address; - // Check if we have a valid transfer inscription emitted by this address that hasn't been sent - // to another address before. Use `LIMIT 3` as a quick way of checking if we have just inserted - // the first transfer for this inscription (genesis + transfer). - const sendRes = await sql` - WITH transfer_data AS ( - SELECT t.id, t.amount, t.brc20_deploy_id, t.from_address, ROW_NUMBER() OVER() - FROM locations AS l - INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id - WHERE l.inscription_id = ${args.pointer.inscription_id} - AND ( - l.block_height < ${args.pointer.block_height} - OR (l.block_height = ${args.pointer.block_height} - AND l.tx_index <= ${args.pointer.tx_index}) - ) - LIMIT 3 - ), - validated_transfer AS ( - SELECT * FROM transfer_data - WHERE NOT EXISTS(SELECT id FROM transfer_data WHERE row_number = 3) - LIMIT 1 - ), - updated_transfer AS ( - UPDATE brc20_transfers - SET to_address = ${toAddress} - WHERE id = (SELECT id FROM validated_transfer) - ), - balance_insert_from AS ( - INSERT INTO brc20_balances (inscription_id, location_id, brc20_deploy_id, address, avail_balance, trans_balance, type) ( - SELECT ${args.pointer.inscription_id}, ${args.pointer.location_id}, brc20_deploy_id, - from_address, 0, -1 * amount, ${DbBrc20BalanceTypeId.transferFrom} - FROM validated_transfer - ) - ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO NOTHING - ), - balance_insert_to AS ( - INSERT INTO brc20_balances (inscription_id, location_id, brc20_deploy_id, address, avail_balance, trans_balance, type) ( - SELECT ${args.pointer.inscription_id}, ${args.pointer.location_id}, brc20_deploy_id, - ${toAddress}, amount, 0, ${DbBrc20BalanceTypeId.transferTo} - FROM validated_transfer - ) - ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO NOTHING - ), - ${ - returnToSender - ? sql` - total_balance_revert AS ( - UPDATE brc20_total_balances SET - avail_balance = avail_balance + (SELECT amount FROM validated_transfer), - trans_balance = trans_balance - (SELECT amount FROM validated_transfer) - WHERE brc20_deploy_id = (SELECT brc20_deploy_id FROM validated_transfer) - AND address = (SELECT from_address FROM validated_transfer) - ), - address_event_type_count_increase AS ( - INSERT INTO brc20_counts_by_address_event_type (address, transfer_send) - (SELECT from_address, 1 FROM validated_transfer) - ON CONFLICT (address) DO UPDATE SET transfer_send = brc20_counts_by_address_event_type.transfer_send + EXCLUDED.transfer_send - ) - ` - : sql` - total_balance_insert_from AS ( - UPDATE brc20_total_balances SET - trans_balance = trans_balance - (SELECT amount FROM validated_transfer), - total_balance = total_balance - (SELECT amount FROM validated_transfer) - WHERE brc20_deploy_id = (SELECT brc20_deploy_id FROM validated_transfer) - AND address = (SELECT from_address FROM validated_transfer) - ), - total_balance_insert_to AS ( - INSERT INTO brc20_total_balances (brc20_deploy_id, address, avail_balance, trans_balance, total_balance) ( - SELECT brc20_deploy_id, ${toAddress}, amount, 0, amount - FROM validated_transfer - ) - ON CONFLICT ON CONSTRAINT brc20_total_balances_unique DO UPDATE SET - avail_balance = brc20_total_balances.avail_balance + EXCLUDED.avail_balance, - total_balance = brc20_total_balances.total_balance + EXCLUDED.total_balance - ), - address_event_type_count_increase_from AS ( - INSERT INTO brc20_counts_by_address_event_type (address, transfer_send) - (SELECT from_address, 1 FROM validated_transfer) - ON CONFLICT (address) DO UPDATE SET transfer_send = brc20_counts_by_address_event_type.transfer_send + EXCLUDED.transfer_send - ), - address_event_type_count_increase_to AS ( - INSERT INTO brc20_counts_by_address_event_type (address, transfer_send) - (SELECT ${toAddress}, 1 FROM validated_transfer) - ON CONFLICT (address) DO UPDATE SET transfer_send = brc20_counts_by_address_event_type.transfer_send + EXCLUDED.transfer_send - ) - ` - }, deploy_update AS ( - UPDATE brc20_deploys - SET tx_count = tx_count + 1 - WHERE id = (SELECT brc20_deploy_id FROM validated_transfer) - ), - event_type_count_increase AS ( - INSERT INTO brc20_counts_by_event_type (event_type, count) - (SELECT 'transfer_send', COALESCE(COUNT(*), 0) FROM validated_transfer) - ON CONFLICT (event_type) DO UPDATE SET count = brc20_counts_by_event_type.count + EXCLUDED.count - ) - INSERT INTO brc20_events (operation, inscription_id, genesis_location_id, brc20_deploy_id, transfer_id, address, from_address) ( - SELECT 'transfer_send', ${args.pointer.inscription_id}, ${args.pointer.location_id}, - brc20_deploy_id, id, ${toAddress}, from_address - FROM validated_transfer - ) + for (const [ticker, amount] of cache.tokenMintSupplies) + await sql` + UPDATE brc20_tokens SET minted_supply = minted_supply + ${amount.toString()} + WHERE ticker = ${ticker} `; - if (sendRes.count) - logger.info( - `Brc20PgStore send transfer to ${toAddress} at block ${args.pointer.block_height}` - ); - }); - } - - private async insertDeploy(deploy: { - brc20: Brc20Deploy; - reveal: InscriptionRevealData; - pointer: DbLocationPointerInsert; - }): Promise { - if (deploy.reveal.location.transfer_type != DbLocationTransferType.transferred) return; - const insert: DbBrc20DeployInsert = { - inscription_id: deploy.pointer.inscription_id, - block_height: deploy.pointer.block_height, - tx_id: deploy.reveal.location.tx_id, - address: deploy.pointer.address as string, - ticker: deploy.brc20.tick, - max: deploy.brc20.max === '0' ? UINT64_MAX.toString() : deploy.brc20.max, - limit: deploy.brc20.lim ?? null, - decimals: deploy.brc20.dec ?? '18', - tx_count: 1, - self_mint: deploy.brc20.self_mint === 'true', - }; - const deployRes = await this.sql` - WITH deploy_insert AS ( - INSERT INTO brc20_deploys ${this.sql(insert)} - ON CONFLICT (LOWER(ticker)) DO NOTHING - RETURNING id - ), - event_type_count_increase AS ( - INSERT INTO brc20_counts_by_event_type (event_type, count) - (SELECT 'deploy', COALESCE(COUNT(*), 0) FROM deploy_insert) - ON CONFLICT (event_type) DO UPDATE SET count = brc20_counts_by_event_type.count + EXCLUDED.count - ), - address_event_type_count_increase AS ( - INSERT INTO brc20_counts_by_address_event_type (address, deploy) - (SELECT ${deploy.pointer.address}, COALESCE(COUNT(*), 0) FROM deploy_insert) - ON CONFLICT (address) DO UPDATE SET deploy = brc20_counts_by_address_event_type.deploy + EXCLUDED.deploy - ), - token_count_increase AS ( - INSERT INTO brc20_counts_by_tokens (token_type, count) - (SELECT 'token', COALESCE(COUNT(*), 0) FROM deploy_insert) - ON CONFLICT (token_type) DO UPDATE SET count = brc20_counts_by_tokens.count + EXCLUDED.count - ) - INSERT INTO brc20_events (operation, inscription_id, genesis_location_id, brc20_deploy_id, deploy_id, address) ( - SELECT 'deploy', ${deploy.pointer.inscription_id}, ${deploy.pointer.location_id}, id, id, - ${deploy.pointer.address} - FROM deploy_insert - ) - `; - if (deployRes.count) - logger.info( - `Brc20PgStore deploy ${deploy.brc20.tick} by ${deploy.pointer.address} at block ${deploy.pointer.block_height}` - ); - } - - private async insertMint(mint: { - brc20: Brc20Mint; - reveal: InscriptionRevealData; - pointer: DbLocationPointerInsert; - }): Promise { - if (mint.reveal.location.transfer_type != DbLocationTransferType.transferred) return; - // Check the following conditions: - // * Is the mint amount within the allowed token limits? - // * Is this a self_mint with the correct parent inscription? - // * Is the number of decimals correct? - // * Does the mint amount exceed remaining supply? - const mintRes = await this.sql` - WITH mint_data AS ( - SELECT d.id, d.decimals, d."limit", d.max, d.minted_supply, d.self_mint, i.genesis_id - FROM brc20_deploys d - INNER JOIN inscriptions i ON i.id = d.inscription_id - WHERE d.ticker_lower = LOWER(${mint.brc20.tick}) AND d.minted_supply < d.max - ), - validated_mint AS ( - SELECT - id AS brc20_deploy_id, - LEAST(${mint.brc20.amt}::numeric, max - minted_supply) AS real_mint_amount - FROM mint_data - WHERE ("limit" IS NULL OR ${mint.brc20.amt}::numeric <= "limit") - AND (SCALE(${mint.brc20.amt}::numeric) <= decimals) - AND ( - self_mint = FALSE OR - (self_mint = TRUE AND genesis_id = ${mint.reveal.inscription.parent}) - ) - ), - mint_insert AS ( - INSERT INTO brc20_mints (inscription_id, brc20_deploy_id, block_height, tx_id, address, amount) ( - SELECT ${mint.pointer.inscription_id}, brc20_deploy_id, ${mint.pointer.block_height}, - ${mint.reveal.location.tx_id}, ${mint.pointer.address}, ${mint.brc20.amt}::numeric - FROM validated_mint - ) - ON CONFLICT (inscription_id) DO NOTHING - RETURNING id, brc20_deploy_id - ), - deploy_update AS ( - UPDATE brc20_deploys - SET - minted_supply = minted_supply + (SELECT real_mint_amount FROM validated_mint), - tx_count = tx_count + 1 - WHERE id = (SELECT brc20_deploy_id FROM validated_mint) - ), - balance_insert AS ( - INSERT INTO brc20_balances (inscription_id, location_id, brc20_deploy_id, address, avail_balance, trans_balance, type) ( - SELECT ${mint.pointer.inscription_id}, ${mint.pointer.location_id}, brc20_deploy_id, - ${mint.pointer.address}, real_mint_amount, 0, ${DbBrc20BalanceTypeId.mint} - FROM validated_mint - ) - ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO NOTHING - ), - total_balance_insert AS ( - INSERT INTO brc20_total_balances (brc20_deploy_id, address, avail_balance, trans_balance, total_balance) ( - SELECT brc20_deploy_id, ${mint.pointer.address}, real_mint_amount, 0, real_mint_amount - FROM validated_mint - ) - ON CONFLICT ON CONSTRAINT brc20_total_balances_unique DO UPDATE SET - avail_balance = brc20_total_balances.avail_balance + EXCLUDED.avail_balance, - total_balance = brc20_total_balances.total_balance + EXCLUDED.total_balance - ), - event_type_count_increase AS ( - INSERT INTO brc20_counts_by_event_type (event_type, count) - (SELECT 'mint', COALESCE(COUNT(*), 0) FROM validated_mint) - ON CONFLICT (event_type) DO UPDATE SET count = brc20_counts_by_event_type.count + EXCLUDED.count - ), - address_event_type_count_increase AS ( - INSERT INTO brc20_counts_by_address_event_type (address, mint) - (SELECT ${mint.pointer.address}, COALESCE(COUNT(*), 0) FROM validated_mint) - ON CONFLICT (address) DO UPDATE SET mint = brc20_counts_by_address_event_type.mint + EXCLUDED.mint - ) - INSERT INTO brc20_events (operation, inscription_id, genesis_location_id, brc20_deploy_id, mint_id, address) ( - SELECT 'mint', ${mint.pointer.inscription_id}, ${mint.pointer.location_id}, brc20_deploy_id, id, ${mint.pointer.address} - FROM mint_insert - ) - `; - if (mintRes.count) - logger.info( - `Brc20PgStore mint ${mint.brc20.tick} (${mint.brc20.amt}) by ${mint.pointer.address} at block ${mint.pointer.block_height}` - ); - } - - private async insertTransfer(transfer: { - brc20: Brc20Transfer; - reveal: InscriptionEventData; - pointer: DbLocationPointerInsert; - }): Promise { - if (transfer.reveal.location.transfer_type != DbLocationTransferType.transferred) return; - const transferRes = await this.sql` - WITH validated_transfer AS ( - SELECT brc20_deploy_id, avail_balance - FROM brc20_total_balances - WHERE brc20_deploy_id = (SELECT id FROM brc20_deploys WHERE ticker_lower = LOWER(${transfer.brc20.tick})) - AND address = ${transfer.pointer.address} - AND avail_balance >= ${transfer.brc20.amt}::numeric - ), - transfer_insert AS ( - INSERT INTO brc20_transfers (inscription_id, brc20_deploy_id, block_height, tx_id, from_address, to_address, amount) ( - SELECT ${transfer.pointer.inscription_id}, brc20_deploy_id, - ${transfer.pointer.block_height}, ${transfer.reveal.location.tx_id}, - ${transfer.pointer.address}, NULL, ${transfer.brc20.amt}::numeric - FROM validated_transfer - ) - ON CONFLICT (inscription_id) DO NOTHING - RETURNING id, brc20_deploy_id - ), - balance_insert AS ( - INSERT INTO brc20_balances (inscription_id, location_id, brc20_deploy_id, address, avail_balance, trans_balance, type) ( - SELECT ${transfer.pointer.inscription_id}, ${transfer.pointer.location_id}, brc20_deploy_id, - ${transfer.pointer.address}, -1 * ${transfer.brc20.amt}::numeric, - ${transfer.brc20.amt}::numeric, ${DbBrc20BalanceTypeId.transferIntent} - FROM validated_transfer - ) - ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO NOTHING - ), - total_balance_update AS ( - UPDATE brc20_total_balances SET - avail_balance = avail_balance - ${transfer.brc20.amt}::numeric, - trans_balance = trans_balance + ${transfer.brc20.amt}::numeric - WHERE brc20_deploy_id = (SELECT brc20_deploy_id FROM validated_transfer) - AND address = ${transfer.pointer.address} - ), - deploy_update AS ( - UPDATE brc20_deploys - SET tx_count = tx_count + 1 - WHERE id = (SELECT brc20_deploy_id FROM validated_transfer) - ), - event_type_count_increase AS ( - INSERT INTO brc20_counts_by_event_type (event_type, count) - (SELECT 'transfer', COALESCE(COUNT(*), 0) FROM validated_transfer) - ON CONFLICT (event_type) DO UPDATE SET count = brc20_counts_by_event_type.count + EXCLUDED.count - ), - address_event_type_count_increase AS ( - INSERT INTO brc20_counts_by_address_event_type (address, transfer) - (SELECT ${transfer.pointer.address}, COALESCE(COUNT(*), 0) FROM validated_transfer) - ON CONFLICT (address) DO UPDATE SET transfer = brc20_counts_by_address_event_type.transfer + EXCLUDED.transfer - ) - INSERT INTO brc20_events (operation, inscription_id, genesis_location_id, brc20_deploy_id, transfer_id, address) ( - SELECT 'transfer', ${transfer.pointer.inscription_id}, ${transfer.pointer.location_id}, brc20_deploy_id, id, ${transfer.pointer.address} - FROM transfer_insert - ) - `; - if (transferRes.count) - logger.info( - `Brc20PgStore transfer ${transfer.brc20.tick} (${transfer.brc20.amt}) by ${transfer.pointer.address} at block ${transfer.pointer.block_height}` - ); - } - - async rollBackInscription(args: { inscription: InscriptionData }): Promise { - const events = await this.sql` - SELECT e.* FROM brc20_events AS e - INNER JOIN inscriptions AS i ON i.id = e.inscription_id - WHERE i.genesis_id = ${args.inscription.genesis_id} - `; - if (events.count === 0) return; - // Traverse all activities generated by this inscription and roll back actions that are NOT - // otherwise handled by the ON DELETE CASCADE postgres constraint. - for (const event of events) { - switch (event.operation) { - case 'deploy': - await this.rollBackDeploy(event); - break; - case 'mint': - await this.rollBackMint(event); - break; - case 'transfer': - await this.rollBackTransfer(event); - break; - } + for (const [ticker, num] of cache.tokenTxCounts) + await sql` + UPDATE brc20_tokens SET tx_count = tx_count + ${num} WHERE ticker = ${ticker} + `; + if (cache.operationCounts.size) { + const entries = []; + for (const [operation, count] of cache.operationCounts) entries.push({ operation, count }); + for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO brc20_counts_by_operation ${sql(batch)} + ON CONFLICT (operation) DO UPDATE SET + count = brc20_counts_by_operation.count + EXCLUDED.count + `; } - } - - async rollBackLocation(args: { location: LocationData }): Promise { - const events = await this.sql` - SELECT e.* FROM brc20_events AS e - INNER JOIN locations AS l ON l.id = e.genesis_location_id - WHERE output = ${args.location.output} AND "offset" = ${args.location.offset} - `; - if (events.count === 0) return; - // Traverse all activities generated by this location and roll back actions that are NOT - // otherwise handled by the ON DELETE CASCADE postgres constraint. - for (const event of events) { - switch (event.operation) { - case 'transfer_send': - await this.rollBackTransferSend(event); - break; - } + if (cache.addressOperationCounts.size) { + const entries = []; + for (const [address, map] of cache.addressOperationCounts) + for (const [operation, count] of map) entries.push({ address, operation, count }); + for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO brc20_counts_by_address_operation ${sql(batch)} + ON CONFLICT (address, operation) DO UPDATE SET + count = brc20_counts_by_address_operation.count + EXCLUDED.count + `; + } + if (cache.totalBalanceChanges.size) { + const entries = []; + for (const [address, map] of cache.totalBalanceChanges) + for (const [ticker, values] of map) + entries.push({ + ticker, + address, + avail_balance: values.avail.toString(), + trans_balance: values.trans.toString(), + total_balance: values.total.toString(), + }); + for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO brc20_total_balances ${sql(batch)} + ON CONFLICT (ticker, address) DO UPDATE SET + avail_balance = brc20_total_balances.avail_balance + EXCLUDED.avail_balance, + trans_balance = brc20_total_balances.trans_balance + EXCLUDED.trans_balance, + total_balance = brc20_total_balances.total_balance + EXCLUDED.total_balance + `; } } - private async rollBackDeploy(activity: DbBrc20DeployEvent): Promise { - // - tx_count is lost successfully, since the deploy will be deleted. - await this.sql` - WITH decrease_event_count AS ( - UPDATE brc20_counts_by_event_type - SET count = count - 1 - WHERE event_type = 'deploy' - ), - decrease_address_event_count AS ( - UPDATE brc20_counts_by_address_event_type - SET deploy = deploy - 1 - WHERE address = (SELECT address FROM locations WHERE id = ${activity.genesis_location_id}) - ) - UPDATE brc20_counts_by_tokens - SET count = count - 1 - `; - } - - private async rollBackMint(activity: DbBrc20MintEvent): Promise { - // Get real minted amount and substract from places. - await this.sql` - WITH minted_balance AS ( - SELECT address, avail_balance - FROM brc20_balances - WHERE inscription_id = ${activity.inscription_id} AND type = ${DbBrc20BalanceTypeId.mint} - ), - deploy_update AS ( - UPDATE brc20_deploys - SET - minted_supply = minted_supply - (SELECT avail_balance FROM minted_balance), - tx_count = tx_count - 1 - WHERE id = ${activity.brc20_deploy_id} - ), - decrease_event_count AS ( - UPDATE brc20_counts_by_event_type - SET count = count - 1 - WHERE event_type = 'mint' - ), - decrease_address_event_count AS ( - UPDATE brc20_counts_by_address_event_type - SET mint = mint - 1 - WHERE address = (SELECT address FROM locations WHERE id = ${activity.genesis_location_id}) - ) - UPDATE brc20_total_balances SET - avail_balance = avail_balance - (SELECT avail_balance FROM minted_balance), - total_balance = total_balance - (SELECT avail_balance FROM minted_balance) - WHERE address = (SELECT address FROM minted_balance) - AND brc20_deploy_id = ${activity.brc20_deploy_id} - `; - } - - private async rollBackTransfer(activity: DbBrc20TransferEvent): Promise { - // Subtract tx_count per transfer event (transfer and transfer_send are - // separate events, so they will both be counted). - await this.sql` - WITH transferrable_balance AS ( - SELECT address, trans_balance - FROM brc20_balances - WHERE inscription_id = ${activity.inscription_id} AND type = ${DbBrc20BalanceTypeId.transferIntent} - ), - decrease_event_count AS ( - UPDATE brc20_counts_by_event_type - SET count = count - 1 - WHERE event_type = 'transfer' - ), - decrease_address_event_count AS ( - UPDATE brc20_counts_by_address_event_type - SET transfer = transfer - 1 - WHERE address = (SELECT address FROM locations WHERE id = ${activity.genesis_location_id}) - ), - decrease_tx_count AS ( - UPDATE brc20_deploys - SET tx_count = tx_count - 1 - WHERE id = ${activity.brc20_deploy_id} - ) - UPDATE brc20_total_balances SET - trans_balance = trans_balance - (SELECT trans_balance FROM transferrable_balance), - avail_balance = avail_balance + (SELECT trans_balance FROM transferrable_balance) - WHERE address = (SELECT address FROM transferrable_balance) - AND brc20_deploy_id = ${activity.brc20_deploy_id} - `; - } - - private async rollBackTransferSend(activity: DbBrc20TransferEvent): Promise { - await this.sqlWriteTransaction(async sql => { - // Get the sender/receiver address for this transfer. We need to get this in a separate query - // to know if we should alter the write query to accomodate a "return to sender" scenario. - const addressRes = await sql<{ returned_to_sender: boolean }[]>` - SELECT from_address = to_address AS returned_to_sender - FROM brc20_transfers - WHERE inscription_id = ${activity.inscription_id} + private async rollBackOperations(sql: PgSqlClient, cache: Brc20BlockCache) { + if (cache.totalBalanceChanges.size) { + for (const [address, map] of cache.totalBalanceChanges) + for (const [ticker, values] of map) + await sql` + UPDATE brc20_total_balances SET + avail_balance = avail_balance - ${values.avail}, + trans_balance = trans_balance - ${values.trans}, + total_balance = total_balance - ${values.total} + WHERE address = ${address} AND ticker = ${ticker} + `; + } + if (cache.addressOperationCounts.size) { + for (const [address, map] of cache.addressOperationCounts) + for (const [operation, count] of map) + await sql` + UPDATE brc20_counts_by_address_operation + SET count = count - ${count} + WHERE address = ${address} AND operation = ${operation} + `; + } + if (cache.operationCounts.size) { + for (const [operation, count] of cache.operationCounts) + await sql` + UPDATE brc20_counts_by_operation + SET count = count - ${count} + WHERE operation = ${operation} + `; + } + for (const [ticker, amount] of cache.tokenMintSupplies) + await sql` + UPDATE brc20_tokens SET minted_supply = minted_supply - ${amount.toString()} + WHERE ticker = ${ticker} `; - if (addressRes.count === 0) return; - const returnedToSender = addressRes[0].returned_to_sender; + for (const [ticker, num] of cache.tokenTxCounts) await sql` - WITH sent_balance_from AS ( - SELECT address, trans_balance - FROM brc20_balances - WHERE inscription_id = ${activity.inscription_id} - AND type = ${DbBrc20BalanceTypeId.transferFrom} - ), - sent_balance_to AS ( - SELECT address, avail_balance - FROM brc20_balances - WHERE inscription_id = ${activity.inscription_id} - AND type = ${DbBrc20BalanceTypeId.transferTo} - ), - decrease_event_count AS ( - UPDATE brc20_counts_by_event_type - SET count = count - 1 - WHERE event_type = 'transfer_send' - ), - ${ - returnedToSender - ? sql` - decrease_address_event_count AS ( - UPDATE brc20_counts_by_address_event_type - SET transfer_send = transfer_send - 1 - WHERE address = (SELECT address FROM sent_balance_from) - ), - undo_sent_balance AS ( - UPDATE brc20_total_balances SET - trans_balance = trans_balance - (SELECT trans_balance FROM sent_balance_from), - avail_balance = avail_balance + (SELECT trans_balance FROM sent_balance_from) - WHERE address = (SELECT address FROM sent_balance_from) - AND brc20_deploy_id = ${activity.brc20_deploy_id} - ) - ` - : sql` - decrease_address_event_count_from AS ( - UPDATE brc20_counts_by_address_event_type - SET transfer_send = transfer_send - 1 - WHERE address = (SELECT address FROM sent_balance_from) - ), - decrease_address_event_count_to AS ( - UPDATE brc20_counts_by_address_event_type - SET transfer_send = transfer_send - 1 - WHERE address = (SELECT address FROM sent_balance_to) - ), - undo_sent_balance_from AS ( - UPDATE brc20_total_balances SET - trans_balance = trans_balance - (SELECT trans_balance FROM sent_balance_from), - total_balance = total_balance - (SELECT trans_balance FROM sent_balance_from) - WHERE address = (SELECT address FROM sent_balance_from) - AND brc20_deploy_id = ${activity.brc20_deploy_id} - ), - undo_sent_balance_to AS ( - UPDATE brc20_total_balances SET - avail_balance = avail_balance - (SELECT avail_balance FROM sent_balance_to), - total_balance = total_balance - (SELECT avail_balance FROM sent_balance_to) - WHERE address = (SELECT address FROM sent_balance_to) - AND brc20_deploy_id = ${activity.brc20_deploy_id} - ) - ` - } - UPDATE brc20_deploys - SET tx_count = tx_count - 1 - WHERE id = ${activity.brc20_deploy_id} + UPDATE brc20_tokens SET tx_count = tx_count - ${num} WHERE ticker = ${ticker} `; - }); + for (const [inscription_id, _] of cache.transferReceivers) + await sql` + UPDATE brc20_operations SET to_address = NULL + WHERE genesis_id = ${inscription_id} AND operation = 'transfer_send' + `; + if (cache.operations.length) { + const blockHeights = cache.operations.map(o => o.block_height); + for await (const batch of batchIterate(blockHeights, INSERT_BATCH_SIZE)) + await sql` + DELETE FROM brc20_operations WHERE block_height IN ${sql(batch)} + `; + } + if (cache.tokens.length) { + const tickers = cache.tokens.map(t => t.ticker); + for await (const batch of batchIterate(tickers, INSERT_BATCH_SIZE)) + await sql` + DELETE FROM brc20_tokens WHERE ticker IN ${sql(batch)} + `; + } } async getTokens( args: { ticker?: string[]; order_by?: Brc20TokenOrderBy } & DbInscriptionIndexPaging ): Promise> { - const tickerPrefixCondition = this.sqlOr( - args.ticker?.map(t => this.sql`d.ticker_lower LIKE LOWER(${t}) || '%'`) + const tickerPrefixCondition = sqlOr( + this.sql, + args.ticker?.map(t => this.sql`d.ticker LIKE LOWER(${t}) || '%'`) ); const orderBy = args.order_by === Brc20TokenOrderBy.tx_count - ? this.sql`tx_count DESC` // tx_count + ? this.sql`d.tx_count DESC` // tx_count : this.sql`l.block_height DESC, l.tx_index DESC`; // default: `index` const results = await this.sql<(DbBrc20Token & { total: number })[]>` ${ args.ticker === undefined ? this.sql`WITH global_count AS ( - SELECT COALESCE(count, 0) AS count FROM brc20_counts_by_tokens + SELECT COALESCE(count, 0) AS count + FROM brc20_counts_by_operation + WHERE operation = 'deploy' )` : this.sql`` } SELECT - ${this.sql(BRC20_DEPLOYS_COLUMNS.map(c => `d.${c}`))}, - i.number, i.genesis_id, l.timestamp, + d.*, i.number, l.timestamp, ${ args.ticker ? this.sql`COUNT(*) OVER()` : this.sql`(SELECT count FROM global_count)` } AS total - FROM brc20_deploys AS d - INNER JOIN inscriptions AS i ON i.id = d.inscription_id - INNER JOIN genesis_locations AS g ON g.inscription_id = d.inscription_id + FROM brc20_tokens AS d + INNER JOIN inscriptions AS i ON i.genesis_id = d.genesis_id + INNER JOIN genesis_locations AS g ON g.inscription_id = i.id INNER JOIN locations AS l ON l.id = g.location_id ${tickerPrefixCondition ? this.sql`WHERE ${tickerPrefixCondition}` : this.sql``} ORDER BY ${orderBy} @@ -648,15 +348,12 @@ export class Brc20PgStore extends BasePgStoreModule { block_height?: number; } & DbInscriptionIndexPaging ): Promise> { - const ticker = this.sqlOr( - args.ticker?.map(t => this.sql`d.ticker_lower LIKE LOWER(${t}) || '%'`) + const ticker = sqlOr( + this.sql, + args.ticker?.map(t => this.sql`d.ticker LIKE LOWER(${t}) || '%'`) ); // Change selection table depending if we're filtering by block height or not. const results = await this.sql<(DbBrc20Balance & { total: number })[]>` - WITH token_ids AS ( - SELECT id FROM brc20_deploys AS d - WHERE ${ticker ? ticker : this.sql`FALSE`} - ) ${ args.block_height ? this.sql` @@ -666,24 +363,23 @@ export class Brc20PgStore extends BasePgStoreModule { SUM(b.trans_balance) AS trans_balance, SUM(b.avail_balance + b.trans_balance) AS total_balance, COUNT(*) OVER() as total - FROM brc20_balances AS b - INNER JOIN brc20_deploys AS d ON d.id = b.brc20_deploy_id - INNER JOIN locations AS l ON l.id = b.location_id + FROM brc20_operations AS b + INNER JOIN brc20_tokens AS d ON d.ticker = b.ticker WHERE b.address = ${args.address} - AND l.block_height <= ${args.block_height} - ${ticker ? this.sql`AND brc20_deploy_id IN (SELECT id FROM token_ids)` : this.sql``} + AND b.block_height <= ${args.block_height} + ${ticker ? this.sql`AND ${ticker}` : this.sql``} GROUP BY d.ticker, d.decimals HAVING SUM(b.avail_balance + b.trans_balance) > 0 ` : this.sql` SELECT d.ticker, d.decimals, b.avail_balance, b.trans_balance, b.total_balance, COUNT(*) OVER() as total FROM brc20_total_balances AS b - INNER JOIN brc20_deploys AS d ON d.id = b.brc20_deploy_id + INNER JOIN brc20_tokens AS d ON d.ticker = b.ticker WHERE b.total_balance > 0 AND b.address = ${args.address} - ${ticker ? this.sql`AND brc20_deploy_id IN (SELECT id FROM token_ids)` : this.sql``} + ${ticker ? this.sql`AND ${ticker}` : this.sql``} ` } LIMIT ${args.limit} @@ -699,18 +395,17 @@ export class Brc20PgStore extends BasePgStoreModule { const result = await this.sql` WITH token AS ( SELECT - ${this.sql(BRC20_DEPLOYS_COLUMNS.map(c => `d.${c}`))}, - i.number, i.genesis_id, l.timestamp - FROM brc20_deploys AS d - INNER JOIN inscriptions AS i ON i.id = d.inscription_id - INNER JOIN genesis_locations AS g ON g.inscription_id = d.inscription_id + d.*, i.number, i.genesis_id, l.timestamp + FROM brc20_tokens AS d + INNER JOIN inscriptions AS i ON i.genesis_id = d.genesis_id + INNER JOIN genesis_locations AS g ON g.inscription_id = i.id INNER JOIN locations AS l ON l.id = g.location_id - WHERE ticker_lower = LOWER(${args.ticker}) + WHERE d.ticker = LOWER(${args.ticker}) ), holders AS ( SELECT COUNT(*) AS count FROM brc20_total_balances - WHERE brc20_deploy_id = (SELECT id FROM token) AND total_balance > 0 + WHERE ticker = (SELECT ticker FROM token) AND total_balance > 0 ) SELECT *, COALESCE((SELECT count FROM holders), 0) AS holders FROM token @@ -725,15 +420,16 @@ export class Brc20PgStore extends BasePgStoreModule { ): Promise | undefined> { return await this.sqlTransaction(async sql => { const token = await sql<{ id: string; decimals: number }[]>` - SELECT id, decimals FROM brc20_deploys WHERE ticker_lower = LOWER(${args.ticker}) + SELECT ticker FROM brc20_tokens WHERE ticker = LOWER(${args.ticker}) `; if (token.count === 0) return; const results = await sql<(DbBrc20Holder & { total: number })[]>` SELECT - address, ${token[0].decimals}::int AS decimals, total_balance, COUNT(*) OVER() AS total - FROM brc20_total_balances - WHERE brc20_deploy_id = ${token[0].id} - ORDER BY total_balance DESC + b.address, d.decimals, b.total_balance, COUNT(*) OVER() AS total + FROM brc20_total_balances AS b + INNER JOIN brc20_tokens AS d USING (ticker) + WHERE b.ticker = LOWER(${args.ticker}) + ORDER BY b.total_balance DESC LIMIT ${args.limit} OFFSET ${args.offset} `; @@ -767,84 +463,71 @@ export class Brc20PgStore extends BasePgStoreModule { filters.address != undefined && filters.address != ''); const needsTickerCount = filterLength === 1 && filters.ticker && filters.ticker.length > 0; - - // Which operations do we need if we're filtering by address? - const sanitizedOperations: DbBrc20EventOperation[] = []; - for (const i of filters.operation ?? BRC20_OPERATIONS) - if (BRC20_OPERATIONS.includes(i)) sanitizedOperations?.push(i as DbBrc20EventOperation); - - // Which tickers are we filtering for? - const tickerConditions = this.sqlOr( - filters.ticker?.map(t => this.sql`ticker_lower = LOWER(${t})`) - ); + const operationsFilter = filters.operation?.filter(i => i !== 'transfer_receive'); return this.sqlTransaction(async sql => { - // The postgres query planner has trouble selecting an optimal plan when the WHERE condition - // checks any column from the `brc20_deploys` table. If the user is filtering by ticker, we - // should get the token IDs first and use those to filter directly in the `brc20_events` - // table. - const tickerIds = tickerConditions - ? (await sql<{ id: string }[]>`SELECT id FROM brc20_deploys WHERE ${tickerConditions}`).map( - i => i.id - ) - : undefined; const results = await sql<(DbBrc20Activity & { total: number })[]>` WITH event_count AS (${ - // Select count from the correct count cache table. needsGlobalEventCount ? sql` SELECT COALESCE(SUM(count), 0) AS count - FROM brc20_counts_by_event_type - ${filters.operation ? sql`WHERE event_type IN ${sql(filters.operation)}` : sql``} + FROM brc20_counts_by_operation + ${operationsFilter ? sql`WHERE operation IN ${sql(operationsFilter)}` : sql``} ` : needsAddressEventCount ? sql` - SELECT COALESCE(${sql.unsafe(sanitizedOperations.join('+'))}, 0) AS count - FROM brc20_counts_by_address_event_type + SELECT SUM(count) AS count + FROM brc20_counts_by_address_operation WHERE address = ${filters.address} + ${operationsFilter ? sql`AND operation IN ${sql(operationsFilter)}` : sql``} ` - : needsTickerCount && tickerIds !== undefined + : needsTickerCount && filters.ticker !== undefined ? sql` SELECT COALESCE(SUM(tx_count), 0) AS count - FROM brc20_deploys AS d - WHERE id IN ${sql(tickerIds)} + FROM brc20_tokens AS d + WHERE ticker IN ${sql(filters.ticker)} ` : sql`SELECT NULL AS count` }) SELECT e.operation, + e.avail_balance, + e.trans_balance, + e.address, + e.to_address, d.ticker, l.genesis_id AS inscription_id, l.block_height, l.block_hash, l.tx_id, - l.address, l.timestamp, l.output, l.offset, d.max AS deploy_max, d.limit AS deploy_limit, d.decimals AS deploy_decimals, - (SELECT amount FROM brc20_mints WHERE id = e.mint_id) AS mint_amount, - (SELECT amount || ';' || from_address || ';' || COALESCE(to_address, '') FROM brc20_transfers WHERE id = e.transfer_id) AS transfer_data, ${ needsGlobalEventCount || needsAddressEventCount || needsTickerCount ? sql`(SELECT count FROM event_count)` : sql`COUNT(*) OVER()` } AS total - FROM brc20_events AS e - INNER JOIN brc20_deploys AS d ON e.brc20_deploy_id = d.id - INNER JOIN locations AS l ON e.genesis_location_id = l.id + FROM brc20_operations AS e + INNER JOIN brc20_tokens AS d ON d.ticker = e.ticker + INNER JOIN locations AS l ON e.genesis_id = l.genesis_id AND e.block_height = l.block_height AND e.tx_index = l.tx_index WHERE TRUE - ${filters.operation ? sql`AND e.operation IN ${sql(filters.operation)}` : sql``} - ${tickerIds ? sql`AND e.brc20_deploy_id IN ${sql(tickerIds)}` : sql``} + ${ + operationsFilter + ? sql`AND e.operation IN ${sql(operationsFilter)}` + : sql`AND e.operation <> 'transfer_receive'` + } + ${filters.ticker ? sql`AND e.ticker IN ${sql(filters.ticker)}` : sql``} ${filters.block_height ? sql`AND l.block_height = ${filters.block_height}` : sql``} ${ filters.address - ? sql`AND (e.address = ${filters.address} OR e.from_address = ${filters.address})` + ? sql`AND (e.address = ${filters.address} OR e.to_address = ${filters.address})` : sql`` } - ORDER BY l.block_height DESC, l.tx_index DESC + ORDER BY e.block_height DESC, e.tx_index DESC LIMIT ${page.limit} OFFSET ${page.offset} `; diff --git a/src/pg/brc20/helpers.ts b/src/pg/brc20/helpers.ts index 6aec1697..1d04e767 100644 --- a/src/pg/brc20/helpers.ts +++ b/src/pg/brc20/helpers.ts @@ -1,96 +1,98 @@ -import { Static, Type } from '@fastify/type-provider-typebox'; -import { TypeCompiler } from '@sinclair/typebox/compiler'; import BigNumber from 'bignumber.js'; -import { hexToBuffer } from '../../api/util/helpers'; -import { DbLocationTransferType, InscriptionRevealData } from '../types'; +import { DbBrc20Operation, DbBrc20OperationInsert, DbBrc20TokenInsert } from './types'; +import * as postgres from 'postgres'; +import { PgSqlClient } from '@hirosystems/api-toolkit'; -const Brc20TickerSchema = Type.String({ minLength: 1 }); -const Brc20NumberSchema = Type.RegEx(/^((\d+)|(\d*\.?\d+))$/); +export function sqlOr( + sql: PgSqlClient, + partials: postgres.PendingQuery[] | undefined +) { + return partials?.reduce((acc, curr) => sql`${acc} OR ${curr}`); +} -const Brc20DeploySchema = Type.Object( - { - p: Type.Literal('brc-20'), - op: Type.Literal('deploy'), - tick: Brc20TickerSchema, - max: Brc20NumberSchema, - lim: Type.Optional(Brc20NumberSchema), - dec: Type.Optional(Type.RegEx(/^\d+$/)), - self_mint: Type.Optional(Type.Literal('true')), - }, - { additionalProperties: true } -); -export type Brc20Deploy = Static; +export interface AddressBalanceData { + avail: BigNumber; + trans: BigNumber; + total: BigNumber; +} -const Brc20MintSchema = Type.Object( - { - p: Type.Literal('brc-20'), - op: Type.Literal('mint'), - tick: Brc20TickerSchema, - amt: Brc20NumberSchema, - }, - { additionalProperties: true } -); -export type Brc20Mint = Static; +export class Brc20BlockCache { + tokens: DbBrc20TokenInsert[] = []; + operations: DbBrc20OperationInsert[] = []; + tokenMintSupplies = new Map(); + tokenTxCounts = new Map(); + operationCounts = new Map(); + addressOperationCounts = new Map>(); + totalBalanceChanges = new Map>(); + transferReceivers = new Map(); -const Brc20TransferSchema = Type.Object( - { - p: Type.Literal('brc-20'), - op: Type.Literal('transfer'), - tick: Brc20TickerSchema, - amt: Brc20NumberSchema, - }, - { additionalProperties: true } -); -export type Brc20Transfer = Static; + increaseOperationCount(operation: DbBrc20Operation) { + this.increaseOperationCountInternal(this.operationCounts, operation); + } + private increaseOperationCountInternal( + map: Map, + operation: DbBrc20Operation + ) { + const current = map.get(operation); + if (current == undefined) { + map.set(operation, 1); + } else { + map.set(operation, current + 1); + } + } -const Brc20Schema = Type.Union([Brc20DeploySchema, Brc20MintSchema, Brc20TransferSchema]); -const Brc20C = TypeCompiler.Compile(Brc20Schema); -export type Brc20 = Static; + increaseTokenMintedSupply(ticker: string, amount: BigNumber) { + const current = this.tokenMintSupplies.get(ticker); + if (current == undefined) { + this.tokenMintSupplies.set(ticker, amount); + } else { + this.tokenMintSupplies.set(ticker, current.plus(amount)); + } + } -export const UINT64_MAX = BigNumber('18446744073709551615'); // 20 digits -// Only compare against `UINT64_MAX` if the number is at least the same number of digits. -const numExceedsMax = (num: string) => num.length >= 20 && UINT64_MAX.isLessThan(num); + increaseTokenTxCount(ticker: string) { + const current = this.tokenTxCounts.get(ticker); + if (current == undefined) { + this.tokenTxCounts.set(ticker, 1); + } else { + this.tokenTxCounts.set(ticker, current + 1); + } + } -/** - * Activation block height for - * https://l1f.discourse.group/t/brc-20-proposal-for-issuance-and-burn-enhancements-brc20-ip-1/621/1 - */ -export const BRC20_SELF_MINT_ACTIVATION_BLOCK = 837090; + increaseAddressOperationCount(address: string, operation: DbBrc20Operation) { + const current = this.addressOperationCounts.get(address); + if (current == undefined) { + const opMap = new Map(); + this.increaseOperationCountInternal(opMap, operation); + this.addressOperationCounts.set(address, opMap); + } else { + this.increaseOperationCountInternal(current, operation); + } + } -export function brc20FromInscription(reveal: InscriptionRevealData): Brc20 | undefined { - if ( - reveal.inscription.classic_number < 0 || - reveal.inscription.number < 0 || - reveal.location.transfer_type != DbLocationTransferType.transferred || - !['text/plain', 'application/json'].includes(reveal.inscription.mime_type) - ) - return; - try { - const json = JSON.parse(hexToBuffer(reveal.inscription.content as string).toString('utf-8')); - if (Brc20C.Check(json)) { - // Check ticker byte length - const tick = Buffer.from(json.tick); - if (json.op === 'deploy') { - if ( - tick.length === 5 && - (reveal.location.block_height < BRC20_SELF_MINT_ACTIVATION_BLOCK || - json.self_mint !== 'true') - ) - return; - } - if (tick.length < 4 || tick.length > 5) return; - // Check numeric values. - if (json.op === 'deploy') { - if ((parseFloat(json.max) == 0 && json.self_mint !== 'true') || numExceedsMax(json.max)) - return; - if (json.lim && (parseFloat(json.lim) == 0 || numExceedsMax(json.lim))) return; - if (json.dec && parseFloat(json.dec) > 18) return; + updateAddressBalance( + ticker: string, + address: string, + availBalance: BigNumber, + transBalance: BigNumber, + totalBalance: BigNumber + ) { + const current = this.totalBalanceChanges.get(address); + if (current === undefined) { + const opMap = new Map(); + opMap.set(ticker, { avail: availBalance, trans: transBalance, total: totalBalance }); + this.totalBalanceChanges.set(address, opMap); + } else { + const currentTick = current.get(ticker); + if (currentTick === undefined) { + current.set(ticker, { avail: availBalance, trans: transBalance, total: totalBalance }); } else { - if (parseFloat(json.amt) == 0 || numExceedsMax(json.amt)) return; + current.set(ticker, { + avail: availBalance.plus(currentTick.avail), + trans: transBalance.plus(currentTick.trans), + total: totalBalance.plus(currentTick.total), + }); } - return json; } - } catch (error) { - // Not a BRC-20 inscription. } } diff --git a/src/pg/brc20/types.ts b/src/pg/brc20/types.ts index 5b28258a..9737dfb6 100644 --- a/src/pg/brc20/types.ts +++ b/src/pg/brc20/types.ts @@ -1,68 +1,40 @@ -import { DbLocationTransferType } from '../types'; +import { PgNumeric } from '@hirosystems/api-toolkit'; -export type DbBrc20Location = { - id: string; - inscription_id: string | null; - block_height: string; - tx_id: string; - tx_index: number; - address: string | null; - transfer_type: DbLocationTransferType; -}; - -export type DbBrc20DeployInsert = { - inscription_id: string; +export type DbBrc20TokenInsert = { + ticker: string; + genesis_id: string; block_height: string; tx_id: string; address: string; - ticker: string; - max: string; - decimals: string; - limit: string | null; - tx_count: number; + max: PgNumeric; + limit: PgNumeric; + decimals: PgNumeric; self_mint: boolean; }; -export type DbBrc20MintInsert = { - inscription_id: string; - brc20_deploy_id: string; - block_height: string; - tx_id: string; - address: string; - amount: string; -}; +export enum DbBrc20Operation { + deploy = 'deploy', + mint = 'mint', + transfer = 'transfer', + transferSend = 'transfer_send', + transferReceive = 'transfer_receive', +} -export type DbBrc20Deploy = { - id: string; - inscription_id: string; - block_height: string; - tx_id: string; - address: string; +export type DbBrc20OperationInsert = { + genesis_id: string; ticker: string; - max: string; - decimals: string; - limit?: string; -}; - -export type DbBrc20TransferInsert = { - inscription_id: string; - brc20_deploy_id: string; - block_height: string; - tx_id: string; - from_address: string; - to_address: string | null; - amount: string; + block_height: PgNumeric; + tx_index: PgNumeric; + address: string; + avail_balance: PgNumeric; + trans_balance: PgNumeric; + operation: DbBrc20Operation; }; -export type DbBrc20Transfer = { - id: string; - inscription_id: string; - brc20_deploy_id: string; - block_height: string; - tx_id: string; - from_address: string; - to_address?: string; - amount: string; +export type DbBrc20CountsByAddressInsert = { + address: string; + operation: DbBrc20Operation; + count: number; }; export type DbBrc20Token = { @@ -145,8 +117,10 @@ export type DbBrc20TransferEvent = BaseEvent & { export type DbBrc20Event = DbBrc20DeployEvent | DbBrc20MintEvent | DbBrc20TransferEvent; -type BaseActivity = { +export type DbBrc20Activity = { ticker: string; + avail_balance: string; + trans_balance: string; deploy_decimals: number; deploy_max: string; deploy_limit: string | null; @@ -159,25 +133,10 @@ type BaseActivity = { block_hash: string; tx_id: string; address: string; + to_address: string | null; timestamp: number; }; -export type DbBrc20DeployActivity = BaseActivity & { - operation: DbBrc20EventOperation.deploy; -}; - -export type DbBrc20MintActivity = BaseActivity & { - operation: DbBrc20EventOperation.mint; - mint_amount: string; -}; - -export type DbBrc20TransferActivity = BaseActivity & { - operation: DbBrc20EventOperation.transfer | DbBrc20EventOperation.transferSend; - transfer_data: string; -}; - -export type DbBrc20Activity = DbBrc20DeployActivity | DbBrc20MintActivity | DbBrc20TransferActivity; - export const BRC20_DEPLOYS_COLUMNS = [ 'id', 'inscription_id', @@ -192,14 +151,3 @@ export const BRC20_DEPLOYS_COLUMNS = [ 'tx_count', 'self_mint', ]; - -export const BRC20_TRANSFERS_COLUMNS = [ - 'id', - 'inscription_id', - 'brc20_deploy_id', - 'block_height', - 'tx_id', - 'from_address', - 'to_address', - 'amount', -]; diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index f08df0c2..433cf3b3 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -40,7 +40,7 @@ import { export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); export const ORDINALS_GENESIS_BLOCK = 767430; -const INSERT_BATCH_SIZE = 4000; +export const INSERT_BATCH_SIZE = 4000; type InscriptionIdentifier = { genesis_id: string } | { number: number }; @@ -92,6 +92,7 @@ export class PgStore extends BasePgStore { logger.info(`PgStore rolling back block ${event.block_identifier.index}`); const time = stopwatch(); const rollbacks = revealInsertsFromOrdhookEvent(event); + await this.brc20.updateBrc20Operations(event, 'rollback'); for (const writeChunk of batchIterate(rollbacks, 1000)) await this.rollBackInscriptions(writeChunk); updatedBlockHeightMin = Math.min(updatedBlockHeightMin, event.block_identifier.index); @@ -125,6 +126,7 @@ export class PgStore extends BasePgStore { for (const writeChunk of batchIterate(writes, INSERT_BATCH_SIZE)) await this.insertInscriptions(writeChunk, payload.chainhook.is_streaming_blocks); updatedBlockHeightMin = Math.min(updatedBlockHeightMin, event.block_identifier.index); + await this.brc20.updateBrc20Operations(event, 'apply'); logger.info( `PgStore ingested block ${event.block_identifier.index} in ${time.getElapsedSeconds()}s` ); @@ -574,11 +576,9 @@ export class PgStore extends BasePgStore { logger.info(`PgStore ${action} at block ${reveal.location.block_height}`); } - // 3. Recursions, Counts and BRC-20 + // 3. Recursions and counts await this.updateInscriptionRecursions(reveals); await this.counts.applyInscriptions(inscriptionInserts); - if (ENV.BRC20_BLOCK_SCAN_ENABLED) - await this.brc20.insertOperations({ reveals: revealOutputs, pointers }); }); } @@ -628,7 +628,6 @@ export class PgStore extends BasePgStore { // Roll back events in reverse so BRC-20 keeps a sane order. for (const rollback of rollbacks.reverse()) { if ('inscription' in rollback) { - await this.brc20.rollBackInscription({ inscription: rollback.inscription }); await this.counts.rollBackInscription({ inscription: rollback.inscription, location: rollback.location, @@ -638,7 +637,6 @@ export class PgStore extends BasePgStore { `PgStore rollback reveal #${rollback.inscription.number} (${rollback.inscription.genesis_id}) at block ${rollback.location.block_height}` ); } else { - await this.brc20.rollBackLocation({ location: rollback.location }); await this.recalculateCurrentLocationPointerFromLocationRollBack({ location: rollback.location, }); diff --git a/tests/brc-20/api.test.ts b/tests/brc-20/api.test.ts new file mode 100644 index 00000000..642609eb --- /dev/null +++ b/tests/brc-20/api.test.ts @@ -0,0 +1,1394 @@ +import { runMigrations } from '@hirosystems/api-toolkit'; +import { buildApiServer } from '../../src/api/init'; +import { Brc20ActivityResponse, Brc20TokenResponse } from '../../src/api/schemas'; +import { MIGRATIONS_DIR, PgStore } from '../../src/pg/pg-store'; +import { + BRC20_GENESIS_BLOCK, + TestChainhookPayloadBuilder, + TestFastifyServer, + deployAndMintPEPE, + incrementing, + randomHash, +} from '../helpers'; + +describe('BRC-20 API', () => { + let db: PgStore; + let fastify: TestFastifyServer; + + beforeEach(async () => { + await runMigrations(MIGRATIONS_DIR, 'up'); + db = await PgStore.connect({ skipMigrations: true }); + fastify = await buildApiServer({ db }); + }); + + afterEach(async () => { + await fastify.close(); + await db.close(); + await runMigrations(MIGRATIONS_DIR, 'down'); + }); + + describe('/brc-20/tokens', () => { + test('tokens endpoint', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: BRC20_GENESIS_BLOCK }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .brc20( + { + deploy: { + inscription_id: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + tick: 'pepe', + max: '21000000', + lim: '21000000', + dec: '18', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + self_mint: false, + }, + }, + { inscription_number: 0 } + ) + .build() + ); + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens/pepe`, + }); + expect(response.statusCode).toBe(200); + expect(response.json()).toStrictEqual({ + token: { + id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + number: 0, + block_height: BRC20_GENESIS_BLOCK, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + ticker: 'pepe', + max_supply: '21000000.000000000000000000', + mint_limit: '21000000.000000000000000000', + decimals: 18, + deploy_timestamp: 1677803510000, + minted_supply: '0.000000000000000000', + tx_count: 1, + self_mint: false, + }, + supply: { + max_supply: '21000000.000000000000000000', + minted_supply: '0.000000000000000000', + holders: 0, + }, + }); + }); + + test('tokens filter by ticker prefix', async () => { + const inscriptionNumbers = incrementing(0); + const blockHeights = incrementing(BRC20_GENESIS_BLOCK); + + let transferHash = randomHash(); + let number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: transferHash }) + .brc20( + { + deploy: { + inscription_id: `${transferHash}i0`, + tick: 'pepe', + max: '21000000', + lim: '21000000', + dec: '18', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + self_mint: false, + }, + }, + { inscription_number: 0 } + ) + .build() + ); + + transferHash = randomHash(); + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: transferHash }) + .brc20( + { + deploy: { + inscription_id: `${transferHash}i0`, + tick: 'peer', + max: '21000000', + lim: '21000000', + dec: '18', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + self_mint: false, + }, + }, + { inscription_number: 1 } + ) + .build() + ); + + transferHash = randomHash(); + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: transferHash }) + .brc20( + { + deploy: { + inscription_id: `${transferHash}i0`, + tick: 'abcd', + max: '21000000', + lim: '21000000', + dec: '18', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + self_mint: false, + }, + }, + { inscription_number: 2 } + ) + .build() + ); + + transferHash = randomHash(); + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: transferHash }) + .brc20( + { + deploy: { + inscription_id: `${transferHash}i0`, + tick: 'dcba', + max: '21000000', + lim: '21000000', + dec: '18', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + self_mint: false, + }, + }, + { inscription_number: 3 } + ) + .build() + ); + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?ticker=PE&ticker=AB`, + }); + expect(response.statusCode).toBe(200); + const responseJson = response.json(); + expect(responseJson.total).toBe(3); + expect(responseJson.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ ticker: 'pepe' }), + expect.objectContaining({ ticker: 'peer' }), + expect.objectContaining({ ticker: 'abcd' }), + ]) + ); + }); + + test('tokens using order_by tx_count', async () => { + // Setup + const inscriptionNumbers = incrementing(0); + const blockHeights = incrementing(BRC20_GENESIS_BLOCK); + const addressA = 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz'; + const addressB = 'bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4'; + + // A deploys pepe + let number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + deploy: { + inscription_id: `${randomHash()}i0`, + tick: 'pepe', + max: '21000000', + lim: '21000000', + dec: '18', + address: addressA, + self_mint: false, + }, + }, + { inscription_number: 0 } + ) + .build() + ); + + // A mints 10000 pepe 10 times (will later be rolled back) + const pepeMints = []; + for (let i = 0; i < 10; i++) { + const txHash = randomHash(); + number = inscriptionNumbers.next().value; + const payload = new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: txHash }) + .brc20( + { + mint: { + inscription_id: `${txHash}i0`, + tick: 'pepe', + address: addressA, + amt: '10000', + }, + }, + { inscription_number: i + 1 } + ) + .build(); + pepeMints.push(payload); + await db.updateInscriptions(payload); + } + + // B deploys abcd + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + deploy: { + inscription_id: `${randomHash()}i0`, + tick: 'abcd', + max: '21000000', + lim: '21000000', + dec: '18', + address: addressB, + self_mint: false, + }, + }, + { inscription_number: 11 } + ) + .build() + ); + + // B mints 10000 abcd + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + mint: { + inscription_id: `${randomHash()}i0`, + tick: 'abcd', + address: addressA, + amt: '10000', + }, + }, + { inscription_number: 12 } + ) + .build() + ); + + // B send 1000 abcd to A + // (create inscription, transfer) + const txHashTransfer = randomHash(); + number = inscriptionNumbers.next().value; + const payloadTransfer = new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: txHashTransfer }) + .brc20( + { + transfer: { + inscription_id: `${txHashTransfer}i0`, + tick: 'abcd', + address: addressB, + amt: '1000', + }, + }, + { inscription_number: 13 } + ) + .build(); + await db.updateInscriptions(payloadTransfer); + // (send inscription, transfer_send) + const txHashTransferSend = randomHash(); + const payloadTransferSend = new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: txHashTransferSend }) + .brc20( + { + transfer_send: { + tick: 'abcd', + inscription_id: `${txHashTransfer}i0`, + amt: '1000', + sender_address: addressB, + receiver_address: addressA, + }, + }, + { inscription_number: 13 } + ) + .build(); + await db.updateInscriptions(payloadTransferSend); + + let response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens`, + }); + expect(response.statusCode).toBe(200); + let json = response.json(); + expect(json.total).toBe(2); + expect(json.results).toHaveLength(2); + + // WITHOUT tx_count sort: + expect(json.results).toEqual([ + // The first result is the token with the latest activity (abcd) + expect.objectContaining({ + ticker: 'abcd', + tx_count: 4, + } as Brc20TokenResponse), + expect.objectContaining({ + ticker: 'pepe', + tx_count: 11, + } as Brc20TokenResponse), + ]); + + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?order_by=tx_count`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(2); + expect(json.results).toHaveLength(2); + + // WITH tx_count sort: The first result is the most active token (pepe) + expect(json.results).toEqual([ + expect.objectContaining({ + ticker: 'pepe', + tx_count: 11, + } as Brc20TokenResponse), + expect.objectContaining({ + ticker: 'abcd', + tx_count: 4, + } as Brc20TokenResponse), + ]); + + // Rollback pepe mints + for (const payload of pepeMints) { + const payloadRollback = { ...payload, apply: [], rollback: payload.apply }; + await db.updateInscriptions(payloadRollback); + } + + // WITH tx_count sort: The first result is the most active token (now abcd) + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?order_by=tx_count`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(2); + expect(json.results).toHaveLength(2); + expect(json.results).toEqual([ + expect.objectContaining({ + ticker: 'abcd', + tx_count: 4, + } as Brc20TokenResponse), + expect.objectContaining({ + ticker: 'pepe', + tx_count: 1, // only the deploy remains + } as Brc20TokenResponse), + ]); + + // Rollback abcd transfer + await db.updateInscriptions({ + ...payloadTransferSend, + apply: [], + rollback: payloadTransferSend.apply, + }); + await db.updateInscriptions({ + ...payloadTransfer, + apply: [], + rollback: payloadTransfer.apply, + }); + + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?order_by=tx_count`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(2); + expect(json.results).toHaveLength(2); + expect(json.results).toEqual([ + expect.objectContaining({ + ticker: 'abcd', + tx_count: 2, // only the deploy and mint remain + } as Brc20TokenResponse), + expect.objectContaining({ + ticker: 'pepe', + tx_count: 1, + } as Brc20TokenResponse), + ]); + }); + }); + + describe('/brc-20/activity', () => { + test('activity for token transfers', async () => { + // Setup + const inscriptionNumbers = incrementing(0); + const blockHeights = incrementing(BRC20_GENESIS_BLOCK); + const addressA = 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz'; + const addressB = 'bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4'; + + // A deploys pepe + let number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + deploy: { + inscription_id: `${randomHash()}i0`, + tick: 'pepe', + max: '21000000', + lim: '21000000', + dec: '18', + address: addressA, + self_mint: false, + }, + }, + { inscription_number: 0 } + ) + .build() + ); + + // Verify that the pepe deploy is in the activity feed + let response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + let json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'deploy', + ticker: 'pepe', + address: addressA, + deploy: expect.objectContaining({ + max_supply: '21000000.000000000000000000', + }), + } as Brc20ActivityResponse), + ]) + ); + + // A mints 10000 pepe + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + mint: { + inscription_id: `${randomHash()}i0`, + tick: 'pepe', + address: addressA, + amt: '10000', + }, + }, + { inscription_number: 1 } + ) + .build() + ); + + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(2); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'deploy', + ticker: 'pepe', + } as Brc20ActivityResponse), + expect.objectContaining({ + operation: 'mint', + ticker: 'pepe', + address: addressA, + mint: { + amount: '10000.000000000000000000', + }, + } as Brc20ActivityResponse), + ]) + ); + + // B mints 10000 pepe + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + mint: { + inscription_id: `${randomHash()}i0`, + tick: 'pepe', + address: addressB, + amt: '10000', + }, + }, + { inscription_number: 2 } + ) + .build() + ); + + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(3); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'mint', + ticker: 'pepe', + address: addressB, + mint: { + amount: '10000.000000000000000000', + }, + } as Brc20ActivityResponse), + ]) + ); + + // A creates transfer of 9000 pepe + const transferHash = randomHash(); + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: transferHash }) + .brc20( + { + transfer: { + inscription_id: `${transferHash}i0`, + tick: 'pepe', + address: addressA, + amt: '9000', + }, + }, + { inscription_number: 3 } + ) + .build() + ); + + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(4); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer', + ticker: 'pepe', + address: addressA, + tx_id: transferHash, + transfer: { + amount: '9000.000000000000000000', + from_address: addressA, + }, + } as Brc20ActivityResponse), + ]) + ); + + // A sends transfer inscription to B (aka transfer/sale) + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + transfer_send: { + tick: 'pepe', + inscription_id: `${transferHash}i0`, + amt: '9000', + sender_address: addressA, + receiver_address: addressB, + }, + }, + { inscription_number: 3 } + ) + .build() + ); + + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(5); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer_send', + ticker: 'pepe', + tx_id: expect.not.stringMatching(transferHash), + address: addressB, + transfer_send: { + amount: '9000.000000000000000000', + from_address: addressA, + to_address: addressB, + }, + } as Brc20ActivityResponse), + ]) + ); + + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe&operation=transfer_send`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer_send', + ticker: 'pepe', + tx_id: expect.not.stringMatching(transferHash), + address: addressB, + transfer_send: { + amount: '9000.000000000000000000', + from_address: addressA, + to_address: addressB, + }, + } as Brc20ActivityResponse), + ]) + ); + }); + + test('activity for multiple token transfers among three participants', async () => { + // Step 1: A deploys a token + // Step 2: A mints 1000 of the token + // Step 3: B mints 2000 of the token + // Step 4: A creates a transfer to B + // Step 5: B creates a transfer to C + // Step 6: A transfer_send the transfer to B + // Step 7: B transfer_send the transfer to C + + // Setup + const inscriptionNumbers = incrementing(0); + const blockHeights = incrementing(BRC20_GENESIS_BLOCK); + const addressA = 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz'; + const addressB = 'bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4'; + const addressC = 'bc1q9d80h0q5d3f54w7w8c3l2sguf9uset4ydw9xj2'; + + // Step 1: A deploys a token + let number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + deploy: { + inscription_id: `${randomHash()}i0`, + tick: 'pepe', + max: '21000000', + lim: '21000000', + dec: '18', + address: addressA, + self_mint: false, + }, + }, + { inscription_number: number } + ) + .build() + ); + + // Verify that the pepe deploy is in the activity feed + let response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + let json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'deploy', + ticker: 'pepe', + address: addressA, + deploy: expect.objectContaining({ + max_supply: '21000000.000000000000000000', + }), + } as Brc20ActivityResponse), + ]) + ); + + // Step 2: A mints 1000 of the token + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + mint: { + inscription_id: `${randomHash()}i0`, + tick: 'pepe', + address: addressA, + amt: '1000', + }, + }, + { inscription_number: number } + ) + .build() + ); + + // Verify that the pepe mint is in the activity feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(2); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'mint', + ticker: 'pepe', + address: addressA, + mint: { + amount: '1000.000000000000000000', + }, + } as Brc20ActivityResponse), + ]) + ); + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe&address=${addressA}`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(2); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'deploy', + ticker: 'pepe', + address: addressA, + deploy: expect.objectContaining({ + max_supply: '21000000.000000000000000000', + }), + } as Brc20ActivityResponse), + expect.objectContaining({ + operation: 'mint', + ticker: 'pepe', + address: addressA, + mint: { + amount: '1000.000000000000000000', + }, + } as Brc20ActivityResponse), + ]) + ); + + // Step 3: B mints 2000 of the token + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + mint: { + inscription_id: `${randomHash()}i0`, + tick: 'pepe', + address: addressB, + amt: '2000', + }, + }, + { inscription_number: number } + ) + .build() + ); + + // Verify that the pepe mint is in the activity feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(3); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'mint', + ticker: 'pepe', + address: addressB, + mint: { + amount: '2000.000000000000000000', + }, + } as Brc20ActivityResponse), + ]) + ); + + // Step 4: A creates a transfer to B + const transferHashAB = randomHash(); + const numberAB = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: transferHashAB }) + .brc20( + { + transfer: { + inscription_id: `${transferHashAB}i0`, + tick: 'pepe', + address: addressA, + amt: '1000', + }, + }, + { inscription_number: numberAB } + ) + .build() + ); + + // Verify that the pepe transfer is in the activity feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(4); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer', + ticker: 'pepe', + address: addressA, + tx_id: transferHashAB, + transfer: { + amount: '1000.000000000000000000', + from_address: addressA, + }, + } as Brc20ActivityResponse), + ]) + ); + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe&address=${addressA}`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(3); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer', + ticker: 'pepe', + address: addressA, + tx_id: transferHashAB, + transfer: { + amount: '1000.000000000000000000', + from_address: addressA, + }, + } as Brc20ActivityResponse), + ]) + ); + + // Step 5: B creates a transfer to C + const transferHashBC = randomHash(); + const numberBC = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: transferHashBC }) + .brc20( + { + transfer: { + inscription_id: `${transferHashBC}i0`, + tick: 'pepe', + address: addressB, + amt: '2000', + }, + }, + { inscription_number: numberBC } + ) + .build() + ); + + // Verify that the pepe transfer is in the activity feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(5); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer', + ticker: 'pepe', + address: addressB, + tx_id: transferHashBC, + transfer: { + amount: '2000.000000000000000000', + from_address: addressB, + }, + } as Brc20ActivityResponse), + ]) + ); + + // Step 6: A transfer_send the transfer to B + const transferHashABSend = randomHash(); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: transferHashABSend }) + .brc20( + { + transfer_send: { + tick: 'pepe', + inscription_id: `${transferHashAB}i0`, + amt: '1000', + sender_address: addressA, + receiver_address: addressB, + }, + }, + { inscription_number: numberAB } + ) + .build() + ); + // A gets the transfer send in its feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe&address=${addressA}`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(4); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer_send', + ticker: 'pepe', + tx_id: expect.not.stringMatching(transferHashAB), + address: addressB, + transfer_send: { + amount: '1000.000000000000000000', + from_address: addressA, + to_address: addressB, + }, + } as Brc20ActivityResponse), + ]) + ); + // B gets the transfer send in its feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe&address=${addressB}`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(3); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer_send', + ticker: 'pepe', + tx_id: expect.not.stringMatching(transferHashAB), + address: addressB, + transfer_send: { + amount: '1000.000000000000000000', + from_address: addressA, + to_address: addressB, + }, + } as Brc20ActivityResponse), + ]) + ); + + // Verify that the pepe transfer_send is in the activity feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(6); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer_send', + ticker: 'pepe', + tx_id: expect.not.stringMatching(transferHashAB), + address: addressB, + transfer_send: { + amount: '1000.000000000000000000', + from_address: addressA, + to_address: addressB, + }, + } as Brc20ActivityResponse), + ]) + ); + + // Step 7: B transfer_send the transfer to C + const transferHashBCSend = randomHash(); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: transferHashBCSend }) + .brc20( + { + transfer_send: { + tick: 'pepe', + inscription_id: `${transferHashBC}i0`, + amt: '2000', + sender_address: addressB, + receiver_address: addressC, + }, + }, + { inscription_number: numberBC } + ) + .build() + ); + + // Verify that the pepe transfer_send is in the activity feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(7); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer_send', + ticker: 'pepe', + tx_id: expect.not.stringMatching(transferHashBC), + address: addressC, + transfer_send: { + amount: '2000.000000000000000000', + from_address: addressB, + to_address: addressC, + }, + } as Brc20ActivityResponse), + ]) + ); + // B gets the transfer send in its feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe&address=${addressB}`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(4); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer_send', + ticker: 'pepe', + tx_id: expect.not.stringMatching(transferHashBC), + address: addressC, + transfer_send: { + amount: '2000.000000000000000000', + from_address: addressB, + to_address: addressC, + }, + } as Brc20ActivityResponse), + ]) + ); + // C gets the transfer send in its feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=pepe&address=${addressC}`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'transfer_send', + ticker: 'pepe', + tx_id: expect.not.stringMatching(transferHashBC), + address: addressC, + transfer_send: { + amount: '2000.000000000000000000', + from_address: addressB, + to_address: addressC, + }, + } as Brc20ActivityResponse), + ]) + ); + }); + + test('activity for multiple token creation', async () => { + const inscriptionNumbers = incrementing(0); + const blockHeights = incrementing(BRC20_GENESIS_BLOCK); + const addressA = 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz'; + + // Step 1: Create a token pepe + let number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + deploy: { + inscription_id: `${randomHash()}i0`, + tick: 'pepe', + max: '21000000', + lim: '21000000', + dec: '18', + address: addressA, + self_mint: false, + }, + }, + { inscription_number: 0 } + ) + .build() + ); + + // Verify that the pepe deploy is in the activity feed + let response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity`, + }); + expect(response.statusCode).toBe(200); + let json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'deploy', + ticker: 'pepe', + address: addressA, + deploy: expect.objectContaining({ + max_supply: '21000000.000000000000000000', + }), + } as Brc20ActivityResponse), + ]) + ); + + // Step 2: Create a token peer + number = inscriptionNumbers.next().value; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: blockHeights.next().value }) + .transaction({ hash: randomHash() }) + .brc20( + { + deploy: { + inscription_id: `${randomHash()}i0`, + tick: 'peer', + max: '21000000', + lim: '21000000', + dec: '18', + address: addressA, + self_mint: false, + }, + }, + { inscription_number: 1 } + ) + .build() + ); + + // Verify that the peer deploy is in the activity feed + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(2); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'deploy', + ticker: 'peer', + address: addressA, + deploy: expect.objectContaining({ + max_supply: '21000000.000000000000000000', + }), + } as Brc20ActivityResponse), + ]) + ); + + // Verify that no events are available before the first block height + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?ticker=peer&block_height=${BRC20_GENESIS_BLOCK}`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(0); + expect(json.results).toEqual([]); + + // Verify that the peer deploy is not in the activity feed when using block_height parameter + response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/activity?block_height=${BRC20_GENESIS_BLOCK}`, + }); + expect(response.statusCode).toBe(200); + json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + operation: 'deploy', + ticker: 'pepe', + address: addressA, + deploy: expect.objectContaining({ + max_supply: '21000000.000000000000000000', + }), + } as Brc20ActivityResponse), + ]) + ); + // Should NOT include peer at this block height + expect(json.results).not.toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ticker: 'peer', + } as Brc20ActivityResponse), + ]) + ); + }); + }); + + describe('/brc-20/token/holders', () => { + test('displays holders for token', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await deployAndMintPEPE(db, address); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: BRC20_GENESIS_BLOCK + 2, + hash: '0000000000000000000034dd2daec375371800da441b17651459b2220cbc1a6e', + }) + .transaction({ + hash: '633648e0e1ddcab8dea0496a561f2b08c486ae619b5634d7bb55d7f0cd32ef16', + }) + .brc20( + { + mint: { + inscription_id: + '633648e0e1ddcab8dea0496a561f2b08c486ae619b5634d7bb55d7f0cd32ef16i0', + tick: 'pepe', + address: 'bc1qp9jgp9qtlhgvwjnxclj6kav6nr2fq09c206pyl', + amt: '2000', + }, + }, + { inscription_number: 2 } + ) + .build() + ); + + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens/pepe/holders`, + }); + expect(response.statusCode).toBe(200); + const json = response.json(); + expect(json.total).toBe(2); + expect(json.results).toStrictEqual([ + { + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + overall_balance: '10000.000000000000000000', + }, + { + address: 'bc1qp9jgp9qtlhgvwjnxclj6kav6nr2fq09c206pyl', + overall_balance: '2000.000000000000000000', + }, + ]); + }); + + test('shows empty list on token with no holders', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: BRC20_GENESIS_BLOCK, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .brc20( + { + deploy: { + inscription_id: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + tick: 'pepe', + max: '250000', + lim: '250000', + dec: '18', + address: 'bc1qp9jgp9qtlhgvwjnxclj6kav6nr2fq09c206pyl', + self_mint: false, + }, + }, + { inscription_number: 0 } + ) + .build() + ); + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens/pepe/holders`, + }); + expect(response.statusCode).toBe(200); + const json = response.json(); + expect(json.total).toBe(0); + expect(json.results).toStrictEqual([]); + }); + + test('shows 404 on token not found', async () => { + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens/pepe/holders`, + }); + expect(response.statusCode).toBe(404); + }); + }); +}); diff --git a/tests/brc-20/brc20.test.ts b/tests/brc-20/brc20.test.ts index 8f2132f3..4bd7fef3 100644 --- a/tests/brc-20/brc20.test.ts +++ b/tests/brc-20/brc20.test.ts @@ -1,78 +1,19 @@ import { runMigrations } from '@hirosystems/api-toolkit'; import { buildApiServer } from '../../src/api/init'; -import { Brc20ActivityResponse, Brc20TokenResponse } from '../../src/api/schemas'; -import { BRC20_SELF_MINT_ACTIVATION_BLOCK, brc20FromInscription } from '../../src/pg/brc20/helpers'; import { MIGRATIONS_DIR, PgStore } from '../../src/pg/pg-store'; -import { DbLocationTransferType, InscriptionRevealData } from '../../src/pg/types'; import { + BRC20_GENESIS_BLOCK, + BRC20_SELF_MINT_ACTIVATION_BLOCK, TestChainhookPayloadBuilder, TestFastifyServer, - brc20Reveal, - incrementing, - randomHash, + deployAndMintPEPE, rollBack, } from '../helpers'; -import { BRC20_GENESIS_BLOCK } from '../../src/pg/brc20/brc20-pg-store'; describe('BRC-20', () => { let db: PgStore; let fastify: TestFastifyServer; - const deployAndMintPEPE = async (address: string) => { - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '250000', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 1, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', - }) - .transaction({ - hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '10000', - }, - number: 1, - ordinal_number: 1, - tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - address: address, - }) - ) - .build() - ); - }; - beforeEach(async () => { await runMigrations(MIGRATIONS_DIR, 'up'); db = await PgStore.connect({ skipMigrations: true }); @@ -85,600 +26,6 @@ describe('BRC-20', () => { await runMigrations(MIGRATIONS_DIR, 'down'); }); - describe('token standard validation', () => { - const testInsert = (json: any, block_height: number = 830000): InscriptionRevealData => { - const content = Buffer.from(JSON.stringify(json), 'utf-8'); - return { - inscription: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - number: 0, - classic_number: 0, - mime_type: 'application/json', - content_type: 'application/json', - content_length: content.length, - content: `0x${content.toString('hex')}`, - fee: '200', - curse_type: null, - sat_ordinal: '2000000', - sat_rarity: 'common', - sat_coinbase_height: 110, - recursive: false, - metadata: null, - parent: null, - }, - recursive_refs: [], - location: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - block_height, - block_hash: '00000000000000000002c5c0aba96f981642a6dca109e6b3564925c21a98aa3e', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - tx_index: 0, - address: 'bc1pdjd6q33l0ca9nuudu2hr5qrs9u5dt6nl0z7fvu8kv4y8w4fzdpysc80028', - output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0', - offset: '0', - prev_output: null, - prev_offset: null, - value: '9999', - transfer_type: DbLocationTransferType.transferred, - block_transfer_index: null, - timestamp: 1091091019, - }, - }; - }; - - test('ignores incorrect MIME type', () => { - const content = Buffer.from( - JSON.stringify({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }), - 'utf-8' - ); - const insert: InscriptionRevealData = { - inscription: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - number: 0, - classic_number: 0, - mime_type: 'foo/bar', - content_type: 'foo/bar;x=1', - content_length: content.length, - content: `0x${content.toString('hex')}`, - fee: '200', - curse_type: null, - sat_ordinal: '2000000', - sat_rarity: 'common', - sat_coinbase_height: 110, - recursive: false, - metadata: null, - parent: null, - }, - recursive_refs: [], - location: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - block_height: 830000, - block_hash: '00000000000000000002c5c0aba96f981642a6dca109e6b3564925c21a98aa3e', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - tx_index: 0, - address: 'bc1pdjd6q33l0ca9nuudu2hr5qrs9u5dt6nl0z7fvu8kv4y8w4fzdpysc80028', - output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0', - offset: '0', - prev_output: null, - prev_offset: null, - value: '9999', - transfer_type: DbLocationTransferType.transferred, - block_transfer_index: null, - timestamp: 1091091019, - }, - }; - expect(brc20FromInscription(insert)).toBeUndefined(); - insert.inscription.content_type = 'application/json'; - insert.inscription.mime_type = 'application/json'; - expect(brc20FromInscription(insert)).not.toBeUndefined(); - insert.inscription.content_type = 'text/plain;charset=utf-8'; - insert.inscription.mime_type = 'text/plain'; - expect(brc20FromInscription(insert)).not.toBeUndefined(); - }); - - test('ignores invalid JSON', () => { - const content = Buffer.from( - '{"p": "brc-20", "op": "deploy", "tick": "PEPE", "max": "21000000"', - 'utf-8' - ); - const insert: InscriptionRevealData = { - inscription: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - number: 0, - classic_number: 0, - mime_type: 'application/json', - content_type: 'application/json', - content_length: content.length, - content: `0x${content.toString('hex')}`, - fee: '200', - curse_type: null, - sat_ordinal: '2000000', - sat_rarity: 'common', - sat_coinbase_height: 110, - recursive: false, - metadata: null, - parent: null, - }, - recursive_refs: [], - location: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - block_height: 830000, - block_hash: '00000000000000000002c5c0aba96f981642a6dca109e6b3564925c21a98aa3e', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - tx_index: 0, - address: 'bc1pdjd6q33l0ca9nuudu2hr5qrs9u5dt6nl0z7fvu8kv4y8w4fzdpysc80028', - output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0', - offset: '0', - prev_output: null, - prev_offset: null, - value: '9999', - transfer_type: DbLocationTransferType.transferred, - block_transfer_index: null, - timestamp: 1091091019, - }, - }; - expect(brc20FromInscription(insert)).toBeUndefined(); - }); - - test('ignores inscriptions spent as fees', () => { - const content = Buffer.from( - JSON.stringify({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }), - 'utf-8' - ); - const insert: InscriptionRevealData = { - inscription: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - number: 0, - classic_number: 0, - mime_type: 'application/json', - content_type: 'application/json', - content_length: content.length, - content: `0x${content.toString('hex')}`, - fee: '200', - curse_type: null, - sat_ordinal: '2000000', - sat_rarity: 'common', - sat_coinbase_height: 110, - recursive: false, - metadata: null, - parent: null, - }, - recursive_refs: [], - location: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - block_height: 830000, - block_hash: '00000000000000000002c5c0aba96f981642a6dca109e6b3564925c21a98aa3e', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - tx_index: 0, - address: '', - output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0', - offset: '0', - prev_output: null, - prev_offset: null, - value: '0', - transfer_type: DbLocationTransferType.spentInFees, - block_transfer_index: null, - timestamp: 1091091019, - }, - }; - expect(brc20FromInscription(insert)).toBeUndefined(); - }); - - test('ignores burnt inscriptions', () => { - const content = Buffer.from( - JSON.stringify({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }), - 'utf-8' - ); - const insert: InscriptionRevealData = { - inscription: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - number: 0, - classic_number: 0, - mime_type: 'application/json', - content_type: 'application/json', - content_length: content.length, - content: `0x${content.toString('hex')}`, - fee: '200', - curse_type: null, - sat_ordinal: '2000000', - sat_rarity: 'common', - sat_coinbase_height: 110, - recursive: false, - metadata: null, - parent: null, - }, - recursive_refs: [], - location: { - genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - block_height: 830000, - block_hash: '00000000000000000002c5c0aba96f981642a6dca109e6b3564925c21a98aa3e', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - tx_index: 0, - address: '', - output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0', - offset: '0', - prev_output: null, - prev_offset: null, - value: '1000', - transfer_type: DbLocationTransferType.burnt, - block_transfer_index: null, - timestamp: 1091091019, - }, - }; - expect(brc20FromInscription(insert)).toBeUndefined(); - }); - - test('ignores incorrect p field', () => { - const insert = testInsert({ - p: 'brc20', // incorrect - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }); - expect(brc20FromInscription(insert)).toBeUndefined(); - }); - - test('ignores incorrect op field', () => { - const insert = testInsert({ - p: 'brc-20', - op: 'deploi', // incorrect - tick: 'PEPE', - max: '21000000', - }); - expect(brc20FromInscription(insert)).toBeUndefined(); - }); - - test('tick must be 4 or 5 bytes wide', () => { - const insert = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPETESTER', // more than 4 bytes - max: '21000000', - }); - expect(brc20FromInscription(insert)).toBeUndefined(); - const insert2 = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'Pe P', // valid - max: '21000000', - }); - expect(brc20FromInscription(insert2)).not.toBeUndefined(); - const insert3 = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: '🤬😉', // more than 4 bytes - max: '21000000', - }); - expect(brc20FromInscription(insert3)).toBeUndefined(); - const insert4 = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'X', // less than 4 bytes - max: '21000000', - }); - expect(brc20FromInscription(insert4)).toBeUndefined(); - }); - - test('deploy self_mint tick must be 5 bytes wide', () => { - const insert = testInsert( - { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', // 5 bytes - max: '21000000', - self_mint: 'true', - }, - 840000 - ); - expect(brc20FromInscription(insert)).not.toBeUndefined(); - const insert2 = testInsert( - { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', // 5 bytes but no self_mint - max: '21000000', - }, - 840000 - ); - expect(brc20FromInscription(insert2)).toBeUndefined(); - const insert4 = testInsert( - { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', // Correct but earlier than activation - max: '21000000', - self_mint: 'true', - }, - 820000 - ); - expect(brc20FromInscription(insert4)).toBeUndefined(); - }); - - test('all fields must be strings', () => { - const insert1 = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: 21000000, - }); - expect(brc20FromInscription(insert1)).toBeUndefined(); - const insert1a = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: 300, - }); - expect(brc20FromInscription(insert1a)).toBeUndefined(); - const insert1b = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: '300', - dec: 2, - }); - expect(brc20FromInscription(insert1b)).toBeUndefined(); - const insert2 = testInsert({ - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: 2, - }); - expect(brc20FromInscription(insert2)).toBeUndefined(); - const insert3 = testInsert({ - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: 2, - }); - expect(brc20FromInscription(insert3)).toBeUndefined(); - }); - - test('ignores empty strings', () => { - const insert1 = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: '', - max: '21000000', - }); - expect(brc20FromInscription(insert1)).toBeUndefined(); - const insert1a = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '', - }); - expect(brc20FromInscription(insert1a)).toBeUndefined(); - const insert1b = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: '', - }); - expect(brc20FromInscription(insert1b)).toBeUndefined(); - const insert1c = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: '200', - dec: '', - }); - expect(brc20FromInscription(insert1c)).toBeUndefined(); - const insert2 = testInsert({ - p: 'brc-20', - op: 'mint', - tick: '', - }); - expect(brc20FromInscription(insert2)).toBeUndefined(); - const insert2a = testInsert({ - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '', - }); - expect(brc20FromInscription(insert2a)).toBeUndefined(); - const insert3 = testInsert({ - p: 'brc-20', - op: 'transfer', - tick: '', - }); - expect(brc20FromInscription(insert3)).toBeUndefined(); - const insert3a = testInsert({ - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '', - }); - expect(brc20FromInscription(insert3a)).toBeUndefined(); - }); - - test('numeric strings must not be zero', () => { - const insert1 = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '0', - }); - expect(brc20FromInscription(insert1)).toBeUndefined(); - const insert1b = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: '0.0', - }); - expect(brc20FromInscription(insert1b)).toBeUndefined(); - const insert1c = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: '200', - dec: '0', - }); - // `dec` can have a value of 0 - expect(brc20FromInscription(insert1c)).not.toBeUndefined(); - const insert1d = testInsert( - { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', - max: '0', // self mints can be max 0 - self_mint: 'true', - }, - 840000 - ); - expect(brc20FromInscription(insert1d)).not.toBeUndefined(); - const insert2a = testInsert({ - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '0', - }); - expect(brc20FromInscription(insert2a)).toBeUndefined(); - const insert3a = testInsert({ - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '.0000', - }); - expect(brc20FromInscription(insert3a)).toBeUndefined(); - }); - - test('numeric fields are not stripped/trimmed', () => { - const insert1 = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: ' 200 ', - }); - expect(brc20FromInscription(insert1)).toBeUndefined(); - const insert1b = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: '+10000', - }); - expect(brc20FromInscription(insert1b)).toBeUndefined(); - const insert1c = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: '200', - dec: ' 0 ', - }); - expect(brc20FromInscription(insert1c)).toBeUndefined(); - const insert2a = testInsert({ - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '.05 ', - }); - expect(brc20FromInscription(insert2a)).toBeUndefined(); - const insert3a = testInsert({ - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '-25.00', - }); - expect(brc20FromInscription(insert3a)).toBeUndefined(); - }); - - test('max value of dec is 18', () => { - const insert1c = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: '200', - dec: '20', - }); - expect(brc20FromInscription(insert1c)).toBeUndefined(); - }); - - test('max value of any numeric field is uint64_max', () => { - const insert1 = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '18446744073709551999', - }); - expect(brc20FromInscription(insert1)).toBeUndefined(); - const insert1b = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - lim: '18446744073709551999', - }); - expect(brc20FromInscription(insert1b)).toBeUndefined(); - const insert2a = testInsert({ - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '18446744073709551999', - }); - expect(brc20FromInscription(insert2a)).toBeUndefined(); - const insert3a = testInsert({ - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '18446744073709551999', - }); - expect(brc20FromInscription(insert3a)).toBeUndefined(); - }); - - test('valid JSONs can have additional properties', () => { - const insert1 = testInsert({ - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '200', - foo: 'bar', - test: 1, - }); - expect(brc20FromInscription(insert1)).not.toBeUndefined(); - const insert2a = testInsert({ - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '5', - foo: 'bar', - test: 1, - }); - expect(brc20FromInscription(insert2a)).not.toBeUndefined(); - const insert3a = testInsert({ - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '25', - foo: 'bar', - test: 1, - }); - expect(brc20FromInscription(insert3a)).not.toBeUndefined(); - }); - }); - describe('deploy', () => { test('deploy is saved', async () => { await db.updateInscriptions( @@ -692,25 +39,26 @@ describe('BRC-20', () => { .transaction({ hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', + .brc20( + { + deploy: { + inscription_id: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + tick: 'pepe', max: '21000000', + lim: '1000', + dec: '18', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + self_mint: false, }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) + }, + { inscription_number: 0 } ) .build() ); const response1 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=PEPE`, + url: `/ordinals/brc-20/tokens?ticker=pepe`, }); expect(response1.statusCode).toBe(200); const responseJson1 = response1.json(); @@ -722,9 +70,9 @@ describe('BRC-20', () => { decimals: 18, id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', number: 0, - mint_limit: null, + mint_limit: '1000.000000000000000000', max_supply: '21000000.000000000000000000', - ticker: 'PEPE', + ticker: 'pepe', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', deploy_timestamp: 1677811111000, minted_supply: '0.000000000000000000', @@ -734,44 +82,6 @@ describe('BRC-20', () => { ]); }); - test('deploy with self_mint is ignored before activation height', async () => { - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - timestamp: 1677811111, - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', - max: '21000000', - self_mint: 'true', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) - ) - .build() - ); - const response1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=$PEPE`, - }); - expect(response1.statusCode).toBe(200); - const responseJson1 = response1.json(); - expect(responseJson1.total).toBe(0); - }); - test('deploy with self_mint is saved', async () => { await db.updateInscriptions( new TestChainhookPayloadBuilder() @@ -784,26 +94,26 @@ describe('BRC-20', () => { .transaction({ hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', + .brc20( + { + deploy: { + inscription_id: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + tick: '$pepe', max: '21000000', - self_mint: 'true', + lim: '1000', + dec: '18', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + self_mint: true, }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) + }, + { inscription_number: 0 } ) .build() ); const response1 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=$PEPE`, + url: `/ordinals/brc-20/tokens?ticker=$pepe`, }); expect(response1.statusCode).toBe(200); const responseJson1 = response1.json(); @@ -815,17 +125,20 @@ describe('BRC-20', () => { deploy_timestamp: 1677811111000, id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', max_supply: '21000000.000000000000000000', - mint_limit: null, + mint_limit: '1000.000000000000000000', self_mint: true, minted_supply: '0.000000000000000000', number: 0, - ticker: '$PEPE', + ticker: '$pepe', tx_count: 1, tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }); }); + }); - test('ignores deploys for existing token', async () => { + describe('mint', () => { + test('valid mints are saved and balance reflected', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -836,19 +149,20 @@ describe('BRC-20', () => { .transaction({ hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', + .brc20( + { + deploy: { + inscription_id: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + tick: 'pepe', max: '21000000', + lim: '250000', + dec: '18', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + self_mint: false, }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) + }, + { inscription_number: 0 } ) .build() ); @@ -857,351 +171,124 @@ describe('BRC-20', () => { .apply() .block({ height: BRC20_GENESIS_BLOCK + 1, - hash: '000000000000000000021a0207fa97024506baaa74396822fb0a07ac20e70148', + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', }) .transaction({ - hash: '3f8067a6e9b45308b5a090c2987feeb2d08cbaf814ef2ffabad7c381b62f5f7e', + hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '19000000', + .brc20( + { + mint: { + tick: 'pepe', + amt: '250000', + inscription_id: + '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99fi0', + address, }, - number: 1, - ordinal_number: 1, - tx_id: '3f8067a6e9b45308b5a090c2987feeb2d08cbaf814ef2ffabad7c381b62f5f7e', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) + }, + { inscription_number: 1 } ) .build() ); + const response1 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=PEPE`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response1.statusCode).toBe(200); const responseJson1 = response1.json(); expect(responseJson1.total).toBe(1); expect(responseJson1.results).toStrictEqual([ { - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: BRC20_GENESIS_BLOCK, - decimals: 18, - id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - max_supply: '21000000.000000000000000000', - mint_limit: null, - number: 0, - ticker: 'PEPE', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - deploy_timestamp: 1677803510000, - minted_supply: '0.000000000000000000', - tx_count: 1, - self_mint: false, + ticker: 'pepe', + available_balance: '250000.000000000000000000', + overall_balance: '250000.000000000000000000', + transferrable_balance: '0.000000000000000000', }, ]); - }); - test('ignores case insensitive deploy for existing token', async () => { - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) - ) - .build() - ); + // New mint await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() .block({ - height: BRC20_GENESIS_BLOCK + 1, - hash: '000000000000000000021a0207fa97024506baaa74396822fb0a07ac20e70148', + height: BRC20_GENESIS_BLOCK + 2, + hash: '0000000000000000000077163227125e51d838787d6af031bc9b55a3a1cc1b2c', }) .transaction({ - hash: '3f8067a6e9b45308b5a090c2987feeb2d08cbaf814ef2ffabad7c381b62f5f7e', + hash: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', + .brc20( + { + mint: { tick: 'pepe', - max: '19000000', + amt: '100000', + inscription_id: + '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8beci0', + address, }, - number: 1, - ordinal_number: 1, - tx_id: '3f8067a6e9b45308b5a090c2987feeb2d08cbaf814ef2ffabad7c381b62f5f7e', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) + }, + { inscription_number: 2 } ) .build() ); - const response1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=PEPE`, - }); - expect(response1.statusCode).toBe(200); - const responseJson1 = response1.json(); - expect(responseJson1.total).toBe(1); - expect(responseJson1.results).toStrictEqual([ - { - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: BRC20_GENESIS_BLOCK, - decimals: 18, - id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - max_supply: '21000000.000000000000000000', - mint_limit: null, - number: 0, - ticker: 'PEPE', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - deploy_timestamp: 1677803510000, - minted_supply: '0.000000000000000000', - tx_count: 1, - self_mint: false, - }, - ]); + const response2 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=pepe`, // Lowercase + url: `/ordinals/brc-20/balances/${address}`, }); expect(response2.statusCode).toBe(200); const responseJson2 = response2.json(); expect(responseJson2.total).toBe(1); expect(responseJson2.results).toStrictEqual([ { - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: BRC20_GENESIS_BLOCK, - decimals: 18, - id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - max_supply: '21000000.000000000000000000', - mint_limit: null, - number: 0, - ticker: 'PEPE', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - deploy_timestamp: 1677803510000, - minted_supply: '0.000000000000000000', - tx_count: 1, - self_mint: false, + ticker: 'pepe', + available_balance: '350000.000000000000000000', + overall_balance: '350000.000000000000000000', + transferrable_balance: '0.000000000000000000', }, ]); + + const response3 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?ticker=pepe`, + }); + expect(response3.statusCode).toBe(200); + const responseJson3 = response3.json(); + expect(responseJson3.total).toBe(1); + expect(responseJson3.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ ticker: 'pepe', minted_supply: '350000.000000000000000000' }), + ]) + ); }); - test('ignores deploy from classic cursed inscription', async () => { + test('valid self mints are saved and balance reflected', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() .block({ - height: BRC20_GENESIS_BLOCK, + height: BRC20_SELF_MINT_ACTIVATION_BLOCK, hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', }) .transaction({ hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', + .brc20( + { + deploy: { + inscription_id: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + tick: '$pepe', max: '21000000', + lim: '21000000', + dec: '18', + address, + self_mint: true, }, - number: 0, - ordinal_number: 0, - classic_number: -1, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) - ) - .build() - ); - const response1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=PEPE`, - }); - expect(response1.statusCode).toBe(200); - const responseJson1 = response1.json(); - expect(responseJson1.total).toBe(0); - expect(responseJson1.results).toHaveLength(0); - }); - }); - - describe('mint', () => { - test('valid mints are saved and balance reflected', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 1, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', - }) - .transaction({ - hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '250000', - }, - number: 1, - ordinal_number: 1, - tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - address: address, - }) - ) - .build() - ); - - const response1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response1.statusCode).toBe(200); - const responseJson1 = response1.json(); - expect(responseJson1.total).toBe(1); - expect(responseJson1.results).toStrictEqual([ - { - ticker: 'PEPE', - available_balance: '250000.000000000000000000', - overall_balance: '250000.000000000000000000', - transferrable_balance: '0.000000000000000000', - }, - ]); - - // New mint - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '0000000000000000000077163227125e51d838787d6af031bc9b55a3a1cc1b2c', - }) - .transaction({ - hash: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'pepe', - amt: '100000', - }, - number: 2, - ordinal_number: 2, - tx_id: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) - ) - .build() - ); - - const response2 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response2.statusCode).toBe(200); - const responseJson2 = response2.json(); - expect(responseJson2.total).toBe(1); - expect(responseJson2.results).toStrictEqual([ - { - ticker: 'PEPE', - available_balance: '350000.000000000000000000', - overall_balance: '350000.000000000000000000', - transferrable_balance: '0.000000000000000000', - }, - ]); - - const response3 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=PEPE`, - }); - expect(response3.statusCode).toBe(200); - const responseJson3 = response3.json(); - expect(responseJson3.total).toBe(1); - expect(responseJson3.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ ticker: 'PEPE', minted_supply: '350000.000000000000000000' }), - ]) - ); - }); - - test('valid self mints are saved and balance reflected', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_SELF_MINT_ACTIVATION_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', - max: '21000000', - self_mint: 'true', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) + }, + { inscription_number: 0 } ) .build() ); @@ -1215,20 +302,17 @@ describe('BRC-20', () => { .transaction({ hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: '$PEPE', + .brc20( + { + mint: { + inscription_id: + '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99fi0', + tick: '$pepe', + address, amt: '250000', }, - number: 1, - ordinal_number: 1, - tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - address: address, - parent: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - }) + }, + { inscription_number: 1 } ) .build() ); @@ -1242,7 +326,7 @@ describe('BRC-20', () => { expect(responseJson1.total).toBe(1); expect(responseJson1.results).toStrictEqual([ { - ticker: '$PEPE', + ticker: '$pepe', available_balance: '250000.000000000000000000', overall_balance: '250000.000000000000000000', transferrable_balance: '0.000000000000000000', @@ -1260,20 +344,17 @@ describe('BRC-20', () => { .transaction({ hash: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', + .brc20( + { + mint: { + inscription_id: + '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8beci0', tick: '$pepe', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', amt: '100000', }, - number: 2, - ordinal_number: 2, - tx_id: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - parent: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - }) + }, + { inscription_number: 2 } ) .build() ); @@ -1287,7 +368,7 @@ describe('BRC-20', () => { expect(responseJson2.total).toBe(1); expect(responseJson2.results).toStrictEqual([ { - ticker: '$PEPE', + ticker: '$pepe', available_balance: '350000.000000000000000000', overall_balance: '350000.000000000000000000', transferrable_balance: '0.000000000000000000', @@ -1296,93 +377,14 @@ describe('BRC-20', () => { const response3 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=$PEPE`, - }); - expect(response3.statusCode).toBe(200); - const responseJson3 = response3.json(); - expect(responseJson3.total).toBe(1); - expect(responseJson3.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ ticker: '$PEPE', minted_supply: '350000.000000000000000000' }), - ]) - ); - }); - - test('self mints with invalid parent inscription are ignored', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_SELF_MINT_ACTIVATION_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', - max: '21000000', - self_mint: 'true', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_SELF_MINT_ACTIVATION_BLOCK + 1, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', - }) - .transaction({ - hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: '$PEPE', - amt: '250000', - }, - number: 1, - ordinal_number: 1, - tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - address: address, - // no parent - }) - ) - .build() - ); - - const response1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response1.statusCode).toBe(200); - const responseJson1 = response1.json(); - expect(responseJson1.total).toBe(0); - - const response3 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=$PEPE`, + url: `/ordinals/brc-20/tokens?ticker=$pepe`, }); expect(response3.statusCode).toBe(200); const responseJson3 = response3.json(); expect(responseJson3.total).toBe(1); expect(responseJson3.results).toEqual( expect.arrayContaining([ - expect.objectContaining({ ticker: '$PEPE', minted_supply: '0.000000000000000000' }), + expect.objectContaining({ ticker: '$pepe', minted_supply: '350000.000000000000000000' }), ]) ); }); @@ -1399,20 +401,20 @@ describe('BRC-20', () => { .transaction({ hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', + .brc20( + { + deploy: { + inscription_id: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + tick: '$pepe', max: '0', - self_mint: 'true', + lim: '250000', + dec: '18', + address, + self_mint: true, }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) + }, + { inscription_number: 0 } ) .build() ); @@ -1426,20 +428,17 @@ describe('BRC-20', () => { .transaction({ hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: '$PEPE', + .brc20( + { + mint: { + inscription_id: + '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99fi0', + tick: '$pepe', + address, amt: '250000', }, - number: 1, - ordinal_number: 1, - tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - address: address, - parent: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - }) + }, + { inscription_number: 1 } ) .build() ); @@ -1453,7 +452,7 @@ describe('BRC-20', () => { expect(responseJson1.total).toBe(1); expect(responseJson1.results).toStrictEqual([ { - ticker: '$PEPE', + ticker: '$pepe', available_balance: '250000.000000000000000000', overall_balance: '250000.000000000000000000', transferrable_balance: '0.000000000000000000', @@ -1471,20 +470,17 @@ describe('BRC-20', () => { .transaction({ hash: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', + .brc20( + { + mint: { + inscription_id: + '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8beci0', tick: '$pepe', + address, amt: '100000', }, - number: 2, - ordinal_number: 2, - tx_id: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - parent: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - }) + }, + { inscription_number: 2 } ) .build() ); @@ -1498,7 +494,7 @@ describe('BRC-20', () => { expect(responseJson2.total).toBe(1); expect(responseJson2.results).toStrictEqual([ { - ticker: '$PEPE', + ticker: '$pepe', available_balance: '350000.000000000000000000', overall_balance: '350000.000000000000000000', transferrable_balance: '0.000000000000000000', @@ -1507,14 +503,14 @@ describe('BRC-20', () => { const response3 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=$PEPE`, + url: `/ordinals/brc-20/tokens?ticker=$pepe`, }); expect(response3.statusCode).toBe(200); const responseJson3 = response3.json(); expect(responseJson3.total).toBe(1); expect(responseJson3.results).toEqual( expect.arrayContaining([ - expect.objectContaining({ ticker: '$PEPE', minted_supply: '350000.000000000000000000' }), + expect.objectContaining({ ticker: '$pepe', minted_supply: '350000.000000000000000000' }), ]) ); }); @@ -1531,19 +527,20 @@ describe('BRC-20', () => { .transaction({ hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', + .brc20( + { + deploy: { + inscription_id: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + tick: 'pepe', max: '21000000', + lim: '21000000', + dec: '18', + address, + self_mint: false, }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) + }, + { inscription_number: 0 } ) .build() ); @@ -1557,19 +554,17 @@ describe('BRC-20', () => { .transaction({ hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', + .brc20( + { + mint: { + inscription_id: + '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99fi0', + tick: 'pepe', + address, amt: '250000', }, - number: 1, - ordinal_number: 1, - tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - address: address, - }) + }, + { inscription_number: 1 } ) .build() ); @@ -1584,19 +579,17 @@ describe('BRC-20', () => { .transaction({ hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', + .brc20( + { + mint: { + inscription_id: + '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99fi0', + tick: 'pepe', + address, amt: '250000', }, - number: 1, - ordinal_number: 1, - tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - address: address, - }) + }, + { inscription_number: 1 } ) .build() ); @@ -1612,520 +605,105 @@ describe('BRC-20', () => { const response3 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens/PEPE`, + url: `/ordinals/brc-20/tokens/pepe`, }); expect(response3.json().token.minted_supply).toBe('0.000000000000000000'); }); + }); - test('numbers should not have more decimal digits than "dec" of ticker', async () => { + describe('transfer', () => { + test('available balance decreases on transfer inscription', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await deployAndMintPEPE(db, address); await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() .block({ - height: BRC20_GENESIS_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - dec: '1', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 1, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + height: BRC20_GENESIS_BLOCK + 2, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', }) .transaction({ - hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '250000.000', // Invalid decimal count + .brc20( + { + transfer: { + inscription_id: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + tick: 'pepe', + address, + amt: '2000', }, - number: 1, - ordinal_number: 1, - tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - address: address, - }) + }, + { inscription_number: 2 } ) .build() ); - const response2 = await fastify.inject({ + const response = await fastify.inject({ method: 'GET', url: `/ordinals/brc-20/balances/${address}`, }); - expect(response2.statusCode).toBe(200); - const responseJson2 = response2.json(); - expect(responseJson2.total).toBe(0); - expect(responseJson2.results).toStrictEqual([]); + expect(response.statusCode).toBe(200); + const json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toStrictEqual([ + { + available_balance: '8000.000000000000000000', + overall_balance: '10000.000000000000000000', + ticker: 'pepe', + transferrable_balance: '2000.000000000000000000', + }, + ]); + + // Balance at previous block + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address}?block_height=779833`, + }); + const json2 = response2.json(); + expect(json2.results[0].available_balance).toBe('10000.000000000000000000'); }); - test('mint exceeds token supply', async () => { + test('multiple transfers in block', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await deployAndMintPEPE(db, address); await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() .block({ - height: BRC20_GENESIS_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + height: BRC20_GENESIS_BLOCK + 2, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', }) .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '2500', - dec: '1', + .brc20( + { + transfer: { + inscription_id: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + tick: 'pepe', + address, + amt: '9000', }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 1, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', - }) - .transaction({ - hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '1000', - }, - number: 1, - ordinal_number: 1, - tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - address: address, - }) - ) - .transaction({ - hash: '7e09bda2cba34bca648cca6d79a074940d39b6137150d3a3edcf80c0e01419a5', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '1000', - }, - number: 2, - ordinal_number: 2, - tx_id: '7e09bda2cba34bca648cca6d79a074940d39b6137150d3a3edcf80c0e01419a5', - address: address, - }) - ) - .transaction({ - hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '5000000000', // Exceeds supply - }, - number: 3, - ordinal_number: 3, - tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - address: address, - }) - ) - .build() - ); - - const response2 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}?ticker=PEPE`, - }); - expect(response2.statusCode).toBe(200); - const responseJson2 = response2.json(); - expect(responseJson2.total).toBe(1); - expect(responseJson2.results).toStrictEqual([ - { - available_balance: '2500.0', // Max capacity - overall_balance: '2500.0', - ticker: 'PEPE', - transferrable_balance: '0.0', - }, - ]); - - // No more mints allowed - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '000000000000000000001f14513d722146fddab04a1855665a5eca22df288c3c', - }) - .transaction({ - hash: 'bf7a3e1a0647ca88f6539119b2defaec302683704ea270b3302e709597643548', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '1000', - }, - number: 4, - ordinal_number: 4, - tx_id: 'bf7a3e1a0647ca88f6539119b2defaec302683704ea270b3302e709597643548', - address: address, - }) - ) - .build() - ); - - const response3 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response3.statusCode).toBe(200); - const responseJson3 = response3.json(); - expect(responseJson3).toStrictEqual(responseJson2); - }); - - test('ignores mint for non-existent token', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 1, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', - }) - .transaction({ - hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '1000', - }, - number: 0, - ordinal_number: 0, - tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - address: address, - }) - ) - .build() - ); - - const response2 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response2.statusCode).toBe(200); - const responseJson2 = response2.json(); - expect(responseJson2.total).toBe(0); - expect(responseJson2.results).toStrictEqual([]); - }); - - test('mint exceeds token mint limit', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '2500', - dec: '1', - lim: '100', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 1, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', - }) - .transaction({ - hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '1000', // Greater than limit - }, - number: 1, - ordinal_number: 1, - tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - address: address, - }) - ) - .build() - ); - - const response2 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response2.statusCode).toBe(200); - const responseJson2 = response2.json(); - expect(responseJson2.total).toBe(0); - expect(responseJson2.results).toStrictEqual([]); - }); - }); - - describe('transfer', () => { - test('available balance decreases on transfer inscription', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await deployAndMintPEPE(address); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', - }) - .transaction({ - hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '2000', - }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) - ) - .build() - ); - - const response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response.statusCode).toBe(200); - const json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toStrictEqual([ - { - available_balance: '8000.000000000000000000', - overall_balance: '10000.000000000000000000', - ticker: 'PEPE', - transferrable_balance: '2000.000000000000000000', - }, - ]); - - // Balance at previous block - const response2 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}?block_height=779833`, - }); - const json2 = response2.json(); - expect(json2.results[0].available_balance).toBe('10000.000000000000000000'); - }); - - test('transfer ignored if token not found', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await deployAndMintPEPE(address); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', - }) - .transaction({ - hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'TEST', // Not found - amt: '2000', - }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) - ) - .build() - ); - - const response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response.statusCode).toBe(200); - const json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toStrictEqual([ - { - available_balance: '10000.000000000000000000', - overall_balance: '10000.000000000000000000', - ticker: 'PEPE', - transferrable_balance: '0.000000000000000000', - }, - ]); - }); - - test('cannot transfer more than available balance', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await deployAndMintPEPE(address); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', - }) - .transaction({ - hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '5000000000', // More than was minted - }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) - ) - .build() - ); - - const response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response.statusCode).toBe(200); - const json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toStrictEqual([ - { - available_balance: '10000.000000000000000000', - overall_balance: '10000.000000000000000000', - ticker: 'PEPE', - transferrable_balance: '0.000000000000000000', - }, - ]); - }); - - test('multiple transfers in block', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await deployAndMintPEPE(address); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', - }) - .transaction({ - hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '9000', - }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) + }, + { inscription_number: 2 } ) .transaction({ hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '2000', // Will exceed available balance + .brc20( + { + transfer: { + inscription_id: + '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21baci0', + tick: 'pepe', + address, + amt: '1000', }, - number: 3, - ordinal_number: 3, - tx_id: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', - address: address, - }) + }, + { inscription_number: 3 } ) .build() ); @@ -2139,325 +717,18 @@ describe('BRC-20', () => { expect(json.total).toBe(1); expect(json.results).toStrictEqual([ { - available_balance: '1000.000000000000000000', + available_balance: '0.000000000000000000', overall_balance: '10000.000000000000000000', - ticker: 'PEPE', - transferrable_balance: '9000.000000000000000000', + ticker: 'pepe', + transferrable_balance: '10000.000000000000000000', }, ]); }); test('send balance to address', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; - await deployAndMintPEPE(address); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', - }) - .transaction({ - hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '9000', - }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 3, - hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', - }) - .transaction({ - hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', - }) - .inscriptionTransferred({ - ordinal_number: 2, - destination: { type: 'transferred', value: address2 }, - satpoint_pre_transfer: - 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', - satpoint_post_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) - .build() - ); - - const response1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response1.statusCode).toBe(200); - const json1 = response1.json(); - expect(json1.total).toBe(1); - expect(json1.results).toStrictEqual([ - { - available_balance: '1000.000000000000000000', - overall_balance: '1000.000000000000000000', - ticker: 'PEPE', - transferrable_balance: '0.000000000000000000', - }, - ]); - - const response2 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address2}`, - }); - expect(response2.statusCode).toBe(200); - const json2 = response2.json(); - expect(json2.total).toBe(1); - expect(json2.results).toStrictEqual([ - { - available_balance: '9000.000000000000000000', - overall_balance: '9000.000000000000000000', - ticker: 'PEPE', - transferrable_balance: '0.000000000000000000', - }, - ]); - - // Balance at previous block - const prevBlock1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}?block_height=779833`, - }); - const prevBlockJson1 = prevBlock1.json(); - expect(prevBlockJson1.results[0].available_balance).toBe('10000.000000000000000000'); - const prevBlock2 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address2}?block_height=779833`, - }); - const prevBlockJson2 = prevBlock2.json(); - expect(prevBlockJson2.results[0]).toBeUndefined(); - }); - - test('send balance for self_mint token to address', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_SELF_MINT_ACTIVATION_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: '$PEPE', - max: '0', - self_mint: 'true', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_SELF_MINT_ACTIVATION_BLOCK + 1, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', - }) - .transaction({ - hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: '$PEPE', - amt: '10000', - }, - number: 1, - ordinal_number: 1, - tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - address: address, - parent: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_SELF_MINT_ACTIVATION_BLOCK + 2, - hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', - }) - .transaction({ - hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: '$PEPE', - amt: '9000', - }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_SELF_MINT_ACTIVATION_BLOCK + 3, - hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', - }) - .transaction({ - hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', - }) - .inscriptionTransferred({ - ordinal_number: 2, - destination: { type: 'transferred', value: address2 }, - satpoint_pre_transfer: - 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', - satpoint_post_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) - .build() - ); - - const response1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response1.statusCode).toBe(200); - const json1 = response1.json(); - expect(json1.total).toBe(1); - expect(json1.results).toStrictEqual([ - { - available_balance: '1000.000000000000000000', - overall_balance: '1000.000000000000000000', - ticker: '$PEPE', - transferrable_balance: '0.000000000000000000', - }, - ]); - - const response2 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address2}`, - }); - expect(response2.statusCode).toBe(200); - const json2 = response2.json(); - expect(json2.total).toBe(1); - expect(json2.results).toStrictEqual([ - { - available_balance: '9000.000000000000000000', - overall_balance: '9000.000000000000000000', - ticker: '$PEPE', - transferrable_balance: '0.000000000000000000', - }, - ]); - }); - - test('sending transfer as fee returns amount to sender', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await deployAndMintPEPE(address); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', - }) - .transaction({ - hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '9000', - }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 3, - hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', - }) - .transaction({ - hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', - }) - .inscriptionTransferred({ - ordinal_number: 2, - destination: { type: 'spent_in_fees', value: '' }, - satpoint_pre_transfer: - 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', - satpoint_post_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) - .build() - ); - - const response1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response1.statusCode).toBe(200); - const json1 = response1.json(); - expect(json1.total).toBe(1); - expect(json1.results).toStrictEqual([ - { - available_balance: '10000.000000000000000000', - overall_balance: '10000.000000000000000000', - ticker: 'PEPE', - transferrable_balance: '0.000000000000000000', - }, - ]); - }); - - test('sending transfer to unspendable output does not return to sender', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await deployAndMintPEPE(address); + const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; + await deployAndMintPEPE(db, address); await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -2468,19 +739,17 @@ describe('BRC-20', () => { .transaction({ hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', + .brc20( + { + transfer: { + inscription_id: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + tick: 'pepe', + address, amt: '9000', }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) + }, + { inscription_number: 2 } ) .build() ); @@ -2494,16 +763,19 @@ describe('BRC-20', () => { .transaction({ hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', }) - .inscriptionTransferred({ - ordinal_number: 2, - destination: { type: 'burnt' }, - satpoint_pre_transfer: - 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', - satpoint_post_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) + .brc20( + { + transfer_send: { + tick: 'pepe', + inscription_id: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + amt: '9000', + sender_address: address, + receiver_address: address2, + }, + }, + { inscription_number: 2 } + ) .build() ); @@ -2518,39 +790,69 @@ describe('BRC-20', () => { { available_balance: '1000.000000000000000000', overall_balance: '1000.000000000000000000', - ticker: 'PEPE', + ticker: 'pepe', + transferrable_balance: '0.000000000000000000', + }, + ]); + + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address2}`, + }); + expect(response2.statusCode).toBe(200); + const json2 = response2.json(); + expect(json2.total).toBe(1); + expect(json2.results).toStrictEqual([ + { + available_balance: '9000.000000000000000000', + overall_balance: '9000.000000000000000000', + ticker: 'pepe', transferrable_balance: '0.000000000000000000', }, ]); + + // Balance at previous block + const prevBlock1 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address}?block_height=779833`, + }); + const prevBlockJson1 = prevBlock1.json(); + expect(prevBlockJson1.results[0].available_balance).toBe('10000.000000000000000000'); + const prevBlock2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address2}?block_height=779833`, + }); + const prevBlockJson2 = prevBlock2.json(); + expect(prevBlockJson2.results[0]).toBeUndefined(); }); - test('cannot spend valid transfer twice', async () => { + test('send balance for self_mint token to address', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; - await deployAndMintPEPE(address); await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', + height: BRC20_SELF_MINT_ACTIVATION_BLOCK, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', }) .transaction({ - hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '9000', + .brc20( + { + deploy: { + inscription_id: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + tick: '$pepe', + max: '0', + lim: '21000000', + dec: '18', + address, + self_mint: true, }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) + }, + { inscription_number: 0 } ) .build() ); @@ -2558,50 +860,76 @@ describe('BRC-20', () => { new TestChainhookPayloadBuilder() .apply() .block({ - height: BRC20_GENESIS_BLOCK + 3, - hash: '000000000000000000016ddf56d0fe72476165acee9500d48d3e2aaf8412f489', + height: BRC20_SELF_MINT_ACTIVATION_BLOCK + 1, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', }) .transaction({ - hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', + hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + }) + .brc20( + { + mint: { + inscription_id: + '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0fi0', + tick: '$pepe', + address, + amt: '10000', + }, + }, + { inscription_number: 1 } + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: BRC20_SELF_MINT_ACTIVATION_BLOCK + 2, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', }) - .inscriptionTransferred({ - ordinal_number: 2, - destination: { type: 'transferred', value: address2 }, - satpoint_pre_transfer: - 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', - satpoint_post_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - post_transfer_output_value: null, - tx_index: 0, + .transaction({ + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', }) + .brc20( + { + transfer: { + inscription_id: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + tick: '$pepe', + address, + amt: '9000', + }, + }, + { inscription_number: 2 } + ) .build() ); - // Attempt to transfer the same inscription back to the original address to change its - // balance. await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() .block({ - height: BRC20_GENESIS_BLOCK + 4, + height: BRC20_SELF_MINT_ACTIVATION_BLOCK + 3, hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', }) .transaction({ - hash: '55bec906eadc9f5c120cc39555ba46e85e562eacd6217e4dd0b8552783286d0e', - }) - .inscriptionTransferred({ - ordinal_number: 2, - destination: { type: 'transferred', value: address }, - satpoint_pre_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - satpoint_post_transfer: - '55bec906eadc9f5c120cc39555ba46e85e562eacd6217e4dd0b8552783286d0e:0:0', - post_transfer_output_value: null, - tx_index: 0, + hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', }) + .brc20( + { + transfer_send: { + inscription_id: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + tick: '$pepe', + amt: '9000', + sender_address: address, + receiver_address: address2, + }, + }, + { inscription_number: 2 } + ) .build() ); - // Balances only reflect the first transfer. const response1 = await fastify.inject({ method: 'GET', url: `/ordinals/brc-20/balances/${address}`, @@ -2613,7 +941,7 @@ describe('BRC-20', () => { { available_balance: '1000.000000000000000000', overall_balance: '1000.000000000000000000', - ticker: 'PEPE', + ticker: '$pepe', transferrable_balance: '0.000000000000000000', }, ]); @@ -2629,7 +957,7 @@ describe('BRC-20', () => { { available_balance: '9000.000000000000000000', overall_balance: '9000.000000000000000000', - ticker: 'PEPE', + ticker: '$pepe', transferrable_balance: '0.000000000000000000', }, ]); @@ -2637,7 +965,7 @@ describe('BRC-20', () => { test('explicit transfer to self restores balance correctly', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await deployAndMintPEPE(address); + await deployAndMintPEPE(db, address); const address2 = 'bc1ph8dp3lqhzpjphqcc3ucgsm7k3w4d74uwfpv8sv893kn3kpkqrdxqqy3cv6'; await db.updateInscriptions( new TestChainhookPayloadBuilder() @@ -2649,19 +977,17 @@ describe('BRC-20', () => { .transaction({ hash: '825a25b64b5d99ca30e04e53cc9a3020412e1054eb2a7523eb075ddd6d983205', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', + .brc20( + { + transfer: { + inscription_id: + '825a25b64b5d99ca30e04e53cc9a3020412e1054eb2a7523eb075ddd6d983205i0', + tick: 'pepe', + address, amt: '20', }, - number: 2, - ordinal_number: 2, - tx_id: '825a25b64b5d99ca30e04e53cc9a3020412e1054eb2a7523eb075ddd6d983205', - address: address, - }) + }, + { inscription_number: 2 } ) .build() ); @@ -2675,16 +1001,19 @@ describe('BRC-20', () => { .transaction({ hash: '486815e61723d03af344e1256d7e0c028a8e9e71eb38157f4bf069eb94292ee1', }) - .inscriptionTransferred({ - ordinal_number: 2, - destination: { type: 'transferred', value: address2 }, - satpoint_pre_transfer: - '825a25b64b5d99ca30e04e53cc9a3020412e1054eb2a7523eb075ddd6d983205:0:0', - satpoint_post_transfer: - '486815e61723d03af344e1256d7e0c028a8e9e71eb38157f4bf069eb94292ee1:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) + .brc20( + { + transfer_send: { + inscription_id: + '825a25b64b5d99ca30e04e53cc9a3020412e1054eb2a7523eb075ddd6d983205i0', + tick: 'pepe', + amt: '20', + sender_address: address, + receiver_address: address2, + }, + }, + { inscription_number: 2 } + ) .build() ); let response = await fastify.inject({ @@ -2695,7 +1024,7 @@ describe('BRC-20', () => { { available_balance: '20.000000000000000000', overall_balance: '20.000000000000000000', - ticker: 'PEPE', + ticker: 'pepe', transferrable_balance: '0.000000000000000000', }, ]); @@ -2709,19 +1038,17 @@ describe('BRC-20', () => { .transaction({ hash: '09a812f72275892b4858880cf3821004a6e8885817159b340639afe9952ac053', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', + .brc20( + { + transfer: { + inscription_id: + '09a812f72275892b4858880cf3821004a6e8885817159b340639afe9952ac053i0', + tick: 'pepe', + address: address2, amt: '20', }, - number: 3, - ordinal_number: 3, - tx_id: '09a812f72275892b4858880cf3821004a6e8885817159b340639afe9952ac053', - address: address2, - }) + }, + { inscription_number: 3 } ) .build() ); @@ -2733,7 +1060,7 @@ describe('BRC-20', () => { { available_balance: '0.000000000000000000', overall_balance: '20.000000000000000000', - ticker: 'PEPE', + ticker: 'pepe', transferrable_balance: '20.000000000000000000', }, ]); @@ -2747,16 +1074,19 @@ describe('BRC-20', () => { .transaction({ hash: '26c0c3acbb1c87e682ade86220ba06e649d7599ecfc49a71495f1bdd04efbbb4', }) - .inscriptionTransferred({ - ordinal_number: 3, - destination: { type: 'transferred', value: address2 }, - satpoint_pre_transfer: - '486815e61723d03af344e1256d7e0c028a8e9e71eb38157f4bf069eb94292ee1:0:0', - satpoint_post_transfer: - '26c0c3acbb1c87e682ade86220ba06e649d7599ecfc49a71495f1bdd04efbbb4:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) + .brc20( + { + transfer_send: { + inscription_id: + '09a812f72275892b4858880cf3821004a6e8885817159b340639afe9952ac053i0', + tick: 'pepe', + amt: '20', + sender_address: address2, + receiver_address: address2, + }, + }, + { inscription_number: 3 } + ) .build() ); response = await fastify.inject({ @@ -2767,1402 +1097,20 @@ describe('BRC-20', () => { { available_balance: '20.000000000000000000', overall_balance: '20.000000000000000000', - ticker: 'PEPE', + ticker: 'pepe', transferrable_balance: '0.000000000000000000', }, ]); }); }); - describe('routes', () => { - describe('/brc-20/tokens', () => { - test('tokens endpoint', async () => { - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: BRC20_GENESIS_BLOCK }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) - ) - .build() - ); - const response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens/PEPE`, - }); - expect(response.statusCode).toBe(200); - expect(response.json()).toStrictEqual({ - token: { - id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - number: 0, - block_height: BRC20_GENESIS_BLOCK, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - ticker: 'PEPE', - max_supply: '21000000.000000000000000000', - mint_limit: null, - decimals: 18, - deploy_timestamp: 1677803510000, - minted_supply: '0.000000000000000000', - tx_count: 1, - self_mint: false, - }, - supply: { - max_supply: '21000000.000000000000000000', - minted_supply: '0.000000000000000000', - holders: 0, - }, - }); - }); - - test('tokens filter by ticker prefix', async () => { - const inscriptionNumbers = incrementing(0); - const blockHeights = incrementing(BRC20_GENESIS_BLOCK); - - let transferHash = randomHash(); - let number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: transferHash }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }, - number: number, - ordinal_number: number, - tx_id: transferHash, - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) - ) - .build() - ); - - transferHash = randomHash(); - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: transferHash }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEER', - max: '21000000', - }, - number: number, - ordinal_number: number, - tx_id: transferHash, - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) - ) - .build() - ); - - transferHash = randomHash(); - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: transferHash }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'ABCD', - max: '21000000', - }, - number: number, - ordinal_number: number, - tx_id: transferHash, - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) - ) - .build() - ); - - transferHash = randomHash(); - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: transferHash }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'DCBA', - max: '21000000', - }, - number: number, - ordinal_number: number, - tx_id: transferHash, - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }) - ) - .build() - ); - const response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens?ticker=PE&ticker=AB`, - }); - expect(response.statusCode).toBe(200); - const responseJson = response.json(); - expect(responseJson.total).toBe(3); - expect(responseJson.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ ticker: 'PEPE' }), - expect.objectContaining({ ticker: 'PEER' }), - expect.objectContaining({ ticker: 'ABCD' }), - ]) - ); - }); - - test('tokens using order_by tx_count', async () => { - // Setup - const inscriptionNumbers = incrementing(0); - const blockHeights = incrementing(BRC20_GENESIS_BLOCK); - const addressA = 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz'; - const addressB = 'bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4'; - - // A deploys PEPE - let number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }, - number: number, - ordinal_number: number, - tx_id: randomHash(), - address: addressA, - }) - ) - .build() - ); - - // A mints 10000 PEPE 10 times (will later be rolled back) - const pepeMints = []; - for (let i = 0; i < 10; i++) { - const txHash = randomHash(); - number = inscriptionNumbers.next().value; - const payload = new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: txHash }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '10000', - }, - number: number, - ordinal_number: number, - tx_id: txHash, - address: addressA, - }) - ) - .build(); - pepeMints.push(payload); - await db.updateInscriptions(payload); - } - - // B deploys ABCD - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'ABCD', - max: '21000000', - }, - number: number, - ordinal_number: number, - tx_id: randomHash(), - address: addressB, - }) - ) - .build() - ); - - // B mints 10000 ABCD - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'ABCD', - amt: '10000', - }, - number, - ordinal_number: number, - tx_id: randomHash(), - address: addressB, - }) - ) - .build() - ); - - // B send 1000 ABCD to A - // (create inscription, transfer) - const txHashTransfer = randomHash(); - number = inscriptionNumbers.next().value; - const payloadTransfer = new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: txHashTransfer }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'ABCD', - amt: '1000', - }, - number, - ordinal_number: number, - tx_id: txHashTransfer, - address: addressB, - }) - ) - .build(); - await db.updateInscriptions(payloadTransfer); - // (send inscription, transfer_send) - const txHashTransferSend = randomHash(); - const payloadTransferSend = new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: txHashTransferSend }) - .inscriptionTransferred({ - ordinal_number: number, - destination: { type: 'transferred', value: addressA }, - satpoint_pre_transfer: `${txHashTransfer}:0:0`, - satpoint_post_transfer: `${txHashTransferSend}:0:0`, - post_transfer_output_value: null, - tx_index: 0, - }) - .build(); - await db.updateInscriptions(payloadTransferSend); - - let response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens`, - }); - expect(response.statusCode).toBe(200); - let json = response.json(); - expect(json.total).toBe(2); - expect(json.results).toHaveLength(2); - - // WITHOUT tx_count sort: - expect(json.results).toEqual([ - // The first result is the token with the latest activity (ABCD) - expect.objectContaining({ - ticker: 'ABCD', - tx_count: 4, - } as Brc20TokenResponse), - expect.objectContaining({ - ticker: 'PEPE', - tx_count: 11, - } as Brc20TokenResponse), - ]); - - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens?order_by=tx_count`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(2); - expect(json.results).toHaveLength(2); - - // WITH tx_count sort: The first result is the most active token (PEPE) - expect(json.results).toEqual([ - expect.objectContaining({ - ticker: 'PEPE', - tx_count: 11, - } as Brc20TokenResponse), - expect.objectContaining({ - ticker: 'ABCD', - tx_count: 4, - } as Brc20TokenResponse), - ]); - - // Rollback PEPE mints - for (const payload of pepeMints) { - const payloadRollback = { ...payload, apply: [], rollback: payload.apply }; - await db.updateInscriptions(payloadRollback); - } - - // WITH tx_count sort: The first result is the most active token (now ABCD) - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens?order_by=tx_count`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(2); - expect(json.results).toHaveLength(2); - expect(json.results).toEqual([ - expect.objectContaining({ - ticker: 'ABCD', - tx_count: 4, - } as Brc20TokenResponse), - expect.objectContaining({ - ticker: 'PEPE', - tx_count: 1, // only the deploy remains - } as Brc20TokenResponse), - ]); - - // Rollback ABCD transfer - await db.updateInscriptions({ - ...payloadTransferSend, - apply: [], - rollback: payloadTransferSend.apply, - }); - await db.updateInscriptions({ - ...payloadTransfer, - apply: [], - rollback: payloadTransfer.apply, - }); - - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens?order_by=tx_count`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(2); - expect(json.results).toHaveLength(2); - expect(json.results).toEqual([ - expect.objectContaining({ - ticker: 'ABCD', - tx_count: 2, // only the deploy and mint remain - } as Brc20TokenResponse), - expect.objectContaining({ - ticker: 'PEPE', - tx_count: 1, - } as Brc20TokenResponse), - ]); - }); - }); - - describe('/brc-20/activity', () => { - test('activity for token transfers', async () => { - // Setup - const inscriptionNumbers = incrementing(0); - const blockHeights = incrementing(BRC20_GENESIS_BLOCK); - const addressA = 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz'; - const addressB = 'bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4'; - - // A deploys PEPE - let number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }, - number, - ordinal_number: number, - tx_id: randomHash(), - address: addressA, - }) - ) - .build() - ); - - // Verify that the PEPE deploy is in the activity feed - let response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - let json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'deploy', - ticker: 'PEPE', - address: addressA, - deploy: expect.objectContaining({ - max_supply: '21000000.000000000000000000', - }), - } as Brc20ActivityResponse), - ]) - ); - - // A mints 10000 PEPE - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '10000', - }, - number, - ordinal_number: number, - tx_id: randomHash(), - address: addressA, - }) - ) - .build() - ); - - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(2); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'deploy', - ticker: 'PEPE', - } as Brc20ActivityResponse), - expect.objectContaining({ - operation: 'mint', - ticker: 'PEPE', - address: addressA, - mint: { - amount: '10000.000000000000000000', - }, - } as Brc20ActivityResponse), - ]) - ); - - // B mints 10000 PEPE - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '10000', - }, - number, - ordinal_number: number, - tx_id: randomHash(), - address: addressB, - }) - ) - .build() - ); - - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(3); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'mint', - ticker: 'PEPE', - address: addressB, - mint: { - amount: '10000.000000000000000000', - }, - } as Brc20ActivityResponse), - ]) - ); - - // A creates transfer of 9000 PEPE - const transferHash = randomHash(); - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: transferHash }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '9000', - }, - number, - ordinal_number: number, - tx_id: transferHash, - address: addressA, - }) - ) - .build() - ); - - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(4); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer', - ticker: 'PEPE', - address: addressA, - tx_id: transferHash, - transfer: { - amount: '9000.000000000000000000', - from_address: addressA, - }, - } as Brc20ActivityResponse), - ]) - ); - - // A sends transfer inscription to B (aka transfer/sale) - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionTransferred({ - destination: { type: 'transferred', value: addressB }, - tx_index: 0, - ordinal_number: number, - post_transfer_output_value: null, - satpoint_pre_transfer: `${transferHash}:0:0`, - satpoint_post_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - }) - .build() - ); - - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(5); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer_send', - ticker: 'PEPE', - tx_id: expect.not.stringMatching(transferHash), - address: addressB, - transfer_send: { - amount: '9000.000000000000000000', - from_address: addressA, - to_address: addressB, - }, - } as Brc20ActivityResponse), - ]) - ); - - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE&operation=transfer_send`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer_send', - ticker: 'PEPE', - tx_id: expect.not.stringMatching(transferHash), - address: addressB, - transfer_send: { - amount: '9000.000000000000000000', - from_address: addressA, - to_address: addressB, - }, - } as Brc20ActivityResponse), - ]) - ); - }); - - test('activity for multiple token transfers among three participants', async () => { - // Step 1: A deploys a token - // Step 2: A mints 1000 of the token - // Step 3: B mints 2000 of the token - // Step 4: A creates a transfer to B - // Step 5: B creates a transfer to C - // Step 6: A transfer_send the transfer to B - // Step 7: B transfer_send the transfer to C - - // Setup - const inscriptionNumbers = incrementing(0); - const blockHeights = incrementing(BRC20_GENESIS_BLOCK); - const addressA = 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz'; - const addressB = 'bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4'; - const addressC = 'bc1q9d80h0q5d3f54w7w8c3l2sguf9uset4ydw9xj2'; - - // Step 1: A deploys a token - let number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }, - number, - ordinal_number: number, - tx_id: randomHash(), - address: addressA, - }) - ) - .build() - ); - - // Verify that the PEPE deploy is in the activity feed - let response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - let json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'deploy', - ticker: 'PEPE', - address: addressA, - deploy: expect.objectContaining({ - max_supply: '21000000.000000000000000000', - }), - } as Brc20ActivityResponse), - ]) - ); - - // Step 2: A mints 1000 of the token - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '1000', - }, - number, - ordinal_number: number, - tx_id: randomHash(), - address: addressA, - }) - ) - .build() - ); - - // Verify that the PEPE mint is in the activity feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(2); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'mint', - ticker: 'PEPE', - address: addressA, - mint: { - amount: '1000.000000000000000000', - }, - } as Brc20ActivityResponse), - ]) - ); - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE&address=${addressA}`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(2); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'deploy', - ticker: 'PEPE', - address: addressA, - deploy: expect.objectContaining({ - max_supply: '21000000.000000000000000000', - }), - } as Brc20ActivityResponse), - expect.objectContaining({ - operation: 'mint', - ticker: 'PEPE', - address: addressA, - mint: { - amount: '1000.000000000000000000', - }, - } as Brc20ActivityResponse), - ]) - ); - - // Step 3: B mints 2000 of the token - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '2000', - }, - number, - ordinal_number: number, - tx_id: randomHash(), - address: addressB, - }) - ) - .build() - ); - - // Verify that the PEPE mint is in the activity feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(3); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'mint', - ticker: 'PEPE', - address: addressB, - mint: { - amount: '2000.000000000000000000', - }, - } as Brc20ActivityResponse), - ]) - ); - - // Step 4: A creates a transfer to B - const transferHashAB = randomHash(); - const numberAB = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: transferHashAB }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '1000', - }, - number: numberAB, - ordinal_number: numberAB, - tx_id: transferHashAB, - address: addressA, - }) - ) - .build() - ); - - // Verify that the PEPE transfer is in the activity feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(4); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer', - ticker: 'PEPE', - address: addressA, - tx_id: transferHashAB, - transfer: { - amount: '1000.000000000000000000', - from_address: addressA, - }, - } as Brc20ActivityResponse), - ]) - ); - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE&address=${addressA}`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(3); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer', - ticker: 'PEPE', - address: addressA, - tx_id: transferHashAB, - transfer: { - amount: '1000.000000000000000000', - from_address: addressA, - }, - } as Brc20ActivityResponse), - ]) - ); - - // Step 5: B creates a transfer to C - const transferHashBC = randomHash(); - const numberBC = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: transferHashBC }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '2000', - }, - number: numberBC, - ordinal_number: numberBC, - tx_id: transferHashBC, - address: addressB, - }) - ) - .build() - ); - - // Verify that the PEPE transfer is in the activity feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(5); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer', - ticker: 'PEPE', - address: addressB, - tx_id: transferHashBC, - transfer: { - amount: '2000.000000000000000000', - from_address: addressB, - }, - } as Brc20ActivityResponse), - ]) - ); - - // Step 6: A transfer_send the transfer to B - const transferHashABSend = randomHash(); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: transferHashABSend }) - .inscriptionTransferred({ - destination: { type: 'transferred', value: addressB }, - tx_index: 0, - ordinal_number: numberAB, - post_transfer_output_value: null, - satpoint_pre_transfer: `${transferHashAB}:0:0`, - satpoint_post_transfer: `${transferHashABSend}:0:0`, - }) - .build() - ); - // A gets the transfer send in its feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE&address=${addressA}`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(4); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer_send', - ticker: 'PEPE', - tx_id: expect.not.stringMatching(transferHashAB), - address: addressB, - transfer_send: { - amount: '1000.000000000000000000', - from_address: addressA, - to_address: addressB, - }, - } as Brc20ActivityResponse), - ]) - ); - // B gets the transfer send in its feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE&address=${addressB}`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(3); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer_send', - ticker: 'PEPE', - tx_id: expect.not.stringMatching(transferHashAB), - address: addressB, - transfer_send: { - amount: '1000.000000000000000000', - from_address: addressA, - to_address: addressB, - }, - } as Brc20ActivityResponse), - ]) - ); - - // Verify that the PEPE transfer_send is in the activity feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(6); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer_send', - ticker: 'PEPE', - tx_id: expect.not.stringMatching(transferHashAB), - address: addressB, - transfer_send: { - amount: '1000.000000000000000000', - from_address: addressA, - to_address: addressB, - }, - } as Brc20ActivityResponse), - ]) - ); - - // Step 7: B transfer_send the transfer to C - const transferHashBCSend = randomHash(); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: transferHashBCSend }) - .inscriptionTransferred({ - destination: { type: 'transferred', value: addressC }, - tx_index: 0, - ordinal_number: numberBC, - post_transfer_output_value: null, - satpoint_pre_transfer: `${transferHashBC}:0:0`, - satpoint_post_transfer: `${transferHashBCSend}:0:0`, - }) - .build() - ); - - // Verify that the PEPE transfer_send is in the activity feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(7); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer_send', - ticker: 'PEPE', - tx_id: expect.not.stringMatching(transferHashBC), - address: addressC, - transfer_send: { - amount: '2000.000000000000000000', - from_address: addressB, - to_address: addressC, - }, - } as Brc20ActivityResponse), - ]) - ); - // B gets the transfer send in its feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE&address=${addressB}`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(4); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer_send', - ticker: 'PEPE', - tx_id: expect.not.stringMatching(transferHashBC), - address: addressC, - transfer_send: { - amount: '2000.000000000000000000', - from_address: addressB, - to_address: addressC, - }, - } as Brc20ActivityResponse), - ]) - ); - // C gets the transfer send in its feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEPE&address=${addressC}`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'transfer_send', - ticker: 'PEPE', - tx_id: expect.not.stringMatching(transferHashBC), - address: addressC, - transfer_send: { - amount: '2000.000000000000000000', - from_address: addressB, - to_address: addressC, - }, - } as Brc20ActivityResponse), - ]) - ); - }); - - test('activity for multiple token creation', async () => { - const inscriptionNumbers = incrementing(0); - const blockHeights = incrementing(BRC20_GENESIS_BLOCK); - const addressA = 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz'; - - // Step 1: Create a token PEPE - let number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '21000000', - }, - number, - ordinal_number: number, - tx_id: randomHash(), - address: addressA, - }) - ) - .build() - ); - - // Verify that the PEPE deploy is in the activity feed - let response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity`, - }); - expect(response.statusCode).toBe(200); - let json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'deploy', - ticker: 'PEPE', - address: addressA, - deploy: expect.objectContaining({ - max_supply: '21000000.000000000000000000', - }), - } as Brc20ActivityResponse), - ]) - ); - - // Step 2: Create a token PEER - number = inscriptionNumbers.next().value; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: blockHeights.next().value }) - .transaction({ hash: randomHash() }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEER', - max: '21000000', - }, - number, - ordinal_number: number, - tx_id: randomHash(), - address: addressA, - }) - ) - .build() - ); - - // Verify that the PEER deploy is in the activity feed - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(2); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'deploy', - ticker: 'PEER', - address: addressA, - deploy: expect.objectContaining({ - max_supply: '21000000.000000000000000000', - }), - } as Brc20ActivityResponse), - ]) - ); - - // Verify that no events are available before the first block height - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?ticker=PEER&block_height=${BRC20_GENESIS_BLOCK}`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(0); - expect(json.results).toEqual([]); - - // Verify that the PEER deploy is not in the activity feed when using block_height parameter - response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/activity?block_height=${BRC20_GENESIS_BLOCK}`, - }); - expect(response.statusCode).toBe(200); - json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - operation: 'deploy', - ticker: 'PEPE', - address: addressA, - deploy: expect.objectContaining({ - max_supply: '21000000.000000000000000000', - }), - } as Brc20ActivityResponse), - ]) - ); - // Should NOT include PEER at this block height - expect(json.results).not.toEqual( - expect.arrayContaining([ - expect.objectContaining({ - ticker: 'PEER', - } as Brc20ActivityResponse), - ]) - ); - }); - }); - - describe('/brc-20/token/holders', () => { - test('displays holders for token', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await deployAndMintPEPE(address); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK + 2, - hash: '0000000000000000000034dd2daec375371800da441b17651459b2220cbc1a6e', - }) - .transaction({ - hash: '633648e0e1ddcab8dea0496a561f2b08c486ae619b5634d7bb55d7f0cd32ef16', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '2000', - }, - number: 2, - ordinal_number: 2, - tx_id: '633648e0e1ddcab8dea0496a561f2b08c486ae619b5634d7bb55d7f0cd32ef16', - address: 'bc1qp9jgp9qtlhgvwjnxclj6kav6nr2fq09c206pyl', - }) - ) - .build() - ); - - const response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens/PEPE/holders`, - }); - expect(response.statusCode).toBe(200); - const json = response.json(); - expect(json.total).toBe(2); - expect(json.results).toStrictEqual([ - { - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - overall_balance: '10000.000000000000000000', - }, - { - address: 'bc1qp9jgp9qtlhgvwjnxclj6kav6nr2fq09c206pyl', - overall_balance: '2000.000000000000000000', - }, - ]); - }); - - test('shows empty list on token with no holders', async () => { - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: BRC20_GENESIS_BLOCK, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '250000', - }, - number: 0, - ordinal_number: 0, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1qp9jgp9qtlhgvwjnxclj6kav6nr2fq09c206pyl', - }) - ) - .build() - ); - const response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens/PEPE/holders`, - }); - expect(response.statusCode).toBe(200); - const json = response.json(); - expect(json.total).toBe(0); - expect(json.results).toStrictEqual([]); - }); - - test('shows 404 on token not found', async () => { - const response = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/tokens/PEPE/holders`, - }); - expect(response.statusCode).toBe(404); - }); - }); - }); - describe('rollbacks', () => { test('reflects rollbacks on balances and counts correctly', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; - await deployAndMintPEPE(address); + await deployAndMintPEPE(db, address); - // Transfer and send PEPE + // Transfer and send pepe const transferPEPE = new TestChainhookPayloadBuilder() .apply() .block({ @@ -4172,19 +1120,16 @@ describe('BRC-20', () => { .transaction({ hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', + .brc20( + { + transfer: { + inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + tick: 'pepe', + address, amt: '9000', }, - number: 2, - ordinal_number: 2, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) + }, + { inscription_number: 2 } ) .build(); await db.updateInscriptions(transferPEPE); @@ -4197,16 +1142,18 @@ describe('BRC-20', () => { .transaction({ hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', }) - .inscriptionTransferred({ - ordinal_number: 2, - destination: { type: 'transferred', value: address2 }, - satpoint_pre_transfer: - 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', - satpoint_post_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) + .brc20( + { + transfer_send: { + inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + tick: 'pepe', + amt: '9000', + sender_address: address, + receiver_address: address2, + }, + }, + { inscription_number: 2 } + ) .build(); await db.updateInscriptions(sendPEPE); // Deploy and mint 🔥 token @@ -4219,19 +1166,19 @@ describe('BRC-20', () => { .transaction({ hash: '8354e85e87fa2df8b3a06ec0b9d395559b95174530cb19447fc4df5f6d4ca84d', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', + .brc20( + { + deploy: { + inscription_id: '8354e85e87fa2df8b3a06ec0b9d395559b95174530cb19447fc4df5f6d4ca84di0', tick: '🔥', max: '1000', + lim: '1000', + dec: '18', + address, + self_mint: false, }, - number: 3, - ordinal_number: 3, - tx_id: '8354e85e87fa2df8b3a06ec0b9d395559b95174530cb19447fc4df5f6d4ca84d', - address: address, - }) + }, + { inscription_number: 3 } ) .build(); await db.updateInscriptions(deployFIRE); @@ -4244,19 +1191,16 @@ describe('BRC-20', () => { .transaction({ hash: '81f4ee2c247c5f5c0d3a6753fef706df410ea61c2aa6d370003b98beb041b887', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', + .brc20( + { + mint: { + inscription_id: '81f4ee2c247c5f5c0d3a6753fef706df410ea61c2aa6d370003b98beb041b887i0', tick: '🔥', + address, amt: '500', }, - number: 4, - ordinal_number: 4, - tx_id: '81f4ee2c247c5f5c0d3a6753fef706df410ea61c2aa6d370003b98beb041b887', - address: address, - }) + }, + { inscription_number: 4 } ) .build(); await db.updateInscriptions(mintFIRE); @@ -4270,19 +1214,16 @@ describe('BRC-20', () => { .transaction({ hash: 'c1c7f1d5c10a30605a8a5285ca3465a4f75758ed9b7f201e5ef62727e179966f', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', + .brc20( + { + transfer: { + inscription_id: 'c1c7f1d5c10a30605a8a5285ca3465a4f75758ed9b7f201e5ef62727e179966fi0', tick: '🔥', + address, amt: '100', }, - number: 5, - ordinal_number: 5, - tx_id: 'c1c7f1d5c10a30605a8a5285ca3465a4f75758ed9b7f201e5ef62727e179966f', - address: address, - }) + }, + { inscription_number: 5 } ) .build(); await db.updateInscriptions(transferFIRE); @@ -4295,16 +1236,18 @@ describe('BRC-20', () => { .transaction({ hash: 'a00d01a3e772ce2219ddf3fe2fe4053be071262d9594f11f018fdada7179ae2d', }) - .inscriptionTransferred({ - ordinal_number: 5, - destination: { type: 'transferred', value: address }, // To self - satpoint_pre_transfer: - 'c1c7f1d5c10a30605a8a5285ca3465a4f75758ed9b7f201e5ef62727e179966f:0:0', - satpoint_post_transfer: - 'a00d01a3e772ce2219ddf3fe2fe4053be071262d9594f11f018fdada7179ae2d:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) + .brc20( + { + transfer_send: { + tick: '🔥', + inscription_id: 'c1c7f1d5c10a30605a8a5285ca3465a4f75758ed9b7f201e5ef62727e179966fi0', + amt: '100', + sender_address: address, + receiver_address: address, + }, + }, + { inscription_number: 5 } + ) .build(); await db.updateInscriptions(sendFIRE); @@ -4319,7 +1262,7 @@ describe('BRC-20', () => { expect(json.results[1].minted_supply).toBe('10000.000000000000000000'); request = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens/PEPE`, + url: `/ordinals/brc-20/tokens/pepe`, }); json = request.json(); expect(json.supply.holders).toBe(2); @@ -4337,7 +1280,7 @@ describe('BRC-20', () => { expect(json.total).toBe(2); expect(json.results).toHaveLength(2); expect(json.results[0]).toStrictEqual({ - ticker: 'PEPE', + ticker: 'pepe', available_balance: '1000.000000000000000000', transferrable_balance: '0.000000000000000000', overall_balance: '1000.000000000000000000', @@ -4356,7 +1299,7 @@ describe('BRC-20', () => { expect(json.total).toBe(1); expect(json.results).toHaveLength(1); expect(json.results[0]).toStrictEqual({ - ticker: 'PEPE', + ticker: 'pepe', available_balance: '9000.000000000000000000', transferrable_balance: '0.000000000000000000', overall_balance: '9000.000000000000000000', @@ -4451,7 +1394,7 @@ describe('BRC-20', () => { expect(json.total).toBe(1); expect(json.results).toHaveLength(1); expect(json.results[0]).toStrictEqual({ - ticker: 'PEPE', + ticker: 'pepe', available_balance: '1000.000000000000000000', transferrable_balance: '0.000000000000000000', overall_balance: '1000.000000000000000000', @@ -4494,7 +1437,7 @@ describe('BRC-20', () => { expect(json.total).toBe(1); expect(json.results).toHaveLength(1); expect(json.results[0]).toStrictEqual({ - ticker: 'PEPE', + ticker: 'pepe', available_balance: '1000.000000000000000000', transferrable_balance: '0.000000000000000000', overall_balance: '1000.000000000000000000', @@ -4508,7 +1451,7 @@ describe('BRC-20', () => { expect(json.results).toHaveLength(4); expect(json.results[0].operation).toBe('transfer_send'); - // Rollback 3: PEPE is un-sent + // Rollback 3: pepe is un-sent await db.updateInscriptions(rollBack(sendPEPE)); request = await fastify.inject({ method: 'GET', @@ -4518,14 +1461,14 @@ describe('BRC-20', () => { expect(json.total).toBe(1); expect(json.results).toHaveLength(1); expect(json.results[0]).toStrictEqual({ - ticker: 'PEPE', + ticker: 'pepe', available_balance: '1000.000000000000000000', transferrable_balance: '9000.000000000000000000', overall_balance: '10000.000000000000000000', }); request = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens/PEPE`, + url: `/ordinals/brc-20/tokens/pepe`, }); json = request.json(); expect(json.supply.holders).toBe(1); @@ -4545,7 +1488,7 @@ describe('BRC-20', () => { expect(json.results).toHaveLength(3); expect(json.results[0].operation).toBe('transfer'); - // Rollback 4: PEPE is un-transferred + // Rollback 4: pepe is un-transferred await db.updateInscriptions(rollBack(transferPEPE)); request = await fastify.inject({ method: 'GET', @@ -4555,14 +1498,14 @@ describe('BRC-20', () => { expect(json.total).toBe(1); expect(json.results).toHaveLength(1); expect(json.results[0]).toStrictEqual({ - ticker: 'PEPE', + ticker: 'pepe', available_balance: '10000.000000000000000000', transferrable_balance: '0.000000000000000000', overall_balance: '10000.000000000000000000', }); request = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/tokens/PEPE`, + url: `/ordinals/brc-20/tokens/pepe`, }); json = request.json(); expect(json.supply.holders).toBe(1); diff --git a/tests/helpers.ts b/tests/helpers.ts index f2c70674..84750cf4 100644 --- a/tests/helpers.ts +++ b/tests/helpers.ts @@ -1,14 +1,15 @@ import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; import { + BitcoinBrc20Operation, BitcoinEvent, BitcoinInscriptionRevealed, BitcoinInscriptionTransferred, + BitcoinPayload, BitcoinTransaction, - Payload, } from '@hirosystems/chainhook-client'; import { FastifyBaseLogger, FastifyInstance } from 'fastify'; import { IncomingMessage, Server, ServerResponse } from 'http'; -import { Brc20 } from '../src/pg/brc20/helpers'; +import { PgStore } from '../src/pg/pg-store'; export type TestFastifyServer = FastifyInstance< Server, @@ -19,7 +20,7 @@ export type TestFastifyServer = FastifyInstance< >; export class TestChainhookPayloadBuilder { - private payload: Payload = { + private payload: BitcoinPayload = { apply: [], rollback: [], chainhook: { @@ -27,6 +28,7 @@ export class TestChainhookPayloadBuilder { predicate: { scope: 'ordinals_protocol', operation: 'inscription_feed', + meta_protocols: ['brc-20'], }, is_streaming_blocks: true, }, @@ -38,6 +40,7 @@ export class TestChainhookPayloadBuilder { private get lastBlockTx(): BitcoinTransaction { return this.lastBlock.transactions[this.lastBlock.transactions.length - 1]; } + private txIndex = 0; streamingBlocks(streaming: boolean): this { this.payload.chainhook.is_streaming_blocks = streaming; @@ -80,6 +83,7 @@ export class TestChainhookPayloadBuilder { metadata: { ordinal_operations: [], proof: null, + index: this.txIndex++, }, }); return this; @@ -95,12 +99,76 @@ export class TestChainhookPayloadBuilder { return this; } - build(): Payload { + brc20( + args: BitcoinBrc20Operation, + opts: { inscription_number: number; ordinal_number?: number } + ): this { + this.lastBlockTx.metadata.brc20_operation = args; + if ('transfer_send' in args) { + this.lastBlockTx.metadata.ordinal_operations.push({ + inscription_transferred: { + ordinal_number: opts.ordinal_number ?? opts.inscription_number, + destination: { + type: 'transferred', + value: args.transfer_send.receiver_address, + }, + satpoint_pre_transfer: `${args.transfer_send.inscription_id.split('i')[0]}:0:0`, + satpoint_post_transfer: `${this.lastBlockTx.transaction_identifier.hash}:0:0`, + post_transfer_output_value: null, + tx_index: 0, + }, + }); + } else { + let inscription_id = ''; + let inscriber_address = ''; + if ('deploy' in args) { + inscription_id = args.deploy.inscription_id; + inscriber_address = args.deploy.address; + } else if ('mint' in args) { + inscription_id = args.mint.inscription_id; + inscriber_address = args.mint.address; + } else { + inscription_id = args.transfer.inscription_id; + inscriber_address = args.transfer.address; + } + this.lastBlockTx.metadata.ordinal_operations.push({ + inscription_revealed: { + content_bytes: `0x101010`, + content_type: 'text/plain;charset=utf-8', + content_length: 3, + inscription_number: { + jubilee: opts.inscription_number, + classic: opts.inscription_number, + }, + inscription_fee: 2000, + inscription_id, + inscription_output_value: 10000, + inscriber_address, + ordinal_number: opts.ordinal_number ?? opts.inscription_number, + ordinal_block_height: 0, + ordinal_offset: 0, + satpoint_post_inscription: `${inscription_id.split('i')[0]}:0:0`, + inscription_input_index: 0, + transfers_pre_inscription: 0, + tx_index: 0, + curse_type: null, + inscription_pointer: null, + delegate: null, + metaprotocol: null, + metadata: undefined, + parent: null, + }, + }); + } + return this; + } + + build(): BitcoinPayload { return this.payload; } } -export function rollBack(payload: Payload) { +export function rollBack(payload: BitcoinPayload) { return { ...payload, apply: [], @@ -108,45 +176,6 @@ export function rollBack(payload: Payload) { }; } -export function brc20Reveal(args: { - json: Brc20; - number: number; - classic_number?: number; - address: string; - tx_id: string; - ordinal_number: number; - parent?: string; -}): BitcoinInscriptionRevealed { - const content = Buffer.from(JSON.stringify(args.json), 'utf-8'); - const reveal: BitcoinInscriptionRevealed = { - content_bytes: `0x${content.toString('hex')}`, - content_type: 'text/plain;charset=utf-8', - content_length: content.length, - inscription_number: { - jubilee: args.number, - classic: args.classic_number ?? args.number, - }, - inscription_fee: 2000, - inscription_id: `${args.tx_id}i0`, - inscription_output_value: 10000, - inscriber_address: args.address, - ordinal_number: args.ordinal_number, - ordinal_block_height: 0, - ordinal_offset: 0, - satpoint_post_inscription: `${args.tx_id}:0:0`, - inscription_input_index: 0, - transfers_pre_inscription: 0, - tx_index: 0, - curse_type: null, - inscription_pointer: null, - delegate: null, - metaprotocol: null, - metadata: undefined, - parent: args.parent ?? null, - }; - return reveal; -} - /** Generate a random hash like string for testing */ export const randomHash = () => [...Array(64)].map(() => Math.floor(Math.random() * 16).toString(16)).join(''); @@ -163,3 +192,58 @@ export function* incrementing( current += step; } } + +export const BRC20_GENESIS_BLOCK = 779832; +export const BRC20_SELF_MINT_ACTIVATION_BLOCK = 837090; + +export async function deployAndMintPEPE(db: PgStore, address: string) { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: BRC20_GENESIS_BLOCK, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .brc20( + { + deploy: { + tick: 'pepe', + max: '250000', + dec: '18', + lim: '250000', + inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + address, + self_mint: false, + }, + }, + { inscription_number: 0 } + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: BRC20_GENESIS_BLOCK + 1, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + }) + .brc20( + { + mint: { + tick: 'pepe', + amt: '10000', + inscription_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0fi0', + address, + }, + }, + { inscription_number: 1 } + ) + .build() + ); +} From 5422156e9919f0c5870c9571ea9f591852c98b69 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Fri, 26 Apr 2024 10:33:35 -0600 Subject: [PATCH 05/10] feat!: support reinscription transfers (#348) * chore: progress * feat: new apply * fix: minted supply * fix: track tx counts * feat: rollbacks * fix: transfer rollbacks * fix: activity addresses * fix: multiple transfer test * fix: holders * fix: operation indexes * chore: draft * fix: style * chore: optimize migrations * fix: initial inserts * fix: get inscription endpoint * fix: rollback * fix: ordhook tests passing * chore: logging * fix: recursion refs * fix: current location comparison * fix: transfers table * test: sat reinscription transfers * fix: start applying counts * fix: tests * fix: all api tests passing * test: transfers per block reinscription * test: address transfer counts * fix: brc20 tests * chore: refactor block caches * chore: clean unused exports --- .github/workflows/ci.yml | 3 + migrations/1676395230925_satoshis.ts | 22 + migrations/1676395230930_inscriptions.ts | 50 +- migrations/1677284495299_locations.ts | 41 +- ....ts => 1677284495500_current-locations.ts} | 20 +- .../1677284495501_inscription-transfers.ts | 52 ++ ...ip-table.ts => 1677284495900_chain-tip.ts} | 14 +- .../1677284495992_inscription-recursions.ts | 25 + ...ck.ts => 1677284495995_counts-by-block.ts} | 3 +- migrations/1677360299810_chain-tip.ts | 14 - ...s => 1683047918926_counts-by-mime-type.ts} | 16 +- migrations/1683047918926_mime-type-counts.ts | 13 - ... => 1683061444855_counts-by-sat-rarity.ts} | 12 +- migrations/1683061444855_sat-rarity-counts.ts | 17 - ...ent.ts => 1683130423352_counts-by-type.ts} | 11 +- migrations/1688925112931_genesis-locations.ts | 36 - migrations/1689264599745_address-counts.ts | 13 - ....ts => 1689264599745_counts-by-address.ts} | 12 +- ...689264599745_counts-by-genesis-address.ts} | 17 +- .../1690229956705_inscription-recursions.ts | 46 - .../1690476164909_count-views-to-tables.ts | 149 --- ...0832271103_location-pointer-constraints.ts | 55 -- migrations/1692980393413_locations-unique.ts | 34 - ...572099_locations-remove-duplicate-index.ts | 12 - .../1693235147508_recursion-backfills.ts | 51 -- .../1695655140203_counts-by-recursive.ts | 11 +- .../1698897577725_locations-location-index.ts | 14 - ...63472553_locations-block-height-indexes.ts | 22 - .../1708471015438_remove-unused-indexes.ts | 18 - package-lock.json | 35 + package.json | 2 + src/api/routes/stats.ts | 2 +- src/api/schemas.ts | 12 +- src/api/util/cache.ts | 7 +- src/api/util/helpers.ts | 2 +- src/ordhook/server.ts | 3 +- src/pg/block-cache.ts | 193 ++++ src/pg/brc20/brc20-block-cache.ts | 228 +++++ src/pg/brc20/brc20-pg-store.ts | 216 ++--- src/pg/brc20/helpers.ts | 89 -- src/pg/brc20/types.ts | 64 +- src/pg/counts/counts-pg-store.ts | 249 +++-- src/pg/helpers.ts | 166 +--- src/pg/pg-store.ts | 853 ++++++++---------- src/pg/types.ts | 136 +-- tests/api/inscriptions.test.ts | 17 +- tests/api/sats.test.ts | 60 +- tests/ordhook/server.test.ts | 26 + 48 files changed, 1355 insertions(+), 1808 deletions(-) create mode 100644 migrations/1676395230925_satoshis.ts rename migrations/{1689006001522_current-locations.ts => 1677284495500_current-locations.ts} (56%) create mode 100644 migrations/1677284495501_inscription-transfers.ts rename migrations/{1701486147464_chain-tip-table.ts => 1677284495900_chain-tip.ts} (59%) create mode 100644 migrations/1677284495992_inscription-recursions.ts rename migrations/{1687785552000_inscriptions-per-block.ts => 1677284495995_counts-by-block.ts} (87%) delete mode 100644 migrations/1677360299810_chain-tip.ts rename migrations/{1704341578275_jubilee-numbers.ts => 1683047918926_counts-by-mime-type.ts} (58%) delete mode 100644 migrations/1683047918926_mime-type-counts.ts rename migrations/{1693234845450_locations-null-inscription-id-index.ts => 1683061444855_counts-by-sat-rarity.ts} (56%) delete mode 100644 migrations/1683061444855_sat-rarity-counts.ts rename migrations/{1707770109739_metadata-parent.ts => 1683130423352_counts-by-type.ts} (66%) delete mode 100644 migrations/1688925112931_genesis-locations.ts delete mode 100644 migrations/1689264599745_address-counts.ts rename migrations/{1698856424356_locations-transfer-type.ts => 1689264599745_counts-by-address.ts} (64%) rename migrations/{1683130423352_inscription-count.ts => 1689264599745_counts-by-genesis-address.ts} (55%) delete mode 100644 migrations/1690229956705_inscription-recursions.ts delete mode 100644 migrations/1690476164909_count-views-to-tables.ts delete mode 100644 migrations/1690832271103_location-pointer-constraints.ts delete mode 100644 migrations/1692980393413_locations-unique.ts delete mode 100644 migrations/1693234572099_locations-remove-duplicate-index.ts delete mode 100644 migrations/1693235147508_recursion-backfills.ts delete mode 100644 migrations/1698897577725_locations-location-index.ts delete mode 100644 migrations/1705363472553_locations-block-height-indexes.ts delete mode 100644 migrations/1708471015438_remove-unused-indexes.ts create mode 100644 src/pg/block-cache.ts create mode 100644 src/pg/brc20/brc20-block-cache.ts diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 72828daa..d8d328a1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -48,6 +48,9 @@ jobs: - name: Lint Prettier run: npm run lint:prettier + - name: Lint Unused Exports + run: npm run lint:unused-exports + test: strategy: fail-fast: false diff --git a/migrations/1676395230925_satoshis.ts b/migrations/1676395230925_satoshis.ts new file mode 100644 index 00000000..62c1d0b7 --- /dev/null +++ b/migrations/1676395230925_satoshis.ts @@ -0,0 +1,22 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('satoshis', { + ordinal_number: { + type: 'numeric', + primaryKey: true, + }, + rarity: { + type: 'text', + notNull: true, + }, + coinbase_height: { + type: 'bigint', + notNull: true, + }, + }); + pgm.createIndex('satoshis', ['rarity']); +} diff --git a/migrations/1676395230930_inscriptions.ts b/migrations/1676395230930_inscriptions.ts index 8b4bd09c..3205872b 100644 --- a/migrations/1676395230930_inscriptions.ts +++ b/migrations/1676395230930_inscriptions.ts @@ -5,30 +5,33 @@ export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { pgm.createTable('inscriptions', { - id: { - type: 'bigserial', - primaryKey: true, - }, genesis_id: { type: 'text', + primaryKey: true, + }, + ordinal_number: { + type: 'numeric', notNull: true, }, number: { type: 'bigint', notNull: true, }, - sat_ordinal: { - type: 'numeric', + classic_number: { + type: 'bigint', notNull: true, }, - sat_rarity: { - type: 'text', + block_height: { + type: 'bigint', notNull: true, }, - sat_coinbase_height: { + tx_index: { type: 'bigint', notNull: true, }, + address: { + type: 'text', + }, mime_type: { type: 'text', notNull: true, @@ -52,6 +55,20 @@ export function up(pgm: MigrationBuilder): void { curse_type: { type: 'text', }, + recursive: { + type: 'boolean', + default: false, + }, + metadata: { + type: 'text', + }, + parent: { + type: 'text', + }, + timestamp: { + type: 'timestamptz', + notNull: true, + }, updated_at: { type: 'timestamptz', default: pgm.func('(NOW())'), @@ -59,10 +76,17 @@ export function up(pgm: MigrationBuilder): void { }, }); pgm.createConstraint('inscriptions', 'inscriptions_number_unique', 'UNIQUE(number)'); - pgm.createIndex('inscriptions', ['genesis_id']); + pgm.createConstraint( + 'inscriptions', + 'inscriptions_ordinal_number_fk', + 'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE' + ); pgm.createIndex('inscriptions', ['mime_type']); - pgm.createIndex('inscriptions', ['sat_ordinal']); - pgm.createIndex('inscriptions', ['sat_rarity']); - pgm.createIndex('inscriptions', ['sat_coinbase_height']); + pgm.createIndex('inscriptions', ['recursive']); + pgm.createIndex('inscriptions', [ + { name: 'block_height', sort: 'DESC' }, + { name: 'tx_index', sort: 'DESC' }, + ]); + pgm.createIndex('inscriptions', ['address']); pgm.createIndex('inscriptions', [{ name: 'updated_at', sort: 'DESC' }]); } diff --git a/migrations/1677284495299_locations.ts b/migrations/1677284495299_locations.ts index b9cc76b1..30894492 100644 --- a/migrations/1677284495299_locations.ts +++ b/migrations/1677284495299_locations.ts @@ -4,32 +4,26 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { + pgm.createType('transfer_type', ['transferred', 'spent_in_fees', 'burnt']); pgm.createTable('locations', { - id: { - type: 'bigserial', - primaryKey: true, - }, - inscription_id: { - type: 'bigint', - }, - genesis_id: { - type: 'text', + ordinal_number: { + type: 'numeric', notNull: true, }, block_height: { type: 'bigint', notNull: true, }, - block_hash: { - type: 'text', + tx_index: { + type: 'bigint', notNull: true, }, tx_id: { type: 'text', notNull: true, }, - tx_index: { - type: 'bigint', + block_hash: { + type: 'text', notNull: true, }, address: { @@ -51,26 +45,27 @@ export function up(pgm: MigrationBuilder): void { value: { type: 'numeric', }, + transfer_type: { + type: 'transfer_type', + notNull: true, + }, timestamp: { type: 'timestamptz', notNull: true, }, }); + pgm.createConstraint('locations', 'locations_pkey', { + primaryKey: ['ordinal_number', 'block_height', 'tx_index'], + }); pgm.createConstraint( 'locations', - 'locations_inscription_id_fk', - 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' + 'locations_ordinal_number_fk', + 'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE' ); - pgm.createConstraint('locations', 'locations_output_offset_unique', 'UNIQUE(output, "offset")'); - pgm.createIndex('locations', ['inscription_id']); + pgm.createIndex('locations', ['output', 'offset']); + pgm.createIndex('locations', ['timestamp']); pgm.createIndex('locations', [ - 'genesis_id', { name: 'block_height', sort: 'DESC' }, { name: 'tx_index', sort: 'DESC' }, ]); - pgm.createIndex('locations', ['block_height']); - pgm.createIndex('locations', ['block_hash']); - pgm.createIndex('locations', ['address']); - pgm.createIndex('locations', ['timestamp']); - pgm.createIndex('locations', ['prev_output']); } diff --git a/migrations/1689006001522_current-locations.ts b/migrations/1677284495500_current-locations.ts similarity index 56% rename from migrations/1689006001522_current-locations.ts rename to migrations/1677284495500_current-locations.ts index 3a469202..51f4b8a3 100644 --- a/migrations/1689006001522_current-locations.ts +++ b/migrations/1677284495500_current-locations.ts @@ -5,12 +5,8 @@ export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { pgm.createTable('current_locations', { - inscription_id: { - type: 'bigint', - notNull: true, - }, - location_id: { - type: 'bigint', + ordinal_number: { + type: 'numeric', notNull: true, }, block_height: { @@ -27,10 +23,14 @@ export function up(pgm: MigrationBuilder): void { }); pgm.createConstraint( 'current_locations', - 'current_locations_inscription_id_unique', - 'UNIQUE(inscription_id)' + 'current_locations_locations_fk', + 'FOREIGN KEY(ordinal_number, block_height, tx_index) REFERENCES locations(ordinal_number, block_height, tx_index) ON DELETE CASCADE' + ); + pgm.createConstraint( + 'locations', + 'locations_satoshis_fk', + 'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE' ); - pgm.createIndex('current_locations', ['location_id']); - pgm.createIndex('current_locations', ['block_height']); + pgm.createIndex('current_locations', ['ordinal_number'], { unique: true }); pgm.createIndex('current_locations', ['address']); } diff --git a/migrations/1677284495501_inscription-transfers.ts b/migrations/1677284495501_inscription-transfers.ts new file mode 100644 index 00000000..90b72717 --- /dev/null +++ b/migrations/1677284495501_inscription-transfers.ts @@ -0,0 +1,52 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('inscription_transfers', { + genesis_id: { + type: 'text', + notNull: true, + }, + number: { + type: 'bigint', + notNull: true, + }, + ordinal_number: { + type: 'numeric', + notNull: true, + }, + block_height: { + type: 'bigint', + notNull: true, + }, + tx_index: { + type: 'bigint', + notNull: true, + }, + block_hash: { + type: 'text', + notNull: true, + }, + block_transfer_index: { + type: 'int', + notNull: true, + }, + }); + pgm.createConstraint('inscription_transfers', 'inscription_transfers_pkey', { + primaryKey: ['block_height', 'block_transfer_index'], + }); + pgm.createConstraint( + 'inscription_transfers', + 'inscription_transfers_locations_fk', + 'FOREIGN KEY(ordinal_number, block_height, tx_index) REFERENCES locations(ordinal_number, block_height, tx_index) ON DELETE CASCADE' + ); + pgm.createConstraint( + 'inscription_transfers', + 'inscription_transfers_satoshis_fk', + 'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE' + ); + pgm.createIndex('inscription_transfers', ['genesis_id']); + pgm.createIndex('inscription_transfers', ['number']); +} diff --git a/migrations/1701486147464_chain-tip-table.ts b/migrations/1677284495900_chain-tip.ts similarity index 59% rename from migrations/1701486147464_chain-tip-table.ts rename to migrations/1677284495900_chain-tip.ts index 1f9b30b2..2b897d40 100644 --- a/migrations/1701486147464_chain-tip-table.ts +++ b/migrations/1677284495900_chain-tip.ts @@ -4,7 +4,6 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.dropMaterializedView('chain_tip'); pgm.createTable('chain_tip', { id: { type: 'bool', @@ -19,20 +18,9 @@ export function up(pgm: MigrationBuilder): void { }, }); pgm.addConstraint('chain_tip', 'chain_tip_one_row', 'CHECK(id)'); - pgm.sql(` - INSERT INTO chain_tip (block_height) ( - SELECT GREATEST(MAX(block_height), 767430) AS block_height FROM locations - ) - `); + pgm.sql(`INSERT INTO chain_tip DEFAULT VALUES`); } export function down(pgm: MigrationBuilder): void { pgm.dropTable('chain_tip'); - pgm.createMaterializedView( - 'chain_tip', - { data: true }, - // Set block height 767430 (inscription #0 genesis) as default. - `SELECT GREATEST(MAX(block_height), 767430) AS block_height FROM locations` - ); - pgm.createIndex('chain_tip', ['block_height'], { unique: true }); } diff --git a/migrations/1677284495992_inscription-recursions.ts b/migrations/1677284495992_inscription-recursions.ts new file mode 100644 index 00000000..d75fb405 --- /dev/null +++ b/migrations/1677284495992_inscription-recursions.ts @@ -0,0 +1,25 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('inscription_recursions', { + genesis_id: { + type: 'text', + notNull: true, + }, + ref_genesis_id: { + type: 'text', + notNull: true, + }, + }); + pgm.createConstraint('inscription_recursions', 'inscription_recursions_pkey', { + primaryKey: ['genesis_id', 'ref_genesis_id'], + }); + pgm.createConstraint( + 'inscription_recursions', + 'inscription_recursions_genesis_id_fk', + 'FOREIGN KEY(genesis_id) REFERENCES inscriptions(genesis_id) ON DELETE CASCADE' + ); +} diff --git a/migrations/1687785552000_inscriptions-per-block.ts b/migrations/1677284495995_counts-by-block.ts similarity index 87% rename from migrations/1687785552000_inscriptions-per-block.ts rename to migrations/1677284495995_counts-by-block.ts index 7aef97a8..2c33335c 100644 --- a/migrations/1687785552000_inscriptions-per-block.ts +++ b/migrations/1677284495995_counts-by-block.ts @@ -4,7 +4,7 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.createTable('inscriptions_per_block', { + pgm.createTable('counts_by_block', { block_height: { type: 'bigint', primaryKey: true, @@ -26,4 +26,5 @@ export function up(pgm: MigrationBuilder): void { notNull: true, }, }); + pgm.createIndex('counts_by_block', ['block_hash']); } diff --git a/migrations/1677360299810_chain-tip.ts b/migrations/1677360299810_chain-tip.ts deleted file mode 100644 index 7dbab3d3..00000000 --- a/migrations/1677360299810_chain-tip.ts +++ /dev/null @@ -1,14 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createMaterializedView( - 'chain_tip', - { data: true }, - // Set block height 767430 (inscription #0 genesis) as default. - `SELECT GREATEST(MAX(block_height), 767430) AS block_height FROM locations` - ); - pgm.createIndex('chain_tip', ['block_height'], { unique: true }); -} diff --git a/migrations/1704341578275_jubilee-numbers.ts b/migrations/1683047918926_counts-by-mime-type.ts similarity index 58% rename from migrations/1704341578275_jubilee-numbers.ts rename to migrations/1683047918926_counts-by-mime-type.ts index bd92ae06..8b0de2ba 100644 --- a/migrations/1704341578275_jubilee-numbers.ts +++ b/migrations/1683047918926_counts-by-mime-type.ts @@ -4,13 +4,15 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.addColumn('inscriptions', { - classic_number: { - type: 'bigint', + pgm.createTable('counts_by_mime_type', { + mime_type: { + type: 'text', + primaryKey: true, + }, + count: { + type: 'int', + notNull: true, + default: 0, }, }); } - -export function down(pgm: MigrationBuilder): void { - pgm.dropColumn('inscriptions', 'classic_number'); -} diff --git a/migrations/1683047918926_mime-type-counts.ts b/migrations/1683047918926_mime-type-counts.ts deleted file mode 100644 index be4d52d1..00000000 --- a/migrations/1683047918926_mime-type-counts.ts +++ /dev/null @@ -1,13 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createMaterializedView( - 'mime_type_counts', - { data: true }, - `SELECT mime_type, COUNT(*) AS count FROM inscriptions GROUP BY mime_type` - ); - pgm.createIndex('mime_type_counts', ['mime_type'], { unique: true }); -} diff --git a/migrations/1693234845450_locations-null-inscription-id-index.ts b/migrations/1683061444855_counts-by-sat-rarity.ts similarity index 56% rename from migrations/1693234845450_locations-null-inscription-id-index.ts rename to migrations/1683061444855_counts-by-sat-rarity.ts index c522d1c3..5fc074ce 100644 --- a/migrations/1693234845450_locations-null-inscription-id-index.ts +++ b/migrations/1683061444855_counts-by-sat-rarity.ts @@ -4,5 +4,15 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.createIndex('locations', ['inscription_id'], { where: 'inscription_id IS NULL' }); + pgm.createTable('counts_by_sat_rarity', { + sat_rarity: { + type: 'text', + primaryKey: true, + }, + count: { + type: 'int', + notNull: true, + default: 0, + }, + }); } diff --git a/migrations/1683061444855_sat-rarity-counts.ts b/migrations/1683061444855_sat-rarity-counts.ts deleted file mode 100644 index 921d59d0..00000000 --- a/migrations/1683061444855_sat-rarity-counts.ts +++ /dev/null @@ -1,17 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createMaterializedView( - 'sat_rarity_counts', - { data: true }, - ` - SELECT sat_rarity, COUNT(*) AS count - FROM inscriptions AS i - GROUP BY sat_rarity - ` - ); - pgm.createIndex('sat_rarity_counts', ['sat_rarity'], { unique: true }); -} diff --git a/migrations/1707770109739_metadata-parent.ts b/migrations/1683130423352_counts-by-type.ts similarity index 66% rename from migrations/1707770109739_metadata-parent.ts rename to migrations/1683130423352_counts-by-type.ts index 0c33c976..f7de9393 100644 --- a/migrations/1707770109739_metadata-parent.ts +++ b/migrations/1683130423352_counts-by-type.ts @@ -4,12 +4,15 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.addColumns('inscriptions', { - metadata: { + pgm.createTable('counts_by_type', { + type: { type: 'text', + primaryKey: true, }, - parent: { - type: 'text', + count: { + type: 'int', + notNull: true, + default: 0, }, }); } diff --git a/migrations/1688925112931_genesis-locations.ts b/migrations/1688925112931_genesis-locations.ts deleted file mode 100644 index 543c61e1..00000000 --- a/migrations/1688925112931_genesis-locations.ts +++ /dev/null @@ -1,36 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createTable('genesis_locations', { - inscription_id: { - type: 'bigint', - notNull: true, - }, - location_id: { - type: 'bigint', - notNull: true, - }, - block_height: { - type: 'bigint', - notNull: true, - }, - tx_index: { - type: 'bigint', - notNull: true, - }, - address: { - type: 'text', - }, - }); - pgm.createConstraint( - 'genesis_locations', - 'genesis_locations_inscription_id_unique', - 'UNIQUE(inscription_id)' - ); - pgm.createIndex('genesis_locations', ['location_id']); - pgm.createIndex('genesis_locations', ['block_height']); - pgm.createIndex('genesis_locations', ['address']); -} diff --git a/migrations/1689264599745_address-counts.ts b/migrations/1689264599745_address-counts.ts deleted file mode 100644 index 4a21827e..00000000 --- a/migrations/1689264599745_address-counts.ts +++ /dev/null @@ -1,13 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createMaterializedView( - 'address_counts', - { data: true }, - `SELECT address, COUNT(*) AS count FROM current_locations GROUP BY address` - ); - pgm.createIndex('address_counts', ['address'], { unique: true }); -} diff --git a/migrations/1698856424356_locations-transfer-type.ts b/migrations/1689264599745_counts-by-address.ts similarity index 64% rename from migrations/1698856424356_locations-transfer-type.ts rename to migrations/1689264599745_counts-by-address.ts index c3ba335f..1e0bd9e3 100644 --- a/migrations/1698856424356_locations-transfer-type.ts +++ b/migrations/1689264599745_counts-by-address.ts @@ -4,11 +4,15 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.createType('transfer_type', ['transferred', 'spent_in_fees', 'burnt']); - pgm.addColumn('locations', { - transfer_type: { - type: 'transfer_type', + pgm.createTable('counts_by_address', { + address: { + type: 'text', + primaryKey: true, + }, + count: { + type: 'int', notNull: true, + default: 0, }, }); } diff --git a/migrations/1683130423352_inscription-count.ts b/migrations/1689264599745_counts-by-genesis-address.ts similarity index 55% rename from migrations/1683130423352_inscription-count.ts rename to migrations/1689264599745_counts-by-genesis-address.ts index 30c5b0fa..95039cdf 100644 --- a/migrations/1683130423352_inscription-count.ts +++ b/migrations/1689264599745_counts-by-genesis-address.ts @@ -4,10 +4,15 @@ import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; export const shorthands: ColumnDefinitions | undefined = undefined; export function up(pgm: MigrationBuilder): void { - pgm.createMaterializedView( - 'inscription_count', - { data: true }, - `SELECT COUNT(*) AS count FROM inscriptions` - ); - pgm.createIndex('inscription_count', ['count'], { unique: true }); + pgm.createTable('counts_by_genesis_address', { + address: { + type: 'text', + primaryKey: true, + }, + count: { + type: 'int', + notNull: true, + default: 0, + }, + }); } diff --git a/migrations/1690229956705_inscription-recursions.ts b/migrations/1690229956705_inscription-recursions.ts deleted file mode 100644 index 83f97e65..00000000 --- a/migrations/1690229956705_inscription-recursions.ts +++ /dev/null @@ -1,46 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createTable('inscription_recursions', { - id: { - type: 'bigserial', - primaryKey: true, - }, - inscription_id: { - type: 'bigint', - notNull: true, - }, - ref_inscription_id: { - type: 'bigint', - notNull: true, - }, - }); - pgm.createConstraint( - 'inscription_recursions', - 'locations_inscription_id_fk', - 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'inscription_recursions', - 'locations_ref_inscription_id_fk', - 'FOREIGN KEY(ref_inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'inscription_recursions', - 'inscriptions_inscription_id_ref_inscription_id_unique', - 'UNIQUE(inscription_id, ref_inscription_id)' - ); - pgm.createIndex('inscription_recursions', ['ref_inscription_id']); - - // Add columns to `inscriptions` table. - pgm.addColumn('inscriptions', { - recursive: { - type: 'boolean', - default: false, - }, - }); - pgm.createIndex('inscriptions', ['recursive'], { where: 'recursive = TRUE' }); -} diff --git a/migrations/1690476164909_count-views-to-tables.ts b/migrations/1690476164909_count-views-to-tables.ts deleted file mode 100644 index b6a167ca..00000000 --- a/migrations/1690476164909_count-views-to-tables.ts +++ /dev/null @@ -1,149 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.dropMaterializedView('mime_type_counts'); - pgm.createTable('counts_by_mime_type', { - mime_type: { - type: 'text', - notNull: true, - primaryKey: true, - }, - count: { - type: 'bigint', - notNull: true, - default: 1, - }, - }); - pgm.sql(` - INSERT INTO counts_by_mime_type ( - SELECT mime_type, COUNT(*) AS count FROM inscriptions GROUP BY mime_type - ) - `); - - pgm.dropMaterializedView('sat_rarity_counts'); - pgm.createTable('counts_by_sat_rarity', { - sat_rarity: { - type: 'text', - notNull: true, - primaryKey: true, - }, - count: { - type: 'bigint', - notNull: true, - default: 1, - }, - }); - pgm.sql(` - INSERT INTO counts_by_sat_rarity ( - SELECT sat_rarity, COUNT(*) AS count FROM inscriptions GROUP BY sat_rarity - ) - `); - - pgm.dropMaterializedView('address_counts'); - pgm.createTable('counts_by_address', { - address: { - type: 'text', - notNull: true, - primaryKey: true, - }, - count: { - type: 'bigint', - notNull: true, - default: 1, - }, - }); - pgm.sql(` - INSERT INTO counts_by_address ( - SELECT address, COUNT(*) AS count FROM current_locations GROUP BY address - ) - `); - - pgm.createTable('counts_by_genesis_address', { - address: { - type: 'text', - notNull: true, - primaryKey: true, - }, - count: { - type: 'bigint', - notNull: true, - default: 1, - }, - }); - pgm.sql(` - INSERT INTO counts_by_genesis_address ( - SELECT address, COUNT(*) AS count FROM genesis_locations GROUP BY address - ) - `); - - pgm.dropMaterializedView('inscription_count'); - pgm.createTable('counts_by_type', { - type: { - type: 'text', - notNull: true, - primaryKey: true, - }, - count: { - type: 'bigint', - notNull: true, - default: 1, - }, - }); - pgm.sql(` - INSERT INTO counts_by_type ( - SELECT 'blessed' AS type, COUNT(*) AS count FROM inscriptions WHERE number >= 0 - ) - `); - pgm.sql(` - INSERT INTO counts_by_type ( - SELECT 'cursed' AS type, COUNT(*) AS count FROM inscriptions WHERE number < 0 - ) - `); - - pgm.createIndex('inscriptions_per_block', ['block_hash']); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropTable('counts_by_mime_type'); - pgm.createMaterializedView( - 'mime_type_counts', - { data: true }, - `SELECT mime_type, COUNT(*) AS count FROM inscriptions GROUP BY mime_type` - ); - pgm.createIndex('mime_type_counts', ['mime_type'], { unique: true }); - - pgm.dropTable('counts_by_sat_rarity'); - pgm.createMaterializedView( - 'sat_rarity_counts', - { data: true }, - ` - SELECT sat_rarity, COUNT(*) AS count - FROM inscriptions AS i - GROUP BY sat_rarity - ` - ); - pgm.createIndex('sat_rarity_counts', ['sat_rarity'], { unique: true }); - - pgm.dropTable('counts_by_address'); - pgm.createMaterializedView( - 'address_counts', - { data: true }, - `SELECT address, COUNT(*) AS count FROM current_locations GROUP BY address` - ); - pgm.createIndex('address_counts', ['address'], { unique: true }); - - pgm.dropTable('counts_by_type'); - pgm.createMaterializedView( - 'inscription_count', - { data: true }, - `SELECT COUNT(*) AS count FROM inscriptions` - ); - pgm.createIndex('inscription_count', ['count'], { unique: true }); - - pgm.dropIndex('inscriptions_per_block', ['block_hash']); - - pgm.dropTable('counts_by_genesis_address'); -} diff --git a/migrations/1690832271103_location-pointer-constraints.ts b/migrations/1690832271103_location-pointer-constraints.ts deleted file mode 100644 index 07184b77..00000000 --- a/migrations/1690832271103_location-pointer-constraints.ts +++ /dev/null @@ -1,55 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.dropConstraint('genesis_locations', 'genesis_locations_inscription_id_unique'); - pgm.createConstraint('genesis_locations', 'genesis_locations_inscription_id_pk', { - primaryKey: 'inscription_id', - }); - pgm.createConstraint( - 'genesis_locations', - 'genesis_locations_inscription_id_fk', - 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'genesis_locations', - 'genesis_locations_location_id_fk', - 'FOREIGN KEY(location_id) REFERENCES locations(id) ON DELETE CASCADE' - ); - - pgm.dropConstraint('current_locations', 'current_locations_inscription_id_unique'); - pgm.createConstraint('current_locations', 'current_locations_inscription_id_pk', { - primaryKey: 'inscription_id', - }); - pgm.createConstraint( - 'current_locations', - 'current_locations_inscription_id_fk', - 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'current_locations', - 'current_locations_location_id_fk', - 'FOREIGN KEY(location_id) REFERENCES locations(id) ON DELETE CASCADE' - ); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropConstraint('genesis_locations', 'genesis_locations_inscription_id_pk'); - pgm.dropConstraint('genesis_locations', 'genesis_locations_inscription_id_fk'); - pgm.dropConstraint('genesis_locations', 'genesis_locations_location_id_fk'); - pgm.createConstraint( - 'genesis_locations', - 'genesis_locations_inscription_id_unique', - 'UNIQUE(inscription_id)' - ); - pgm.dropConstraint('current_locations', 'current_locations_inscription_id_pk'); - pgm.dropConstraint('current_locations', 'current_locations_inscription_id_fk'); - pgm.dropConstraint('current_locations', 'current_locations_location_id_fk'); - pgm.createConstraint( - 'current_locations', - 'current_locations_inscription_id_unique', - 'UNIQUE(inscription_id)' - ); -} diff --git a/migrations/1692980393413_locations-unique.ts b/migrations/1692980393413_locations-unique.ts deleted file mode 100644 index 56491527..00000000 --- a/migrations/1692980393413_locations-unique.ts +++ /dev/null @@ -1,34 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.dropConstraint('locations', 'locations_output_offset_unique'); - pgm.createIndex('locations', ['output', 'offset']); - pgm.createConstraint( - 'locations', - 'locations_inscription_id_block_height_tx_index_unique', - 'UNIQUE(inscription_id, block_height, tx_index)' - ); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropConstraint('locations', 'locations_inscription_id_block_height_tx_index_unique'); - pgm.dropIndex('locations', ['output', 'offset']); - // Modify any repeated offsets slightly so we can re-add the unique constraint. This is mostly for - // unit testing purposes. - pgm.sql(` - WITH duplicates AS ( - SELECT - id, output, "offset", ROW_NUMBER() OVER (PARTITION BY output, "offset" ORDER BY id) as rn - FROM locations - ) - UPDATE locations - SET "offset" = duplicates."offset" + rn - 1 - FROM duplicates - WHERE locations.id = duplicates.id - AND rn > 1 - `); - pgm.createConstraint('locations', 'locations_output_offset_unique', 'UNIQUE(output, "offset")'); -} diff --git a/migrations/1693234572099_locations-remove-duplicate-index.ts b/migrations/1693234572099_locations-remove-duplicate-index.ts deleted file mode 100644 index e8103544..00000000 --- a/migrations/1693234572099_locations-remove-duplicate-index.ts +++ /dev/null @@ -1,12 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.dropIndex('locations', ['inscription_id'], { ifExists: true }); -} - -export function down(pgm: MigrationBuilder): void { - pgm.createIndex('locations', ['inscription_id'], { ifNotExists: true }); -} diff --git a/migrations/1693235147508_recursion-backfills.ts b/migrations/1693235147508_recursion-backfills.ts deleted file mode 100644 index d4fea843..00000000 --- a/migrations/1693235147508_recursion-backfills.ts +++ /dev/null @@ -1,51 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.addColumn('inscription_recursions', { - ref_inscription_genesis_id: { - type: 'text', - }, - }); - pgm.sql(` - UPDATE inscription_recursions AS ir - SET ref_inscription_genesis_id = ( - SELECT genesis_id FROM inscriptions WHERE id = ir.ref_inscription_id - ) - `); - pgm.alterColumn('inscription_recursions', 'ref_inscription_genesis_id', { notNull: true }); - pgm.alterColumn('inscription_recursions', 'ref_inscription_id', { allowNull: true }); - - pgm.createIndex('inscription_recursions', ['ref_inscription_genesis_id']); - pgm.createIndex('inscription_recursions', ['ref_inscription_id'], { - where: 'ref_inscription_id IS NULL', - name: 'inscription_recursions_ref_inscription_id_null_index', - }); - pgm.dropConstraint( - 'inscription_recursions', - 'inscriptions_inscription_id_ref_inscription_id_unique' - ); - pgm.createConstraint( - 'inscription_recursions', - 'inscription_recursions_unique', - 'UNIQUE(inscription_id, ref_inscription_genesis_id)' - ); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropConstraint('inscription_recursions', 'inscription_recursions_unique'); - pgm.dropIndex('inscription_recursions', ['ref_inscription_genesis_id']); - pgm.dropColumn('inscription_recursions', 'ref_inscription_genesis_id'); - pgm.dropIndex('inscription_recursions', ['ref_inscription_id'], { - name: 'inscription_recursions_ref_inscription_id_null_index', - }); - pgm.sql(`DELETE FROM inscription_recursions WHERE ref_inscription_id IS NULL`); - pgm.alterColumn('inscription_recursions', 'ref_inscription_id', { notNull: true }); - pgm.createConstraint( - 'inscription_recursions', - 'inscriptions_inscription_id_ref_inscription_id_unique', - 'UNIQUE(inscription_id, ref_inscription_id)' - ); -} diff --git a/migrations/1695655140203_counts-by-recursive.ts b/migrations/1695655140203_counts-by-recursive.ts index f19322af..b0fe6bc2 100644 --- a/migrations/1695655140203_counts-by-recursive.ts +++ b/migrations/1695655140203_counts-by-recursive.ts @@ -7,21 +7,12 @@ export function up(pgm: MigrationBuilder): void { pgm.createTable('counts_by_recursive', { recursive: { type: 'boolean', - notNull: true, primaryKey: true, }, count: { type: 'bigint', notNull: true, - default: 1, + default: 0, }, }); - pgm.sql(` - INSERT INTO counts_by_recursive (recursive, count) - (SELECT recursive, COUNT(*) AS count FROM inscriptions GROUP BY recursive) - `); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropTable('counts_by_recursive'); } diff --git a/migrations/1698897577725_locations-location-index.ts b/migrations/1698897577725_locations-location-index.ts deleted file mode 100644 index bb7461db..00000000 --- a/migrations/1698897577725_locations-location-index.ts +++ /dev/null @@ -1,14 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.addColumn('locations', { - block_transfer_index: { - type: 'int', - }, - }); - pgm.addIndex('locations', ['block_height', { name: 'block_transfer_index', sort: 'DESC' }]); - pgm.addIndex('locations', ['block_hash', { name: 'block_transfer_index', sort: 'DESC' }]); -} diff --git a/migrations/1705363472553_locations-block-height-indexes.ts b/migrations/1705363472553_locations-block-height-indexes.ts deleted file mode 100644 index 304f3cac..00000000 --- a/migrations/1705363472553_locations-block-height-indexes.ts +++ /dev/null @@ -1,22 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.dropIndex('locations', ['block_hash']); - pgm.dropIndex('locations', ['block_height']); - pgm.createIndex('locations', [ - { name: 'block_height', sort: 'DESC' }, - { name: 'tx_index', sort: 'DESC' }, - ]); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropIndex('locations', [ - { name: 'block_height', sort: 'DESC' }, - { name: 'tx_index', sort: 'DESC' }, - ]); - pgm.createIndex('locations', ['block_hash']); - pgm.createIndex('locations', ['block_height']); -} diff --git a/migrations/1708471015438_remove-unused-indexes.ts b/migrations/1708471015438_remove-unused-indexes.ts deleted file mode 100644 index 1d94c6f7..00000000 --- a/migrations/1708471015438_remove-unused-indexes.ts +++ /dev/null @@ -1,18 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.dropIndex('locations', ['prev_output']); - pgm.dropIndex('locations', ['address']); - pgm.dropIndex('current_locations', ['block_height']); - pgm.dropIndex('inscription_recursions', ['ref_inscription_genesis_id']); -} - -export function down(pgm: MigrationBuilder): void { - pgm.createIndex('locations', ['prev_output']); - pgm.createIndex('locations', ['address']); - pgm.createIndex('current_locations', ['block_height']); - pgm.createIndex('inscription_recursions', ['ref_inscription_genesis_id']); -} diff --git a/package-lock.json b/package-lock.json index 19c72ef7..d715c605 100644 --- a/package-lock.json +++ b/package-lock.json @@ -50,6 +50,7 @@ "rimraf": "^3.0.2", "ts-jest": "^29.0.3", "ts-node": "^10.8.2", + "ts-unused-exports": "^10.0.1", "typescript": "^4.7.4" } }, @@ -18343,6 +18344,30 @@ } } }, + "node_modules/ts-unused-exports": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/ts-unused-exports/-/ts-unused-exports-10.0.1.tgz", + "integrity": "sha512-nWG8Y96pKem01Hw4j4+Mwuy+L0/9sKT7D61Q+OS3cii9ocQACuV6lu00B9qpiPhF4ReVWw3QYHDqV8+to2wbsg==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "tsconfig-paths": "^3.9.0" + }, + "bin": { + "ts-unused-exports": "bin/ts-unused-exports" + }, + "funding": { + "url": "https://github.com/pzavolinsky/ts-unused-exports?sponsor=1" + }, + "peerDependencies": { + "typescript": ">=3.8.3" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": false + } + } + }, "node_modules/tsconfig-paths": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", @@ -32335,6 +32360,16 @@ "yn": "3.1.1" } }, + "ts-unused-exports": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/ts-unused-exports/-/ts-unused-exports-10.0.1.tgz", + "integrity": "sha512-nWG8Y96pKem01Hw4j4+Mwuy+L0/9sKT7D61Q+OS3cii9ocQACuV6lu00B9qpiPhF4ReVWw3QYHDqV8+to2wbsg==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "tsconfig-paths": "^3.9.0" + } + }, "tsconfig-paths": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", diff --git a/package.json b/package.json index 132e5ff8..889db34b 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "migrate": "ts-node node_modules/.bin/node-pg-migrate -j ts", "lint:eslint": "eslint . --ext .js,.jsx,.ts,.tsx -f unix", "lint:prettier": "prettier --check src/**/*.ts tests/**/*.ts migrations/**/*.ts", + "lint:unused-exports": "ts-unused-exports tsconfig.json --showLineNumber --excludePathsFromReport=migrations/* --excludePathsFromReport=util/*", "generate:openapi": "rimraf ./tmp && node -r ts-node/register ./util/openapi-generator.ts", "generate:docs": "redoc-cli build --output ./tmp/index.html ./tmp/openapi.yaml", "generate:git-info": "rimraf .git-info && node_modules/.bin/api-toolkit-git-info", @@ -46,6 +47,7 @@ "rimraf": "^3.0.2", "ts-jest": "^29.0.3", "ts-node": "^10.8.2", + "ts-unused-exports": "^10.0.1", "typescript": "^4.7.4" }, "dependencies": { diff --git a/src/api/routes/stats.ts b/src/api/routes/stats.ts index f5539bb6..94c64a3d 100644 --- a/src/api/routes/stats.ts +++ b/src/api/routes/stats.ts @@ -31,7 +31,7 @@ const IndexRoutes: FastifyPluginCallback, Server, TypeBoxTy }, }, async (request, reply) => { - const inscriptions = await fastify.db.getInscriptionCountPerBlock({ + const inscriptions = await fastify.db.counts.getInscriptionCountPerBlock({ ...blockParam(request.query.from_block_height, 'from_block'), ...blockParam(request.query.to_block_height, 'to_block'), }); diff --git a/src/api/schemas.ts b/src/api/schemas.ts index 053f38d3..21483605 100644 --- a/src/api/schemas.ts +++ b/src/api/schemas.ts @@ -82,7 +82,7 @@ export const Brc20TickerParam = Type.String(); export const Brc20TickersParam = Type.Array(Brc20TickerParam); -export const InscriptionIdParam = Type.RegEx(/^[a-fA-F0-9]{64}i[0-9]+$/, { +const InscriptionIdParam = Type.RegEx(/^[a-fA-F0-9]{64}i[0-9]+$/, { title: 'Inscription ID', description: 'Inscription ID', examples: ['38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0'], @@ -134,7 +134,7 @@ export const BlockHeightParam = Type.RegEx(/^[0-9]+$/, { }); export const BlockHeightParamCType = TypeCompiler.Compile(BlockHeightParam); -export const BlockHashParam = Type.RegEx(/^[0]{8}[a-fA-F0-9]{56}$/, { +const BlockHashParam = Type.RegEx(/^[0]{8}[a-fA-F0-9]{56}$/, { title: 'Block Hash', description: 'Bitcoin block hash', examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'], @@ -210,7 +210,7 @@ export const LimitParam = Type.Integer({ description: 'Results per page', }); -export const Brc20OperationParam = Type.Union( +const Brc20OperationParam = Type.Union( [ Type.Literal('deploy'), Type.Literal('mint'), @@ -494,7 +494,7 @@ export const Brc20TokenResponseSchema = Type.Object( ); export type Brc20TokenResponse = Static; -export const Brc20SupplySchema = Type.Object({ +const Brc20SupplySchema = Type.Object({ max_supply: Type.String({ examples: ['21000000'] }), minted_supply: Type.String({ examples: ['1000000'] }), holders: Type.Integer({ examples: [240] }), @@ -516,7 +516,7 @@ export const Brc20TokenDetailsSchema = Type.Object( }, { title: 'BRC-20 Token Details Response' } ); -export type Brc20TokenDetails = Static; +type Brc20TokenDetails = Static; export const NotFoundResponse = Type.Object( { @@ -532,7 +532,7 @@ export const InvalidSatoshiNumberResponse = Type.Object( { title: 'Invalid Satoshi Number Response' } ); -export const InscriptionsPerBlock = Type.Object({ +const InscriptionsPerBlock = Type.Object({ block_height: Type.String({ examples: ['778921'] }), block_hash: Type.String({ examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'], diff --git a/src/api/util/cache.ts b/src/api/util/cache.ts index cc08159e..c69e0e9d 100644 --- a/src/api/util/cache.ts +++ b/src/api/util/cache.ts @@ -2,7 +2,7 @@ import { FastifyReply, FastifyRequest } from 'fastify'; import { InscriptionIdParamCType, InscriptionNumberParamCType } from '../schemas'; import { logger } from '@hirosystems/api-toolkit'; -export enum ETagType { +enum ETagType { inscriptionsIndex, inscription, inscriptionsPerBlock, @@ -57,11 +57,6 @@ async function handleCache(type: ETagType, request: FastifyRequest, reply: Fasti } } -export function setReplyNonCacheable(reply: FastifyReply) { - reply.removeHeader('Cache-Control'); - reply.removeHeader('Etag'); -} - /** * Retrieve the inscriptions's location timestamp as a UNIX epoch so we can use it as the response * ETag. diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index 9ff8c520..0b51af1c 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -233,7 +233,7 @@ export function hexToBuffer(hex: string): Buffer { return Buffer.from(hex.substring(2), 'hex'); } -export const has0xPrefix = (id: string) => id.substr(0, 2).toLowerCase() === '0x'; +const has0xPrefix = (id: string) => id.substr(0, 2).toLowerCase() === '0x'; export function normalizedHexString(hex: string): string { return has0xPrefix(hex) ? hex.substring(2) : hex; diff --git a/src/ordhook/server.ts b/src/ordhook/server.ts index 7f604584..19d1decc 100644 --- a/src/ordhook/server.ts +++ b/src/ordhook/server.ts @@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'; import { ENV } from '../env'; import { PgStore } from '../pg/pg-store'; import { + BitcoinPayload, ChainhookEventObserver, ChainhookNodeOptions, Payload, @@ -64,7 +65,7 @@ export async function startOrdhookServer(args: { db: PgStore }): Promise(); + recursiveRefs = new Map(); + + mimeTypeCounts = new Map(); + satRarityCounts = new Map(); + inscriptionTypeCounts = new Map(); + genesisAddressCounts = new Map(); + recursiveCounts = new Map(); + + constructor(blockHeight: number, blockHash: string, timestamp: number) { + this.blockHeight = blockHeight; + this.blockHash = blockHash; + this.timestamp = timestamp; + } + + reveal(reveal: BitcoinInscriptionRevealed, tx_id: string) { + const satoshi = new OrdinalSatoshi(reveal.ordinal_number); + const ordinal_number = reveal.ordinal_number.toString(); + this.satoshis.push({ + ordinal_number, + rarity: satoshi.rarity, + coinbase_height: satoshi.blockHeight, + }); + const satpoint = parseSatPoint(reveal.satpoint_post_inscription); + const recursive_refs = getInscriptionRecursion(reveal.content_bytes); + const content_type = removeNullBytes(reveal.content_type); + const mime_type = content_type.split(';')[0]; + this.inscriptions.push({ + genesis_id: reveal.inscription_id, + mime_type, + content_type, + content_length: reveal.content_length, + block_height: this.blockHeight, + tx_index: reveal.tx_index, + address: reveal.inscriber_address, + number: reveal.inscription_number.jubilee, + classic_number: reveal.inscription_number.classic, + content: removeNullBytes(reveal.content_bytes), + fee: reveal.inscription_fee.toString(), + curse_type: reveal.curse_type ? JSON.stringify(reveal.curse_type) : null, + ordinal_number, + recursive: recursive_refs.length > 0, + metadata: reveal.metadata ? JSON.stringify(reveal.metadata) : null, + parent: reveal.parent, + timestamp: this.timestamp, + }); + this.increaseMimeTypeCount(mime_type); + this.increaseSatRarityCount(satoshi.rarity); + this.increaseInscriptionTypeCount(reveal.inscription_number.classic < 0 ? 'cursed' : 'blessed'); + this.increaseGenesisAddressCount(reveal.inscriber_address); + this.increaseRecursiveCount(recursive_refs.length > 0); + this.locations.push({ + block_hash: this.blockHash, + block_height: this.blockHeight, + tx_id, + tx_index: reveal.tx_index, + ordinal_number, + address: reveal.inscriber_address, + output: `${satpoint.tx_id}:${satpoint.vout}`, + offset: satpoint.offset ?? null, + prev_output: null, + prev_offset: null, + value: reveal.inscription_output_value.toString(), + timestamp: this.timestamp, + transfer_type: getTransferType(reveal), + }); + this.updateCurrentLocation(ordinal_number, { + ordinal_number, + block_height: this.blockHeight, + tx_index: reveal.tx_index, + address: reveal.inscriber_address, + }); + if (recursive_refs.length > 0) this.recursiveRefs.set(reveal.inscription_id, recursive_refs); + } + + transfer(transfer: BitcoinInscriptionTransferred, tx_id: string) { + const satpoint = parseSatPoint(transfer.satpoint_post_transfer); + const prevSatpoint = parseSatPoint(transfer.satpoint_pre_transfer); + const ordinal_number = transfer.ordinal_number.toString(); + const address = transfer.destination.value ?? null; + this.locations.push({ + block_hash: this.blockHash, + block_height: this.blockHeight, + tx_id, + tx_index: transfer.tx_index, + ordinal_number, + address, + output: `${satpoint.tx_id}:${satpoint.vout}`, + offset: satpoint.offset ?? null, + prev_output: `${prevSatpoint.tx_id}:${prevSatpoint.vout}`, + prev_offset: prevSatpoint.offset ?? null, + value: transfer.post_transfer_output_value + ? transfer.post_transfer_output_value.toString() + : null, + timestamp: this.timestamp, + transfer_type: + toEnumValue(DbLocationTransferType, transfer.destination.type) ?? + DbLocationTransferType.transferred, + }); + this.updateCurrentLocation(ordinal_number, { + ordinal_number, + block_height: this.blockHeight, + tx_index: transfer.tx_index, + address, + }); + } + + private updateCurrentLocation(ordinal_number: string, data: DbCurrentLocationInsert) { + const current = this.currentLocations.get(ordinal_number); + if ( + current === undefined || + (current && + (data.block_height > current.block_height || + (data.block_height === current.block_height && data.tx_index > current.tx_index))) + ) { + this.currentLocations.set(ordinal_number, data); + } + } + + private increaseMimeTypeCount(mime_type: string) { + const current = this.mimeTypeCounts.get(mime_type); + if (current == undefined) { + this.mimeTypeCounts.set(mime_type, 1); + } else { + this.mimeTypeCounts.set(mime_type, current + 1); + } + } + + private increaseSatRarityCount(rarity: string) { + const current = this.satRarityCounts.get(rarity); + if (current == undefined) { + this.satRarityCounts.set(rarity, 1); + } else { + this.satRarityCounts.set(rarity, current + 1); + } + } + + private increaseInscriptionTypeCount(type: string) { + const current = this.inscriptionTypeCounts.get(type); + if (current == undefined) { + this.inscriptionTypeCounts.set(type, 1); + } else { + this.inscriptionTypeCounts.set(type, current + 1); + } + } + + private increaseGenesisAddressCount(address: string | null) { + if (!address) return; + const current = this.genesisAddressCounts.get(address); + if (current == undefined) { + this.genesisAddressCounts.set(address, 1); + } else { + this.genesisAddressCounts.set(address, current + 1); + } + } + + private increaseRecursiveCount(recursive: boolean) { + const current = this.recursiveCounts.get(recursive); + if (current == undefined) { + this.recursiveCounts.set(recursive, 1); + } else { + this.recursiveCounts.set(recursive, current + 1); + } + } +} diff --git a/src/pg/brc20/brc20-block-cache.ts b/src/pg/brc20/brc20-block-cache.ts new file mode 100644 index 00000000..3bd35659 --- /dev/null +++ b/src/pg/brc20/brc20-block-cache.ts @@ -0,0 +1,228 @@ +import BigNumber from 'bignumber.js'; +import { DbBrc20TokenInsert, DbBrc20OperationInsert, DbBrc20Operation } from './types'; +import { + BitcoinBrc20DeployOperation, + BitcoinBrc20MintOperation, + BitcoinBrc20TransferOperation, + BitcoinBrc20TransferSendOperation, +} from '@hirosystems/chainhook-client'; + +interface AddressBalanceData { + avail: BigNumber; + trans: BigNumber; + total: BigNumber; +} + +/** + * In-memory cache for an Ordhook block's BRC-20 activities. + */ +export class Brc20BlockCache { + blockHeight: number; + + tokens: DbBrc20TokenInsert[] = []; + operations: DbBrc20OperationInsert[] = []; + tokenMintSupplies = new Map(); + tokenTxCounts = new Map(); + operationCounts = new Map(); + addressOperationCounts = new Map>(); + totalBalanceChanges = new Map>(); + transferReceivers = new Map(); + + constructor(blockHeight: number) { + this.blockHeight = blockHeight; + } + + deploy(operation: BitcoinBrc20DeployOperation, tx_id: string, tx_index: number) { + this.tokens.push({ + block_height: this.blockHeight, + genesis_id: operation.deploy.inscription_id, + tx_id, + address: operation.deploy.address, + ticker: operation.deploy.tick, + max: operation.deploy.max, + limit: operation.deploy.lim, + decimals: operation.deploy.dec, + self_mint: operation.deploy.self_mint, + }); + this.operations.push({ + block_height: this.blockHeight, + tx_index, + genesis_id: operation.deploy.inscription_id, + ticker: operation.deploy.tick, + address: operation.deploy.address, + avail_balance: '0', + trans_balance: '0', + operation: DbBrc20Operation.deploy, + }); + this.increaseOperationCount(DbBrc20Operation.deploy); + this.increaseAddressOperationCount(operation.deploy.address, DbBrc20Operation.deploy); + this.increaseTokenTxCount(operation.deploy.tick); + } + + mint(operation: BitcoinBrc20MintOperation, tx_index: number) { + this.operations.push({ + block_height: this.blockHeight, + tx_index, + genesis_id: operation.mint.inscription_id, + ticker: operation.mint.tick, + address: operation.mint.address, + avail_balance: operation.mint.amt, + trans_balance: '0', + operation: DbBrc20Operation.mint, + }); + const amt = BigNumber(operation.mint.amt); + this.increaseTokenMintedSupply(operation.mint.tick, amt); + this.increaseTokenTxCount(operation.mint.tick); + this.increaseOperationCount(DbBrc20Operation.mint); + this.increaseAddressOperationCount(operation.mint.address, DbBrc20Operation.mint); + this.updateAddressBalance(operation.mint.tick, operation.mint.address, amt, BigNumber(0), amt); + } + + transfer(operation: BitcoinBrc20TransferOperation, tx_index: number) { + this.operations.push({ + block_height: this.blockHeight, + tx_index, + genesis_id: operation.transfer.inscription_id, + ticker: operation.transfer.tick, + address: operation.transfer.address, + avail_balance: BigNumber(operation.transfer.amt).negated().toString(), + trans_balance: operation.transfer.amt, + operation: DbBrc20Operation.transfer, + }); + const amt = BigNumber(operation.transfer.amt); + this.increaseOperationCount(DbBrc20Operation.transfer); + this.increaseTokenTxCount(operation.transfer.tick); + this.increaseAddressOperationCount(operation.transfer.address, DbBrc20Operation.transfer); + this.updateAddressBalance( + operation.transfer.tick, + operation.transfer.address, + amt.negated(), + amt, + BigNumber(0) + ); + } + + transferSend(operation: BitcoinBrc20TransferSendOperation, tx_index: number) { + this.operations.push({ + block_height: this.blockHeight, + tx_index, + genesis_id: operation.transfer_send.inscription_id, + ticker: operation.transfer_send.tick, + address: operation.transfer_send.sender_address, + avail_balance: '0', + trans_balance: BigNumber(operation.transfer_send.amt).negated().toString(), + operation: DbBrc20Operation.transferSend, + }); + this.transferReceivers.set( + operation.transfer_send.inscription_id, + operation.transfer_send.receiver_address + ); + this.operations.push({ + block_height: this.blockHeight, + tx_index, + genesis_id: operation.transfer_send.inscription_id, + ticker: operation.transfer_send.tick, + address: operation.transfer_send.receiver_address, + avail_balance: operation.transfer_send.amt, + trans_balance: '0', + operation: DbBrc20Operation.transferReceive, + }); + const amt = BigNumber(operation.transfer_send.amt); + this.increaseOperationCount(DbBrc20Operation.transferSend); + this.increaseTokenTxCount(operation.transfer_send.tick); + this.increaseAddressOperationCount( + operation.transfer_send.sender_address, + DbBrc20Operation.transferSend + ); + if (operation.transfer_send.sender_address != operation.transfer_send.receiver_address) { + this.increaseAddressOperationCount( + operation.transfer_send.receiver_address, + DbBrc20Operation.transferSend + ); + } + this.updateAddressBalance( + operation.transfer_send.tick, + operation.transfer_send.sender_address, + BigNumber('0'), + amt.negated(), + amt.negated() + ); + this.updateAddressBalance( + operation.transfer_send.tick, + operation.transfer_send.receiver_address, + amt, + BigNumber(0), + amt + ); + } + + private increaseOperationCount(operation: DbBrc20Operation) { + this.increaseOperationCountInternal(this.operationCounts, operation); + } + private increaseOperationCountInternal( + map: Map, + operation: DbBrc20Operation + ) { + const current = map.get(operation); + if (current == undefined) { + map.set(operation, 1); + } else { + map.set(operation, current + 1); + } + } + + private increaseTokenMintedSupply(ticker: string, amount: BigNumber) { + const current = this.tokenMintSupplies.get(ticker); + if (current == undefined) { + this.tokenMintSupplies.set(ticker, amount); + } else { + this.tokenMintSupplies.set(ticker, current.plus(amount)); + } + } + + private increaseTokenTxCount(ticker: string) { + const current = this.tokenTxCounts.get(ticker); + if (current == undefined) { + this.tokenTxCounts.set(ticker, 1); + } else { + this.tokenTxCounts.set(ticker, current + 1); + } + } + + private increaseAddressOperationCount(address: string, operation: DbBrc20Operation) { + const current = this.addressOperationCounts.get(address); + if (current == undefined) { + const opMap = new Map(); + this.increaseOperationCountInternal(opMap, operation); + this.addressOperationCounts.set(address, opMap); + } else { + this.increaseOperationCountInternal(current, operation); + } + } + + private updateAddressBalance( + ticker: string, + address: string, + availBalance: BigNumber, + transBalance: BigNumber, + totalBalance: BigNumber + ) { + const current = this.totalBalanceChanges.get(address); + if (current === undefined) { + const opMap = new Map(); + opMap.set(ticker, { avail: availBalance, trans: transBalance, total: totalBalance }); + this.totalBalanceChanges.set(address, opMap); + } else { + const currentTick = current.get(ticker); + if (currentTick === undefined) { + current.set(ticker, { avail: availBalance, trans: transBalance, total: totalBalance }); + } else { + current.set(ticker, { + avail: availBalance.plus(currentTick.avail), + trans: transBalance.plus(currentTick.trans), + total: totalBalance.plus(currentTick.total), + }); + } + } + } +} diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index 1699e041..5f727507 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -6,169 +6,58 @@ import { DbBrc20Holder, DbBrc20Token, DbBrc20TokenWithSupply, - DbBrc20Operation, } from './types'; import { Brc20TokenOrderBy } from '../../api/schemas'; import { objRemoveUndefinedValues } from '../helpers'; import { BitcoinEvent } from '@hirosystems/chainhook-client'; -import BigNumber from 'bignumber.js'; -import { Brc20BlockCache, sqlOr } from './helpers'; +import { sqlOr } from './helpers'; import { INSERT_BATCH_SIZE } from '../pg-store'; +import { Brc20BlockCache } from './brc20-block-cache'; export class Brc20PgStore extends BasePgStoreModule { - async updateBrc20Operations(event: BitcoinEvent, direction: 'apply' | 'rollback'): Promise { - await this.sqlWriteTransaction(async sql => { - const block_height = event.block_identifier.index.toString(); - const cache = new Brc20BlockCache(); - for (const tx of event.transactions) { - const tx_index = tx.metadata.index.toString(); - if (tx.metadata.brc20_operation) { - const operation = tx.metadata.brc20_operation; - if ('deploy' in operation) { - cache.tokens.push({ - block_height, - genesis_id: operation.deploy.inscription_id, - tx_id: tx.transaction_identifier.hash, - address: operation.deploy.address, - ticker: operation.deploy.tick, - max: operation.deploy.max, - limit: operation.deploy.lim, - decimals: operation.deploy.dec, - self_mint: operation.deploy.self_mint, - }); - cache.operations.push({ - block_height, - tx_index, - genesis_id: operation.deploy.inscription_id, - ticker: operation.deploy.tick, - address: operation.deploy.address, - avail_balance: '0', - trans_balance: '0', - operation: DbBrc20Operation.deploy, - }); - cache.increaseOperationCount(DbBrc20Operation.deploy); - cache.increaseAddressOperationCount(operation.deploy.address, DbBrc20Operation.deploy); - cache.increaseTokenTxCount(operation.deploy.tick); - logger.info( - `Brc20PgStore ${direction} deploy ${operation.deploy.tick} by ${operation.deploy.address} at height ${block_height}` - ); - } else if ('mint' in operation) { - cache.operations.push({ - block_height, - tx_index, - genesis_id: operation.mint.inscription_id, - ticker: operation.mint.tick, - address: operation.mint.address, - avail_balance: operation.mint.amt, - trans_balance: '0', - operation: DbBrc20Operation.mint, - }); - const amt = BigNumber(operation.mint.amt); - cache.increaseTokenMintedSupply(operation.mint.tick, amt); - cache.increaseTokenTxCount(operation.mint.tick); - cache.increaseOperationCount(DbBrc20Operation.mint); - cache.increaseAddressOperationCount(operation.mint.address, DbBrc20Operation.mint); - cache.updateAddressBalance( - operation.mint.tick, - operation.mint.address, - amt, - BigNumber(0), - amt - ); - logger.info( - `Brc20PgStore ${direction} mint ${operation.mint.tick} ${operation.mint.amt} by ${operation.mint.address} at height ${block_height}` - ); - } else if ('transfer' in operation) { - cache.operations.push({ - block_height, - tx_index, - genesis_id: operation.transfer.inscription_id, - ticker: operation.transfer.tick, - address: operation.transfer.address, - avail_balance: BigNumber(operation.transfer.amt).negated().toString(), - trans_balance: operation.transfer.amt, - operation: DbBrc20Operation.transfer, - }); - const amt = BigNumber(operation.transfer.amt); - cache.increaseOperationCount(DbBrc20Operation.transfer); - cache.increaseTokenTxCount(operation.transfer.tick); - cache.increaseAddressOperationCount( - operation.transfer.address, - DbBrc20Operation.transfer - ); - cache.updateAddressBalance( - operation.transfer.tick, - operation.transfer.address, - amt.negated(), - amt, - BigNumber(0) - ); - logger.info( - `Brc20PgStore ${direction} transfer ${operation.transfer.tick} ${operation.transfer.amt} by ${operation.transfer.address} at height ${block_height}` - ); - } else if ('transfer_send' in operation) { - cache.operations.push({ - block_height, - tx_index, - genesis_id: operation.transfer_send.inscription_id, - ticker: operation.transfer_send.tick, - address: operation.transfer_send.sender_address, - avail_balance: '0', - trans_balance: BigNumber(operation.transfer_send.amt).negated().toString(), - operation: DbBrc20Operation.transferSend, - }); - cache.transferReceivers.set( - operation.transfer_send.inscription_id, - operation.transfer_send.receiver_address - ); - cache.operations.push({ - block_height, - tx_index, - genesis_id: operation.transfer_send.inscription_id, - ticker: operation.transfer_send.tick, - address: operation.transfer_send.receiver_address, - avail_balance: operation.transfer_send.amt, - trans_balance: '0', - operation: DbBrc20Operation.transferReceive, - }); - const amt = BigNumber(operation.transfer_send.amt); - cache.increaseOperationCount(DbBrc20Operation.transferSend); - cache.increaseTokenTxCount(operation.transfer_send.tick); - cache.increaseAddressOperationCount( - operation.transfer_send.sender_address, - DbBrc20Operation.transferSend - ); - if ( - operation.transfer_send.sender_address != operation.transfer_send.receiver_address - ) { - cache.increaseAddressOperationCount( - operation.transfer_send.receiver_address, - DbBrc20Operation.transferSend - ); - } - cache.updateAddressBalance( - operation.transfer_send.tick, - operation.transfer_send.sender_address, - BigNumber('0'), - amt.negated(), - amt.negated() - ); - cache.updateAddressBalance( - operation.transfer_send.tick, - operation.transfer_send.receiver_address, - amt, - BigNumber(0), - amt - ); - logger.info( - `Brc20PgStore ${direction} transfer_send ${operation.transfer_send.tick} ${operation.transfer_send.amt} from ${operation.transfer_send.sender_address} to ${operation.transfer_send.receiver_address} at height ${block_height}` - ); - } + async updateBrc20Operations( + sql: PgSqlClient, + event: BitcoinEvent, + direction: 'apply' | 'rollback' + ): Promise { + const block_height = event.block_identifier.index; + const cache = new Brc20BlockCache(block_height); + for (const tx of event.transactions) { + const tx_id = tx.transaction_identifier.hash; + const tx_index = tx.metadata.index; + if (tx.metadata.brc20_operation) { + const operation = tx.metadata.brc20_operation; + if ('deploy' in operation) { + cache.deploy(operation, tx_id, tx_index); + logger.info( + `Brc20PgStore ${direction} deploy ${operation.deploy.tick} by ${operation.deploy.address} at height ${block_height}` + ); + } else if ('mint' in operation) { + cache.mint(operation, tx_index); + logger.info( + `Brc20PgStore ${direction} mint ${operation.mint.tick} ${operation.mint.amt} by ${operation.mint.address} at height ${block_height}` + ); + } else if ('transfer' in operation) { + cache.transfer(operation, tx_index); + logger.info( + `Brc20PgStore ${direction} transfer ${operation.transfer.tick} ${operation.transfer.amt} by ${operation.transfer.address} at height ${block_height}` + ); + } else if ('transfer_send' in operation) { + cache.transferSend(operation, tx_index); + logger.info( + `Brc20PgStore ${direction} transfer_send ${operation.transfer_send.tick} ${operation.transfer_send.amt} from ${operation.transfer_send.sender_address} to ${operation.transfer_send.receiver_address} at height ${block_height}` + ); } } - if (direction === 'apply') await this.applyOperations(sql, cache); - else await this.rollBackOperations(sql, cache); - }); + } + switch (direction) { + case 'apply': + await this.applyOperations(sql, cache); + break; + case 'rollback': + await this.rollBackOperations(sql, cache); + break; + } } private async applyOperations(sql: PgSqlClient, cache: Brc20BlockCache) { @@ -310,7 +199,7 @@ export class Brc20PgStore extends BasePgStoreModule { const orderBy = args.order_by === Brc20TokenOrderBy.tx_count ? this.sql`d.tx_count DESC` // tx_count - : this.sql`l.block_height DESC, l.tx_index DESC`; // default: `index` + : this.sql`i.block_height DESC, i.tx_index DESC`; // default: `index` const results = await this.sql<(DbBrc20Token & { total: number })[]>` ${ args.ticker === undefined @@ -322,14 +211,12 @@ export class Brc20PgStore extends BasePgStoreModule { : this.sql`` } SELECT - d.*, i.number, l.timestamp, + d.*, i.number, i.timestamp, ${ args.ticker ? this.sql`COUNT(*) OVER()` : this.sql`(SELECT count FROM global_count)` } AS total FROM brc20_tokens AS d INNER JOIN inscriptions AS i ON i.genesis_id = d.genesis_id - INNER JOIN genesis_locations AS g ON g.inscription_id = i.id - INNER JOIN locations AS l ON l.id = g.location_id ${tickerPrefixCondition ? this.sql`WHERE ${tickerPrefixCondition}` : this.sql``} ORDER BY ${orderBy} OFFSET ${args.offset} @@ -395,11 +282,9 @@ export class Brc20PgStore extends BasePgStoreModule { const result = await this.sql` WITH token AS ( SELECT - d.*, i.number, i.genesis_id, l.timestamp + d.*, i.number, i.genesis_id, i.timestamp FROM brc20_tokens AS d INNER JOIN inscriptions AS i ON i.genesis_id = d.genesis_id - INNER JOIN genesis_locations AS g ON g.inscription_id = i.id - INNER JOIN locations AS l ON l.id = g.location_id WHERE d.ticker = LOWER(${args.ticker}) ), holders AS ( @@ -496,8 +381,8 @@ export class Brc20PgStore extends BasePgStoreModule { e.address, e.to_address, d.ticker, - l.genesis_id AS inscription_id, - l.block_height, + e.genesis_id AS inscription_id, + i.block_height, l.block_hash, l.tx_id, l.timestamp, @@ -513,7 +398,8 @@ export class Brc20PgStore extends BasePgStoreModule { } AS total FROM brc20_operations AS e INNER JOIN brc20_tokens AS d ON d.ticker = e.ticker - INNER JOIN locations AS l ON e.genesis_id = l.genesis_id AND e.block_height = l.block_height AND e.tx_index = l.tx_index + INNER JOIN inscriptions AS i ON i.genesis_id = e.genesis_id + INNER JOIN locations AS l ON i.ordinal_number = l.ordinal_number AND e.block_height = l.block_height AND e.tx_index = l.tx_index WHERE TRUE ${ operationsFilter @@ -521,7 +407,7 @@ export class Brc20PgStore extends BasePgStoreModule { : sql`AND e.operation <> 'transfer_receive'` } ${filters.ticker ? sql`AND e.ticker IN ${sql(filters.ticker)}` : sql``} - ${filters.block_height ? sql`AND l.block_height = ${filters.block_height}` : sql``} + ${filters.block_height ? sql`AND e.block_height = ${filters.block_height}` : sql``} ${ filters.address ? sql`AND (e.address = ${filters.address} OR e.to_address = ${filters.address})` diff --git a/src/pg/brc20/helpers.ts b/src/pg/brc20/helpers.ts index 1d04e767..b0e9e6c1 100644 --- a/src/pg/brc20/helpers.ts +++ b/src/pg/brc20/helpers.ts @@ -1,5 +1,3 @@ -import BigNumber from 'bignumber.js'; -import { DbBrc20Operation, DbBrc20OperationInsert, DbBrc20TokenInsert } from './types'; import * as postgres from 'postgres'; import { PgSqlClient } from '@hirosystems/api-toolkit'; @@ -9,90 +7,3 @@ export function sqlOr( ) { return partials?.reduce((acc, curr) => sql`${acc} OR ${curr}`); } - -export interface AddressBalanceData { - avail: BigNumber; - trans: BigNumber; - total: BigNumber; -} - -export class Brc20BlockCache { - tokens: DbBrc20TokenInsert[] = []; - operations: DbBrc20OperationInsert[] = []; - tokenMintSupplies = new Map(); - tokenTxCounts = new Map(); - operationCounts = new Map(); - addressOperationCounts = new Map>(); - totalBalanceChanges = new Map>(); - transferReceivers = new Map(); - - increaseOperationCount(operation: DbBrc20Operation) { - this.increaseOperationCountInternal(this.operationCounts, operation); - } - private increaseOperationCountInternal( - map: Map, - operation: DbBrc20Operation - ) { - const current = map.get(operation); - if (current == undefined) { - map.set(operation, 1); - } else { - map.set(operation, current + 1); - } - } - - increaseTokenMintedSupply(ticker: string, amount: BigNumber) { - const current = this.tokenMintSupplies.get(ticker); - if (current == undefined) { - this.tokenMintSupplies.set(ticker, amount); - } else { - this.tokenMintSupplies.set(ticker, current.plus(amount)); - } - } - - increaseTokenTxCount(ticker: string) { - const current = this.tokenTxCounts.get(ticker); - if (current == undefined) { - this.tokenTxCounts.set(ticker, 1); - } else { - this.tokenTxCounts.set(ticker, current + 1); - } - } - - increaseAddressOperationCount(address: string, operation: DbBrc20Operation) { - const current = this.addressOperationCounts.get(address); - if (current == undefined) { - const opMap = new Map(); - this.increaseOperationCountInternal(opMap, operation); - this.addressOperationCounts.set(address, opMap); - } else { - this.increaseOperationCountInternal(current, operation); - } - } - - updateAddressBalance( - ticker: string, - address: string, - availBalance: BigNumber, - transBalance: BigNumber, - totalBalance: BigNumber - ) { - const current = this.totalBalanceChanges.get(address); - if (current === undefined) { - const opMap = new Map(); - opMap.set(ticker, { avail: availBalance, trans: transBalance, total: totalBalance }); - this.totalBalanceChanges.set(address, opMap); - } else { - const currentTick = current.get(ticker); - if (currentTick === undefined) { - current.set(ticker, { avail: availBalance, trans: transBalance, total: totalBalance }); - } else { - current.set(ticker, { - avail: availBalance.plus(currentTick.avail), - trans: transBalance.plus(currentTick.trans), - total: totalBalance.plus(currentTick.total), - }); - } - } - } -} diff --git a/src/pg/brc20/types.ts b/src/pg/brc20/types.ts index 9737dfb6..c3bb8a32 100644 --- a/src/pg/brc20/types.ts +++ b/src/pg/brc20/types.ts @@ -3,7 +3,7 @@ import { PgNumeric } from '@hirosystems/api-toolkit'; export type DbBrc20TokenInsert = { ticker: string; genesis_id: string; - block_height: string; + block_height: number; tx_id: string; address: string; max: PgNumeric; @@ -23,20 +23,14 @@ export enum DbBrc20Operation { export type DbBrc20OperationInsert = { genesis_id: string; ticker: string; - block_height: PgNumeric; - tx_index: PgNumeric; + block_height: number; + tx_index: number; address: string; avail_balance: PgNumeric; trans_balance: PgNumeric; operation: DbBrc20Operation; }; -export type DbBrc20CountsByAddressInsert = { - address: string; - operation: DbBrc20Operation; - count: number; -}; - export type DbBrc20Token = { id: string; genesis_id: string; @@ -73,49 +67,12 @@ export type DbBrc20Balance = { total_balance: string; }; -export enum DbBrc20BalanceTypeId { - mint = 0, - transferIntent = 1, - transferFrom = 2, - transferTo = 3, -} - export enum DbBrc20EventOperation { deploy = 'deploy', mint = 'mint', transfer = 'transfer', transferSend = 'transfer_send', } -export const BRC20_OPERATIONS = ['deploy', 'mint', 'transfer', 'transfer_send']; - -type BaseEvent = { - inscription_id: string; - genesis_location_id: string; - brc20_deploy_id: string; -}; - -export type DbBrc20DeployEvent = BaseEvent & { - operation: 'deploy'; - deploy_id: string; - mint_id: null; - transfer_id: null; -}; - -export type DbBrc20MintEvent = BaseEvent & { - operation: 'mint'; - deploy_id: null; - mint_id: string; - transfer_id: null; -}; - -export type DbBrc20TransferEvent = BaseEvent & { - operation: 'transfer' | 'transfer_send'; - deploy_id: null; - mint_id: null; - transfer_id: string; -}; - -export type DbBrc20Event = DbBrc20DeployEvent | DbBrc20MintEvent | DbBrc20TransferEvent; export type DbBrc20Activity = { ticker: string; @@ -136,18 +93,3 @@ export type DbBrc20Activity = { to_address: string | null; timestamp: number; }; - -export const BRC20_DEPLOYS_COLUMNS = [ - 'id', - 'inscription_id', - 'block_height', - 'tx_id', - 'address', - 'ticker', - 'max', - 'decimals', - 'limit', - 'minted_supply', - 'tx_count', - 'self_mint', -]; diff --git a/src/pg/counts/counts-pg-store.ts b/src/pg/counts/counts-pg-store.ts index 6994959c..7e56e54c 100644 --- a/src/pg/counts/counts-pg-store.ts +++ b/src/pg/counts/counts-pg-store.ts @@ -1,14 +1,13 @@ -import { BasePgStoreModule } from '@hirosystems/api-toolkit'; +import { BasePgStoreModule, PgSqlClient } from '@hirosystems/api-toolkit'; import { SatoshiRarity } from '../../api/util/ordinal-satoshi'; import { - DbInscription, + DbInscriptionCountPerBlock, + DbInscriptionCountPerBlockFilters, DbInscriptionIndexFilters, - InscriptionData, DbInscriptionType, - RevealLocationData, - DbLocationPointer, } from '../types'; import { DbInscriptionIndexResultCountType } from './types'; +import { BlockCache } from '../block-cache'; /** * This class affects all the different tables that track inscription counts according to different @@ -55,142 +54,128 @@ export class CountsPgStore extends BasePgStoreModule { } } - async applyInscriptions(writes: InscriptionData[]): Promise { - if (writes.length === 0) return; - const mimeType = new Map(); - const rarity = new Map(); - const recursion = new Map(); - const typeMap = new Map(); - for (const i of writes) { - mimeType.set(i.mime_type, (mimeType.get(i.mime_type) ?? 0) + 1); - rarity.set(i.sat_rarity, (rarity.get(i.sat_rarity) ?? 0) + 1); - recursion.set(i.recursive, (recursion.get(i.recursive) ?? 0) + 1); - const inscrType = i.number < 0 ? 'cursed' : 'blessed'; - typeMap.set(inscrType, (typeMap.get(inscrType) ?? 0) + 1); - } - const mimeTypeInsert = Array.from(mimeType.entries()).map(k => ({ - mime_type: k[0], - count: k[1], - })); - const rarityInsert = Array.from(rarity.entries()).map(k => ({ - sat_rarity: k[0], - count: k[1], - })); - const recursionInsert = Array.from(recursion.entries()).map(k => ({ - recursive: k[0], - count: k[1], - })); - const typeInsert = Array.from(typeMap.entries()).map(k => ({ - type: k[0], - count: k[1], - })); - // `counts_by_address` and `counts_by_genesis_address` count increases are handled in - // `applyLocations`. - await this.sql` - WITH increase_mime_type AS ( - INSERT INTO counts_by_mime_type ${this.sql(mimeTypeInsert)} + async applyCounts(sql: PgSqlClient, cache: BlockCache) { + if (cache.mimeTypeCounts.size) { + const entries = []; + for (const [mime_type, count] of cache.mimeTypeCounts) entries.push({ mime_type, count }); + await sql` + INSERT INTO counts_by_mime_type ${sql(entries)} ON CONFLICT (mime_type) DO UPDATE SET count = counts_by_mime_type.count + EXCLUDED.count - ), - increase_rarity AS ( - INSERT INTO counts_by_sat_rarity ${this.sql(rarityInsert)} + `; + } + if (cache.satRarityCounts.size) { + const entries = []; + for (const [sat_rarity, count] of cache.satRarityCounts) entries.push({ sat_rarity, count }); + await sql` + INSERT INTO counts_by_sat_rarity ${sql(entries)} ON CONFLICT (sat_rarity) DO UPDATE SET count = counts_by_sat_rarity.count + EXCLUDED.count - ), - increase_recursive AS ( - INSERT INTO counts_by_recursive ${this.sql(recursionInsert)} + `; + } + if (cache.inscriptionTypeCounts.size) { + const entries = []; + for (const [type, count] of cache.inscriptionTypeCounts) entries.push({ type, count }); + await sql` + INSERT INTO counts_by_type ${sql(entries)} + ON CONFLICT (type) DO UPDATE SET count = counts_by_type.count + EXCLUDED.count + `; + } + if (cache.recursiveCounts.size) { + const entries = []; + for (const [recursive, count] of cache.recursiveCounts) entries.push({ recursive, count }); + await sql` + INSERT INTO counts_by_recursive ${sql(entries)} ON CONFLICT (recursive) DO UPDATE SET count = counts_by_recursive.count + EXCLUDED.count - ) - INSERT INTO counts_by_type ${this.sql(typeInsert)} - ON CONFLICT (type) DO UPDATE SET count = counts_by_type.count + EXCLUDED.count - `; - } - - async rollBackInscription(args: { - inscription: InscriptionData; - location: RevealLocationData; - }): Promise { - await this.sql` - WITH decrease_mime_type AS ( - UPDATE counts_by_mime_type SET count = count - 1 - WHERE mime_type = ${args.inscription.mime_type} - ), - decrease_rarity AS ( - UPDATE counts_by_sat_rarity SET count = count - 1 - WHERE sat_rarity = ${args.inscription.sat_rarity} - ), - decrease_recursive AS ( - UPDATE counts_by_recursive SET count = count - 1 - WHERE recursive = ${args.inscription.recursive} - ), - decrease_type AS ( - UPDATE counts_by_type SET count = count - 1 WHERE type = ${ - args.inscription.number < 0 ? DbInscriptionType.cursed : DbInscriptionType.blessed - } - ), - decrease_genesis AS ( - UPDATE counts_by_genesis_address SET count = count - 1 - WHERE address = ${args.location.address} - ) - UPDATE counts_by_address SET count = count - 1 WHERE address = ${args.location.address} - `; + `; + } + if (cache.genesisAddressCounts.size) { + const entries = []; + for (const [address, count] of cache.genesisAddressCounts) entries.push({ address, count }); + await sql` + INSERT INTO counts_by_genesis_address ${sql(entries)} + ON CONFLICT (address) DO UPDATE SET count = counts_by_genesis_address.count + EXCLUDED.count + `; + } + if (cache.inscriptions.length) + await sql` + WITH prev_entry AS ( + SELECT inscription_count_accum + FROM counts_by_block + WHERE block_height < ${cache.blockHeight} + ORDER BY block_height DESC + LIMIT 1 + ) + INSERT INTO counts_by_block + (block_height, block_hash, inscription_count, inscription_count_accum, timestamp) + VALUES ( + ${cache.blockHeight}, ${cache.blockHash}, ${cache.inscriptions.length}, + COALESCE((SELECT inscription_count_accum FROM prev_entry), 0) + ${cache.inscriptions.length}, + TO_TIMESTAMP(${cache.timestamp}) + ) + `; + // Address ownership count is handled in `PgStore`. } - async applyLocations( - writes: { old_address: string | null; new_address: string | null }[], - genesis: boolean = true - ): Promise { - if (writes.length === 0) return; - await this.sqlWriteTransaction(async sql => { - const table = genesis ? sql`counts_by_genesis_address` : sql`counts_by_address`; - const oldAddr = new Map(); - const newAddr = new Map(); - for (const i of writes) { - if (i.old_address) oldAddr.set(i.old_address, (oldAddr.get(i.old_address) ?? 0) + 1); - if (i.new_address) newAddr.set(i.new_address, (newAddr.get(i.new_address) ?? 0) + 1); - } - const oldAddrInsert = Array.from(oldAddr.entries()).map(k => ({ - address: k[0], - count: k[1], - })); - const newAddrInsert = Array.from(newAddr.entries()).map(k => ({ - address: k[0], - count: k[1], - })); - if (oldAddrInsert.length > 0) + async rollBackCounts(sql: PgSqlClient, cache: BlockCache) { + if (cache.inscriptions.length) + await sql`DELETE FROM counts_by_block WHERE block_height = ${cache.blockHeight}`; + if (cache.genesisAddressCounts.size) + for (const [address, count] of cache.genesisAddressCounts) await sql` - INSERT INTO ${table} ${sql(oldAddrInsert)} - ON CONFLICT (address) DO UPDATE SET count = ${table}.count - EXCLUDED.count + UPDATE counts_by_genesis_address SET count = count - ${count} WHERE address = ${address} `; - if (newAddrInsert.length > 0) + if (cache.recursiveCounts.size) + for (const [recursive, count] of cache.recursiveCounts) await sql` - INSERT INTO ${table} ${sql(newAddrInsert)} - ON CONFLICT (address) DO UPDATE SET count = ${table}.count + EXCLUDED.count + UPDATE counts_by_recursive SET count = count - ${count} WHERE recursive = ${recursive} `; - }); - } - - async rollBackCurrentLocation(args: { - curr: DbLocationPointer; - prev: DbLocationPointer; - }): Promise { - await this.sqlWriteTransaction(async sql => { - if (args.curr.address) { + if (cache.inscriptionTypeCounts.size) + for (const [type, count] of cache.inscriptionTypeCounts) await sql` - UPDATE counts_by_address SET count = count - 1 WHERE address = ${args.curr.address} + UPDATE counts_by_type SET count = count - ${count} WHERE type = ${type} `; - } - if (args.prev.address) { + if (cache.satRarityCounts.size) + for (const [sat_rarity, count] of cache.satRarityCounts) await sql` - UPDATE counts_by_address SET count = count + 1 WHERE address = ${args.prev.address} + UPDATE counts_by_sat_rarity SET count = count - ${count} WHERE sat_rarity = ${sat_rarity} `; - } - }); + if (cache.mimeTypeCounts.size) + for (const [mime_type, count] of cache.mimeTypeCounts) + await sql` + UPDATE counts_by_mime_type SET count = count - ${count} WHERE mime_type = ${mime_type} + `; + // Address ownership count is handled in `PgStore`. + } + + async getInscriptionCountPerBlock( + filters: DbInscriptionCountPerBlockFilters + ): Promise { + const fromCondition = filters.from_block_height + ? this.sql`block_height >= ${filters.from_block_height}` + : this.sql``; + + const toCondition = filters.to_block_height + ? this.sql`block_height <= ${filters.to_block_height}` + : this.sql``; + + const where = + filters.from_block_height && filters.to_block_height + ? this.sql`WHERE ${fromCondition} AND ${toCondition}` + : this.sql`WHERE ${fromCondition}${toCondition}`; + + return await this.sql` + SELECT * + FROM counts_by_block + ${filters.from_block_height || filters.to_block_height ? where : this.sql``} + ORDER BY block_height DESC + LIMIT 5000 + `; // roughly 35 days of blocks, assuming 10 minute block times on a full database } private async getBlockCount(from?: number, to?: number): Promise { if (from === undefined && to === undefined) return 0; const result = await this.sql<{ count: number }[]>` - SELECT COALESCE(SUM(inscription_count), 0) AS count - FROM inscriptions_per_block + SELECT COALESCE(SUM(inscription_count), 0)::int AS count + FROM counts_by_block WHERE TRUE ${from !== undefined ? this.sql`AND block_height >= ${from}` : this.sql``} ${to !== undefined ? this.sql`AND block_height <= ${to}` : this.sql``} @@ -201,8 +186,8 @@ export class CountsPgStore extends BasePgStoreModule { private async getBlockHashCount(hash?: string): Promise { if (!hash) return 0; const result = await this.sql<{ count: number }[]>` - SELECT COALESCE(SUM(inscription_count), 0) AS count - FROM inscriptions_per_block + SELECT COALESCE(SUM(inscription_count), 0)::int AS count + FROM counts_by_block WHERE block_hash = ${hash} `; return result[0].count; @@ -212,7 +197,7 @@ export class CountsPgStore extends BasePgStoreModule { const types = type !== undefined ? [type] : [DbInscriptionType.blessed, DbInscriptionType.cursed]; const result = await this.sql<{ count: number }[]>` - SELECT COALESCE(SUM(count), 0) AS count + SELECT COALESCE(SUM(count), 0)::int AS count FROM counts_by_type WHERE type IN ${this.sql(types)} `; @@ -222,7 +207,7 @@ export class CountsPgStore extends BasePgStoreModule { private async getMimeTypeCount(mimeType?: string[]): Promise { if (!mimeType) return 0; const result = await this.sql<{ count: number }[]>` - SELECT COALESCE(SUM(count), 0) AS count + SELECT COALESCE(SUM(count), 0)::int AS count FROM counts_by_mime_type WHERE mime_type IN ${this.sql(mimeType)} `; @@ -232,7 +217,7 @@ export class CountsPgStore extends BasePgStoreModule { private async getSatRarityCount(satRarity?: SatoshiRarity[]): Promise { if (!satRarity) return 0; const result = await this.sql<{ count: number }[]>` - SELECT COALESCE(SUM(count), 0) AS count + SELECT COALESCE(SUM(count), 0)::int AS count FROM counts_by_sat_rarity WHERE sat_rarity IN ${this.sql(satRarity)} `; @@ -242,17 +227,17 @@ export class CountsPgStore extends BasePgStoreModule { private async getRecursiveCount(recursive?: boolean): Promise { const rec = recursive !== undefined ? [recursive] : [true, false]; const result = await this.sql<{ count: number }[]>` - SELECT COALESCE(SUM(count), 0) AS count + SELECT COALESCE(SUM(count), 0)::int AS count FROM counts_by_recursive WHERE recursive IN ${this.sql(rec)} `; return result[0].count; } - private async getAddressCount(address?: string[]): Promise { + async getAddressCount(address?: string[]): Promise { if (!address) return 0; const result = await this.sql<{ count: number }[]>` - SELECT COALESCE(SUM(count), 0) AS count + SELECT COALESCE(SUM(count), 0)::int AS count FROM counts_by_address WHERE address IN ${this.sql(address)} `; @@ -262,7 +247,7 @@ export class CountsPgStore extends BasePgStoreModule { private async getGenesisAddressCount(genesisAddress?: string[]): Promise { if (!genesisAddress) return 0; const result = await this.sql<{ count: number }[]>` - SELECT COALESCE(SUM(count), 0) AS count + SELECT COALESCE(SUM(count), 0)::int AS count FROM counts_by_genesis_address WHERE address IN ${this.sql(genesisAddress)} `; diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index 033a1ece..9b399934 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -1,17 +1,7 @@ -import { PgBytea, logger, toEnumValue } from '@hirosystems/api-toolkit'; -import { hexToBuffer, normalizedHexString, parseSatPoint } from '../api/util/helpers'; -import { - BitcoinEvent, - BitcoinInscriptionRevealed, - BitcoinInscriptionTransferred, -} from '@hirosystems/chainhook-client'; -import { - DbLocationTransferType, - InscriptionEventData, - InscriptionTransferData, - InscriptionRevealData, -} from './types'; -import { OrdinalSatoshi } from '../api/util/ordinal-satoshi'; +import { PgBytea, logger } from '@hirosystems/api-toolkit'; +import { hexToBuffer } from '../api/util/helpers'; +import { BitcoinInscriptionRevealed } from '@hirosystems/chainhook-client'; +import { DbLocationTransferType } from './types'; /** * Returns a list of referenced inscription ids from inscription content. @@ -28,28 +18,6 @@ export function getInscriptionRecursion(content: PgBytea): string[] { return result; } -/** - * Returns the values from settled Promise results. - * Throws if any Promise is rejected. - * This can be used with Promise.allSettled to get the values from all promises, - * instead of Promise.all which will swallow following unhandled rejections. - * @param settles - Array of `Promise.allSettled()` results - * @returns Array of Promise result values - */ -export function throwOnFirstRejected(settles: { - [K in keyof T]: PromiseSettledResult; -}): T { - const values: T = [] as any; - for (const promise of settles) { - if (promise.status === 'rejected') throw promise.reason; - - // Note: Pushing to result `values` array is required for type inference - // Compared to e.g. `settles.map(s => s.value)` - values.push(promise.value); - } - return values; -} - export function objRemoveUndefinedValues(obj: object) { Object.keys(obj).forEach(key => (obj as any)[key] === undefined && delete (obj as any)[key]); } @@ -63,23 +31,13 @@ export function removeNullBytes(input: string): string { return input.replace(/\x00/g, ''); } -function updateFromOrdhookInscriptionRevealed(args: { - block_height: number; - block_hash: string; - tx_id: string; - timestamp: number; - reveal: BitcoinInscriptionRevealed; -}): InscriptionRevealData { - const satoshi = new OrdinalSatoshi(args.reveal.ordinal_number); - const satpoint = parseSatPoint(args.reveal.satpoint_post_inscription); - const recursive_refs = getInscriptionRecursion(args.reveal.content_bytes); - const content_type = removeNullBytes(args.reveal.content_type); +export function getTransferType(reveal: BitcoinInscriptionRevealed) { let transfer_type = DbLocationTransferType.transferred; - if (args.reveal.inscriber_address == null || args.reveal.inscriber_address == '') { - if (args.reveal.inscription_output_value == 0) { - if (args.reveal.inscription_pointer !== 0 && args.reveal.inscription_pointer !== null) { + if (reveal.inscriber_address == null || reveal.inscriber_address == '') { + if (reveal.inscription_output_value == 0) { + if (reveal.inscription_pointer !== 0 && reveal.inscription_pointer !== null) { logger.warn( - `Detected inscription reveal with no address and no output value but a valid pointer ${args.reveal.inscription_id}` + `Detected inscription reveal with no address and no output value but a valid pointer ${reveal.inscription_id}` ); } transfer_type = DbLocationTransferType.spentInFees; @@ -87,109 +45,5 @@ function updateFromOrdhookInscriptionRevealed(args: { transfer_type = DbLocationTransferType.burnt; } } - return { - inscription: { - genesis_id: args.reveal.inscription_id, - mime_type: content_type.split(';')[0], - content_type, - content_length: args.reveal.content_length, - number: args.reveal.inscription_number.jubilee, - classic_number: args.reveal.inscription_number.classic, - content: removeNullBytes(args.reveal.content_bytes), - fee: args.reveal.inscription_fee.toString(), - curse_type: args.reveal.curse_type ? JSON.stringify(args.reveal.curse_type) : null, - sat_ordinal: args.reveal.ordinal_number.toString(), - sat_rarity: satoshi.rarity, - sat_coinbase_height: satoshi.blockHeight, - recursive: recursive_refs.length > 0, - metadata: args.reveal.metadata ? JSON.stringify(args.reveal.metadata) : null, - parent: args.reveal.parent, - }, - location: { - block_hash: args.block_hash, - block_height: args.block_height, - tx_id: args.tx_id, - tx_index: args.reveal.tx_index, - block_transfer_index: null, - genesis_id: args.reveal.inscription_id, - address: args.reveal.inscriber_address, - output: `${satpoint.tx_id}:${satpoint.vout}`, - offset: satpoint.offset ?? null, - prev_output: null, - prev_offset: null, - value: args.reveal.inscription_output_value.toString(), - timestamp: args.timestamp, - transfer_type, - }, - recursive_refs, - }; -} - -function updateFromOrdhookInscriptionTransferred(args: { - block_height: number; - block_hash: string; - tx_id: string; - timestamp: number; - blockTransferIndex: number; - transfer: BitcoinInscriptionTransferred; -}): InscriptionTransferData { - const satpoint = parseSatPoint(args.transfer.satpoint_post_transfer); - const prevSatpoint = parseSatPoint(args.transfer.satpoint_pre_transfer); - return { - location: { - block_hash: args.block_hash, - block_height: args.block_height, - tx_id: args.tx_id, - tx_index: args.transfer.tx_index, - block_transfer_index: args.blockTransferIndex, - ordinal_number: args.transfer.ordinal_number.toString(), - address: args.transfer.destination.value ?? null, - output: `${satpoint.tx_id}:${satpoint.vout}`, - offset: satpoint.offset ?? null, - prev_output: `${prevSatpoint.tx_id}:${prevSatpoint.vout}`, - prev_offset: prevSatpoint.offset ?? null, - value: args.transfer.post_transfer_output_value - ? args.transfer.post_transfer_output_value.toString() - : null, - timestamp: args.timestamp, - transfer_type: - toEnumValue(DbLocationTransferType, args.transfer.destination.type) ?? - DbLocationTransferType.transferred, - }, - }; -} - -export function revealInsertsFromOrdhookEvent(event: BitcoinEvent): InscriptionEventData[] { - // Keep the relative ordering of a transfer within a block for faster future reads. - let blockTransferIndex = 0; - const block_height = event.block_identifier.index; - const block_hash = normalizedHexString(event.block_identifier.hash); - const writes: InscriptionEventData[] = []; - for (const tx of event.transactions) { - const tx_id = normalizedHexString(tx.transaction_identifier.hash); - for (const operation of tx.metadata.ordinal_operations) { - if (operation.inscription_revealed) - writes.push( - updateFromOrdhookInscriptionRevealed({ - block_hash, - block_height, - tx_id, - timestamp: event.timestamp, - reveal: operation.inscription_revealed, - }) - ); - if (operation.inscription_transferred) - writes.push( - updateFromOrdhookInscriptionTransferred({ - block_hash, - block_height, - tx_id, - timestamp: event.timestamp, - blockTransferIndex: blockTransferIndex++, - transfer: operation.inscription_transferred, - }) - ); - } - } - return writes; + return transfer_type; } diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 433cf3b3..0c9267b4 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -2,14 +2,13 @@ import { BasePgStore, PgConnectionVars, PgSqlClient, - PgSqlQuery, batchIterate, connectPostgres, logger, runMigrations, stopwatch, } from '@hirosystems/api-toolkit'; -import { BitcoinEvent, Payload } from '@hirosystems/chainhook-client'; +import { BitcoinEvent, BitcoinPayload } from '@hirosystems/chainhook-client'; import * as path from 'path'; import * as postgres from 'postgres'; import { Order, OrderBy } from '../api/schemas'; @@ -17,29 +16,21 @@ import { ENV } from '../env'; import { Brc20PgStore } from './brc20/brc20-pg-store'; import { CountsPgStore } from './counts/counts-pg-store'; import { getIndexResultCountType } from './counts/helpers'; -import { revealInsertsFromOrdhookEvent } from './helpers'; import { DbFullyLocatedInscriptionResult, DbInscriptionContent, - DbInscriptionCountPerBlock, - DbInscriptionCountPerBlockFilters, DbInscriptionIndexFilters, DbInscriptionIndexOrder, DbInscriptionIndexPaging, DbInscriptionLocationChange, DbLocation, - DbLocationPointer, - DbLocationPointerInsert, DbPaginatedResult, - InscriptionEventData, - LOCATIONS_COLUMNS, - InscriptionInsert, - LocationInsert, - LocationData, } from './types'; +import { normalizedHexString } from '../api/util/helpers'; +import { BlockCache } from './block-cache'; export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); -export const ORDINALS_GENESIS_BLOCK = 767430; +const ORDINALS_GENESIS_BLOCK = 767430; export const INSERT_BATCH_SIZE = 4000; type InscriptionIdentifier = { genesis_id: string } | { number: number }; @@ -83,62 +74,293 @@ export class PgStore extends BasePgStore { * chain re-orgs. * @param args - Apply/Rollback Ordhook events */ - async updateInscriptions(payload: Payload): Promise { - let updatedBlockHeightMin = Infinity; + async updateInscriptions(payload: BitcoinPayload): Promise { await this.sqlWriteTransaction(async sql => { - // ROLLBACK - for (const rollbackEvent of payload.rollback) { - const event = rollbackEvent as BitcoinEvent; - logger.info(`PgStore rolling back block ${event.block_identifier.index}`); + const streamed = payload.chainhook.is_streaming_blocks; + for (const event of payload.rollback) { + logger.info(`PgStore rollback block ${event.block_identifier.index}`); const time = stopwatch(); - const rollbacks = revealInsertsFromOrdhookEvent(event); - await this.brc20.updateBrc20Operations(event, 'rollback'); - for (const writeChunk of batchIterate(rollbacks, 1000)) - await this.rollBackInscriptions(writeChunk); - updatedBlockHeightMin = Math.min(updatedBlockHeightMin, event.block_identifier.index); + await this.updateInscriptionsEvent(sql, event, 'rollback', streamed); + await this.brc20.updateBrc20Operations(sql, event, 'rollback'); + await this.updateChainTipBlockHeight(sql, event.block_identifier.index - 1); logger.info( - `PgStore rolled back block ${ + `PgStore rollback block ${ event.block_identifier.index - } in ${time.getElapsedSeconds()}s` + } finished in ${time.getElapsedSeconds()}s` ); - await this.updateChainTipBlockHeight(event.block_identifier.index - 1); } - - // APPLY - for (const applyEvent of payload.apply) { - // Check where we're at in terms of ingestion, e.g. block height and max blessed inscription - // number. This will let us determine if we should skip ingesting this block or throw an - // error if a gap is detected. - const currentBlockHeight = await this.getChainTipBlockHeight(); - const event = applyEvent as BitcoinEvent; - if ( - event.block_identifier.index <= currentBlockHeight && - event.block_identifier.index !== ORDINALS_GENESIS_BLOCK - ) { - logger.info( - `PgStore skipping ingestion for previously seen block ${event.block_identifier.index}, current chain tip is at ${currentBlockHeight}` - ); + for (const event of payload.apply) { + if (await this.isBlockIngested(event)) { + logger.warn(`PgStore skipping previously seen block ${event.block_identifier.index}`); continue; } - logger.info(`PgStore ingesting block ${event.block_identifier.index}`); + logger.info(`PgStore apply block ${event.block_identifier.index}`); const time = stopwatch(); - const writes = revealInsertsFromOrdhookEvent(event); - for (const writeChunk of batchIterate(writes, INSERT_BATCH_SIZE)) - await this.insertInscriptions(writeChunk, payload.chainhook.is_streaming_blocks); - updatedBlockHeightMin = Math.min(updatedBlockHeightMin, event.block_identifier.index); - await this.brc20.updateBrc20Operations(event, 'apply'); + await this.updateInscriptionsEvent(sql, event, 'apply', streamed); + await this.brc20.updateBrc20Operations(sql, event, 'apply'); + await this.updateChainTipBlockHeight(sql, event.block_identifier.index); logger.info( - `PgStore ingested block ${event.block_identifier.index} in ${time.getElapsedSeconds()}s` + `PgStore apply block ${ + event.block_identifier.index + } finished in ${time.getElapsedSeconds()}s` ); - await this.updateChainTipBlockHeight(event.block_identifier.index); } }); - if (updatedBlockHeightMin !== Infinity) - await this.normalizeInscriptionCount({ min_block_height: updatedBlockHeightMin }); } - private async updateChainTipBlockHeight(block_height: number): Promise { - await this.sql`UPDATE chain_tip SET block_height = ${block_height}`; + private async updateInscriptionsEvent( + sql: PgSqlClient, + event: BitcoinEvent, + direction: 'apply' | 'rollback', + streamed: boolean = false + ) { + const cache = new BlockCache( + event.block_identifier.index, + normalizedHexString(event.block_identifier.hash), + event.timestamp + ); + for (const tx of event.transactions) { + const tx_id = normalizedHexString(tx.transaction_identifier.hash); + for (const operation of tx.metadata.ordinal_operations) { + if (operation.inscription_revealed) { + cache.reveal(operation.inscription_revealed, tx_id); + logger.info( + `PgStore ${direction} reveal inscription #${operation.inscription_revealed.inscription_number.jubilee} (${operation.inscription_revealed.inscription_id}) at block ${cache.blockHeight}` + ); + } + if (operation.inscription_transferred) { + cache.transfer(operation.inscription_transferred, tx_id); + logger.info( + `PgStore ${direction} transfer satoshi ${operation.inscription_transferred.ordinal_number} to ${operation.inscription_transferred.destination.value} at block ${cache.blockHeight}` + ); + } + } + } + switch (direction) { + case 'apply': + await this.applyInscriptions(sql, cache, streamed); + break; + case 'rollback': + await this.rollBackInscriptions(sql, cache, streamed); + break; + } + } + + private async applyInscriptions( + sql: PgSqlClient, + cache: BlockCache, + streamed: boolean + ): Promise { + if (cache.satoshis.length) + for await (const batch of batchIterate(cache.satoshis, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO satoshis ${sql(batch)} + ON CONFLICT (ordinal_number) DO NOTHING + `; + if (cache.inscriptions.length) { + const entries = cache.inscriptions.map(i => ({ + ...i, + timestamp: sql`TO_TIMESTAMP(${i.timestamp})`, + })); + for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO inscriptions ${sql(batch)} + ON CONFLICT (genesis_id) DO NOTHING + `; + } + if (cache.locations.length) { + const entries = cache.locations.map(l => ({ + ...l, + timestamp: sql`TO_TIMESTAMP(${l.timestamp})`, + })); + for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO locations ${sql(batch)} + ON CONFLICT (ordinal_number, block_height, tx_index) DO NOTHING + `; + // Insert block transfers. + let block_transfer_index = 0; + const transferEntries = []; + for (const transfer of cache.locations) { + const transferred = await sql<{ genesis_id: string; number: string }[]>` + SELECT genesis_id, number FROM inscriptions + WHERE ordinal_number = ${transfer.ordinal_number} AND ( + block_height < ${transfer.block_height} + OR (block_height = ${transfer.block_height} AND tx_index < ${transfer.tx_index}) + ) + `; + for (const inscription of transferred) + transferEntries.push({ + genesis_id: inscription.genesis_id, + number: inscription.number, + ordinal_number: transfer.ordinal_number, + block_height: transfer.block_height, + block_hash: transfer.block_hash, + tx_index: transfer.tx_index, + block_transfer_index: block_transfer_index++, + }); + } + for await (const batch of batchIterate(transferEntries, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO inscription_transfers ${sql(batch)} + ON CONFLICT (block_height, block_transfer_index) DO NOTHING + `; + } + if (cache.recursiveRefs.size) + for (const [genesis_id, refs] of cache.recursiveRefs) { + const entries = refs.map(r => ({ genesis_id, ref_genesis_id: r })); + await sql` + INSERT INTO inscription_recursions ${sql(entries)} + ON CONFLICT (genesis_id, ref_genesis_id) DO NOTHING + `; + } + if (cache.currentLocations.size) { + // Deduct counts from previous owners + const moved_sats = [...cache.currentLocations.keys()]; + const prevOwners = await sql<{ address: string; count: number }[]>` + SELECT address, COUNT(*) AS count + FROM current_locations + WHERE ordinal_number IN ${sql(moved_sats)} + GROUP BY address + `; + for (const owner of prevOwners) + await sql` + UPDATE counts_by_address + SET count = count - ${owner.count} + WHERE address = ${owner.address} + `; + // Insert locations + const entries = [...cache.currentLocations.values()]; + for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE)) + await sql` + INSERT INTO current_locations ${sql(batch)} + ON CONFLICT (ordinal_number) DO UPDATE SET + block_height = EXCLUDED.block_height, + tx_index = EXCLUDED.tx_index, + address = EXCLUDED.address + WHERE + EXCLUDED.block_height > current_locations.block_height OR + (EXCLUDED.block_height = current_locations.block_height AND + EXCLUDED.tx_index > current_locations.tx_index) + `; + // Update owner counts + await sql` + WITH new_owners AS ( + SELECT address, COUNT(*) AS count + FROM current_locations + WHERE ordinal_number IN ${sql(moved_sats)} + GROUP BY address + ) + INSERT INTO counts_by_address (address, count) + (SELECT address, count FROM new_owners) + ON CONFLICT (address) DO UPDATE SET count = counts_by_address.count + EXCLUDED.count + `; + if (streamed) + for await (const batch of batchIterate(moved_sats, INSERT_BATCH_SIZE)) + await sql` + UPDATE inscriptions + SET updated_at = NOW() + WHERE ordinal_number IN ${sql(batch)} + `; + } + await this.counts.applyCounts(sql, cache); + } + + private async rollBackInscriptions( + sql: PgSqlClient, + cache: BlockCache, + streamed: boolean + ): Promise { + await this.counts.rollBackCounts(sql, cache); + const moved_sats = [...cache.currentLocations.keys()]; + // Delete old current owners first. + if (cache.currentLocations.size) { + const prevOwners = await sql<{ address: string; count: number }[]>` + SELECT address, COUNT(*) AS count + FROM current_locations + WHERE ordinal_number IN ${sql(moved_sats)} + GROUP BY address + `; + for (const owner of prevOwners) + await sql` + UPDATE counts_by_address + SET count = count - ${owner.count} + WHERE address = ${owner.address} + `; + await sql` + DELETE FROM current_locations WHERE ordinal_number IN ${sql(moved_sats)} + `; + } + if (cache.locations.length) + for (const location of cache.locations) + await sql` + DELETE FROM locations + WHERE ordinal_number = ${location.ordinal_number} + AND block_height = ${location.block_height} + AND tx_index = ${location.tx_index} + `; + if (cache.inscriptions.length) + // This will also delete recursive refs. + for (const inscription of cache.inscriptions) + await sql` + DELETE FROM inscriptions WHERE genesis_id = ${inscription.genesis_id} + `; + if (cache.satoshis.length) + for (const satoshi of cache.satoshis) + await sql` + DELETE FROM satoshis + WHERE ordinal_number = ${satoshi.ordinal_number} AND NOT EXISTS ( + SELECT genesis_id FROM inscriptions WHERE ordinal_number = ${satoshi.ordinal_number} + ) + `; + // Recalculate current locations for affected inscriptions. + if (cache.currentLocations.size) { + for (const ordinal_number of moved_sats) { + await sql` + INSERT INTO current_locations (ordinal_number, block_height, tx_index, address) + ( + SELECT ordinal_number, block_height, tx_index, address + FROM locations + WHERE ordinal_number = ${ordinal_number} + ORDER BY block_height DESC, tx_index DESC + LIMIT 1 + ) + `; + } + await sql` + WITH new_owners AS ( + SELECT address, COUNT(*) AS count + FROM current_locations + WHERE ordinal_number IN ${sql(moved_sats)} + GROUP BY address + ) + INSERT INTO counts_by_address (address, count) + (SELECT address, count FROM new_owners) + ON CONFLICT (address) DO UPDATE SET count = counts_by_address.count + EXCLUDED.count + `; + if (streamed) + for await (const batch of batchIterate(moved_sats, INSERT_BATCH_SIZE)) + await sql` + UPDATE inscriptions + SET updated_at = NOW() + WHERE ordinal_number IN ${sql(batch)} + `; + } + } + + private async isBlockIngested(event: BitcoinEvent): Promise { + const currentBlockHeight = await this.getChainTipBlockHeight(); + if ( + event.block_identifier.index <= currentBlockHeight && + event.block_identifier.index !== ORDINALS_GENESIS_BLOCK + ) { + return true; + } + return false; + } + + private async updateChainTipBlockHeight(sql: PgSqlClient, block_height: number): Promise { + await sql`UPDATE chain_tip SET block_height = ${block_height}`; } async getChainTipBlockHeight(): Promise { @@ -174,7 +396,7 @@ export class PgStore extends BasePgStore { async getInscriptionsPerBlockETag(): Promise { const result = await this.sql<{ block_hash: string; inscription_count: string }[]>` SELECT block_hash, inscription_count - FROM inscriptions_per_block + FROM counts_by_block ORDER BY block_height DESC LIMIT 1 `; @@ -223,13 +445,13 @@ export class PgStore extends BasePgStore { let orderBy = sql`i.number ${order}`; switch (sort?.order_by) { case OrderBy.genesis_block_height: - orderBy = sql`gen.block_height ${order}, gen.tx_index ${order}`; + orderBy = sql`i.block_height ${order}, i.tx_index ${order}`; break; case OrderBy.ordinal: - orderBy = sql`i.sat_ordinal ${order}`; + orderBy = sql`i.ordinal_number ${order}`; break; case OrderBy.rarity: - orderBy = sql`ARRAY_POSITION(ARRAY['common','uncommon','rare','epic','legendary','mythic'], i.sat_rarity) ${order}, i.number DESC`; + orderBy = sql`ARRAY_POSITION(ARRAY['common','uncommon','rare','epic','legendary','mythic'], s.rarity) ${order}, i.number DESC`; break; } // This function will generate a query to be used for getting results or total counts. @@ -239,10 +461,10 @@ export class PgStore extends BasePgStore { ) => sql` SELECT ${columns} FROM inscriptions AS i - INNER JOIN current_locations AS cur ON cur.inscription_id = i.id - INNER JOIN locations AS cur_l ON cur_l.id = cur.location_id - INNER JOIN genesis_locations AS gen ON gen.inscription_id = i.id - INNER JOIN locations AS gen_l ON gen_l.id = gen.location_id + INNER JOIN current_locations AS cur ON cur.ordinal_number = i.ordinal_number + INNER JOIN locations AS cur_l ON cur_l.ordinal_number = cur.ordinal_number AND cur_l.block_height = cur.block_height AND cur_l.tx_index = cur.tx_index + INNER JOIN locations AS gen_l ON gen_l.ordinal_number = i.ordinal_number AND gen_l.block_height = i.block_height AND gen_l.tx_index = i.tx_index + INNER JOIN satoshis AS s ON s.ordinal_number = i.ordinal_number WHERE TRUE ${ filters?.genesis_id?.length @@ -251,7 +473,7 @@ export class PgStore extends BasePgStore { } ${ filters?.genesis_block_height - ? sql`AND gen.block_height = ${filters.genesis_block_height}` + ? sql`AND i.block_height = ${filters.genesis_block_height}` : sql`` } ${ @@ -261,40 +483,42 @@ export class PgStore extends BasePgStore { } ${ filters?.from_genesis_block_height - ? sql`AND gen.block_height >= ${filters.from_genesis_block_height}` + ? sql`AND i.block_height >= ${filters.from_genesis_block_height}` : sql`` } ${ filters?.to_genesis_block_height - ? sql`AND gen.block_height <= ${filters.to_genesis_block_height}` + ? sql`AND i.block_height <= ${filters.to_genesis_block_height}` : sql`` } ${ filters?.from_sat_coinbase_height - ? sql`AND i.sat_coinbase_height >= ${filters.from_sat_coinbase_height}` + ? sql`AND s.coinbase_height >= ${filters.from_sat_coinbase_height}` : sql`` } ${ filters?.to_sat_coinbase_height - ? sql`AND i.sat_coinbase_height <= ${filters.to_sat_coinbase_height}` + ? sql`AND s.coinbase_height <= ${filters.to_sat_coinbase_height}` : sql`` } ${ filters?.from_genesis_timestamp - ? sql`AND gen_l.timestamp >= to_timestamp(${filters.from_genesis_timestamp})` + ? sql`AND i.timestamp >= to_timestamp(${filters.from_genesis_timestamp})` : sql`` } ${ filters?.to_genesis_timestamp - ? sql`AND gen_l.timestamp <= to_timestamp(${filters.to_genesis_timestamp})` + ? sql`AND i.timestamp <= to_timestamp(${filters.to_genesis_timestamp})` : sql`` } ${ filters?.from_sat_ordinal - ? sql`AND i.sat_ordinal >= ${filters.from_sat_ordinal}` + ? sql`AND i.ordinal_number >= ${filters.from_sat_ordinal}` : sql`` } - ${filters?.to_sat_ordinal ? sql`AND i.sat_ordinal <= ${filters.to_sat_ordinal}` : sql``} + ${ + filters?.to_sat_ordinal ? sql`AND i.ordinal_number <= ${filters.to_sat_ordinal}` : sql`` + } ${filters?.number?.length ? sql`AND i.number IN ${sql(filters.number)}` : sql``} ${ filters?.from_number !== undefined ? sql`AND i.number >= ${filters.from_number}` : sql`` @@ -303,18 +527,14 @@ export class PgStore extends BasePgStore { ${filters?.address?.length ? sql`AND cur.address IN ${sql(filters.address)}` : sql``} ${filters?.mime_type?.length ? sql`AND i.mime_type IN ${sql(filters.mime_type)}` : sql``} ${filters?.output ? sql`AND cur_l.output = ${filters.output}` : sql``} - ${ - filters?.sat_rarity?.length - ? sql`AND i.sat_rarity IN ${sql(filters.sat_rarity)}` - : sql`` - } - ${filters?.sat_ordinal ? sql`AND i.sat_ordinal = ${filters.sat_ordinal}` : sql``} + ${filters?.sat_rarity?.length ? sql`AND s.rarity IN ${sql(filters.sat_rarity)}` : sql``} + ${filters?.sat_ordinal ? sql`AND i.ordinal_number = ${filters.sat_ordinal}` : sql``} ${filters?.recursive !== undefined ? sql`AND i.recursive = ${filters.recursive}` : sql``} ${filters?.cursed === true ? sql`AND i.number < 0` : sql``} ${filters?.cursed === false ? sql`AND i.number >= 0` : sql``} ${ filters?.genesis_address?.length - ? sql`AND gen.address IN ${sql(filters.genesis_address)}` + ? sql`AND i.address IN ${sql(filters.genesis_address)}` : sql`` } ${sorting} @@ -328,21 +548,20 @@ export class PgStore extends BasePgStore { i.content_length, i.fee AS genesis_fee, i.curse_type, - i.sat_ordinal, - i.sat_rarity, - i.sat_coinbase_height, + i.ordinal_number AS sat_ordinal, + s.rarity AS sat_rarity, + s.coinbase_height AS sat_coinbase_height, i.recursive, ( - SELECT STRING_AGG(ii.genesis_id, ',') + SELECT STRING_AGG(ir.ref_genesis_id, ',') FROM inscription_recursions AS ir - INNER JOIN inscriptions AS ii ON ii.id = ir.ref_inscription_id - WHERE ir.inscription_id = i.id + WHERE ir.genesis_id = i.genesis_id ) AS recursion_refs, - gen.block_height AS genesis_block_height, + i.block_height AS genesis_block_height, gen_l.block_hash AS genesis_block_hash, gen_l.tx_id AS genesis_tx_id, - gen_l.timestamp AS genesis_timestamp, - gen.address AS genesis_address, + i.timestamp AS genesis_timestamp, + i.address AS genesis_address, cur_l.tx_id, cur.address, cur_l.output, @@ -372,18 +591,19 @@ export class PgStore extends BasePgStore { args: InscriptionIdentifier & { limit: number; offset: number } ): Promise> { const results = await this.sql<({ total: number } & DbLocation)[]>` - SELECT ${this.sql(LOCATIONS_COLUMNS)}, COUNT(*) OVER() as total - FROM locations - WHERE genesis_id = ( - SELECT genesis_id FROM inscriptions - WHERE ${ - 'number' in args - ? this.sql`number = ${args.number}` - : this.sql`genesis_id = ${args.genesis_id}` - } - LIMIT 1 - ) - ORDER BY block_height DESC, tx_index DESC + SELECT l.*, COUNT(*) OVER() as total + FROM locations AS l + INNER JOIN inscriptions AS i ON i.ordinal_number = l.ordinal_number + WHERE ${ + 'number' in args + ? this.sql`i.number = ${args.number}` + : this.sql`i.genesis_id = ${args.genesis_id}` + } + AND ( + (l.block_height > i.block_height) + OR (l.block_height = i.block_height AND l.tx_index >= i.tx_index) + ) + ORDER BY l.block_height DESC, l.tx_index DESC LIMIT ${args.limit} OFFSET ${args.offset} `; @@ -397,411 +617,70 @@ export class PgStore extends BasePgStore { args: { block_height?: number; block_hash?: string } & DbInscriptionIndexPaging ): Promise> { const results = await this.sql<({ total: number } & DbInscriptionLocationChange)[]>` - WITH max_transfer_index AS ( - SELECT MAX(block_transfer_index) FROM locations WHERE ${ + WITH transfer_total AS ( + SELECT MAX(block_transfer_index) AS total FROM inscription_transfers WHERE ${ 'block_height' in args ? this.sql`block_height = ${args.block_height}` : this.sql`block_hash = ${args.block_hash}` - } AND block_transfer_index IS NOT NULL + } ), - transfers AS ( + transfer_data AS ( SELECT - i.id AS inscription_id, - i.genesis_id, - i.number, - l.id AS to_id, + t.number, + t.genesis_id, + t.ordinal_number, + t.block_height, + t.tx_index, + t.block_transfer_index, ( - SELECT id - FROM locations AS ll - WHERE - ll.inscription_id = i.id - AND ( - ll.block_height < l.block_height OR - (ll.block_height = l.block_height AND ll.tx_index < l.tx_index) - ) - ORDER BY ll.block_height DESC + SELECT l.block_height || ',' || l.tx_index + FROM locations AS l + WHERE l.ordinal_number = t.ordinal_number AND ( + l.block_height < t.block_height OR + (l.block_height = t.block_height AND l.tx_index < t.tx_index) + ) + ORDER BY l.block_height DESC, l.tx_index DESC LIMIT 1 - ) AS from_id - FROM locations AS l - INNER JOIN inscriptions AS i ON l.inscription_id = i.id + ) AS from_data + FROM inscription_transfers AS t WHERE ${ 'block_height' in args - ? this.sql`l.block_height = ${args.block_height}` - : this.sql`l.block_hash = ${args.block_hash}` + ? this.sql`t.block_height = ${args.block_height}` + : this.sql`t.block_hash = ${args.block_hash}` } - AND l.block_transfer_index IS NOT NULL - AND l.block_transfer_index <= ((SELECT max FROM max_transfer_index) - ${args.offset}::int) - AND l.block_transfer_index > - ((SELECT max FROM max_transfer_index) - (${args.offset}::int + ${args.limit}::int)) + AND t.block_transfer_index <= ((SELECT total FROM transfer_total) - ${args.offset}::int) + AND t.block_transfer_index > + ((SELECT total FROM transfer_total) - (${args.offset}::int + ${args.limit}::int)) ) SELECT - t.genesis_id, - t.number, - (SELECT max FROM max_transfer_index) + 1 AS total, - ${this.sql.unsafe(LOCATIONS_COLUMNS.map(c => `lf.${c} AS from_${c}`).join(','))}, - ${this.sql.unsafe(LOCATIONS_COLUMNS.map(c => `lt.${c} AS to_${c}`).join(','))} - FROM transfers AS t - INNER JOIN locations AS lf ON t.from_id = lf.id - INNER JOIN locations AS lt ON t.to_id = lt.id - ORDER BY lt.block_transfer_index DESC + td.genesis_id, + td.number, + lf.block_height AS from_block_height, + lf.block_hash AS from_block_hash, + lf.tx_id AS from_tx_id, + lf.address AS from_address, + lf.output AS from_output, + lf.offset AS from_offset, + lf.value AS from_value, + lf.timestamp AS from_timestamp, + lt.block_height AS to_block_height, + lt.block_hash AS to_block_hash, + lt.tx_id AS to_tx_id, + lt.address AS to_address, + lt.output AS to_output, + lt.offset AS to_offset, + lt.value AS to_value, + lt.timestamp AS to_timestamp, + (SELECT total FROM transfer_total) + 1 AS total + FROM transfer_data AS td + INNER JOIN locations AS lf ON td.ordinal_number = lf.ordinal_number AND lf.block_height = SPLIT_PART(td.from_data, ',', 1)::int AND lf.tx_index = SPLIT_PART(td.from_data, ',', 2)::int + INNER JOIN locations AS lt ON td.ordinal_number = lt.ordinal_number AND td.block_height = lt.block_height AND td.tx_index = lt.tx_index + ORDER BY td.block_height DESC, td.block_transfer_index DESC `; return { total: results[0]?.total ?? 0, results: results ?? [], }; } - - async getInscriptionCountPerBlock( - filters: DbInscriptionCountPerBlockFilters - ): Promise { - const fromCondition = filters.from_block_height - ? this.sql`block_height >= ${filters.from_block_height}` - : this.sql``; - - const toCondition = filters.to_block_height - ? this.sql`block_height <= ${filters.to_block_height}` - : this.sql``; - - const where = - filters.from_block_height && filters.to_block_height - ? this.sql`WHERE ${fromCondition} AND ${toCondition}` - : this.sql`WHERE ${fromCondition}${toCondition}`; - - return await this.sql` - SELECT * - FROM inscriptions_per_block - ${filters.from_block_height || filters.to_block_height ? where : this.sql``} - ORDER BY block_height DESC - LIMIT 5000 - `; // roughly 35 days of blocks, assuming 10 minute block times on a full database - } - - private async insertInscriptions( - reveals: InscriptionEventData[], - streamed: boolean - ): Promise { - if (reveals.length === 0) return; - await this.sqlWriteTransaction(async sql => { - // 1. Write inscription reveals - const inscriptionInserts: InscriptionInsert[] = []; - for (const r of reveals) if ('inscription' in r) inscriptionInserts.push(r.inscription); - if (inscriptionInserts.length) - await sql` - INSERT INTO inscriptions ${sql(inscriptionInserts)} - ON CONFLICT ON CONSTRAINT inscriptions_number_unique DO UPDATE SET - genesis_id = EXCLUDED.genesis_id, - mime_type = EXCLUDED.mime_type, - content_type = EXCLUDED.content_type, - content_length = EXCLUDED.content_length, - content = EXCLUDED.content, - fee = EXCLUDED.fee, - sat_ordinal = EXCLUDED.sat_ordinal, - sat_rarity = EXCLUDED.sat_rarity, - sat_coinbase_height = EXCLUDED.sat_coinbase_height, - updated_at = NOW() - `; - - // 2. Write locations and transfers - const locationInserts: LocationInsert[] = []; - const revealOutputs: InscriptionEventData[] = []; - const transferredOrdinalNumbersSet = new Set(); - for (const r of reveals) - if ('inscription' in r) { - revealOutputs.push(r); - locationInserts.push({ - ...r.location, - inscription_id: sql`(SELECT id FROM inscriptions WHERE genesis_id = ${r.location.genesis_id})`, - timestamp: sql`TO_TIMESTAMP(${r.location.timestamp})`, - }); - } else { - transferredOrdinalNumbersSet.add(r.location.ordinal_number); - // Transfers can move multiple inscriptions in the same sat, we must expand all of them so - // we can update their respective locations. - // TODO: This could probably be optimized to use fewer queries. - const inscriptionIds = await sql<{ id: string; genesis_id: string }[]>` - SELECT id, genesis_id FROM inscriptions WHERE sat_ordinal = ${r.location.ordinal_number} - `; - for (const row of inscriptionIds) { - revealOutputs.push(r); - locationInserts.push({ - genesis_id: row.genesis_id, - inscription_id: row.id, - block_height: r.location.block_height, - block_hash: r.location.block_hash, - tx_id: r.location.tx_id, - tx_index: r.location.tx_index, - address: r.location.address, - output: r.location.output, - offset: r.location.offset, - prev_output: r.location.prev_output, - prev_offset: r.location.prev_offset, - value: r.location.value, - transfer_type: r.location.transfer_type, - block_transfer_index: r.location.block_transfer_index, - timestamp: sql`TO_TIMESTAMP(${r.location.timestamp})`, - }); - } - } - const pointers: DbLocationPointerInsert[] = []; - for (const batch of batchIterate(locationInserts, INSERT_BATCH_SIZE)) { - const pointerBatch = await sql` - INSERT INTO locations ${sql(batch)} - ON CONFLICT ON CONSTRAINT locations_inscription_id_block_height_tx_index_unique DO UPDATE SET - genesis_id = EXCLUDED.genesis_id, - block_hash = EXCLUDED.block_hash, - tx_id = EXCLUDED.tx_id, - address = EXCLUDED.address, - value = EXCLUDED.value, - output = EXCLUDED.output, - "offset" = EXCLUDED.offset, - timestamp = EXCLUDED.timestamp - RETURNING inscription_id, id AS location_id, block_height, tx_index, address - `; - await this.updateInscriptionLocationPointers(pointerBatch); - pointers.push(...pointerBatch); - } - if (streamed && transferredOrdinalNumbersSet.size) - await sql` - UPDATE inscriptions - SET updated_at = NOW() - WHERE sat_ordinal IN ${sql([...transferredOrdinalNumbersSet])} - `; - - for (const reveal of reveals) { - const action = - 'inscription' in reveal - ? `reveal #${reveal.inscription.number} (${reveal.location.genesis_id})` - : `transfer sat ${reveal.location.ordinal_number}`; - logger.info(`PgStore ${action} at block ${reveal.location.block_height}`); - } - - // 3. Recursions and counts - await this.updateInscriptionRecursions(reveals); - await this.counts.applyInscriptions(inscriptionInserts); - }); - } - - private async normalizeInscriptionCount(args: { min_block_height: number }): Promise { - await this.sqlWriteTransaction(async sql => { - await sql` - DELETE FROM inscriptions_per_block - WHERE block_height >= ${args.min_block_height} - `; - // - gets highest total for a block < min_block_height - // - calculates new totals for all blocks >= min_block_height - // - inserts new totals - await sql` - WITH previous AS ( - SELECT * - FROM inscriptions_per_block - WHERE block_height < ${args.min_block_height} - ORDER BY block_height DESC - LIMIT 1 - ), updated_blocks AS ( - SELECT - l.block_height, - MIN(l.block_hash), - COUNT(*) AS inscription_count, - COALESCE((SELECT previous.inscription_count_accum FROM previous), 0) + (SUM(COUNT(*)) OVER (ORDER BY l.block_height ASC)) AS inscription_count_accum, - MIN(l.timestamp) - FROM locations AS l - INNER JOIN genesis_locations AS g ON g.location_id = l.id - WHERE l.block_height >= ${args.min_block_height} - GROUP BY l.block_height - ORDER BY l.block_height ASC - ) - INSERT INTO inscriptions_per_block - SELECT * FROM updated_blocks - ON CONFLICT (block_height) DO UPDATE SET - block_hash = EXCLUDED.block_hash, - inscription_count = EXCLUDED.inscription_count, - inscription_count_accum = EXCLUDED.inscription_count_accum, - timestamp = EXCLUDED.timestamp; - `; - }); - } - - private async rollBackInscriptions(rollbacks: InscriptionEventData[]): Promise { - if (rollbacks.length === 0) return; - await this.sqlWriteTransaction(async sql => { - // Roll back events in reverse so BRC-20 keeps a sane order. - for (const rollback of rollbacks.reverse()) { - if ('inscription' in rollback) { - await this.counts.rollBackInscription({ - inscription: rollback.inscription, - location: rollback.location, - }); - await sql`DELETE FROM inscriptions WHERE genesis_id = ${rollback.inscription.genesis_id}`; - logger.info( - `PgStore rollback reveal #${rollback.inscription.number} (${rollback.inscription.genesis_id}) at block ${rollback.location.block_height}` - ); - } else { - await this.recalculateCurrentLocationPointerFromLocationRollBack({ - location: rollback.location, - }); - await sql` - DELETE FROM locations - WHERE output = ${rollback.location.output} AND "offset" = ${rollback.location.offset} - `; - logger.info( - `PgStore rollback transfer for sat ${rollback.location.ordinal_number} at block ${rollback.location.block_height}` - ); - } - } - }); - } - - private async updateInscriptionLocationPointers( - pointers: DbLocationPointerInsert[] - ): Promise { - if (pointers.length === 0) return; - - // Filters pointer args so we enter only one new pointer per inscription. - const distinctPointers = ( - cond: (a: DbLocationPointerInsert, b: DbLocationPointerInsert) => boolean - ): DbLocationPointerInsert[] => { - const out = new Map(); - for (const ptr of pointers) { - if (ptr.inscription_id === null) continue; - const current = out.get(ptr.inscription_id); - out.set(ptr.inscription_id, current ? (cond(current, ptr) ? current : ptr) : ptr); - } - return [...out.values()]; - }; - - await this.sqlWriteTransaction(async sql => { - const distinctIds = [ - ...new Set(pointers.map(i => i.inscription_id).filter(v => v !== null)), - ]; - const genesisPtrs = distinctPointers( - (a, b) => - parseInt(a.block_height) < parseInt(b.block_height) || - (parseInt(a.block_height) === parseInt(b.block_height) && - parseInt(a.tx_index) < parseInt(b.tx_index)) - ); - if (genesisPtrs.length) { - const genesis = await sql<{ old_address: string | null; new_address: string | null }[]>` - WITH old_pointers AS ( - SELECT inscription_id, address - FROM genesis_locations - WHERE inscription_id IN ${sql(distinctIds)} - ), - new_pointers AS ( - INSERT INTO genesis_locations ${sql(genesisPtrs)} - ON CONFLICT (inscription_id) DO UPDATE SET - location_id = EXCLUDED.location_id, - block_height = EXCLUDED.block_height, - tx_index = EXCLUDED.tx_index, - address = EXCLUDED.address - WHERE - EXCLUDED.block_height < genesis_locations.block_height OR - (EXCLUDED.block_height = genesis_locations.block_height AND - EXCLUDED.tx_index < genesis_locations.tx_index) - RETURNING inscription_id, address - ) - SELECT n.address AS new_address, o.address AS old_address - FROM new_pointers AS n - LEFT JOIN old_pointers AS o USING (inscription_id) - `; - await this.counts.applyLocations(genesis, true); - } - - const currentPtrs = distinctPointers( - (a, b) => - parseInt(a.block_height) > parseInt(b.block_height) || - (parseInt(a.block_height) === parseInt(b.block_height) && - parseInt(a.tx_index) > parseInt(b.tx_index)) - ); - if (currentPtrs.length) { - const current = await sql<{ old_address: string | null; new_address: string | null }[]>` - WITH old_pointers AS ( - SELECT inscription_id, address - FROM current_locations - WHERE inscription_id IN ${sql(distinctIds)} - ), - new_pointers AS ( - INSERT INTO current_locations ${sql(currentPtrs)} - ON CONFLICT (inscription_id) DO UPDATE SET - location_id = EXCLUDED.location_id, - block_height = EXCLUDED.block_height, - tx_index = EXCLUDED.tx_index, - address = EXCLUDED.address - WHERE - EXCLUDED.block_height > current_locations.block_height OR - (EXCLUDED.block_height = current_locations.block_height AND - EXCLUDED.tx_index > current_locations.tx_index) - RETURNING inscription_id, address - ) - SELECT n.address AS new_address, o.address AS old_address - FROM new_pointers AS n - LEFT JOIN old_pointers AS o USING (inscription_id) - `; - await this.counts.applyLocations(current, false); - } - }); - } - - private async recalculateCurrentLocationPointerFromLocationRollBack(args: { - location: LocationData; - }): Promise { - await this.sqlWriteTransaction(async sql => { - // Is the location we're rolling back *the* current location? - const current = await sql` - SELECT * - FROM current_locations AS c - INNER JOIN locations AS l ON l.id = c.location_id - WHERE l.output = ${args.location.output} AND l."offset" = ${args.location.offset} - `; - if (current.count > 0) { - const update = await sql` - WITH prev AS ( - SELECT id, block_height, tx_index, address - FROM locations - WHERE inscription_id = ${current[0].inscription_id} AND id <> ${current[0].location_id} - ORDER BY block_height DESC, tx_index DESC - LIMIT 1 - ) - UPDATE current_locations AS c SET - location_id = prev.id, - block_height = prev.block_height, - tx_index = prev.tx_index, - address = prev.address - FROM prev - WHERE c.inscription_id = ${current[0].inscription_id} - RETURNING * - `; - await this.counts.rollBackCurrentLocation({ curr: current[0], prev: update[0] }); - } - }); - } - - private async updateInscriptionRecursions(reveals: InscriptionEventData[]): Promise { - if (reveals.length === 0) return; - const inserts: { - inscription_id: PgSqlQuery; - ref_inscription_id: PgSqlQuery; - ref_inscription_genesis_id: string; - }[] = []; - for (const i of reveals) - if ('inscription' in i && i.recursive_refs?.length) { - const refSet = new Set(i.recursive_refs); - for (const ref of refSet) - inserts.push({ - inscription_id: this - .sql`(SELECT id FROM inscriptions WHERE genesis_id = ${i.inscription.genesis_id} LIMIT 1)`, - ref_inscription_id: this - .sql`(SELECT id FROM inscriptions WHERE genesis_id = ${ref} LIMIT 1)`, - ref_inscription_genesis_id: ref, - }); - } - if (inserts.length === 0) return; - await this.sqlWriteTransaction(async sql => { - for (const chunk of batchIterate(inserts, 500)) - await sql` - INSERT INTO inscription_recursions ${sql(chunk)} - ON CONFLICT ON CONSTRAINT inscription_recursions_unique DO NOTHING - `; - }); - } } diff --git a/src/pg/types.ts b/src/pg/types.ts index 46680b91..3a4e86de 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -2,35 +2,38 @@ import { PgNumeric, PgBytea, PgSqlQuery } from '@hirosystems/api-toolkit'; import { Order, OrderBy } from '../api/schemas'; import { SatoshiRarity } from '../api/util/ordinal-satoshi'; -/** - * Updates and inserts - */ +export type DbSatoshiInsert = { + ordinal_number: PgNumeric; + rarity: string; + coinbase_height: number; +}; -export type InscriptionData = { +export type DbInscriptionInsert = { genesis_id: string; + ordinal_number: PgNumeric; number: number; classic_number: number; + block_height: number; + tx_index: number; + address: string | null; mime_type: string; content_type: string; content_length: number; content: PgBytea; fee: PgNumeric; curse_type: string | null; - sat_ordinal: PgNumeric; - sat_rarity: string; - sat_coinbase_height: number; recursive: boolean; metadata: string | null; parent: string | null; + timestamp: number; }; -export type InscriptionInsert = InscriptionData; - -type AbstractLocationData = { +export type DbLocationInsert = { + ordinal_number: PgNumeric; block_height: number; block_hash: string; - tx_id: string; tx_index: number; + tx_id: string; address: string | null; output: string; offset: PgNumeric | null; @@ -38,36 +41,31 @@ type AbstractLocationData = { prev_offset: PgNumeric | null; value: PgNumeric | null; transfer_type: DbLocationTransferType; - block_transfer_index: number | null; -}; - -export type RevealLocationData = AbstractLocationData & { genesis_id: string; timestamp: number }; - -export type TransferLocationData = AbstractLocationData & { - ordinal_number: PgNumeric; timestamp: number; }; -export type LocationData = RevealLocationData | TransferLocationData; - -export type LocationInsert = AbstractLocationData & { - timestamp: PgSqlQuery; - genesis_id: string; - inscription_id: PgSqlQuery | string; -}; - -export type InscriptionRevealData = { - inscription: InscriptionData; - recursive_refs: string[]; - location: RevealLocationData; +export type DbCurrentLocationInsert = { + ordinal_number: PgNumeric; + block_height: number; + tx_index: number; + address: string | null; }; -export type InscriptionTransferData = { - location: TransferLocationData; +type AbstractLocationData = { + block_height: number; + block_hash: string; + tx_id: string; + tx_index: number; + address: string | null; + output: string; + offset: PgNumeric | null; + prev_output: string | null; + prev_offset: PgNumeric | null; + value: PgNumeric | null; + transfer_type: DbLocationTransferType; + block_transfer_index: number | null; }; -export type InscriptionEventData = InscriptionRevealData | InscriptionTransferData; - /** * Selects */ @@ -111,8 +109,6 @@ export enum DbLocationTransferType { } export type DbLocation = { - id: string; - inscription_id: string | null; genesis_id: string; block_height: string; block_hash: string; @@ -127,27 +123,9 @@ export type DbLocation = { timestamp: Date; }; -export type DbLocationPointer = { - inscription_id: number; - location_id: number; - block_height: number; - tx_index: number; - address: string | null; -}; - -export type DbLocationPointerInsert = { - inscription_id: string; - location_id: string; - block_height: string; - tx_index: string; - address: string | null; -}; - export type DbInscriptionLocationChange = { genesis_id: string; number: string; - from_id: string; - from_inscription_id: string; from_block_height: string; from_block_hash: string; from_tx_id: string; @@ -156,10 +134,6 @@ export type DbInscriptionLocationChange = { from_offset: string | null; from_value: string | null; from_timestamp: Date; - from_genesis: boolean; - from_current: boolean; - to_id: string; - to_inscription_id: string; to_block_height: string; to_block_hash: string; to_tx_id: string; @@ -168,37 +142,6 @@ export type DbInscriptionLocationChange = { to_offset: string | null; to_value: string | null; to_timestamp: Date; - to_genesis: boolean; - to_current: boolean; -}; - -export const LOCATIONS_COLUMNS = [ - 'id', - 'inscription_id', - 'genesis_id', - 'block_height', - 'block_hash', - 'tx_id', - 'tx_index', - 'address', - 'output', - 'offset', - 'value', - 'timestamp', -]; - -export type DbInscription = { - id: string; - genesis_id: string; - number: string; - mime_type: string; - content_type: string; - content_length: string; - fee: string; - sat_ordinal: string; - sat_rarity: string; - sat_coinbase_height: string; - recursive: boolean; }; export type DbInscriptionContent = { @@ -207,21 +150,6 @@ export type DbInscriptionContent = { content: string; }; -export const INSCRIPTIONS_COLUMNS = [ - 'id', - 'genesis_id', - 'number', - 'mime_type', - 'content_type', - 'content_length', - 'fee', - 'curse_type', - 'sat_ordinal', - 'sat_rarity', - 'sat_coinbase_height', - 'recursive', -]; - export type DbInscriptionIndexPaging = { limit: number; offset: number; diff --git a/tests/api/inscriptions.test.ts b/tests/api/inscriptions.test.ts index a9693510..0ae354e8 100644 --- a/tests/api/inscriptions.test.ts +++ b/tests/api/inscriptions.test.ts @@ -278,6 +278,7 @@ describe('/inscriptions', () => { recursion_refs: [ '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0', 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0', + 'b4b27b9a15f928b95a8ce4b418946553b7b313a345254cd9b23d79489175fa5ai0', ], }; @@ -298,7 +299,7 @@ describe('/inscriptions', () => { expect(response2.json()).toStrictEqual(expected); }); - test('shows inscription with null genesis address', async () => { + test('shows inscription with empty genesis address', async () => { await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -318,7 +319,7 @@ describe('/inscriptions', () => { inscription_fee: 2805, inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', inscription_output_value: 10000, - inscriber_address: null, + inscriber_address: '', ordinal_number: 257418248345364, ordinal_block_height: 51483, ordinal_offset: 0, @@ -337,8 +338,8 @@ describe('/inscriptions', () => { .build() ); const expected = { - address: null, - genesis_address: null, + address: '', + genesis_address: '', genesis_block_hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', genesis_block_height: 775617, content_length: 5, @@ -1463,7 +1464,7 @@ describe('/inscriptions', () => { inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', inscription_output_value: 10000, inscriber_address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - ordinal_number: 257418248345364, + ordinal_number: 257418248345365, ordinal_block_height: 650000, ordinal_offset: 0, satpoint_post_inscription: @@ -1502,7 +1503,7 @@ describe('/inscriptions', () => { offset: '0', number: 1, value: '10000', - sat_ordinal: '257418248345364', + sat_ordinal: '257418248345365', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', sat_coinbase_height: 51483, output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0', @@ -1603,7 +1604,7 @@ describe('/inscriptions', () => { inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', inscription_output_value: 10000, inscriber_address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - ordinal_number: 257418248345364, + ordinal_number: 257418248345365, ordinal_block_height: 650000, ordinal_offset: 0, satpoint_post_inscription: @@ -1676,7 +1677,7 @@ describe('/inscriptions', () => { offset: '0', number: 1, value: '10000', - sat_ordinal: '257418248345364', + sat_ordinal: '257418248345365', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', sat_coinbase_height: 51483, output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0', diff --git a/tests/api/sats.test.ts b/tests/api/sats.test.ts index 4747237a..7e036772 100644 --- a/tests/api/sats.test.ts +++ b/tests/api/sats.test.ts @@ -81,7 +81,7 @@ describe('/sats', () => { ); }); - test('returns sat with more than 1 cursed inscription', async () => { + test('returns sat with more than 1 inscription', async () => { await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -133,6 +133,7 @@ describe('/sats', () => { inscription_id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993i0', inscription_output_value: 10000, inscriber_address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + // Same sat. This will also create a transfer for the previous inscription. ordinal_number: 257418248345364, ordinal_block_height: 650000, ordinal_offset: 0, @@ -195,22 +196,71 @@ describe('/sats', () => { genesis_timestamp: 1677803510000, genesis_tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - location: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0:0', + // Re-inscribed sat is moved to the latest inscription's location. + location: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0:0', mime_type: 'image/png', number: -7, offset: '0', - output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0', + output: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0', sat_coinbase_height: 51483, sat_ordinal: '257418248345364', sat_rarity: 'common', - timestamp: 1677803510000, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + timestamp: 1676913207000, + tx_id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993', value: '10000', curse_type: '"p2wsh"', recursive: false, recursion_refs: null, }, ]); + + // Inscription -7 should have 2 locations, -1 should only have 1. + let transfersResponse = await fastify.inject({ + method: 'GET', + url: '/ordinals/v1/inscriptions/-7/transfers', + }); + expect(transfersResponse.statusCode).toBe(200); + let transferJson = transfersResponse.json(); + expect(transferJson.total).toBe(2); + expect(transferJson.results).toHaveLength(2); + expect(transferJson.results[0].location).toBe( + 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0:0' + ); + expect(transferJson.results[1].location).toBe( + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0:0' + ); + + transfersResponse = await fastify.inject({ + method: 'GET', + url: '/ordinals/v1/inscriptions/-1/transfers', + }); + expect(transfersResponse.statusCode).toBe(200); + transferJson = transfersResponse.json(); + expect(transferJson.total).toBe(1); + expect(transferJson.results).toHaveLength(1); + expect(transferJson.results[0].location).toBe( + 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0:0' + ); + + // Block transfer activity should reflect all true transfers. + transfersResponse = await fastify.inject({ + method: 'GET', + url: '/ordinals/v1/inscriptions/transfers?block=775617', + }); + expect(transfersResponse.statusCode).toBe(200); + transferJson = transfersResponse.json(); + expect(transferJson.total).toBe(0); + expect(transferJson.results).toHaveLength(0); + + transfersResponse = await fastify.inject({ + method: 'GET', + url: '/ordinals/v1/inscriptions/transfers?block=775618', + }); + expect(transfersResponse.statusCode).toBe(200); + transferJson = transfersResponse.json(); + expect(transferJson.total).toBe(1); + expect(transferJson.results).toHaveLength(1); + expect(transferJson.results[0].number).toBe(-7); }); test('returns not found on invalid sats', async () => { diff --git a/tests/ordhook/server.test.ts b/tests/ordhook/server.test.ts index e8035854..a09a217c 100644 --- a/tests/ordhook/server.test.ts +++ b/tests/ordhook/server.test.ts @@ -117,6 +117,10 @@ describe('EventServer', () => { expect(inscr.sat_rarity).toBe('common'); expect(inscr.timestamp.toISOString()).toBe('2023-02-20T17:13:27.000Z'); expect(inscr.value).toBe('10000'); + let count = await db.counts.getAddressCount([ + 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + ]); + expect(count).toBe(1); // Rollback const payload2 = new TestChainhookPayloadBuilder() @@ -142,6 +146,10 @@ describe('EventServer', () => { expect(c1[0].count).toBe(0); const c2 = await db.sql<{ count: number }[]>`SELECT COUNT(*)::int FROM locations`; expect(c2[0].count).toBe(0); + count = await db.counts.getAddressCount([ + 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + ]); + expect(count).toBe(0); }); test('parses inscription_transferred apply and rollback', async () => { @@ -257,6 +265,14 @@ describe('EventServer', () => { expect(inscr.sat_rarity).toBe('common'); expect(inscr.timestamp.toISOString()).toBe('2023-02-20T17:13:27.000Z'); expect(inscr.value).toBe('10000'); + let count = await db.counts.getAddressCount([ + 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + ]); + expect(count).toBe(0); + count = await db.counts.getAddressCount([ + 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf00000', + ]); + expect(count).toBe(1); // Rollback const payload2 = new TestChainhookPayloadBuilder() @@ -283,6 +299,14 @@ describe('EventServer', () => { const c2 = await db.sql<{ count: number }[]>`SELECT COUNT(*)::int FROM locations`; expect(c2[0].count).toBe(1); await expect(db.getChainTipBlockHeight()).resolves.toBe(775617); + count = await db.counts.getAddressCount([ + 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + ]); + expect(count).toBe(1); + count = await db.counts.getAddressCount([ + 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf00000', + ]); + expect(count).toBe(0); }); test('multiple inscription pointers on the same block are compared correctly', async () => { @@ -485,6 +509,7 @@ describe('EventServer', () => { const status = await db.sql<{ transfer_type: string }[]>` SELECT transfer_type FROM locations + INNER JOIN inscriptions USING (ordinal_number) WHERE genesis_id = '53957f47697096cef4ad24dae6357b3d7ffe1e3eb9216ce0bb01d6b6a2c8cf4ai0' `; expect(status[0].transfer_type).toBe('spent_in_fees'); @@ -539,6 +564,7 @@ describe('EventServer', () => { const status = await db.sql<{ transfer_type: string }[]>` SELECT transfer_type FROM locations + INNER JOIN inscriptions USING (ordinal_number) WHERE genesis_id = '53957f47697096cef4ad24dae6357b3d7ffe1e3eb9216ce0bb01d6b6a2c8cf4ai0' `; expect(status[0].transfer_type).toBe('burnt'); From 3c1480f5bfb8bec4993fffd50245c345d71cdf08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Fri, 26 Apr 2024 12:25:27 -0600 Subject: [PATCH 06/10] fix: detect block gaps when streaming from ordhook (#349) * fix: detect gaps when streaming * fix: tests --- src/pg/block-cache.ts | 2 + src/pg/pg-store.ts | 64 ++++++++++++++---- tests/api/cache.test.ts | 10 +++ tests/helpers.ts | 2 +- tests/ordhook/replay.test.ts | 1 + tests/ordhook/server.test.ts | 126 +++++++++++++++++++++++++++++++++++ 6 files changed, 192 insertions(+), 13 deletions(-) diff --git a/src/pg/block-cache.ts b/src/pg/block-cache.ts index 2dc9f249..128dcba7 100644 --- a/src/pg/block-cache.ts +++ b/src/pg/block-cache.ts @@ -28,6 +28,7 @@ export class BlockCache { locations: DbLocationInsert[] = []; currentLocations = new Map(); recursiveRefs = new Map(); + revealedNumbers: number[] = []; mimeTypeCounts = new Map(); satRarityCounts = new Map(); @@ -72,6 +73,7 @@ export class BlockCache { parent: reveal.parent, timestamp: this.timestamp, }); + this.revealedNumbers.push(reveal.inscription_number.jubilee); this.increaseMimeTypeCount(mime_type); this.increaseSatRarityCount(satoshi.rarity); this.increaseInscriptionTypeCount(reveal.inscription_number.classic < 0 ? 'cursed' : 'blessed'); diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 0c9267b4..0dbf25a2 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -8,7 +8,11 @@ import { runMigrations, stopwatch, } from '@hirosystems/api-toolkit'; -import { BitcoinEvent, BitcoinPayload } from '@hirosystems/chainhook-client'; +import { + BadPayloadRequestError, + BitcoinEvent, + BitcoinPayload, +} from '@hirosystems/chainhook-client'; import * as path from 'path'; import * as postgres from 'postgres'; import { Order, OrderBy } from '../api/schemas'; @@ -35,6 +39,8 @@ export const INSERT_BATCH_SIZE = 4000; type InscriptionIdentifier = { genesis_id: string } | { number: number }; +class BlockAlreadyIngestedError extends Error {} + export class PgStore extends BasePgStore { readonly brc20: Brc20PgStore; readonly counts: CountsPgStore; @@ -90,14 +96,17 @@ export class PgStore extends BasePgStore { ); } for (const event of payload.apply) { - if (await this.isBlockIngested(event)) { - logger.warn(`PgStore skipping previously seen block ${event.block_identifier.index}`); - continue; - } logger.info(`PgStore apply block ${event.block_identifier.index}`); const time = stopwatch(); - await this.updateInscriptionsEvent(sql, event, 'apply', streamed); - await this.brc20.updateBrc20Operations(sql, event, 'apply'); + try { + await this.updateInscriptionsEvent(sql, event, 'apply', streamed); + await this.brc20.updateBrc20Operations(sql, event, 'apply'); + } catch (error) { + if (error instanceof BlockAlreadyIngestedError) { + logger.warn(error); + continue; + } else throw error; + } await this.updateChainTipBlockHeight(sql, event.block_identifier.index); logger.info( `PgStore apply block ${ @@ -119,6 +128,7 @@ export class PgStore extends BasePgStore { normalizedHexString(event.block_identifier.hash), event.timestamp ); + if (direction === 'apply') await this.assertNextBlockIsNotIngested(sql, event); for (const tx of event.transactions) { const tx_id = normalizedHexString(tx.transaction_identifier.hash); for (const operation of tx.metadata.ordinal_operations) { @@ -138,6 +148,7 @@ export class PgStore extends BasePgStore { } switch (direction) { case 'apply': + if (streamed) await this.assertNextBlockIsContiguous(sql, event, cache); await this.applyInscriptions(sql, cache, streamed); break; case 'rollback': @@ -348,15 +359,44 @@ export class PgStore extends BasePgStore { } } - private async isBlockIngested(event: BitcoinEvent): Promise { - const currentBlockHeight = await this.getChainTipBlockHeight(); + private async assertNextBlockIsNotIngested(sql: PgSqlClient, event: BitcoinEvent) { + const result = await sql<{ block_height: number }[]>` + SELECT block_height::int FROM chain_tip + `; + if (!result.count) return false; + const currentHeight = result[0].block_height; if ( - event.block_identifier.index <= currentBlockHeight && + event.block_identifier.index <= currentHeight && event.block_identifier.index !== ORDINALS_GENESIS_BLOCK ) { - return true; + throw new BlockAlreadyIngestedError( + `Block ${event.block_identifier.index} is already ingested, chain tip is at ${currentHeight}` + ); + } + } + + private async assertNextBlockIsContiguous( + sql: PgSqlClient, + event: BitcoinEvent, + cache: BlockCache + ) { + if (!cache.revealedNumbers.length) { + // TODO: How do we check blocks with only transfers? + return; } - return false; + const result = await sql<{ max: number | null; block_height: number }[]>` + WITH tip AS (SELECT block_height::int FROM chain_tip) + SELECT MAX(number)::int AS max, (SELECT block_height FROM tip) + FROM inscriptions WHERE number >= 0 + `; + if (!result.count) return; + const data = result[0]; + const firstReveal = cache.revealedNumbers.sort()[0]; + if (data.max === null && firstReveal === 0) return; + if ((data.max ?? 0) + 1 != firstReveal) + throw new BadPayloadRequestError( + `Streamed block ${event.block_identifier.index} is non-contiguous, attempting to reveal #${firstReveal} when current max is #${data.max} at block height ${data.block_height}` + ); } private async updateChainTipBlockHeight(sql: PgSqlClient, block_height: number): Promise { diff --git a/tests/api/cache.test.ts b/tests/api/cache.test.ts index 9eca102e..9c2fc564 100644 --- a/tests/api/cache.test.ts +++ b/tests/api/cache.test.ts @@ -21,6 +21,7 @@ describe('ETag cache', () => { test('inscription cache control', async () => { const block = new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 775617 }) .transaction({ hash: '0x38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc' }) @@ -88,6 +89,7 @@ describe('ETag cache', () => { // Perform transfer and check cache await db.updateInscriptions( new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 775618, timestamp: 1678122360 }) .transaction({ @@ -125,6 +127,7 @@ describe('ETag cache', () => { // Perform transfer GAP FILL and check cache await db.updateInscriptions( new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 775619, timestamp: 1678122360 }) .transaction({ @@ -161,6 +164,7 @@ describe('ETag cache', () => { test('inscriptions index cache control', async () => { const block1 = new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 778575 }) .transaction({ hash: '0x9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201' }) @@ -194,6 +198,7 @@ describe('ETag cache', () => { .build(); await db.updateInscriptions(block1); const block2 = new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 778576 }) .transaction({ hash: '0x00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d' }) @@ -246,6 +251,7 @@ describe('ETag cache', () => { // New location const block3 = new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 778577 }) .transaction({ hash: 'ae9d273a10e899f0d2cad47ee2b0e77ab8a9addd9dd5bb5e4b03d6971c060d52' }) @@ -274,6 +280,7 @@ describe('ETag cache', () => { test('inscriptions stats per block cache control', async () => { const block1 = new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 778575, hash: randomHash() }) .transaction({ hash: '0x9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201' }) @@ -326,6 +333,7 @@ describe('ETag cache', () => { // New block const block2 = new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 778576, hash: randomHash() }) .transaction({ hash: '0x00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d' }) @@ -370,6 +378,7 @@ describe('ETag cache', () => { test('status etag changes with new block', async () => { const block1 = new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 778575, hash: randomHash() }) .transaction({ hash: '0x9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201' }) @@ -422,6 +431,7 @@ describe('ETag cache', () => { // New block const block2 = new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 778576, hash: randomHash() }) .transaction({ hash: '0x00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d' }) diff --git a/tests/helpers.ts b/tests/helpers.ts index 84750cf4..9450f56c 100644 --- a/tests/helpers.ts +++ b/tests/helpers.ts @@ -30,7 +30,7 @@ export class TestChainhookPayloadBuilder { operation: 'inscription_feed', meta_protocols: ['brc-20'], }, - is_streaming_blocks: true, + is_streaming_blocks: false, }, }; private action: 'apply' | 'rollback' = 'apply'; diff --git a/tests/ordhook/replay.test.ts b/tests/ordhook/replay.test.ts index 1e07d625..e6eef06e 100644 --- a/tests/ordhook/replay.test.ts +++ b/tests/ordhook/replay.test.ts @@ -22,6 +22,7 @@ describe('Replay', () => { test('shuts down when streaming on replay mode', async () => { const payload1 = new TestChainhookPayloadBuilder() + .streamingBlocks(true) .apply() .block({ height: 767430, diff --git a/tests/ordhook/server.test.ts b/tests/ordhook/server.test.ts index a09a217c..4f8a4d23 100644 --- a/tests/ordhook/server.test.ts +++ b/tests/ordhook/server.test.ts @@ -572,6 +572,132 @@ describe('EventServer', () => { }); describe('gap detection', () => { + test('server rejects payload with first inscription gap when streaming', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .streamingBlocks(false) + .apply() + .block({ + height: 778575, + hash: '0x00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + timestamp: 1676913207, + }) + .transaction({ + hash: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201', + }) + .inscriptionRevealed({ + content_bytes: '0x48656C6C6F', + content_type: 'text/plain;charset=utf-8', + content_length: 5, + inscription_number: { classic: 0, jubilee: 0 }, + inscription_fee: 705, + inscription_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0', + inscription_output_value: 10000, + inscriber_address: 'bc1pscktlmn99gyzlvymvrezh6vwd0l4kg06tg5rvssw0czg8873gz5sdkteqj', + ordinal_number: 257418248345364, + ordinal_block_height: 650000, + ordinal_offset: 0, + satpoint_post_inscription: + '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0:0', + inscription_input_index: 0, + transfers_pre_inscription: 0, + tx_index: 0, + curse_type: null, + inscription_pointer: null, + delegate: null, + metaprotocol: null, + metadata: null, + parent: null, + }) + .build() + ); + const errorPayload1 = new TestChainhookPayloadBuilder() + .streamingBlocks(false) + .apply() + .block({ + height: 778576, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + timestamp: 1676913207, + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed({ + content_bytes: '0x48656C6C6F', + content_type: 'text/plain;charset=utf-8', + content_length: 5, + inscription_number: { classic: 5, jubilee: 5 }, // Gap at 5 but block is not streamed + inscription_fee: 705, + inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + inscription_output_value: 10000, + inscriber_address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + ordinal_number: 1050000000000000, + ordinal_block_height: 650000, + ordinal_offset: 0, + satpoint_post_inscription: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0:0', + inscription_input_index: 0, + transfers_pre_inscription: 0, + tx_index: 0, + curse_type: null, + inscription_pointer: null, + delegate: null, + metaprotocol: null, + metadata: null, + parent: null, + }) + .build(); + // Not streamed, accepts block. + await expect(db.updateInscriptions(errorPayload1)).resolves.not.toThrow( + BadPayloadRequestError + ); + + const errorPayload2 = new TestChainhookPayloadBuilder() + .streamingBlocks(true) + .apply() + .block({ + height: 778579, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + timestamp: 1676913207, + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed({ + content_bytes: '0x48656C6C6F', + content_type: 'text/plain;charset=utf-8', + content_length: 5, + inscription_number: { classic: 10, jubilee: 10 }, // Gap at 10 + inscription_fee: 705, + inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + inscription_output_value: 10000, + inscriber_address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + ordinal_number: 1050000000000000, + ordinal_block_height: 650000, + ordinal_offset: 0, + satpoint_post_inscription: + '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0:0', + inscription_input_index: 0, + transfers_pre_inscription: 0, + tx_index: 0, + curse_type: null, + inscription_pointer: null, + delegate: null, + metaprotocol: null, + metadata: null, + parent: null, + }) + .build(); + await expect(db.updateInscriptions(errorPayload2)).rejects.toThrow(BadPayloadRequestError); + const response = await server['fastify'].inject({ + method: 'POST', + url: `/payload`, + headers: { authorization: `Bearer ${ENV.ORDHOOK_NODE_AUTH_TOKEN}` }, + payload: errorPayload2, + }); + expect(response.statusCode).toBe(400); + }); + test('server ignores past blocks', async () => { const payload = new TestChainhookPayloadBuilder() .apply() From 939286e3a036063835206c533ce7f9a66cee0ba7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Fri, 26 Apr 2024 15:30:10 -0600 Subject: [PATCH 07/10] feat: return `parent` and `metadata` in inscription responses (#350) * fix: show parent * fix: metadata * fix: tests --- src/api/schemas.ts | 6 ++ src/api/util/helpers.ts | 2 + src/pg/pg-store.ts | 2 + src/pg/types.ts | 2 + tests/api/inscriptions.test.ts | 147 +++++++++++++++++++++++++++++++++ tests/api/sats.test.ts | 4 + 6 files changed, 163 insertions(+) diff --git a/src/api/schemas.ts b/src/api/schemas.ts index 21483605..d8b6f88c 100644 --- a/src/api/schemas.ts +++ b/src/api/schemas.ts @@ -326,6 +326,12 @@ export const InscriptionResponse = Type.Object( }) ) ), + parent: Nullable( + Type.String({ + examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218i0'], + }) + ), + metadata: Nullable(Type.Any()), }, { title: 'Inscription Response' } ); diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index 0b51af1c..5d3d38da 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -55,6 +55,8 @@ export function parseDbInscriptions( curse_type: i.curse_type, recursive: i.recursive, recursion_refs: i.recursion_refs?.split(',') ?? null, + parent: i.parent, + metadata: i.metadata ? JSON.parse(i.metadata) : null, })); } export function parseDbInscription(item: DbFullyLocatedInscriptionResult): InscriptionResponseType { diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 0dbf25a2..494a0998 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -589,6 +589,8 @@ export class PgStore extends BasePgStore { i.fee AS genesis_fee, i.curse_type, i.ordinal_number AS sat_ordinal, + i.parent, + i.metadata, s.rarity AS sat_rarity, s.coinbase_height AS sat_coinbase_height, i.recursive, diff --git a/src/pg/types.ts b/src/pg/types.ts index 3a4e86de..46e66e1e 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -100,6 +100,8 @@ export type DbFullyLocatedInscriptionResult = { curse_type: string | null; recursive: boolean; recursion_refs: string | null; + parent: string | null; + metadata: string | null; }; export enum DbLocationTransferType { diff --git a/tests/api/inscriptions.test.ts b/tests/api/inscriptions.test.ts index 0ae354e8..dc541cc3 100644 --- a/tests/api/inscriptions.test.ts +++ b/tests/api/inscriptions.test.ts @@ -124,6 +124,8 @@ describe('/inscriptions', () => { curse_type: null, recursive: false, recursion_refs: null, + parent: null, + metadata: null, }; // By inscription id @@ -280,6 +282,8 @@ describe('/inscriptions', () => { 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0', 'b4b27b9a15f928b95a8ce4b418946553b7b313a345254cd9b23d79489175fa5ai0', ], + parent: null, + metadata: null, }; // By inscription id @@ -299,6 +303,127 @@ describe('/inscriptions', () => { expect(response2.json()).toStrictEqual(expected); }); + test('shows inscription with parent', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 778575, + hash: '0x00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + timestamp: 1676913207, + }) + .transaction({ + hash: '0x9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201', + }) + .inscriptionRevealed({ + content_bytes: `0x010101`, + content_type: 'text/plain;charset=utf-8', + content_length: 5, + inscription_number: { classic: 0, jubilee: 0 }, + inscription_fee: 705, + inscription_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0', + inscription_output_value: 10000, + inscriber_address: 'bc1pscktlmn99gyzlvymvrezh6vwd0l4kg06tg5rvssw0czg8873gz5sdkteqj', + ordinal_number: 257418248345364, + ordinal_block_height: 650000, + ordinal_offset: 0, + satpoint_post_inscription: + '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0:0', + tx_index: 0, + inscription_input_index: 0, + transfers_pre_inscription: 0, + curse_type: null, + inscription_pointer: null, + delegate: null, + metaprotocol: null, + metadata: null, + parent: null, + }) + .transaction({ + hash: '0xf351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421', + }) + .inscriptionRevealed({ + content_bytes: '0x48656C6C6F', + content_type: 'image/png', + content_length: 5, + inscription_number: { classic: 1, jubilee: 1 }, + inscription_fee: 2805, + inscription_id: 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0', + inscription_output_value: 10000, + inscriber_address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + ordinal_number: 257418248345364, + ordinal_block_height: 51483, + ordinal_offset: 0, + satpoint_post_inscription: + 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421:0:0', + inscription_input_index: 0, + transfers_pre_inscription: 0, + tx_index: 0, + curse_type: null, + inscription_pointer: null, + delegate: null, + metaprotocol: null, + metadata: null, + parent: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0', + }) + .build() + ); + const response = await fastify.inject({ + method: 'GET', + url: '/ordinals/v1/inscriptions/f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0', + }); + expect(response.statusCode).toBe(200); + expect(response.json().parent).toBe( + '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0' + ); + }); + + test('shows inscription with metadata', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 778575, + hash: '0x00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + timestamp: 1676913207, + }) + .transaction({ + hash: '0x9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201', + }) + .inscriptionRevealed({ + content_bytes: `0x010101`, + content_type: 'text/plain;charset=utf-8', + content_length: 5, + inscription_number: { classic: 0, jubilee: 0 }, + inscription_fee: 705, + inscription_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0', + inscription_output_value: 10000, + inscriber_address: 'bc1pscktlmn99gyzlvymvrezh6vwd0l4kg06tg5rvssw0czg8873gz5sdkteqj', + ordinal_number: 257418248345364, + ordinal_block_height: 650000, + ordinal_offset: 0, + satpoint_post_inscription: + '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0:0', + tx_index: 0, + inscription_input_index: 0, + transfers_pre_inscription: 0, + curse_type: null, + inscription_pointer: null, + delegate: null, + metaprotocol: null, + metadata: { foo: 'bar', test: 1337 }, + parent: null, + }) + .build() + ); + const response = await fastify.inject({ + method: 'GET', + url: '/ordinals/v1/inscriptions/9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0', + }); + expect(response.statusCode).toBe(200); + expect(response.json().metadata).toStrictEqual({ foo: 'bar', test: 1337 }); + }); + test('shows inscription with empty genesis address', async () => { await db.updateInscriptions( new TestChainhookPayloadBuilder() @@ -362,6 +487,8 @@ describe('/inscriptions', () => { curse_type: null, recursive: false, recursion_refs: null, + parent: null, + metadata: null, }; // By inscription id @@ -444,6 +571,8 @@ describe('/inscriptions', () => { curse_type: '88', recursive: false, recursion_refs: null, + parent: null, + metadata: null, }; // By inscription id @@ -555,6 +684,8 @@ describe('/inscriptions', () => { curse_type: null, recursive: false, recursion_refs: null, + parent: null, + metadata: null, }); // Transfer 2 @@ -610,6 +741,8 @@ describe('/inscriptions', () => { curse_type: null, recursive: false, recursion_refs: null, + parent: null, + metadata: null, }); }); @@ -720,6 +853,8 @@ describe('/inscriptions', () => { curse_type: null, recursive: false, recursion_refs: null, + parent: null, + metadata: null, }); }); @@ -815,6 +950,8 @@ describe('/inscriptions', () => { curse_type: '{"tag":66}', recursive: false, recursion_refs: null, + parent: null, + metadata: null, }); // Transfer 2 @@ -870,6 +1007,8 @@ describe('/inscriptions', () => { curse_type: '{"tag":66}', recursive: false, recursion_refs: null, + parent: null, + metadata: null, }); }); }); @@ -1515,6 +1654,8 @@ describe('/inscriptions', () => { curse_type: null, recursive: false, recursion_refs: null, + parent: null, + metadata: null, }, { address: 'bc1pscktlmn99gyzlvymvrezh6vwd0l4kg06tg5rvssw0czg8873gz5sdkteqj', @@ -1541,6 +1682,8 @@ describe('/inscriptions', () => { curse_type: null, recursive: false, recursion_refs: null, + parent: null, + metadata: null, }, ]); }); @@ -1654,6 +1797,8 @@ describe('/inscriptions', () => { curse_type: null, recursive: false, recursion_refs: null, + parent: null, + metadata: null, }; expect(responseJson1.results[0]).toStrictEqual(result1); @@ -1689,6 +1834,8 @@ describe('/inscriptions', () => { curse_type: null, recursive: false, recursion_refs: null, + parent: null, + metadata: null, }; expect(responseJson2.results[0]).toStrictEqual(result2); diff --git a/tests/api/sats.test.ts b/tests/api/sats.test.ts index 7e036772..5b12bb2e 100644 --- a/tests/api/sats.test.ts +++ b/tests/api/sats.test.ts @@ -184,6 +184,8 @@ describe('/sats', () => { curse_type: '"p2wsh"', recursive: false, recursion_refs: null, + parent: null, + metadata: null, }, { address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', @@ -211,6 +213,8 @@ describe('/sats', () => { curse_type: '"p2wsh"', recursive: false, recursion_refs: null, + parent: null, + metadata: null, }, ]); From 1515b5d1caf46cc137c611088b9a9c98dc55cb41 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Fri, 26 Apr 2024 21:38:03 +0000 Subject: [PATCH 08/10] chore(release): 4.0.0-beta.1 [skip ci] MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## [4.0.0-beta.1](https://github.com/hirosystems/ordinals-api/compare/v3.1.0...v4.0.0-beta.1) (2024-04-26) ### ⚠ BREAKING CHANGES * support reinscription transfers (#348) * ingest BRC20 data from ordhook (#347) ### Features * ingest BRC20 data from ordhook ([#347](https://github.com/hirosystems/ordinals-api/issues/347)) ([56a8851](https://github.com/hirosystems/ordinals-api/commit/56a88518b1ffe549524941e4d94d6347d11c98f3)) * return `parent` and `metadata` in inscription responses ([#350](https://github.com/hirosystems/ordinals-api/issues/350)) ([939286e](https://github.com/hirosystems/ordinals-api/commit/939286e3a036063835206c533ce7f9a66cee0ba7)) * support reinscription transfers ([#348](https://github.com/hirosystems/ordinals-api/issues/348)) ([5422156](https://github.com/hirosystems/ordinals-api/commit/5422156e9919f0c5870c9571ea9f591852c98b69)) ### Bug Fixes * detect block gaps when streaming from ordhook ([#349](https://github.com/hirosystems/ordinals-api/issues/349)) ([3c1480f](https://github.com/hirosystems/ordinals-api/commit/3c1480f5bfb8bec4993fffd50245c345d71cdf08)) --- CHANGELOG.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index cc0219a7..214fadc6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,22 @@ +## [4.0.0-beta.1](https://github.com/hirosystems/ordinals-api/compare/v3.1.0...v4.0.0-beta.1) (2024-04-26) + + +### ⚠ BREAKING CHANGES + +* support reinscription transfers (#348) +* ingest BRC20 data from ordhook (#347) + +### Features + +* ingest BRC20 data from ordhook ([#347](https://github.com/hirosystems/ordinals-api/issues/347)) ([56a8851](https://github.com/hirosystems/ordinals-api/commit/56a88518b1ffe549524941e4d94d6347d11c98f3)) +* return `parent` and `metadata` in inscription responses ([#350](https://github.com/hirosystems/ordinals-api/issues/350)) ([939286e](https://github.com/hirosystems/ordinals-api/commit/939286e3a036063835206c533ce7f9a66cee0ba7)) +* support reinscription transfers ([#348](https://github.com/hirosystems/ordinals-api/issues/348)) ([5422156](https://github.com/hirosystems/ordinals-api/commit/5422156e9919f0c5870c9571ea9f591852c98b69)) + + +### Bug Fixes + +* detect block gaps when streaming from ordhook ([#349](https://github.com/hirosystems/ordinals-api/issues/349)) ([3c1480f](https://github.com/hirosystems/ordinals-api/commit/3c1480f5bfb8bec4993fffd50245c345d71cdf08)) + ## [3.1.0](https://github.com/hirosystems/ordinals-api/compare/v3.0.1...v3.1.0) (2024-04-23) From dfc003ee65198a35bac1e0fc723068bde90c63c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Tue, 11 Jun 2024 08:49:33 -0600 Subject: [PATCH 09/10] fix: ordhook ingestion (#356) * chore: launch configs * fix: migrations * fix: debug server * fix: optimize address handling * chore: upgrade client * fix: debug server * fix: disable gap check during replay mode * fix: adjust insert batch size * fix: debug server name * fix: batch insert owners * fix: optimize fks * fix: remove directly * fix: fk name * chore: drop unnecessary fks * fix: batch inscription transfers * fix: use full index for block tx index --- .vscode/launch.json | 22 ++++ migrations/1676395230930_inscriptions.ts | 7 +- migrations/1677284495299_locations.ts | 6 +- migrations/1677284495500_current-locations.ts | 12 +- .../1677284495501_inscription-transfers.ts | 10 -- package-lock.json | 14 +-- package.json | 4 +- src/env.ts | 6 +- src/ordhook/server.ts | 13 ++- src/pg/block-cache.ts | 11 +- src/pg/pg-store.ts | 108 +++++++++--------- src/pg/types.ts | 27 +---- tests/api/inscriptions.test.ts | 2 +- tests/ordhook/server.test.ts | 2 +- util/debug-server.ts | 7 +- 15 files changed, 123 insertions(+), 128 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 0ad80e0d..2860f61e 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -14,6 +14,7 @@ ], "outputCapture": "std", "internalConsoleOptions": "openOnSessionStart", + "envFile": "${workspaceFolder}/.env", "env": { "NODE_ENV": "development", "TS_NODE_SKIP_IGNORE": "true" @@ -33,6 +34,7 @@ ], "outputCapture": "std", "internalConsoleOptions": "openOnSessionStart", + "envFile": "${workspaceFolder}/.env", "env": { "NODE_ENV": "development", "TS_NODE_SKIP_IGNORE": "true", @@ -53,6 +55,7 @@ ], "outputCapture": "std", "internalConsoleOptions": "openOnSessionStart", + "envFile": "${workspaceFolder}/.env", "env": { "NODE_ENV": "development", "TS_NODE_SKIP_IGNORE": "true", @@ -60,6 +63,25 @@ }, "killBehavior": "polite", }, + { + "type": "node", + "request": "launch", + "name": "Run: debug server", + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/util/debug-server.ts" + ], + "outputCapture": "std", + "internalConsoleOptions": "openOnSessionStart", + "env": { + "NODE_ENV": "development", + "TS_NODE_SKIP_IGNORE": "true", + }, + "killBehavior": "polite", + }, { "type": "node", "request": "launch", diff --git a/migrations/1676395230930_inscriptions.ts b/migrations/1676395230930_inscriptions.ts index 3205872b..17951b52 100644 --- a/migrations/1676395230930_inscriptions.ts +++ b/migrations/1676395230930_inscriptions.ts @@ -31,6 +31,7 @@ export function up(pgm: MigrationBuilder): void { }, address: { type: 'text', + notNull: true, }, mime_type: { type: 'text', @@ -76,11 +77,6 @@ export function up(pgm: MigrationBuilder): void { }, }); pgm.createConstraint('inscriptions', 'inscriptions_number_unique', 'UNIQUE(number)'); - pgm.createConstraint( - 'inscriptions', - 'inscriptions_ordinal_number_fk', - 'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE' - ); pgm.createIndex('inscriptions', ['mime_type']); pgm.createIndex('inscriptions', ['recursive']); pgm.createIndex('inscriptions', [ @@ -89,4 +85,5 @@ export function up(pgm: MigrationBuilder): void { ]); pgm.createIndex('inscriptions', ['address']); pgm.createIndex('inscriptions', [{ name: 'updated_at', sort: 'DESC' }]); + pgm.createIndex('inscriptions', ['ordinal_number']); } diff --git a/migrations/1677284495299_locations.ts b/migrations/1677284495299_locations.ts index 30894492..3cdcc48d 100644 --- a/migrations/1677284495299_locations.ts +++ b/migrations/1677284495299_locations.ts @@ -28,6 +28,7 @@ export function up(pgm: MigrationBuilder): void { }, address: { type: 'text', + notNull: true, }, output: { type: 'text', @@ -57,11 +58,6 @@ export function up(pgm: MigrationBuilder): void { pgm.createConstraint('locations', 'locations_pkey', { primaryKey: ['ordinal_number', 'block_height', 'tx_index'], }); - pgm.createConstraint( - 'locations', - 'locations_ordinal_number_fk', - 'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE' - ); pgm.createIndex('locations', ['output', 'offset']); pgm.createIndex('locations', ['timestamp']); pgm.createIndex('locations', [ diff --git a/migrations/1677284495500_current-locations.ts b/migrations/1677284495500_current-locations.ts index 51f4b8a3..8da71549 100644 --- a/migrations/1677284495500_current-locations.ts +++ b/migrations/1677284495500_current-locations.ts @@ -19,18 +19,10 @@ export function up(pgm: MigrationBuilder): void { }, address: { type: 'text', + notNull: true, }, }); - pgm.createConstraint( - 'current_locations', - 'current_locations_locations_fk', - 'FOREIGN KEY(ordinal_number, block_height, tx_index) REFERENCES locations(ordinal_number, block_height, tx_index) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'locations', - 'locations_satoshis_fk', - 'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE' - ); pgm.createIndex('current_locations', ['ordinal_number'], { unique: true }); pgm.createIndex('current_locations', ['address']); + pgm.createIndex('current_locations', ['block_height', 'tx_index']); } diff --git a/migrations/1677284495501_inscription-transfers.ts b/migrations/1677284495501_inscription-transfers.ts index 90b72717..648ef662 100644 --- a/migrations/1677284495501_inscription-transfers.ts +++ b/migrations/1677284495501_inscription-transfers.ts @@ -37,16 +37,6 @@ export function up(pgm: MigrationBuilder): void { pgm.createConstraint('inscription_transfers', 'inscription_transfers_pkey', { primaryKey: ['block_height', 'block_transfer_index'], }); - pgm.createConstraint( - 'inscription_transfers', - 'inscription_transfers_locations_fk', - 'FOREIGN KEY(ordinal_number, block_height, tx_index) REFERENCES locations(ordinal_number, block_height, tx_index) ON DELETE CASCADE' - ); - pgm.createConstraint( - 'inscription_transfers', - 'inscription_transfers_satoshis_fk', - 'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE' - ); pgm.createIndex('inscription_transfers', ['genesis_id']); pgm.createIndex('inscription_transfers', ['number']); } diff --git a/package-lock.json b/package-lock.json index d715c605..a6e43e84 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,7 +15,7 @@ "@fastify/swagger": "^8.3.1", "@fastify/type-provider-typebox": "^3.2.0", "@hirosystems/api-toolkit": "^1.4.0", - "@hirosystems/chainhook-client": "^1.8.0", + "@hirosystems/chainhook-client": "^1.10.0", "@semantic-release/changelog": "^6.0.3", "@semantic-release/commit-analyzer": "^10.0.4", "@semantic-release/git": "^10.0.1", @@ -1271,9 +1271,9 @@ } }, "node_modules/@hirosystems/chainhook-client": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.8.0.tgz", - "integrity": "sha512-BpYwrbxWuH0KGRyKq1T8nIiZUGaapOxz6yFZ653m6CJi7DS7kqOm2+v5X/DR0hbeZUmqriGMUJnROJ1tW08aEg==", + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.10.0.tgz", + "integrity": "sha512-Akp3+sZxys/n2iC5NjfnjEHtMfZmP89wSjZfvxU31pvXjz5PtOvL2LFZtkU3+y2EKjEI9msKemMEvQqSAdKO3g==", "dependencies": { "@fastify/type-provider-typebox": "^3.2.0", "fastify": "^4.15.0", @@ -19739,9 +19739,9 @@ } }, "@hirosystems/chainhook-client": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.8.0.tgz", - "integrity": "sha512-BpYwrbxWuH0KGRyKq1T8nIiZUGaapOxz6yFZ653m6CJi7DS7kqOm2+v5X/DR0hbeZUmqriGMUJnROJ1tW08aEg==", + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.10.0.tgz", + "integrity": "sha512-Akp3+sZxys/n2iC5NjfnjEHtMfZmP89wSjZfvxU31pvXjz5PtOvL2LFZtkU3+y2EKjEI9msKemMEvQqSAdKO3g==", "requires": { "@fastify/type-provider-typebox": "^3.2.0", "fastify": "^4.15.0", diff --git a/package.json b/package.json index 889db34b..08fd8b84 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "build": "rimraf ./dist && tsc --project tsconfig.build.json", "start": "node dist/src/index.js", "start-ts": "ts-node ./src/index.ts", - "start:debug-server": "node dist/util/debug-server.js", + "start:debug-server": "ts-node ./util/debug-server.ts", "test": "jest --runInBand", "test:brc-20": "npm run test -- ./tests/brc-20/", "test:api": "npm run test -- ./tests/api/", @@ -57,7 +57,7 @@ "@fastify/swagger": "^8.3.1", "@fastify/type-provider-typebox": "^3.2.0", "@hirosystems/api-toolkit": "^1.4.0", - "@hirosystems/chainhook-client": "^1.8.0", + "@hirosystems/chainhook-client": "^1.10.0", "@semantic-release/changelog": "^6.0.3", "@semantic-release/commit-analyzer": "^10.0.4", "@semantic-release/git": "^10.0.1", diff --git a/src/env.ts b/src/env.ts index fc8f0389..bc7aca06 100644 --- a/src/env.ts +++ b/src/env.ts @@ -33,9 +33,9 @@ const schema = Type.Object({ ORDHOOK_NODE_RPC_PORT: Type.Number({ default: 20456, minimum: 0, maximum: 65535 }), /** * Authorization token that the ordhook node must send with every event to make sure it's - * coming from the valid instance + * coming from the valid instance. Leave it undefined if you wish to avoid header validation. */ - ORDHOOK_NODE_AUTH_TOKEN: Type.String(), + ORDHOOK_NODE_AUTH_TOKEN: Type.Optional(Type.String()), /** * Register ordhook predicates automatically when the API is first launched. Set this to `false` * if you're configuring your predicates manually for any reason. @@ -53,6 +53,8 @@ const schema = Type.Object({ { default: 'default', replay: 'replay' }, { default: 'default' } ), + /** If the API should automatically shut down when Ordhook ingestion mode is `replay` */ + ORDHOOK_REPLAY_INGESTION_MODE_AUTO_SHUTDOWN: Type.Boolean({ default: true }), PGHOST: Type.String(), PGPORT: Type.Number({ default: 5432, minimum: 0, maximum: 65535 }), diff --git a/src/ordhook/server.ts b/src/ordhook/server.ts index 19d1decc..cc383229 100644 --- a/src/ordhook/server.ts +++ b/src/ordhook/server.ts @@ -45,10 +45,11 @@ export async function startOrdhookServer(args: { db: PgStore }): Promise { const streamed = payload.chainhook.is_streaming_blocks; - if (ENV.ORDHOOK_INGESTION_MODE === 'replay' && streamed) { + if ( + ENV.ORDHOOK_INGESTION_MODE === 'replay' && + ENV.ORDHOOK_REPLAY_INGESTION_MODE_AUTO_SHUTDOWN && + streamed + ) { logger.info(`OrdhookServer finished replaying blocks, shutting down`); return shutdown(); } @@ -67,5 +72,7 @@ export async function startOrdhookServer(args: { db: PgStore }): Promise 0) + this.revealedNumbers.push(reveal.inscription_number.jubilee); this.increaseMimeTypeCount(mime_type); this.increaseSatRarityCount(satoshi.rarity); this.increaseInscriptionTypeCount(reveal.inscription_number.classic < 0 ? 'cursed' : 'blessed'); @@ -85,7 +86,7 @@ export class BlockCache { tx_id, tx_index: reveal.tx_index, ordinal_number, - address: reveal.inscriber_address, + address: reveal.inscriber_address ?? '', output: `${satpoint.tx_id}:${satpoint.vout}`, offset: satpoint.offset ?? null, prev_output: null, @@ -98,7 +99,7 @@ export class BlockCache { ordinal_number, block_height: this.blockHeight, tx_index: reveal.tx_index, - address: reveal.inscriber_address, + address: reveal.inscriber_address ?? '', }); if (recursive_refs.length > 0) this.recursiveRefs.set(reveal.inscription_id, recursive_refs); } @@ -107,7 +108,7 @@ export class BlockCache { const satpoint = parseSatPoint(transfer.satpoint_post_transfer); const prevSatpoint = parseSatPoint(transfer.satpoint_pre_transfer); const ordinal_number = transfer.ordinal_number.toString(); - const address = transfer.destination.value ?? null; + const address = transfer.destination.value ?? ''; this.locations.push({ block_hash: this.blockHash, block_height: this.blockHeight, diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 494a0998..708e4f58 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -35,7 +35,7 @@ import { BlockCache } from './block-cache'; export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); const ORDINALS_GENESIS_BLOCK = 767430; -export const INSERT_BATCH_SIZE = 4000; +export const INSERT_BATCH_SIZE = 3500; type InscriptionIdentifier = { genesis_id: string } | { number: number }; @@ -82,7 +82,8 @@ export class PgStore extends BasePgStore { */ async updateInscriptions(payload: BitcoinPayload): Promise { await this.sqlWriteTransaction(async sql => { - const streamed = payload.chainhook.is_streaming_blocks; + const streamed = + ENV.ORDHOOK_INGESTION_MODE === 'default' && payload.chainhook.is_streaming_blocks; for (const event of payload.rollback) { logger.info(`PgStore rollback block ${event.block_identifier.index}`); const time = stopwatch(); @@ -184,36 +185,34 @@ export class PgStore extends BasePgStore { ...l, timestamp: sql`TO_TIMESTAMP(${l.timestamp})`, })); + // Insert locations, figure out moved inscriptions, insert inscription transfers. for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE)) await sql` - INSERT INTO locations ${sql(batch)} - ON CONFLICT (ordinal_number, block_height, tx_index) DO NOTHING - `; - // Insert block transfers. - let block_transfer_index = 0; - const transferEntries = []; - for (const transfer of cache.locations) { - const transferred = await sql<{ genesis_id: string; number: string }[]>` - SELECT genesis_id, number FROM inscriptions - WHERE ordinal_number = ${transfer.ordinal_number} AND ( - block_height < ${transfer.block_height} - OR (block_height = ${transfer.block_height} AND tx_index < ${transfer.tx_index}) + WITH location_inserts AS ( + INSERT INTO locations ${sql(batch)} + ON CONFLICT (ordinal_number, block_height, tx_index) DO NOTHING + RETURNING ordinal_number, block_height, block_hash, tx_index + ), + prev_transfer_index AS ( + SELECT MAX(block_transfer_index) AS max + FROM inscription_transfers + WHERE block_height = (SELECT block_height FROM location_inserts LIMIT 1) + ), + moved_inscriptions AS ( + SELECT + i.genesis_id, i.number, i.ordinal_number, li.block_height, li.block_hash, li.tx_index, + ( + ROW_NUMBER() OVER (ORDER BY li.block_height ASC, li.tx_index ASC) + (SELECT COALESCE(max, -1) FROM prev_transfer_index) + ) AS block_transfer_index + FROM inscriptions AS i + INNER JOIN location_inserts AS li ON li.ordinal_number = i.ordinal_number + WHERE + i.block_height < li.block_height + OR (i.block_height = li.block_height AND i.tx_index < li.tx_index) ) - `; - for (const inscription of transferred) - transferEntries.push({ - genesis_id: inscription.genesis_id, - number: inscription.number, - ordinal_number: transfer.ordinal_number, - block_height: transfer.block_height, - block_hash: transfer.block_hash, - tx_index: transfer.tx_index, - block_transfer_index: block_transfer_index++, - }); - } - for await (const batch of batchIterate(transferEntries, INSERT_BATCH_SIZE)) - await sql` - INSERT INTO inscription_transfers ${sql(batch)} + INSERT INTO inscription_transfers + (genesis_id, number, ordinal_number, block_height, block_hash, tx_index, block_transfer_index) + (SELECT * FROM moved_inscriptions) ON CONFLICT (block_height, block_transfer_index) DO NOTHING `; } @@ -228,18 +227,20 @@ export class PgStore extends BasePgStore { if (cache.currentLocations.size) { // Deduct counts from previous owners const moved_sats = [...cache.currentLocations.keys()]; - const prevOwners = await sql<{ address: string; count: number }[]>` - SELECT address, COUNT(*) AS count - FROM current_locations - WHERE ordinal_number IN ${sql(moved_sats)} - GROUP BY address - `; - for (const owner of prevOwners) - await sql` - UPDATE counts_by_address - SET count = count - ${owner.count} - WHERE address = ${owner.address} + for await (const batch of batchIterate(moved_sats, INSERT_BATCH_SIZE)) { + const prevOwners = await sql<{ address: string; count: number }[]>` + SELECT address, COUNT(*) AS count + FROM current_locations + WHERE ordinal_number IN ${sql(batch)} + GROUP BY address `; + for (const owner of prevOwners) + await sql` + UPDATE counts_by_address + SET count = count - ${owner.count} + WHERE address = ${owner.address} + `; + } // Insert locations const entries = [...cache.currentLocations.values()]; for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE)) @@ -255,24 +256,25 @@ export class PgStore extends BasePgStore { EXCLUDED.tx_index > current_locations.tx_index) `; // Update owner counts - await sql` - WITH new_owners AS ( - SELECT address, COUNT(*) AS count - FROM current_locations - WHERE ordinal_number IN ${sql(moved_sats)} - GROUP BY address - ) - INSERT INTO counts_by_address (address, count) - (SELECT address, count FROM new_owners) - ON CONFLICT (address) DO UPDATE SET count = counts_by_address.count + EXCLUDED.count - `; - if (streamed) - for await (const batch of batchIterate(moved_sats, INSERT_BATCH_SIZE)) + for await (const batch of batchIterate(moved_sats, INSERT_BATCH_SIZE)) { + await sql` + WITH new_owners AS ( + SELECT address, COUNT(*) AS count + FROM current_locations + WHERE ordinal_number IN ${sql(batch)} + GROUP BY address + ) + INSERT INTO counts_by_address (address, count) + (SELECT address, count FROM new_owners) + ON CONFLICT (address) DO UPDATE SET count = counts_by_address.count + EXCLUDED.count + `; + if (streamed) await sql` UPDATE inscriptions SET updated_at = NOW() WHERE ordinal_number IN ${sql(batch)} `; + } } await this.counts.applyCounts(sql, cache); } diff --git a/src/pg/types.ts b/src/pg/types.ts index 46e66e1e..82eed73c 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -15,7 +15,7 @@ export type DbInscriptionInsert = { classic_number: number; block_height: number; tx_index: number; - address: string | null; + address: string; mime_type: string; content_type: string; content_length: number; @@ -34,7 +34,7 @@ export type DbLocationInsert = { block_hash: string; tx_index: number; tx_id: string; - address: string | null; + address: string; output: string; offset: PgNumeric | null; prev_output: string | null; @@ -48,22 +48,7 @@ export type DbCurrentLocationInsert = { ordinal_number: PgNumeric; block_height: number; tx_index: number; - address: string | null; -}; - -type AbstractLocationData = { - block_height: number; - block_hash: string; - tx_id: string; - tx_index: number; - address: string | null; - output: string; - offset: PgNumeric | null; - prev_output: string | null; - prev_offset: PgNumeric | null; - value: PgNumeric | null; - transfer_type: DbLocationTransferType; - block_transfer_index: number | null; + address: string; }; /** @@ -116,7 +101,7 @@ export type DbLocation = { block_hash: string; tx_id: string; tx_index: number; - address: string | null; + address: string; output: string; offset: string | null; prev_output: string | null; @@ -131,7 +116,7 @@ export type DbInscriptionLocationChange = { from_block_height: string; from_block_hash: string; from_tx_id: string; - from_address: string | null; + from_address: string; from_output: string; from_offset: string | null; from_value: string | null; @@ -139,7 +124,7 @@ export type DbInscriptionLocationChange = { to_block_height: string; to_block_hash: string; to_tx_id: string; - to_address: string | null; + to_address: string; to_output: string; to_offset: string | null; to_value: string | null; diff --git a/tests/api/inscriptions.test.ts b/tests/api/inscriptions.test.ts index dc541cc3..d52d4ed3 100644 --- a/tests/api/inscriptions.test.ts +++ b/tests/api/inscriptions.test.ts @@ -3031,7 +3031,7 @@ describe('/inscriptions', () => { new TestChainhookPayloadBuilder() .rollback() .block({ - height: 775618, + height: 778575, hash: '000000000000000000032ef6c45a69c0496456b3cae84ee9f2899f636d03c5ac', timestamp: 1675312161, }) diff --git a/tests/ordhook/server.test.ts b/tests/ordhook/server.test.ts index 4f8a4d23..17cf8a70 100644 --- a/tests/ordhook/server.test.ts +++ b/tests/ordhook/server.test.ts @@ -126,7 +126,7 @@ describe('EventServer', () => { const payload2 = new TestChainhookPayloadBuilder() .rollback() .block({ - height: 107, + height: 767430, hash: '0x163de66dc9c0949905bfe8e148bde04600223cf88d19f26fdbeba1d6e6fa0f88', timestamp: 1676913207, }) diff --git a/util/debug-server.ts b/util/debug-server.ts index 3537408a..2ea49d17 100644 --- a/util/debug-server.ts +++ b/util/debug-server.ts @@ -23,17 +23,18 @@ import * as path from 'path'; const serverOpts: ServerOptions = { hostname: ENV.API_HOST, port: ENV.EVENT_PORT, - auth_token: ENV.ORDHOOK_NODE_AUTH_TOKEN, + auth_token: ENV.ORDHOOK_NODE_AUTH_TOKEN ?? '', external_base_url: `http://${ENV.EXTERNAL_HOSTNAME}`, wait_for_chainhook_node: false, validate_chainhook_payloads: false, + validate_token_authorization: false, body_limit: ENV.EVENT_SERVER_BODY_LIMIT, node_type: 'ordhook', }; const ordhookOpts: ChainhookNodeOptions = { base_url: ORDHOOK_BASE_PATH, }; -const dirPath = path.join(__dirname, '../../tmp/debug-server/'); +const dirPath = path.join(__dirname, '../tmp/debug-server/'); fs.mkdirSync(dirPath, { recursive: true }); logger.info(`DebugServer saving outputs to ${dirPath}`); @@ -41,7 +42,7 @@ const server = new ChainhookEventObserver(serverOpts, ordhookOpts); server .start([], async (uuid: string, payload: Payload) => { logger.info(`DebugServer received payload from predicate ${uuid}`); - const filePath = path.join(dirPath, `${new Date().getTime()}.txt`); + const filePath = path.join(dirPath, `${payload.apply[0].block_identifier.index}.txt`); fs.writeFileSync(filePath, JSON.stringify(payload, null, 2)); return Promise.resolve(); }) From d6acea7457d55dc83f5631a194e8b82c2ca5ce02 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Tue, 11 Jun 2024 14:51:35 +0000 Subject: [PATCH 10/10] chore(release): 4.0.0-beta.2 [skip ci] ## [4.0.0-beta.2](https://github.com/hirosystems/ordinals-api/compare/v4.0.0-beta.1...v4.0.0-beta.2) (2024-06-11) ### Bug Fixes * ordhook ingestion ([#356](https://github.com/hirosystems/ordinals-api/issues/356)) ([dfc003e](https://github.com/hirosystems/ordinals-api/commit/dfc003ee65198a35bac1e0fc723068bde90c63c4)) --- CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 214fadc6..5e9480ad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,10 @@ +## [4.0.0-beta.2](https://github.com/hirosystems/ordinals-api/compare/v4.0.0-beta.1...v4.0.0-beta.2) (2024-06-11) + + +### Bug Fixes + +* ordhook ingestion ([#356](https://github.com/hirosystems/ordinals-api/issues/356)) ([dfc003e](https://github.com/hirosystems/ordinals-api/commit/dfc003ee65198a35bac1e0fc723068bde90c63c4)) + ## [4.0.0-beta.1](https://github.com/hirosystems/ordinals-api/compare/v3.1.0...v4.0.0-beta.1) (2024-04-26)