From 5d1a0c774bbd0a10c4851f9609b3f39a928bd026 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Thu, 6 Jun 2024 12:19:24 +0200 Subject: [PATCH 01/62] add read scheduled rewards --- README.md | 4 ++++ migrations/002.do.scheduled-rewards.sql | 6 ++++++ stats/lib/handler.js | 12 ++++++++++++ stats/lib/stats-fetchers.js | 18 ++++++++++++++++++ stats/lib/typings.d.ts | 1 + 5 files changed, 41 insertions(+) create mode 100644 migrations/002.do.scheduled-rewards.sql diff --git a/README.md b/README.md index 9c56722..db37a58 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,10 @@ Base URL: http://stats.filspark.com/ http://stats.filspark.com/participants/change-rates +- `GET /participants/scheduled-rewards/?address=
&from=&to=` + + http://stats.filspark.com/participants/scheduled-rewards/ + - `GET /miners/retrieval-success-rate/summary?from=&to=` http://stats.filspark.com/miners/retrieval-success-rate/summary diff --git a/migrations/002.do.scheduled-rewards.sql b/migrations/002.do.scheduled-rewards.sql new file mode 100644 index 0000000..be740f9 --- /dev/null +++ b/migrations/002.do.scheduled-rewards.sql @@ -0,0 +1,6 @@ +CREATE TABLE daily_scheduled_rewards ( + day DATE NOT NULL, + address TEXT NOT NULL, + scheduled_rewards NUMERIC NOT NULL, + PRIMARY KEY (day, address) +); diff --git a/stats/lib/handler.js b/stats/lib/handler.js index 16a1515..e99a130 100644 --- a/stats/lib/handler.js +++ b/stats/lib/handler.js @@ -7,6 +7,7 @@ import { fetchMinersRSRSummary, fetchMonthlyParticipants, fetchParticipantChangeRates, + fetchParticipantScheduledRewards, fetchRetrievalSuccessRate } from './stats-fetchers.js' @@ -70,6 +71,17 @@ const handler = async (req, res, pgPool) => { res, pgPool, fetchParticipantChangeRates) + } else if ( + req.method === 'GET' + && segs[0] === 'participants' + && segs[1] === 'scheduled-rewards' + ) { + await getStatsWithFilterAndCaching( + pathname, + searchParams, + res, + pgPool, + fetchParticipantScheduledRewards) } else if (req.method === 'GET' && segs.join('/') === 'miners/retrieval-success-rate/summary') { await getStatsWithFilterAndCaching( pathname, diff --git a/stats/lib/stats-fetchers.js b/stats/lib/stats-fetchers.js index 5cbd976..90b3eda 100644 --- a/stats/lib/stats-fetchers.js +++ b/stats/lib/stats-fetchers.js @@ -112,6 +112,24 @@ export const fetchParticipantChangeRates = async (pgPool, filter) => { return stats } +/** + * + * @param {import('pg').Pool} pgPool + * @param {import('./typings').Filter} filter + */ +export const fetchParticipantScheduledRewards = async (pgPool, filter) => { + const { rows } = await pgPool.query(` + SELECT scheduled_rewards + FROM daily_scheduled_rewards + WHERE address = $1 AND day >= $2 AND day <= $3 + `, [ + filter.address, + filter.from, + filter.to + ]) + return rows +} + /** * @param {import('pg').Pool} pgPool * @param {import('./typings').Filter} filter diff --git a/stats/lib/typings.d.ts b/stats/lib/typings.d.ts index a0091b8..beb82ad 100644 --- a/stats/lib/typings.d.ts +++ b/stats/lib/typings.d.ts @@ -7,4 +7,5 @@ export interface Logger { export interface Filter { from: string; to: string; + address?: string; } From ea4681943191bc09f2fffffa3445b05fe464dd95 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Thu, 6 Jun 2024 12:21:12 +0200 Subject: [PATCH 02/62] fix lint --- stats/lib/handler.js | 6 +++--- stats/lib/stats-fetchers.js | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/stats/lib/handler.js b/stats/lib/handler.js index e99a130..5ecb64c 100644 --- a/stats/lib/handler.js +++ b/stats/lib/handler.js @@ -72,9 +72,9 @@ const handler = async (req, res, pgPool) => { pgPool, fetchParticipantChangeRates) } else if ( - req.method === 'GET' - && segs[0] === 'participants' - && segs[1] === 'scheduled-rewards' + req.method === 'GET' && + segs[0] === 'participants' && + segs[1] === 'scheduled-rewards' ) { await getStatsWithFilterAndCaching( pathname, diff --git a/stats/lib/stats-fetchers.js b/stats/lib/stats-fetchers.js index 90b3eda..5c01d4e 100644 --- a/stats/lib/stats-fetchers.js +++ b/stats/lib/stats-fetchers.js @@ -113,9 +113,8 @@ export const fetchParticipantChangeRates = async (pgPool, filter) => { } /** - * - * @param {import('pg').Pool} pgPool - * @param {import('./typings').Filter} filter + * @param {import('pg').Pool} pgPool + * @param {import('./typings').Filter} filter */ export const fetchParticipantScheduledRewards = async (pgPool, filter) => { const { rows } = await pgPool.query(` From 5baa7dcd2b24cd7cc0f91bfb5018368aa5e77e5f Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Fri, 7 Jun 2024 15:30:20 +0200 Subject: [PATCH 03/62] fix lint --- stats/lib/handler.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stats/lib/handler.js b/stats/lib/handler.js index a72e5f0..724543a 100644 --- a/stats/lib/handler.js +++ b/stats/lib/handler.js @@ -53,7 +53,7 @@ const handler = async (req, res, pgPoolEvaluateDb, pgPoolStatsDb) => { 'participants/monthly': fetchMonthlyParticipants, 'participants/change-rates': fetchParticipantChangeRates, 'participants/scheduled-rewards': fetchParticipantScheduledRewards, - 'miners/retrieval-success-rate/summary': fetchMinersRSRSummary, + 'miners/retrieval-success-rate/summary': fetchMinersRSRSummary } const fetchStatsFn = fetchFunctionMap[segs.join('/')] From cfd9b8ce130eddcc92c378d1c0811780c5c2c7a7 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Fri, 7 Jun 2024 15:31:38 +0200 Subject: [PATCH 04/62] fix migration number --- ...{002.do.scheduled-rewards.sql => 003.do.scheduled-rewards.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename migrations/{002.do.scheduled-rewards.sql => 003.do.scheduled-rewards.sql} (100%) diff --git a/migrations/002.do.scheduled-rewards.sql b/migrations/003.do.scheduled-rewards.sql similarity index 100% rename from migrations/002.do.scheduled-rewards.sql rename to migrations/003.do.scheduled-rewards.sql From 6647e52d9c254a3690baab8fe2080f886c88a32a Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Fri, 7 Jun 2024 15:35:03 +0200 Subject: [PATCH 05/62] wrap observation functions --- observer/bin/dry-run.js | 4 ++-- observer/bin/spark-observer.js | 6 +++--- observer/lib/observer.js | 12 +++++++++++- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/observer/bin/dry-run.js b/observer/bin/dry-run.js index 2617603..14704bc 100644 --- a/observer/bin/dry-run.js +++ b/observer/bin/dry-run.js @@ -2,7 +2,7 @@ import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator' import { ethers } from 'ethers' import { RPC_URL, rpcHeaders } from '../lib/config.js' -import { observeTransferEvents } from '../lib/observer.js' +import { observe } from '../lib/observer.js' import { getPgPool } from '../lib/db.js' /** @type {pg.Pool} */ @@ -16,6 +16,6 @@ const ieContract = new ethers.Contract(SparkImpactEvaluator.ADDRESS, SparkImpact await pgPool.query('DELETE FROM daily_reward_transfers') -await observeTransferEvents(pgPool, ieContract, provider) +await observe(pgPool, ieContract, provider) await pgPool.end() diff --git a/observer/bin/spark-observer.js b/observer/bin/spark-observer.js index 66beec1..925f158 100644 --- a/observer/bin/spark-observer.js +++ b/observer/bin/spark-observer.js @@ -5,7 +5,7 @@ import timers from 'node:timers/promises' import { RPC_URL, rpcHeaders, OBSERVATION_INTERVAL_MS } from '../lib/config.js' import { getPgPool } from '../lib/db.js' -import { observeTransferEvents } from '../lib/observer.js' +import { observe } from '../lib/observer.js' const pgPool = await getPgPool() @@ -15,10 +15,10 @@ const provider = new ethers.JsonRpcProvider(fetchRequest, null, { polling: true const ieContract = new ethers.Contract(SparkImpactEvaluator.ADDRESS, SparkImpactEvaluator.ABI, provider) -// Listen for Transfer events from the IE contract +// Listen for events from the IE contract while (true) { try { - await observeTransferEvents(pgPool, ieContract, provider) + await observe(pgPool, ieContract, provider) } catch (e) { console.error(e) Sentry.captureException(e) diff --git a/observer/lib/observer.js b/observer/lib/observer.js index c1ef935..2de2d77 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -1,12 +1,22 @@ import { updateDailyTransferStats } from './platform-stats.js' +/** + * Observe all events + * @param {import('pg').Pool} pgPool + * @param {import('ethers').Contract} ieContract + * @param {import('ethers').Provider} provider + */ +export const observe = async (pgPool, ieContract, provider) => { + await observeTransferEvents(pgPool, ieContract, provider) +} + /** * Observe the transfer events on the Filecoin blockchain * @param {import('pg').Pool} pgPool * @param {import('ethers').Contract} ieContract * @param {import('ethers').Provider} provider */ -export const observeTransferEvents = async (pgPool, ieContract, provider) => { +const observeTransferEvents = async (pgPool, ieContract, provider) => { const { rows } = await pgPool.query( 'SELECT MAX(last_checked_block) FROM daily_reward_transfers' ) From af65a2065bd4f840153a56b0dd9ac167805bd9ca Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Fri, 7 Jun 2024 15:47:06 +0200 Subject: [PATCH 06/62] wip --- observer/bin/spark-observer.js | 6 ++++-- observer/lib/observer.js | 26 +++++++++++++++++++++++++- observer/lib/scheduled-rewards.js | 14 ++++++++++++++ 3 files changed, 43 insertions(+), 3 deletions(-) create mode 100644 observer/lib/scheduled-rewards.js diff --git a/observer/bin/spark-observer.js b/observer/bin/spark-observer.js index 925f158..6f5ae77 100644 --- a/observer/bin/spark-observer.js +++ b/observer/bin/spark-observer.js @@ -15,13 +15,15 @@ const provider = new ethers.JsonRpcProvider(fetchRequest, null, { polling: true const ieContract = new ethers.Contract(SparkImpactEvaluator.ADDRESS, SparkImpactEvaluator.ABI, provider) -// Listen for events from the IE contract while (true) { + const start = new Date() try { await observe(pgPool, ieContract, provider) } catch (e) { console.error(e) Sentry.captureException(e) } - await timers.setTimeout(OBSERVATION_INTERVAL_MS) + const dt = new Date() - start + console.log(`Observation took ${dt}ms`) + await timers.setTimeout(OBSERVATION_INTERVAL_MS - dt) } diff --git a/observer/lib/observer.js b/observer/lib/observer.js index 2de2d77..8d063b2 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -1,4 +1,5 @@ import { updateDailyTransferStats } from './platform-stats.js' +import { updateDailyScheduledRewardsStats } from './scheduled-rewards.js' /** * Observe all events @@ -7,7 +8,10 @@ import { updateDailyTransferStats } from './platform-stats.js' * @param {import('ethers').Provider} provider */ export const observe = async (pgPool, ieContract, provider) => { - await observeTransferEvents(pgPool, ieContract, provider) + await Promise.all([ + observeTransferEvents(pgPool, ieContract, provider), + observeScheduledRewards(pgPool, ieContract, provider) + ]) } /** @@ -47,3 +51,23 @@ const observeTransferEvents = async (pgPool, ieContract, provider) => { await updateDailyTransferStats(pgPool, transferEvent, currentBlockNumber) } } + +/** + * Observe scheduled rewards on the Filecoin blockchain + * @param {import('pg').Pool} pgPool + * @param {import('ethers').Contract} ieContract + * @param {import('ethers').Provider} provider + */ +const observeScheduledRewards = async (pgPool, ieContract, provider) => { + console.log('Querying scheduled rewards from impact evaluator') + const participants = [] // TODO + for (const address of participants) { + const amount = await ieContract.scheduledRewards(address) + console.log('Scheduled rewards for', address, amount) + await updateDailyScheduledRewardsStats( + pgPool, + { address, amount } + ) + } +} + diff --git a/observer/lib/scheduled-rewards.js b/observer/lib/scheduled-rewards.js new file mode 100644 index 0000000..7a089ab --- /dev/null +++ b/observer/lib/scheduled-rewards.js @@ -0,0 +1,14 @@ +/** + * @param {import('pg').Client} pgClient + * @param {Object} participant + * @param {string} participant.address + * @param {number} participant.amount + */ +export const updateDailyScheduledRewardsStats = async (pgClient, participant) => { + await pgClient.query(` + INSERT INTO daily_scheduled_rewards (day, address, amount) + VALUES (now(), $1, $2) + ON CONFLICT (day, address) DO UPDATE SET + amount = EXCLUDED.amount + `, [participant.address, participant.amount]) +} From c7b25b3b6979dc57815d9ce923e94e27a0f35eeb Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Fri, 7 Jun 2024 15:53:15 +0200 Subject: [PATCH 07/62] simplify, dry run works --- observer/lib/observer.js | 36 ++++++++++++++++++++----------- observer/lib/scheduled-rewards.js | 14 ------------ 2 files changed, 23 insertions(+), 27 deletions(-) delete mode 100644 observer/lib/scheduled-rewards.js diff --git a/observer/lib/observer.js b/observer/lib/observer.js index 8d063b2..d9557f8 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -1,8 +1,6 @@ import { updateDailyTransferStats } from './platform-stats.js' -import { updateDailyScheduledRewardsStats } from './scheduled-rewards.js' /** - * Observe all events * @param {import('pg').Pool} pgPool * @param {import('ethers').Contract} ieContract * @param {import('ethers').Provider} provider @@ -10,7 +8,7 @@ import { updateDailyScheduledRewardsStats } from './scheduled-rewards.js' export const observe = async (pgPool, ieContract, provider) => { await Promise.all([ observeTransferEvents(pgPool, ieContract, provider), - observeScheduledRewards(pgPool, ieContract, provider) + observeScheduledRewards(pgPool, ieContract) ]) } @@ -56,18 +54,30 @@ const observeTransferEvents = async (pgPool, ieContract, provider) => { * Observe scheduled rewards on the Filecoin blockchain * @param {import('pg').Pool} pgPool * @param {import('ethers').Contract} ieContract - * @param {import('ethers').Provider} provider */ -const observeScheduledRewards = async (pgPool, ieContract, provider) => { +const observeScheduledRewards = async (pgPool, ieContract) => { console.log('Querying scheduled rewards from impact evaluator') - const participants = [] // TODO - for (const address of participants) { - const amount = await ieContract.scheduledRewards(address) - console.log('Scheduled rewards for', address, amount) - await updateDailyScheduledRewardsStats( - pgPool, - { address, amount } - ) + for (let i = 0; ; i++) { + let address + try { + address = await ieContract.readyForTransfer(i) + } catch (err) { + break + } + let scheduledRewards + try { + scheduledRewards = await ieContract.rewardsScheduledFor(address) + } catch (err) { + console.error('Error querying scheduled rewards for', address, { cause: err }) + continue + } + console.log('Scheduled rewards for', address, scheduledRewards) + await pgPool.query(` + INSERT INTO daily_scheduled_rewards (day, address, scheduled_rewards) + VALUES (now(), $1, $2) + ON CONFLICT (day, address) DO UPDATE SET + scheduled_rewards = EXCLUDED.scheduled_rewards + `, [address, scheduledRewards]) } } diff --git a/observer/lib/scheduled-rewards.js b/observer/lib/scheduled-rewards.js deleted file mode 100644 index 7a089ab..0000000 --- a/observer/lib/scheduled-rewards.js +++ /dev/null @@ -1,14 +0,0 @@ -/** - * @param {import('pg').Client} pgClient - * @param {Object} participant - * @param {string} participant.address - * @param {number} participant.amount - */ -export const updateDailyScheduledRewardsStats = async (pgClient, participant) => { - await pgClient.query(` - INSERT INTO daily_scheduled_rewards (day, address, amount) - VALUES (now(), $1, $2) - ON CONFLICT (day, address) DO UPDATE SET - amount = EXCLUDED.amount - `, [participant.address, participant.amount]) -} From a5c0187b6d7180ba3f515343653f2a95aac9a303 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Fri, 7 Jun 2024 17:30:01 +0200 Subject: [PATCH 08/62] fix lint --- observer/lib/observer.js | 1 - 1 file changed, 1 deletion(-) diff --git a/observer/lib/observer.js b/observer/lib/observer.js index d9557f8..072ead4 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -80,4 +80,3 @@ const observeScheduledRewards = async (pgPool, ieContract) => { `, [address, scheduledRewards]) } } - From 8e047e8d23f62c6a7c119bef453ff5d27a28dce7 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 15:11:55 +0200 Subject: [PATCH 09/62] split up loops --- observer/bin/spark-observer.js | 51 ++++++++++++++++++++++++---------- observer/lib/config.js | 7 ++--- 2 files changed, 39 insertions(+), 19 deletions(-) diff --git a/observer/bin/spark-observer.js b/observer/bin/spark-observer.js index 6f5ae77..a854c7b 100644 --- a/observer/bin/spark-observer.js +++ b/observer/bin/spark-observer.js @@ -3,9 +3,12 @@ import { ethers } from 'ethers' import * as Sentry from '@sentry/node' import timers from 'node:timers/promises' -import { RPC_URL, rpcHeaders, OBSERVATION_INTERVAL_MS } from '../lib/config.js' +import { RPC_URL, rpcHeaders } from '../lib/config.js' import { getPgPool } from '../lib/db.js' -import { observe } from '../lib/observer.js' +import { + observeTransferEvents, + observeScheduledRewards +} from '../lib/observer.js' const pgPool = await getPgPool() @@ -15,15 +18,35 @@ const provider = new ethers.JsonRpcProvider(fetchRequest, null, { polling: true const ieContract = new ethers.Contract(SparkImpactEvaluator.ADDRESS, SparkImpactEvaluator.ABI, provider) -while (true) { - const start = new Date() - try { - await observe(pgPool, ieContract, provider) - } catch (e) { - console.error(e) - Sentry.captureException(e) - } - const dt = new Date() - start - console.log(`Observation took ${dt}ms`) - await timers.setTimeout(OBSERVATION_INTERVAL_MS - dt) -} +const ONE_HOUR = 60 * 60 * 1000 + +await Promise.all([ + (async () => { + while (true) { + const start = new Date() + try { + await observeTransferEvents(pgPool, ieContract, provider) + } catch (e) { + console.error(e) + Sentry.captureException(e) + } + const dt = new Date() - start + console.log(`Observing Transfer events took ${dt}ms`) + await timers.setTimeout(ONE_HOUR - dt) + } + })(), + (async () => { + while (true) { + const start = new Date() + try { + await observeScheduledRewards(pgPool, ieContract, provider) + } catch (e) { + console.error(e) + Sentry.captureException(e) + } + const dt = new Date() - start + console.log(`Observing scheduled rewards took ${dt}ms`) + await timers.setTimeout((24 * ONE_HOUR) - dt) + } + })() +]) diff --git a/observer/lib/config.js b/observer/lib/config.js index 4bfb0f0..89c1fde 100644 --- a/observer/lib/config.js +++ b/observer/lib/config.js @@ -5,9 +5,7 @@ const { RPC_URLS = 'https://api.node.glif.io/rpc/v0', GLIF_TOKEN, // DATABASE_URL points to `spark_stats` database managed by this monorepo - DATABASE_URL = 'postgres://localhost:5432/spark_stats', - // Sleep one hour between observations - OBSERVATION_INTERVAL_MS = 1000 * 60 * 60 + DATABASE_URL = 'postgres://localhost:5432/spark_stats' } = process.env const rpcUrls = RPC_URLS.split(',') @@ -22,6 +20,5 @@ if (RPC_URL.includes('glif')) { export { RPC_URL, DATABASE_URL, - rpcHeaders, - OBSERVATION_INTERVAL_MS + rpcHeaders } From 716e5891f202ddc9036c6db8ed6b60e5561b8d7d Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 15:12:14 +0200 Subject: [PATCH 10/62] get participants from db --- observer/lib/observer.js | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/observer/lib/observer.js b/observer/lib/observer.js index 072ead4..b832c0e 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -1,4 +1,5 @@ import { updateDailyTransferStats } from './platform-stats.js' +import * as Sentry from '@sentry/node' /** * @param {import('pg').Pool} pgPool @@ -57,13 +58,10 @@ const observeTransferEvents = async (pgPool, ieContract, provider) => { */ const observeScheduledRewards = async (pgPool, ieContract) => { console.log('Querying scheduled rewards from impact evaluator') - for (let i = 0; ; i++) { - let address - try { - address = await ieContract.readyForTransfer(i) - } catch (err) { - break - } + const rows = await pgPool.query(` + SELECT participant_address FROM participants + `) + for (const { participant_address: address } of rows) { let scheduledRewards try { scheduledRewards = await ieContract.rewardsScheduledFor(address) From fa466149b244e159e424c71f9d21719d27b9f2b9 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 15:12:20 +0200 Subject: [PATCH 11/62] sentry --- observer/lib/observer.js | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/observer/lib/observer.js b/observer/lib/observer.js index b832c0e..41d432a 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -66,7 +66,12 @@ const observeScheduledRewards = async (pgPool, ieContract) => { try { scheduledRewards = await ieContract.rewardsScheduledFor(address) } catch (err) { - console.error('Error querying scheduled rewards for', address, { cause: err }) + Sentry.captureException(err) + console.error( + 'Error querying scheduled rewards for', + address, + { cause: err } + ) continue } console.log('Scheduled rewards for', address, scheduledRewards) From 109c4f904fc2094de6357f35d15c1ee6cee5e508 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 15:16:44 +0200 Subject: [PATCH 12/62] read from last 3 days of participants --- observer/lib/observer.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/observer/lib/observer.js b/observer/lib/observer.js index 41d432a..3b215df 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -59,7 +59,10 @@ const observeTransferEvents = async (pgPool, ieContract, provider) => { const observeScheduledRewards = async (pgPool, ieContract) => { console.log('Querying scheduled rewards from impact evaluator') const rows = await pgPool.query(` - SELECT participant_address FROM participants + SELECT participant_address + FROM participants + JOIN daily_participants USING (participant_address) + WHERE day >= now() - interval '3 days' `) for (const { participant_address: address } of rows) { let scheduledRewards From 043af01ec009211a9f264b8db8275a3065c57563 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 16:20:16 +0200 Subject: [PATCH 13/62] big refactor --- common/db.js | 65 ++++++++++++++++++++++++ common/typings.d.ts | 6 +++ migrations/003.do.scheduled-rewards.sql | 4 +- observer/bin/dry-run.js | 11 ++-- observer/bin/spark-observer.js | 8 +-- observer/lib/config.js | 5 +- observer/lib/db.js | 34 ------------- observer/lib/observer.js | 31 ++++++------ observer/test/platform-stats.test.js | 5 +- stats/bin/migrate.js | 17 ++++--- stats/bin/spark-stats.js | 47 ++--------------- stats/lib/config.js | 9 ---- stats/lib/handler.js | 16 +++--- stats/lib/platform-routes.js | 10 ++-- stats/test/handler.test.js | 13 ++--- stats/test/platform-routes.test.js | 67 +++++++++++-------------- 16 files changed, 162 insertions(+), 186 deletions(-) create mode 100644 common/db.js create mode 100644 common/typings.d.ts delete mode 100644 observer/lib/db.js delete mode 100644 stats/lib/config.js diff --git a/common/db.js b/common/db.js new file mode 100644 index 0000000..5d34767 --- /dev/null +++ b/common/db.js @@ -0,0 +1,65 @@ +import { migrateWithPgClient } from '@filecoin-station/spark-stats-db-migrations' +import pg from 'pg' + +const { +// DATABASE_URL points to `spark_stats` database managed by this monorepo + DATABASE_URL = 'postgres://localhost:5432/spark_stats', + + // EVALUATE_DB_URL points to `spark_evaluate` database managed by spark-evaluate repo. + // Eventually, we should move the code updating stats from spark-evaluate to this repo + // and then we won't need two connection strings. + EVALUATE_DB_URL = 'postgres://localhost:5432/spark_evaluate' +} = process.env + +const poolConfig = { + // allow the pool to close all connections and become empty + min: 0, + // this values should correlate with service concurrency hard_limit configured in fly.toml + // and must take into account the connection limit of our PG server, see + // https://fly.io/docs/postgres/managing/configuration-tuning/ + max: 100, + // close connections that haven't been used for one second + idleTimeoutMillis: 1000, + // automatically close connections older than 60 seconds + maxLifetimeSeconds: 60 +} + +const onError = err => { + // Prevent crashing the process on idle client errors, the pool will recover + // itself. If all connections are lost, the process will still crash. + // https://github.com/brianc/node-postgres/issues/1324#issuecomment-308778405 + console.error('An idle client has experienced an error', err.stack) +} + +export const getStats = async () => { + const stats = new pg.Pool({ + ...poolConfig, + connectionString: DATABASE_URL + }) + stats.on('error', onError) + await migrateWithPgClient(stats) + return stats +} + +export const getEvaluate = async () => { + const evaluate = new pg.Pool({ + ...poolConfig, + connectionString: EVALUATE_DB_URL + }) + evaluate.on('error', onError) + + // Check that we can talk to the database + await evaluate.query('SELECT 1') + return evaluate +} + +/** + * @returns {Promise} + */ +export const getPgPools = async () => { + const stats = await getStats() + const evaluate = await getEvaluate() + const end = () => Promise.all([stats.end(), evaluate.end()]) + + return { stats, evaluate, end } +} diff --git a/common/typings.d.ts b/common/typings.d.ts new file mode 100644 index 0000000..1cf8a0b --- /dev/null +++ b/common/typings.d.ts @@ -0,0 +1,6 @@ +import type { Pool } from 'pg' + +export interface pgPools { + pgPool: Pool; + pgPoolEvaluate: Pool; +} diff --git a/migrations/003.do.scheduled-rewards.sql b/migrations/003.do.scheduled-rewards.sql index be740f9..b30d488 100644 --- a/migrations/003.do.scheduled-rewards.sql +++ b/migrations/003.do.scheduled-rewards.sql @@ -1,6 +1,6 @@ CREATE TABLE daily_scheduled_rewards ( day DATE NOT NULL, - address TEXT NOT NULL, + participant_address TEXT NOT NULL, scheduled_rewards NUMERIC NOT NULL, - PRIMARY KEY (day, address) + PRIMARY KEY (day, participant_address) ); diff --git a/observer/bin/dry-run.js b/observer/bin/dry-run.js index 14704bc..b0022e6 100644 --- a/observer/bin/dry-run.js +++ b/observer/bin/dry-run.js @@ -3,10 +3,9 @@ import { ethers } from 'ethers' import { RPC_URL, rpcHeaders } from '../lib/config.js' import { observe } from '../lib/observer.js' -import { getPgPool } from '../lib/db.js' +import { getPgPools } from '../../common/db.js' -/** @type {pg.Pool} */ -const pgPool = await getPgPool() +const pgPools = await getPgPools() const fetchRequest = new ethers.FetchRequest(RPC_URL) fetchRequest.setHeader('Authorization', rpcHeaders.Authorization || '') @@ -14,8 +13,8 @@ const provider = new ethers.JsonRpcProvider(fetchRequest, null, { polling: true const ieContract = new ethers.Contract(SparkImpactEvaluator.ADDRESS, SparkImpactEvaluator.ABI, provider) -await pgPool.query('DELETE FROM daily_reward_transfers') +await pgPools.stats.query('DELETE FROM daily_reward_transfers') -await observe(pgPool, ieContract, provider) +await observe(pgPools, ieContract, provider) -await pgPool.end() +await pgPools.end() diff --git a/observer/bin/spark-observer.js b/observer/bin/spark-observer.js index a854c7b..a3f153c 100644 --- a/observer/bin/spark-observer.js +++ b/observer/bin/spark-observer.js @@ -4,13 +4,13 @@ import * as Sentry from '@sentry/node' import timers from 'node:timers/promises' import { RPC_URL, rpcHeaders } from '../lib/config.js' -import { getPgPool } from '../lib/db.js' +import { getPgPools } from '../../common/db.js' import { observeTransferEvents, observeScheduledRewards } from '../lib/observer.js' -const pgPool = await getPgPool() +const pgPools = await getPgPools() const fetchRequest = new ethers.FetchRequest(RPC_URL) fetchRequest.setHeader('Authorization', rpcHeaders.Authorization || '') @@ -25,7 +25,7 @@ await Promise.all([ while (true) { const start = new Date() try { - await observeTransferEvents(pgPool, ieContract, provider) + await observeTransferEvents(pgPools, ieContract, provider) } catch (e) { console.error(e) Sentry.captureException(e) @@ -39,7 +39,7 @@ await Promise.all([ while (true) { const start = new Date() try { - await observeScheduledRewards(pgPool, ieContract, provider) + await observeScheduledRewards(pgPools, ieContract, provider) } catch (e) { console.error(e) Sentry.captureException(e) diff --git a/observer/lib/config.js b/observer/lib/config.js index 89c1fde..a6340b5 100644 --- a/observer/lib/config.js +++ b/observer/lib/config.js @@ -3,9 +3,7 @@ const { // supports rpc failover // RPC_URLS = 'https://api.node.glif.io/rpc/v0,https://api.chain.love/rpc/v1', RPC_URLS = 'https://api.node.glif.io/rpc/v0', - GLIF_TOKEN, - // DATABASE_URL points to `spark_stats` database managed by this monorepo - DATABASE_URL = 'postgres://localhost:5432/spark_stats' + GLIF_TOKEN } = process.env const rpcUrls = RPC_URLS.split(',') @@ -19,6 +17,5 @@ if (RPC_URL.includes('glif')) { export { RPC_URL, - DATABASE_URL, rpcHeaders } diff --git a/observer/lib/db.js b/observer/lib/db.js deleted file mode 100644 index 1a74dd1..0000000 --- a/observer/lib/db.js +++ /dev/null @@ -1,34 +0,0 @@ -import { migrateWithPgClient } from '@filecoin-station/spark-stats-db-migrations' -import pg from 'pg' - -import { DATABASE_URL } from '../lib/config.js' - -export const getPgPool = async () => { - const pgPool = new pg.Pool({ - connectionString: DATABASE_URL, - // allow the pool to close all connections and become empty - min: 0, - // this values should correlate with service concurrency hard_limit configured in fly.toml - // and must take into account the connection limit of our PG server, see - // https://fly.io/docs/postgres/managing/configuration-tuning/ - max: 100, - // close connections that haven't been used for one second - idleTimeoutMillis: 1000, - // automatically close connections older than 60 seconds - maxLifetimeSeconds: 60 - }) - - pgPool.on('error', err => { - // Prevent crashing the process on idle client errors, the pool will recover - // itself. If all connections are lost, the process will still crash. - // https://github.com/brianc/node-postgres/issues/1324#issuecomment-308778405 - console.error('An idle client has experienced an error', err.stack) - }) - - await migrateWithPgClient(pgPool) - - // Check that we can talk to the database - await pgPool.query('SELECT 1') - - return pgPool -} diff --git a/observer/lib/observer.js b/observer/lib/observer.js index 3b215df..d0a7db4 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -2,25 +2,25 @@ import { updateDailyTransferStats } from './platform-stats.js' import * as Sentry from '@sentry/node' /** - * @param {import('pg').Pool} pgPool + * @param {import('../../common/typings').pgPools} pgPools * @param {import('ethers').Contract} ieContract * @param {import('ethers').Provider} provider */ -export const observe = async (pgPool, ieContract, provider) => { +export const observe = async (pgPools, ieContract, provider) => { await Promise.all([ - observeTransferEvents(pgPool, ieContract, provider), - observeScheduledRewards(pgPool, ieContract) + observeTransferEvents(pgPools.stats, ieContract, provider), + observeScheduledRewards(pgPools, ieContract) ]) } /** * Observe the transfer events on the Filecoin blockchain - * @param {import('pg').Pool} pgPool + * @param {import('pg').Pool} pgPoolStats * @param {import('ethers').Contract} ieContract * @param {import('ethers').Provider} provider */ -const observeTransferEvents = async (pgPool, ieContract, provider) => { - const { rows } = await pgPool.query( +const observeTransferEvents = async (pgPoolStats, ieContract, provider) => { + const { rows } = await pgPoolStats.query( 'SELECT MAX(last_checked_block) FROM daily_reward_transfers' ) const lastCheckedBlock = rows[0].last_checked_block @@ -47,21 +47,21 @@ const observeTransferEvents = async (pgPool, ieContract, provider) => { amount: event.args.amount } console.log('Transfer event:', transferEvent) - await updateDailyTransferStats(pgPool, transferEvent, currentBlockNumber) + await updateDailyTransferStats(pgPoolStats, transferEvent, currentBlockNumber) } } /** * Observe scheduled rewards on the Filecoin blockchain - * @param {import('pg').Pool} pgPool + * @param {import('../../common/typings').pgPools} pgPools * @param {import('ethers').Contract} ieContract */ -const observeScheduledRewards = async (pgPool, ieContract) => { +const observeScheduledRewards = async (pgPools, ieContract) => { console.log('Querying scheduled rewards from impact evaluator') - const rows = await pgPool.query(` + const rows = await pgPools.evaluate.query(` SELECT participant_address FROM participants - JOIN daily_participants USING (participant_address) + JOIN daily_participants USING (participant_id) WHERE day >= now() - interval '3 days' `) for (const { participant_address: address } of rows) { @@ -78,10 +78,11 @@ const observeScheduledRewards = async (pgPool, ieContract) => { continue } console.log('Scheduled rewards for', address, scheduledRewards) - await pgPool.query(` - INSERT INTO daily_scheduled_rewards (day, address, scheduled_rewards) + await pgPools.stats.query(` + INSERT INTO daily_scheduled_rewards + (day, participant_address, scheduled_rewards) VALUES (now(), $1, $2) - ON CONFLICT (day, address) DO UPDATE SET + ON CONFLICT (day, id) DO UPDATE SET scheduled_rewards = EXCLUDED.scheduled_rewards `, [address, scheduledRewards]) } diff --git a/observer/test/platform-stats.test.js b/observer/test/platform-stats.test.js index d6c12db..c6f087e 100644 --- a/observer/test/platform-stats.test.js +++ b/observer/test/platform-stats.test.js @@ -1,8 +1,7 @@ import assert from 'node:assert' -import pg from 'pg' import { beforeEach, describe, it } from 'mocha' -import { DATABASE_URL } from '../lib/config.js' +import { getStats } from '../../common/db.js' import { updateDailyTransferStats } from '../lib/platform-stats.js' import { migrateWithPgClient } from '@filecoin-station/spark-stats-db-migrations' @@ -11,7 +10,7 @@ describe('platform-stats-generator', () => { let pgPool before(async () => { - pgPool = new pg.Pool({ connectionString: DATABASE_URL }) + pgPool = await getStats() await migrateWithPgClient(pgPool) }) diff --git a/stats/bin/migrate.js b/stats/bin/migrate.js index f033825..01924ed 100644 --- a/stats/bin/migrate.js +++ b/stats/bin/migrate.js @@ -1,12 +1,13 @@ -import { - DATABASE_URL, - EVALUATE_DB_URL -} from '../lib/config.js' -import { migrateWithPgConfig as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -import { migrateWithPgConfig as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' +import { getPgPools } from '../../common/db.js' +import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' +import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' + +const pgPools = await getPgPools() console.log('Migrating spark_evaluate database') -await migrateEvaluateDB({ connectionString: EVALUATE_DB_URL }) +await migrateEvaluateDB(pgPools.evaluate) console.log('Migrating spark_stats database') -await migrateStatsDB({ connectionString: DATABASE_URL }) +await migrateStatsDB(pgPools.stats) + +await pgPools.end() diff --git a/stats/bin/spark-stats.js b/stats/bin/spark-stats.js index f7f9b7e..0058edc 100644 --- a/stats/bin/spark-stats.js +++ b/stats/bin/spark-stats.js @@ -1,9 +1,8 @@ import '../lib/instrument.js' import http from 'node:http' import { once } from 'node:events' -import pg from 'pg' import { createHandler } from '../lib/handler.js' -import { DATABASE_URL, EVALUATE_DB_URL } from '../lib/config.js' +import { getPgPools } from '../../common/db.js' const { PORT = 8080, @@ -11,54 +10,14 @@ const { REQUEST_LOGGING = 'true' } = process.env -const pgPoolConfig = { - // allow the pool to close all connections and become empty - min: 0, - // this values should correlate with service concurrency hard_limit configured in fly.toml - // and must take into account the connection limit of our PG server, see - // https://fly.io/docs/postgres/managing/configuration-tuning/ - max: 100, - // close connections that haven't been used for one second - idleTimeoutMillis: 1000, - // automatically close connections older than 60 seconds - maxLifetimeSeconds: 60 -} -const pgPoolErrFn = err => { - // Prevent crashing the process on idle client errors, the pool will recover - // itself. If all connections are lost, the process will still crash. - // https://github.com/brianc/node-postgres/issues/1324#issuecomment-308778405 - console.error('An idle client has experienced an error', err.stack) -} - -// Connect and set up the Evaluate DB -const pgPoolEvaluateDb = new pg.Pool({ - connectionString: EVALUATE_DB_URL, - ...pgPoolConfig -}) -pgPoolEvaluateDb.on('error', pgPoolErrFn) -// Check that we can talk to the database -await pgPoolEvaluateDb.query('SELECT 1') - -// Connect and set up the Stats DB -const pgPoolStatsDb = new pg.Pool({ - connectionString: DATABASE_URL, - ...pgPoolConfig -}) -pgPoolStatsDb.on('error', pgPoolErrFn) -// Check that we can talk to the database -await pgPoolStatsDb.query('SELECT 1') - +const pgPools = await getPgPools() const logger = { error: console.error, info: console.info, request: ['1', 'true'].includes(REQUEST_LOGGING) ? console.info : () => {} } -const handler = createHandler({ - pgPoolEvaluateDb, - pgPoolStatsDb, - logger -}) +const handler = createHandler({ pgPools, logger }) const server = http.createServer(handler) console.log('Starting the http server on host %j port %s', HOST, PORT) server.listen(PORT, HOST) diff --git a/stats/lib/config.js b/stats/lib/config.js deleted file mode 100644 index ea99e8a..0000000 --- a/stats/lib/config.js +++ /dev/null @@ -1,9 +0,0 @@ -export const { - // DATABASE_URL points to `spark_stats` database managed by this monorepo - DATABASE_URL = 'postgres://localhost:5432/spark_stats', - - // EVALUATE_DB_URL points to `spark_evaluate` database managed by spark-evaluate repo. - // Eventually, we should move the code updating stats from spark-evaluate to this repo - // and then we won't need two connection strings. - EVALUATE_DB_URL = 'postgres://localhost:5432/spark_evaluate' -} = process.env diff --git a/stats/lib/handler.js b/stats/lib/handler.js index 724543a..cc7c01f 100644 --- a/stats/lib/handler.js +++ b/stats/lib/handler.js @@ -15,20 +15,19 @@ import { handlePlatformRoutes } from './platform-routes.js' /** * @param {object} args - * @param {import('pg').Pool} args.pgPoolEvaluateDb + * @param {import('../../common/typings')} args.pgPools * @param {import('pg').Pool} args.pgPoolStatsDb * @param {import('./typings').Logger} args.logger * @returns */ export const createHandler = ({ - pgPoolEvaluateDb, - pgPoolStatsDb, + pgPools, logger }) => { return (req, res) => { const start = new Date() logger.request(`${req.method} ${req.url} ...`) - handler(req, res, pgPoolEvaluateDb, pgPoolStatsDb) + handler(req, res, pgPools) .catch(err => errorHandler(res, err, logger)) .then(() => { logger.request(`${req.method} ${req.url} ${res.statusCode} (${new Date() - start}ms)`) @@ -39,10 +38,9 @@ export const createHandler = ({ /** * @param {import('node:http').IncomingMessage} req * @param {import('node:http').ServerResponse} res - * @param {import('pg').Pool} pgPoolEvaluateDb - * @param {import('pg').Pool} pgPoolStatsDb + * @param {import('../../common/typings')} args.pgPools */ -const handler = async (req, res, pgPoolEvaluateDb, pgPoolStatsDb) => { +const handler = async (req, res, pgPools) => { // Caveat! `new URL('//foo', 'http://127.0.0.1')` would produce "http://foo/" - not what we want! const { pathname, searchParams } = new URL(`http://127.0.0.1${req.url}`) const segs = pathname.split('/').filter(Boolean) @@ -62,10 +60,10 @@ const handler = async (req, res, pgPoolEvaluateDb, pgPoolStatsDb) => { pathname, searchParams, res, - pgPoolEvaluateDb, + pgPools.evaluate, fetchStatsFn ) - } else if (await handlePlatformRoutes(req, res, pgPoolEvaluateDb, pgPoolStatsDb)) { + } else if (await handlePlatformRoutes(req, res, pgPools)) { // no-op, request was handled by handlePlatformRoute } else { notFound(res) diff --git a/stats/lib/platform-routes.js b/stats/lib/platform-routes.js index e2fcf43..db7ad2e 100644 --- a/stats/lib/platform-routes.js +++ b/stats/lib/platform-routes.js @@ -6,7 +6,7 @@ import { fetchDailyStationAcceptedMeasurementCount } from './platform-stats-fetchers.js' -export const handlePlatformRoutes = async (req, res, pgPoolEvaluateDb, pgPoolStatsDb) => { +export const handlePlatformRoutes = async (req, res, pgPools) => { // Caveat! `new URL('//foo', 'http://127.0.0.1')` would produce "http://foo/" - not what we want! const { pathname, searchParams } = new URL(`http://127.0.0.1${req.url}`) const segs = pathname.split('/').filter(Boolean) @@ -14,19 +14,19 @@ export const handlePlatformRoutes = async (req, res, pgPoolEvaluateDb, pgPoolSta const routeHandlerInfoMap = { 'stations/daily': { fetchFunction: fetchDailyStationCount, - pgPool: pgPoolEvaluateDb + pgPool: pgPools.evaluate }, 'stations/monthly': { fetchFunction: fetchMonthlyStationCount, - pgPool: pgPoolEvaluateDb + pgPool: pgPools.evaluate }, 'measurements/daily': { fetchFunction: fetchDailyStationAcceptedMeasurementCount, - pgPool: pgPoolEvaluateDb + pgPool: pgPools.evaluate }, 'transfers/daily': { fetchFunction: fetchDailyRewardTransfers, - pgPool: pgPoolStatsDb + pgPool: pgPools.stats } } diff --git a/stats/test/handler.test.js b/stats/test/handler.test.js index 2bf0fd4..7e552cb 100644 --- a/stats/test/handler.test.js +++ b/stats/test/handler.test.js @@ -1,19 +1,18 @@ import http from 'node:http' import { once } from 'node:events' import assert from 'node:assert' -import pg from 'pg' import createDebug from 'debug' import { mapParticipantsToIds } from 'spark-evaluate/lib/platform-stats.js' +import { getEvaluate } from '../../common/db.js' import { assertResponseStatus } from './test-helpers.js' import { createHandler } from '../lib/handler.js' import { today } from '../lib/request-helpers.js' -import { EVALUATE_DB_URL } from '../lib/config.js' const debug = createDebug('test') describe('HTTP request handler', () => { - /** @type {pg.Pool} */ + /** @type {import('pg').Pool} */ let pgPool /** @type {http.Server} */ let server @@ -22,11 +21,13 @@ describe('HTTP request handler', () => { before(async () => { // handler doesn't use Stats DB - pgPool = new pg.Pool({ connectionString: EVALUATE_DB_URL }) + pgPool = await getEvaluate() const handler = createHandler({ - pgPoolEvaluateDb: pgPool, - pgPoolStatsDb: undefined, + pgPools: { + stats: null, + evaluate: pgPool + }, logger: { info: debug, error: console.error, diff --git a/stats/test/platform-routes.test.js b/stats/test/platform-routes.test.js index a276a83..a704587 100644 --- a/stats/test/platform-routes.test.js +++ b/stats/test/platform-routes.test.js @@ -1,32 +1,26 @@ import http from 'node:http' import { once } from 'node:events' import assert from 'node:assert' -import pg from 'pg' import createDebug from 'debug' +import { getPgPools } from '../../common/db.js' import { assertResponseStatus } from './test-helpers.js' import { createHandler } from '../lib/handler.js' -import { DATABASE_URL, EVALUATE_DB_URL } from '../lib/config.js' const debug = createDebug('test') describe('Platform Routes HTTP request handler', () => { - /** @type {pg.Pool} */ - let pgPoolEvaluateDb - /** @type {pg.Pool} */ - let pgPoolStatsDb - /** @type {http.Server} */ + /** @type {import('../../common/typings').pgPools} */ + let pgPools let server /** @type {string} */ let baseUrl before(async () => { - pgPoolEvaluateDb = new pg.Pool({ connectionString: EVALUATE_DB_URL }) - pgPoolStatsDb = new pg.Pool({ connectionString: DATABASE_URL }) + pgPools = await getPgPools() const handler = createHandler({ - pgPoolEvaluateDb, - pgPoolStatsDb, + pgPools, logger: { info: debug, error: console.error, @@ -43,28 +37,27 @@ describe('Platform Routes HTTP request handler', () => { after(async () => { server.closeAllConnections() server.close() - await pgPoolEvaluateDb.end() - await pgPoolStatsDb.end() + await pgPools.end() }) beforeEach(async () => { - await pgPoolEvaluateDb.query('DELETE FROM daily_stations') - await pgPoolStatsDb.query('DELETE FROM daily_reward_transfers') + await pgPools.evaluate.query('DELETE FROM daily_stations') + await pgPools.stats.query('DELETE FROM daily_reward_transfers') }) describe('GET /stations/daily', () => { it('returns daily station metrics for the given date range', async () => { - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-10', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-10', [ { station_id: 'station1', accepted_measurement_count: 1 } ]) - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-11', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-11', [ { station_id: 'station2', accepted_measurement_count: 1 } ]) - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-12', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-12', [ { station_id: 'station2', accepted_measurement_count: 2 }, { station_id: 'station3', accepted_measurement_count: 1 } ]) - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-13', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-13', [ { station_id: 'station1', accepted_measurement_count: 1 } ]) @@ -88,25 +81,25 @@ describe('Platform Routes HTTP request handler', () => { describe('GET /stations/monthly', () => { it('returns monthly station metrics for the given date range ignoring the day number', async () => { // before the date range - await givenDailyStationMetrics(pgPoolEvaluateDb, '2023-12-31', [ + await givenDailyStationMetrics(pgPools.evaluate, '2023-12-31', [ { station_id: 'station1', accepted_measurement_count: 1 } ]) // in the date range - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-10', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-10', [ { station_id: 'station1', accepted_measurement_count: 1 } ]) - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-11', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-11', [ { station_id: 'station2', accepted_measurement_count: 1 } ]) - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-12', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-12', [ { station_id: 'station2', accepted_measurement_count: 2 }, { station_id: 'station3', accepted_measurement_count: 1 } ]) - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-02-13', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-02-13', [ { station_id: 'station1', accepted_measurement_count: 1 } ]) // after the date range - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-03-01', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-03-01', [ { station_id: 'station1', accepted_measurement_count: 1 } ]) @@ -129,17 +122,17 @@ describe('Platform Routes HTTP request handler', () => { describe('GET /measurements/daily', () => { it('returns daily total accepted measurement count for the given date range', async () => { - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-10', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-10', [ { station_id: 'station1', accepted_measurement_count: 1 } ]) - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-11', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-11', [ { station_id: 'station2', accepted_measurement_count: 1 } ]) - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-12', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-12', [ { station_id: 'station2', accepted_measurement_count: 2 }, { station_id: 'station3', accepted_measurement_count: 1 } ]) - await givenDailyStationMetrics(pgPoolEvaluateDb, '2024-01-13', [ + await givenDailyStationMetrics(pgPools.evaluate, '2024-01-13', [ { station_id: 'station1', accepted_measurement_count: 1 } ]) @@ -162,17 +155,17 @@ describe('Platform Routes HTTP request handler', () => { describe('GET /transfers/daily', () => { it('returns daily total Rewards sent for the given date range', async () => { - await givenDailyRewardTransferMetrics(pgPoolStatsDb, '2024-01-10', [ + await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-10', [ { to_address: 'to1', amount: 100, last_checked_block: 1 } ]) - await givenDailyRewardTransferMetrics(pgPoolStatsDb, '2024-01-11', [ + await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-11', [ { to_address: 'to2', amount: 150, last_checked_block: 1 } ]) - await givenDailyRewardTransferMetrics(pgPoolStatsDb, '2024-01-12', [ + await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-12', [ { to_address: 'to2', amount: 300, last_checked_block: 1 }, { to_address: 'to3', amount: 250, last_checked_block: 1 } ]) - await givenDailyRewardTransferMetrics(pgPoolStatsDb, '2024-01-13', [ + await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-13', [ { to_address: 'to1', amount: 100, last_checked_block: 1 } ]) @@ -194,8 +187,8 @@ describe('Platform Routes HTTP request handler', () => { }) }) -const givenDailyStationMetrics = async (pgPoolEvaluateDb, day, stationStats) => { - await pgPoolEvaluateDb.query(` +const givenDailyStationMetrics = async (pgPoolEvaluate, day, stationStats) => { + await pgPoolEvaluate.query(` INSERT INTO daily_stations (day, station_id, accepted_measurement_count) SELECT $1 AS day, UNNEST($2::text[]) AS station_id, UNNEST($3::int[]) AS accepted_measurement_count ON CONFLICT DO NOTHING @@ -206,8 +199,8 @@ const givenDailyStationMetrics = async (pgPoolEvaluateDb, day, stationStats) => ]) } -const givenDailyRewardTransferMetrics = async (pgPoolStatsDb, day, transferStats) => { - await pgPoolStatsDb.query(` +const givenDailyRewardTransferMetrics = async (pgPoolStats, day, transferStats) => { + await pgPoolStats.query(` INSERT INTO daily_reward_transfers (day, to_address, amount, last_checked_block) SELECT $1 AS day, UNNEST($2::text[]) AS to_address, UNNEST($3::int[]) AS amount, UNNEST($4::int[]) AS last_checked_block ON CONFLICT DO NOTHING From 158bf599c96bae08d445b825650d11f85d4569cd Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:30:54 +0200 Subject: [PATCH 14/62] add tests --- observer/test/observer.test.js | 64 ++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 observer/test/observer.test.js diff --git a/observer/test/observer.test.js b/observer/test/observer.test.js new file mode 100644 index 0000000..aad65ec --- /dev/null +++ b/observer/test/observer.test.js @@ -0,0 +1,64 @@ +import assert from 'node:assert' +import { observeScheduledRewards } from '../lib/observer.js' +import { getPgPools } from '../../common/db.js' + +describe('observer', () => { + describe('observeScheduledRewards', () => { + let pgPools + + before(async () => { + pgPools = await getPgPools() + }) + + it('observes scheduled rewards', async () => { + await pgPools.evaluate.query(`DELETE FROM daily_participants`) + await pgPools.evaluate.query(`DELETE FROM participants`) + const { rows: insertRows } = await pgPools.evaluate.query(` + INSERT INTO participants + (participant_address) + VALUES + ('0xCURRENT'), + ('0xOLD') + RETURNING id + `) + await pgPools.evaluate.query(` + INSERT INTO daily_participants + (participant_id, day) + VALUES + ($1, now()), + ($2, now() - interval '4 days') + `, [insertRows[0].id, insertRows[1].id]) + + const ieContract = { + rewardsScheduledFor: async (address) => { + if (address === '0xCURRENT') { + return 100n + } else { + throw new Error('Should never be called') + } + } + } + await observeScheduledRewards(pgPools, ieContract) + const { rows } = await pgPools.stats.query(` + SELECT * + FROM daily_scheduled_rewards + `) + assert.strictEqual(rows.length, 1) + assert.strictEqual(rows[0].participant_address, '0xCURRENT') + assert.strictEqual(rows[0].scheduled_rewards, '100') + }) + it('updates scheduled rewards', async () => { + const ieContract = { + rewardsScheduledFor: async () => 200n + } + await observeScheduledRewards(pgPools, ieContract) + const { rows } = await pgPools.stats.query(` + SELECT * + FROM daily_scheduled_rewards + `) + assert.strictEqual(rows.length, 1) + assert.strictEqual(rows[0].participant_address, '0xCURRENT') + assert.strictEqual(rows[0].scheduled_rewards, '200') + }) + }) +}) \ No newline at end of file From cb2cead8698abe05f7cb75fa3c2c7ee35d6cb590 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:31:14 +0200 Subject: [PATCH 15/62] tests pass --- observer/lib/observer.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/observer/lib/observer.js b/observer/lib/observer.js index d0a7db4..cb94e72 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -56,13 +56,13 @@ const observeTransferEvents = async (pgPoolStats, ieContract, provider) => { * @param {import('../../common/typings').pgPools} pgPools * @param {import('ethers').Contract} ieContract */ -const observeScheduledRewards = async (pgPools, ieContract) => { +export const observeScheduledRewards = async (pgPools, ieContract) => { console.log('Querying scheduled rewards from impact evaluator') - const rows = await pgPools.evaluate.query(` + const { rows } = await pgPools.evaluate.query(` SELECT participant_address - FROM participants - JOIN daily_participants USING (participant_id) - WHERE day >= now() - interval '3 days' + FROM participants p + JOIN daily_participants d ON p.id = d.participant_id + WHERE d.day >= now() - interval '3 days' `) for (const { participant_address: address } of rows) { let scheduledRewards @@ -82,7 +82,7 @@ const observeScheduledRewards = async (pgPools, ieContract) => { INSERT INTO daily_scheduled_rewards (day, participant_address, scheduled_rewards) VALUES (now(), $1, $2) - ON CONFLICT (day, id) DO UPDATE SET + ON CONFLICT (day, participant_address) DO UPDATE SET scheduled_rewards = EXCLUDED.scheduled_rewards `, [address, scheduledRewards]) } From 2b46dfaae2daeb1b2bf81e07a45107b8c54eb45b Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:39:01 +0200 Subject: [PATCH 16/62] fix lint --- observer/test/observer.test.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/observer/test/observer.test.js b/observer/test/observer.test.js index aad65ec..408ede9 100644 --- a/observer/test/observer.test.js +++ b/observer/test/observer.test.js @@ -9,10 +9,10 @@ describe('observer', () => { before(async () => { pgPools = await getPgPools() }) - + it('observes scheduled rewards', async () => { - await pgPools.evaluate.query(`DELETE FROM daily_participants`) - await pgPools.evaluate.query(`DELETE FROM participants`) + await pgPools.evaluate.query('DELETE FROM daily_participants') + await pgPools.evaluate.query('DELETE FROM participants') const { rows: insertRows } = await pgPools.evaluate.query(` INSERT INTO participants (participant_address) @@ -61,4 +61,4 @@ describe('observer', () => { assert.strictEqual(rows[0].scheduled_rewards, '200') }) }) -}) \ No newline at end of file +}) From 0577ddca77ada1bdd0de3d323de8ee7a48403206 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:44:03 +0200 Subject: [PATCH 17/62] add missing env --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 922c89a..a321794 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -54,6 +54,7 @@ jobs: --health-retries 5 env: DATABASE_URL: postgres://postgres:postgres@localhost:5432/spark_stats + EVALUATE_DB_URL: postgres://postgres:postgres@localhost:5432/spark_evaluate NPM_CONFIG_WORKSPACE: observer steps: - uses: actions/checkout@v4 From f146179042ec2900b0c02fed2e83175ca86c2fdb Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:45:36 +0200 Subject: [PATCH 18/62] fix missing ci db --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a321794..aa123bd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -57,6 +57,7 @@ jobs: EVALUATE_DB_URL: postgres://postgres:postgres@localhost:5432/spark_evaluate NPM_CONFIG_WORKSPACE: observer steps: + - run: psql "${DATABASE_URL}" -c "CREATE DATABASE spark_evaluate" - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: From f12a51e407b3c3144b1a14051f3ed04985bad113 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:47:08 +0200 Subject: [PATCH 19/62] fix missing migrate call --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa123bd..4913744 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -63,6 +63,7 @@ jobs: with: node-version: 20 - run: npm ci + - run: npm run migrate - run: npm test lint-all: From db3ecb17fd1545498263b911206267973a7dc544 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:49:20 +0200 Subject: [PATCH 20/62] fix migration bin --- observer/bin/migrate.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/observer/bin/migrate.js b/observer/bin/migrate.js index 13654cb..3785028 100644 --- a/observer/bin/migrate.js +++ b/observer/bin/migrate.js @@ -1,5 +1,5 @@ -import { DATABASE_URL } from '../lib/config.js' -import { migrateWithPgConfig as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' +import { getStats } from '../../common/db.js' +import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' console.log('Migrating spark_stats database') -await migrateStatsDB({ connectionString: DATABASE_URL }) +await migrateStatsDB(await getStats()) From fdb37fac0ff2c2873f93a2665dbdf4846af0d18a Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:49:52 +0200 Subject: [PATCH 21/62] clean up --- migrations/index.js | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/migrations/index.js b/migrations/index.js index e6e417b..c582e81 100644 --- a/migrations/index.js +++ b/migrations/index.js @@ -9,19 +9,6 @@ const migrationsDirectory = join( 'migrations' ) -/** - *@param {pg.ClientConfig} pgConfig - */ -export const migrateWithPgConfig = async (pgConfig) => { - const client = new pg.Client(pgConfig) - await client.connect() - try { - await migrateWithPgClient(client) - } finally { - await client.end() - } -} - /** * @param {pg.Client} client */ From a0866e6c60f27d09c916c987cb947c23cd47fe05 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:51:35 +0200 Subject: [PATCH 22/62] fix lint --- migrations/index.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/migrations/index.js b/migrations/index.js index c582e81..1c2709e 100644 --- a/migrations/index.js +++ b/migrations/index.js @@ -1,6 +1,5 @@ import { dirname, join } from 'node:path' import { fileURLToPath } from 'node:url' -import pg from 'pg' import Postgrator from 'postgrator' const migrationsDirectory = join( @@ -10,7 +9,7 @@ const migrationsDirectory = join( ) /** - * @param {pg.Client} client + * @param {import('pg').Client} client */ export const migrateWithPgClient = async (client) => { const postgrator = new Postgrator({ From 5b8a082937d8597716b096b4655a4ba9bd5746c6 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:53:06 +0200 Subject: [PATCH 23/62] fix migrate bin --- observer/bin/migrate.js | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/observer/bin/migrate.js b/observer/bin/migrate.js index 3785028..01924ed 100644 --- a/observer/bin/migrate.js +++ b/observer/bin/migrate.js @@ -1,5 +1,13 @@ -import { getStats } from '../../common/db.js' +import { getPgPools } from '../../common/db.js' +import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' +const pgPools = await getPgPools() + +console.log('Migrating spark_evaluate database') +await migrateEvaluateDB(pgPools.evaluate) + console.log('Migrating spark_stats database') -await migrateStatsDB(await getStats()) +await migrateStatsDB(pgPools.stats) + +await pgPools.end() From 6f574acd9c06194dd1615494e4a3ffb9fb732094 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Tue, 11 Jun 2024 23:55:34 +0200 Subject: [PATCH 24/62] clean up --- .github/workflows/ci.yml | 2 -- README.md | 6 ------ common/db.js | 21 ++++++++++++++++----- observer/bin/migrate.js | 13 ------------- observer/package.json | 1 - package.json | 1 - stats/bin/migrate.js | 13 ------------- stats/package.json | 1 - 8 files changed, 16 insertions(+), 42 deletions(-) delete mode 100644 observer/bin/migrate.js delete mode 100644 stats/bin/migrate.js diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4913744..3c36b2c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,7 +33,6 @@ jobs: with: node-version: 20 - run: npm ci - - run: npm run migrate - run: npm test build-observer: @@ -63,7 +62,6 @@ jobs: with: node-version: 20 - run: npm ci - - run: npm run migrate - run: npm test lint-all: diff --git a/README.md b/README.md index db37a58..1662c9d 100644 --- a/README.md +++ b/README.md @@ -75,12 +75,6 @@ Next, you need to create `spark_evaluate` database. psql postgres://localhost:5432/ -c "CREATE DATABASE spark_evaluate" ``` -Finally, run database schema migration scripts. - -```bash -npm run migrate -``` - ### Run the test suite ```sh diff --git a/common/db.js b/common/db.js index 5d34767..b49c7a7 100644 --- a/common/db.js +++ b/common/db.js @@ -1,5 +1,6 @@ -import { migrateWithPgClient } from '@filecoin-station/spark-stats-db-migrations' import pg from 'pg' +import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' +import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' const { // DATABASE_URL points to `spark_stats` database managed by this monorepo @@ -37,7 +38,7 @@ export const getStats = async () => { connectionString: DATABASE_URL }) stats.on('error', onError) - await migrateWithPgClient(stats) + await migrateStatsDB(stats) return stats } @@ -47,9 +48,7 @@ export const getEvaluate = async () => { connectionString: EVALUATE_DB_URL }) evaluate.on('error', onError) - - // Check that we can talk to the database - await evaluate.query('SELECT 1') + await migrateEvaluateDB(evaluate) return evaluate } @@ -63,3 +62,15 @@ export const getPgPools = async () => { return { stats, evaluate, end } } + +export const migrate = async () => { + const pgPools = await getPgPools() + + console.log('Migrating spark_evaluate database') + await migrateEvaluateDB(pgPools.evaluate) + + console.log('Migrating spark_stats database') + await migrateStatsDB(pgPools.stats) + + await pgPools.end() +} diff --git a/observer/bin/migrate.js b/observer/bin/migrate.js deleted file mode 100644 index 01924ed..0000000 --- a/observer/bin/migrate.js +++ /dev/null @@ -1,13 +0,0 @@ -import { getPgPools } from '../../common/db.js' -import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' - -const pgPools = await getPgPools() - -console.log('Migrating spark_evaluate database') -await migrateEvaluateDB(pgPools.evaluate) - -console.log('Migrating spark_stats database') -await migrateStatsDB(pgPools.stats) - -await pgPools.end() diff --git a/observer/package.json b/observer/package.json index bb84490..a183c07 100644 --- a/observer/package.json +++ b/observer/package.json @@ -3,7 +3,6 @@ "type": "module", "private": true, "scripts": { - "migrate": "node bin/migrate.js", "start": "node bin/spark-observer.js", "lint": "standard", "test": "mocha" diff --git a/package.json b/package.json index a7ac74f..fa27ee6 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,6 @@ "stats" ], "scripts": { - "migrate": "npm run migrate --workspaces --if-present", "lint": "standard", "test:observer": "npm t -w observer", "test:stats": "npm t -w stats", diff --git a/stats/bin/migrate.js b/stats/bin/migrate.js deleted file mode 100644 index 01924ed..0000000 --- a/stats/bin/migrate.js +++ /dev/null @@ -1,13 +0,0 @@ -import { getPgPools } from '../../common/db.js' -import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' - -const pgPools = await getPgPools() - -console.log('Migrating spark_evaluate database') -await migrateEvaluateDB(pgPools.evaluate) - -console.log('Migrating spark_stats database') -await migrateStatsDB(pgPools.stats) - -await pgPools.end() diff --git a/stats/package.json b/stats/package.json index c744938..f71a09a 100644 --- a/stats/package.json +++ b/stats/package.json @@ -3,7 +3,6 @@ "type": "module", "private": true, "scripts": { - "migrate": "node bin/migrate.js", "start": "node bin/spark-stats.js", "lint": "standard", "test": "mocha" From f9920970f7ee65cd01c173c5a2c0a240815de95b Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 10:08:02 +0200 Subject: [PATCH 25/62] Revert "clean up" This reverts commit 6f574acd9c06194dd1615494e4a3ffb9fb732094. --- .github/workflows/ci.yml | 2 ++ README.md | 6 ++++++ common/db.js | 21 +++++---------------- observer/bin/migrate.js | 13 +++++++++++++ observer/package.json | 1 + package.json | 1 + stats/bin/migrate.js | 13 +++++++++++++ stats/package.json | 1 + 8 files changed, 42 insertions(+), 16 deletions(-) create mode 100644 observer/bin/migrate.js create mode 100644 stats/bin/migrate.js diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3c36b2c..4913744 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,6 +33,7 @@ jobs: with: node-version: 20 - run: npm ci + - run: npm run migrate - run: npm test build-observer: @@ -62,6 +63,7 @@ jobs: with: node-version: 20 - run: npm ci + - run: npm run migrate - run: npm test lint-all: diff --git a/README.md b/README.md index 1662c9d..db37a58 100644 --- a/README.md +++ b/README.md @@ -75,6 +75,12 @@ Next, you need to create `spark_evaluate` database. psql postgres://localhost:5432/ -c "CREATE DATABASE spark_evaluate" ``` +Finally, run database schema migration scripts. + +```bash +npm run migrate +``` + ### Run the test suite ```sh diff --git a/common/db.js b/common/db.js index b49c7a7..5d34767 100644 --- a/common/db.js +++ b/common/db.js @@ -1,6 +1,5 @@ +import { migrateWithPgClient } from '@filecoin-station/spark-stats-db-migrations' import pg from 'pg' -import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' const { // DATABASE_URL points to `spark_stats` database managed by this monorepo @@ -38,7 +37,7 @@ export const getStats = async () => { connectionString: DATABASE_URL }) stats.on('error', onError) - await migrateStatsDB(stats) + await migrateWithPgClient(stats) return stats } @@ -48,7 +47,9 @@ export const getEvaluate = async () => { connectionString: EVALUATE_DB_URL }) evaluate.on('error', onError) - await migrateEvaluateDB(evaluate) + + // Check that we can talk to the database + await evaluate.query('SELECT 1') return evaluate } @@ -62,15 +63,3 @@ export const getPgPools = async () => { return { stats, evaluate, end } } - -export const migrate = async () => { - const pgPools = await getPgPools() - - console.log('Migrating spark_evaluate database') - await migrateEvaluateDB(pgPools.evaluate) - - console.log('Migrating spark_stats database') - await migrateStatsDB(pgPools.stats) - - await pgPools.end() -} diff --git a/observer/bin/migrate.js b/observer/bin/migrate.js new file mode 100644 index 0000000..01924ed --- /dev/null +++ b/observer/bin/migrate.js @@ -0,0 +1,13 @@ +import { getPgPools } from '../../common/db.js' +import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' +import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' + +const pgPools = await getPgPools() + +console.log('Migrating spark_evaluate database') +await migrateEvaluateDB(pgPools.evaluate) + +console.log('Migrating spark_stats database') +await migrateStatsDB(pgPools.stats) + +await pgPools.end() diff --git a/observer/package.json b/observer/package.json index a183c07..bb84490 100644 --- a/observer/package.json +++ b/observer/package.json @@ -3,6 +3,7 @@ "type": "module", "private": true, "scripts": { + "migrate": "node bin/migrate.js", "start": "node bin/spark-observer.js", "lint": "standard", "test": "mocha" diff --git a/package.json b/package.json index fa27ee6..a7ac74f 100644 --- a/package.json +++ b/package.json @@ -8,6 +8,7 @@ "stats" ], "scripts": { + "migrate": "npm run migrate --workspaces --if-present", "lint": "standard", "test:observer": "npm t -w observer", "test:stats": "npm t -w stats", diff --git a/stats/bin/migrate.js b/stats/bin/migrate.js new file mode 100644 index 0000000..01924ed --- /dev/null +++ b/stats/bin/migrate.js @@ -0,0 +1,13 @@ +import { getPgPools } from '../../common/db.js' +import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' +import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' + +const pgPools = await getPgPools() + +console.log('Migrating spark_evaluate database') +await migrateEvaluateDB(pgPools.evaluate) + +console.log('Migrating spark_stats database') +await migrateStatsDB(pgPools.stats) + +await pgPools.end() diff --git a/stats/package.json b/stats/package.json index f71a09a..c744938 100644 --- a/stats/package.json +++ b/stats/package.json @@ -3,6 +3,7 @@ "type": "module", "private": true, "scripts": { + "migrate": "node bin/migrate.js", "start": "node bin/spark-stats.js", "lint": "standard", "test": "mocha" From c83d40a27be799677934c9a533f64eeaf23a0ff2 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 10:10:14 +0200 Subject: [PATCH 26/62] refactor `migrate()` --- common/db.js | 16 ++++++++++++++-- observer/bin/migrate.js | 14 ++------------ stats/bin/migrate.js | 14 ++------------ 3 files changed, 18 insertions(+), 26 deletions(-) diff --git a/common/db.js b/common/db.js index 5d34767..cc6354d 100644 --- a/common/db.js +++ b/common/db.js @@ -1,4 +1,5 @@ -import { migrateWithPgClient } from '@filecoin-station/spark-stats-db-migrations' +import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' +import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' import pg from 'pg' const { @@ -37,7 +38,7 @@ export const getStats = async () => { connectionString: DATABASE_URL }) stats.on('error', onError) - await migrateWithPgClient(stats) + await migrateStatsDB(stats) return stats } @@ -63,3 +64,14 @@ export const getPgPools = async () => { return { stats, evaluate, end } } + +export const migrate = async () => { + const pgPools = await getPgPools() + + console.log('Migrating spark_evaluate database') + await migrateEvaluateDB(pgPools.evaluate) + console.log('Migrating spark_stats database') + await migrateStatsDB(pgPools.stats) + + await pgPools.end() +} diff --git a/observer/bin/migrate.js b/observer/bin/migrate.js index 01924ed..cc3cbef 100644 --- a/observer/bin/migrate.js +++ b/observer/bin/migrate.js @@ -1,13 +1,3 @@ -import { getPgPools } from '../../common/db.js' -import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' +import { migrate } from '../../common/db.js' -const pgPools = await getPgPools() - -console.log('Migrating spark_evaluate database') -await migrateEvaluateDB(pgPools.evaluate) - -console.log('Migrating spark_stats database') -await migrateStatsDB(pgPools.stats) - -await pgPools.end() +await migrate() diff --git a/stats/bin/migrate.js b/stats/bin/migrate.js index 01924ed..cc3cbef 100644 --- a/stats/bin/migrate.js +++ b/stats/bin/migrate.js @@ -1,13 +1,3 @@ -import { getPgPools } from '../../common/db.js' -import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' +import { migrate } from '../../common/db.js' -const pgPools = await getPgPools() - -console.log('Migrating spark_evaluate database') -await migrateEvaluateDB(pgPools.evaluate) - -console.log('Migrating spark_stats database') -await migrateStatsDB(pgPools.stats) - -await pgPools.end() +await migrate() From a3ea5f1e3a55240f18bc01d156b354fca3c191b1 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 10:11:05 +0200 Subject: [PATCH 27/62] remove implicit migrate --- common/db.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/common/db.js b/common/db.js index cc6354d..ac0cf9b 100644 --- a/common/db.js +++ b/common/db.js @@ -38,7 +38,7 @@ export const getStats = async () => { connectionString: DATABASE_URL }) stats.on('error', onError) - await migrateStatsDB(stats) + await stats.query('SELECT 1') return stats } @@ -48,8 +48,6 @@ export const getEvaluate = async () => { connectionString: EVALUATE_DB_URL }) evaluate.on('error', onError) - - // Check that we can talk to the database await evaluate.query('SELECT 1') return evaluate } From 82a8d2686a58f3801531d4b85b46d888256aef61 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 10:24:20 +0200 Subject: [PATCH 28/62] create `db` package --- common/db.js => db/index.js | 0 db/package.json | 21 +++++++++++++++++++++ {common => db}/typings.d.ts | 0 observer/bin/dry-run.js | 2 +- observer/bin/migrate.js | 2 +- observer/bin/spark-observer.js | 2 +- observer/lib/observer.js | 4 ++-- observer/test/observer.test.js | 2 +- observer/test/platform-stats.test.js | 2 +- package-lock.json | 15 +++++++++++++++ package.json | 1 + stats/bin/migrate.js | 2 +- stats/bin/spark-stats.js | 2 +- stats/lib/handler.js | 4 ++-- stats/package.json | 1 + stats/test/handler.test.js | 2 +- stats/test/platform-routes.test.js | 4 ++-- 17 files changed, 52 insertions(+), 14 deletions(-) rename common/db.js => db/index.js (100%) create mode 100644 db/package.json rename {common => db}/typings.d.ts (100%) diff --git a/common/db.js b/db/index.js similarity index 100% rename from common/db.js rename to db/index.js diff --git a/db/package.json b/db/package.json new file mode 100644 index 0000000..a132d16 --- /dev/null +++ b/db/package.json @@ -0,0 +1,21 @@ +{ + "name": "@filecoin-station/spark-stats-db", + "version": "1.0.0", + "type": "module", + "main": "index.js", + "private": true, + "scripts": { + "lint": "standard" + }, + "devDependencies": { + "standard": "^17.1.0" + }, + "dependencies": { + "pg": "^8.12.0" + }, + "standard": { + "env": [ + "mocha" + ] + } +} diff --git a/common/typings.d.ts b/db/typings.d.ts similarity index 100% rename from common/typings.d.ts rename to db/typings.d.ts diff --git a/observer/bin/dry-run.js b/observer/bin/dry-run.js index b0022e6..0f56675 100644 --- a/observer/bin/dry-run.js +++ b/observer/bin/dry-run.js @@ -3,7 +3,7 @@ import { ethers } from 'ethers' import { RPC_URL, rpcHeaders } from '../lib/config.js' import { observe } from '../lib/observer.js' -import { getPgPools } from '../../common/db.js' +import { getPgPools } from '../../db/index.js' const pgPools = await getPgPools() diff --git a/observer/bin/migrate.js b/observer/bin/migrate.js index cc3cbef..0e4d5ef 100644 --- a/observer/bin/migrate.js +++ b/observer/bin/migrate.js @@ -1,3 +1,3 @@ -import { migrate } from '../../common/db.js' +import { migrate } from '@filecoin-station/spark-stats-db' await migrate() diff --git a/observer/bin/spark-observer.js b/observer/bin/spark-observer.js index a3f153c..956eacc 100644 --- a/observer/bin/spark-observer.js +++ b/observer/bin/spark-observer.js @@ -4,7 +4,7 @@ import * as Sentry from '@sentry/node' import timers from 'node:timers/promises' import { RPC_URL, rpcHeaders } from '../lib/config.js' -import { getPgPools } from '../../common/db.js' +import { getPgPools } from '@filecoin-station/spark-stats-db' import { observeTransferEvents, observeScheduledRewards diff --git a/observer/lib/observer.js b/observer/lib/observer.js index cc5d56e..75dd38a 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -2,7 +2,7 @@ import { updateDailyTransferStats } from './platform-stats.js' import * as Sentry from '@sentry/node' /** - * @param {import('../../common/typings').pgPools} pgPools + * @param {import('@filecoin-station/spark-stats-db').pgPools} pgPools * @param {import('ethers').Contract} ieContract * @param {import('ethers').Provider} provider */ @@ -47,7 +47,7 @@ const observeTransferEvents = async (pgPoolStats, ieContract, provider) => { /** * Observe scheduled rewards on the Filecoin blockchain - * @param {import('../../common/typings').pgPools} pgPools + * @param {import('@filecoin-station/spark-stats-db').pgPools} pgPools * @param {import('ethers').Contract} ieContract */ export const observeScheduledRewards = async (pgPools, ieContract) => { diff --git a/observer/test/observer.test.js b/observer/test/observer.test.js index 408ede9..a4a4b01 100644 --- a/observer/test/observer.test.js +++ b/observer/test/observer.test.js @@ -1,6 +1,6 @@ import assert from 'node:assert' import { observeScheduledRewards } from '../lib/observer.js' -import { getPgPools } from '../../common/db.js' +import { getPgPools } from '@filecoin-station/spark-stats-db' describe('observer', () => { describe('observeScheduledRewards', () => { diff --git a/observer/test/platform-stats.test.js b/observer/test/platform-stats.test.js index 5c4ca13..763de5c 100644 --- a/observer/test/platform-stats.test.js +++ b/observer/test/platform-stats.test.js @@ -1,7 +1,7 @@ import assert from 'node:assert' import { beforeEach, describe, it } from 'mocha' -import { getStats } from '../../common/db.js' +import { getStats } from '@filecoin-station/spark-stats-db' import { updateDailyTransferStats } from '../lib/platform-stats.js' import { migrateWithPgClient } from '@filecoin-station/spark-stats-db-migrations' diff --git a/package-lock.json b/package-lock.json index fd80e26..ac18409 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6,6 +6,7 @@ "": { "name": "@filecoin-station/spark-stats-monorepo", "workspaces": [ + "db", "migrations", "observer", "stats" @@ -14,6 +15,15 @@ "standard": "^17.1.0" } }, + "db": { + "version": "1.0.0", + "dependencies": { + "pg": "^8.12.0" + }, + "devDependencies": { + "standard": "^17.1.0" + } + }, "migrations": { "name": "@filecoin-station/spark-stats-db-migrations", "version": "1.0.0", @@ -99,6 +109,10 @@ "resolved": "stats", "link": true }, + "node_modules/@filecoin-station/spark-stats-db": { + "resolved": "db", + "link": true + }, "node_modules/@filecoin-station/spark-stats-db-migrations": { "resolved": "migrations", "link": true @@ -6124,6 +6138,7 @@ "stats": { "name": "@filecoin-station/spark-stats", "dependencies": { + "@filecoin-station/spark-stats-db": "^1.0.0", "@sentry/node": "^8.8.0", "debug": "^4.3.5", "http-assert": "^1.5.0", diff --git a/package.json b/package.json index a7ac74f..9758eac 100644 --- a/package.json +++ b/package.json @@ -3,6 +3,7 @@ "private": true, "type": "module", "workspaces": [ + "db", "migrations", "observer", "stats" diff --git a/stats/bin/migrate.js b/stats/bin/migrate.js index cc3cbef..0e4d5ef 100644 --- a/stats/bin/migrate.js +++ b/stats/bin/migrate.js @@ -1,3 +1,3 @@ -import { migrate } from '../../common/db.js' +import { migrate } from '@filecoin-station/spark-stats-db' await migrate() diff --git a/stats/bin/spark-stats.js b/stats/bin/spark-stats.js index 0058edc..1401d07 100644 --- a/stats/bin/spark-stats.js +++ b/stats/bin/spark-stats.js @@ -2,7 +2,7 @@ import '../lib/instrument.js' import http from 'node:http' import { once } from 'node:events' import { createHandler } from '../lib/handler.js' -import { getPgPools } from '../../common/db.js' +import { getPgPools } from '@filecoin-station/spark-stats-db' const { PORT = 8080, diff --git a/stats/lib/handler.js b/stats/lib/handler.js index cc7c01f..3dacd66 100644 --- a/stats/lib/handler.js +++ b/stats/lib/handler.js @@ -15,7 +15,7 @@ import { handlePlatformRoutes } from './platform-routes.js' /** * @param {object} args - * @param {import('../../common/typings')} args.pgPools + * @param {import('@filecoin-station/spark-stats-db')} args.pgPools * @param {import('pg').Pool} args.pgPoolStatsDb * @param {import('./typings').Logger} args.logger * @returns @@ -38,7 +38,7 @@ export const createHandler = ({ /** * @param {import('node:http').IncomingMessage} req * @param {import('node:http').ServerResponse} res - * @param {import('../../common/typings')} args.pgPools + * @param {import('@filecoin-station/spark-stats-db')} args.pgPools */ const handler = async (req, res, pgPools) => { // Caveat! `new URL('//foo', 'http://127.0.0.1')` would produce "http://foo/" - not what we want! diff --git a/stats/package.json b/stats/package.json index c744938..a641e3e 100644 --- a/stats/package.json +++ b/stats/package.json @@ -15,6 +15,7 @@ "standard": "^17.1.0" }, "dependencies": { + "@filecoin-station/spark-stats-db": "^1.0.0", "@sentry/node": "^8.8.0", "debug": "^4.3.5", "http-assert": "^1.5.0", diff --git a/stats/test/handler.test.js b/stats/test/handler.test.js index 1ee2282..b1c624e 100644 --- a/stats/test/handler.test.js +++ b/stats/test/handler.test.js @@ -3,7 +3,7 @@ import { once } from 'node:events' import assert from 'node:assert' import createDebug from 'debug' import { mapParticipantsToIds } from 'spark-evaluate/lib/platform-stats.js' -import { getEvaluate } from '../../common/db.js' +import { getEvaluate } from '@filecoin-station/spark-stats-db' import { assertResponseStatus } from './test-helpers.js' import { createHandler } from '../lib/handler.js' diff --git a/stats/test/platform-routes.test.js b/stats/test/platform-routes.test.js index 3755473..898bdfc 100644 --- a/stats/test/platform-routes.test.js +++ b/stats/test/platform-routes.test.js @@ -2,7 +2,7 @@ import http from 'node:http' import { once } from 'node:events' import assert from 'node:assert' import createDebug from 'debug' -import { getPgPools } from '../../common/db.js' +import { getPgPools } from '@filecoin-station/spark-stats-db' import { assertResponseStatus } from './test-helpers.js' import { createHandler } from '../lib/handler.js' @@ -10,7 +10,7 @@ import { createHandler } from '../lib/handler.js' const debug = createDebug('test') describe('Platform Routes HTTP request handler', () => { - /** @type {import('../../common/typings').pgPools} */ + /** @type {import('@filecoin-station/spark-stats-db').pgPools} */ let pgPools let server /** @type {string} */ From cc9ad5d2a2da57c59be39b761ead4f056ca9f0f4 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 10:31:48 +0200 Subject: [PATCH 29/62] add missing dep --- observer/package.json | 1 + package-lock.json | 1 + 2 files changed, 2 insertions(+) diff --git a/observer/package.json b/observer/package.json index 0fc4b7d..8c4143c 100644 --- a/observer/package.json +++ b/observer/package.json @@ -14,6 +14,7 @@ }, "dependencies": { "@filecoin-station/spark-impact-evaluator": "^1.1.1", + "@filecoin-station/spark-stats-db": "^1.0.0", "@filecoin-station/spark-stats-db-migrations": "^1.0.0", "@sentry/node": "^8.9.1", "debug": "^4.3.5", diff --git a/package-lock.json b/package-lock.json index 90defb5..166ed2e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6142,6 +6142,7 @@ "name": "@filecoin-station/spark-observer", "dependencies": { "@filecoin-station/spark-impact-evaluator": "^1.1.1", + "@filecoin-station/spark-stats-db": "^1.0.0", "@filecoin-station/spark-stats-db-migrations": "^1.0.0", "@sentry/node": "^8.9.1", "debug": "^4.3.5", From 883a9ce2448fe53fd18b1266a2f7aea637d6936f Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 13:01:36 +0200 Subject: [PATCH 30/62] add missing dependency --- db/package.json | 3 ++- package-lock.json | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/db/package.json b/db/package.json index a132d16..2c83223 100644 --- a/db/package.json +++ b/db/package.json @@ -8,7 +8,8 @@ "lint": "standard" }, "devDependencies": { - "standard": "^17.1.0" + "standard": "^17.1.0", + "spark-evaluate": "filecoin-station/spark-evaluate#main" }, "dependencies": { "pg": "^8.12.0" diff --git a/package-lock.json b/package-lock.json index 166ed2e..04c80c3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -22,6 +22,7 @@ "pg": "^8.12.0" }, "devDependencies": { + "spark-evaluate": "filecoin-station/spark-evaluate#main", "standard": "^17.1.0" } }, From 2aca0de8a5d09cc97a3cef1bdfdb761a70986c47 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 13:07:59 +0200 Subject: [PATCH 31/62] try run `npm ci` first --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 5ccccb1..452ac93 100644 --- a/Dockerfile +++ b/Dockerfile @@ -33,6 +33,7 @@ COPY --link migrations/package.json ./migrations/ COPY --link stats/package.json ./stats/ COPY --link observer/package.json ./observer/ +RUN npm ci RUN npm ci --workspaces # Copy application code From 19f0f5ec16915c6b5190d0fa289fdd476a37c724 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 13:09:53 +0200 Subject: [PATCH 32/62] try run `npm install` first --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 452ac93..d1da6a3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -33,7 +33,7 @@ COPY --link migrations/package.json ./migrations/ COPY --link stats/package.json ./stats/ COPY --link observer/package.json ./observer/ -RUN npm ci +RUN npm install RUN npm ci --workspaces # Copy application code From 8323e13a629fee3134dfcbf1a1b6f263e7cf0c84 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 13:13:42 +0200 Subject: [PATCH 33/62] clean up --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index d1da6a3..5ccccb1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -33,7 +33,6 @@ COPY --link migrations/package.json ./migrations/ COPY --link stats/package.json ./stats/ COPY --link observer/package.json ./observer/ -RUN npm install RUN npm ci --workspaces # Copy application code From b217338b1bb6fef16612c1852c4fafe8a95ac35b Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 13:14:20 +0200 Subject: [PATCH 34/62] try update node --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 5ccccb1..890f6ff 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ # syntax = docker/dockerfile:1 # Adjust NODE_VERSION as desired -ARG NODE_VERSION=20.8.1 +ARG NODE_VERSION=22.3.0 FROM node:${NODE_VERSION}-slim as base LABEL fly_launch_runtime="nodejs" From f588f029e8a8f8a317830747a7a9c6a3a226bd5b Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 13:17:48 +0200 Subject: [PATCH 35/62] fix Dockerfile --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 890f6ff..e56270d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,6 +29,7 @@ COPY --link package-lock.json package.json ./ # We cannot use a wildcard until `COPY --parents` is stabilised # See https://docs.docker.com/reference/dockerfile/#copy---parents +COPY --link db/package.json ./db/ COPY --link migrations/package.json ./migrations/ COPY --link stats/package.json ./stats/ COPY --link observer/package.json ./observer/ From ba1fdec2b59830b83a6aba3a7f8c88901b04abb5 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 13:21:04 +0200 Subject: [PATCH 36/62] downgrade again to keep diff small --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index e56270d..43045e9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ # syntax = docker/dockerfile:1 # Adjust NODE_VERSION as desired -ARG NODE_VERSION=22.3.0 +ARG NODE_VERSION=20.8.1 FROM node:${NODE_VERSION}-slim as base LABEL fly_launch_runtime="nodejs" From 5da6faa8ba44af23c431fde0c1a8b94f7845fbbb Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 13:24:32 +0200 Subject: [PATCH 37/62] add back implicit migrate --- db/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db/index.js b/db/index.js index ac0cf9b..c75c750 100644 --- a/db/index.js +++ b/db/index.js @@ -38,7 +38,7 @@ export const getStats = async () => { connectionString: DATABASE_URL }) stats.on('error', onError) - await stats.query('SELECT 1') + await migrateStatsDB(stats) return stats } From c09f608679103bdda711ea3206699ed142e9c5eb Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 04:57:50 -0700 Subject: [PATCH 38/62] Update db/index.js MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Miroslav Bajtoš --- db/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db/index.js b/db/index.js index c75c750..36dab4b 100644 --- a/db/index.js +++ b/db/index.js @@ -3,7 +3,7 @@ import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-s import pg from 'pg' const { -// DATABASE_URL points to `spark_stats` database managed by this monorepo + // DATABASE_URL points to `spark_stats` database managed by this monorepo DATABASE_URL = 'postgres://localhost:5432/spark_stats', // EVALUATE_DB_URL points to `spark_evaluate` database managed by spark-evaluate repo. From 4e7484521900a541ed46f0a0e5805f0d2686ac8c Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 13:59:49 +0200 Subject: [PATCH 39/62] rename pg pools --- db/index.js | 8 ++++---- observer/test/platform-stats.test.js | 4 ++-- stats/test/handler.test.js | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/db/index.js b/db/index.js index 36dab4b..4590743 100644 --- a/db/index.js +++ b/db/index.js @@ -32,7 +32,7 @@ const onError = err => { console.error('An idle client has experienced an error', err.stack) } -export const getStats = async () => { +export const getStatsPgPool = async () => { const stats = new pg.Pool({ ...poolConfig, connectionString: DATABASE_URL @@ -42,7 +42,7 @@ export const getStats = async () => { return stats } -export const getEvaluate = async () => { +export const getEvaluatePgPool = async () => { const evaluate = new pg.Pool({ ...poolConfig, connectionString: EVALUATE_DB_URL @@ -56,8 +56,8 @@ export const getEvaluate = async () => { * @returns {Promise} */ export const getPgPools = async () => { - const stats = await getStats() - const evaluate = await getEvaluate() + const stats = await getStatsPgPool() + const evaluate = await getEvaluatePgPool() const end = () => Promise.all([stats.end(), evaluate.end()]) return { stats, evaluate, end } diff --git a/observer/test/platform-stats.test.js b/observer/test/platform-stats.test.js index 763de5c..035f258 100644 --- a/observer/test/platform-stats.test.js +++ b/observer/test/platform-stats.test.js @@ -1,7 +1,7 @@ import assert from 'node:assert' import { beforeEach, describe, it } from 'mocha' -import { getStats } from '@filecoin-station/spark-stats-db' +import { getStatsPgPool } from '@filecoin-station/spark-stats-db' import { updateDailyTransferStats } from '../lib/platform-stats.js' import { migrateWithPgClient } from '@filecoin-station/spark-stats-db-migrations' @@ -10,7 +10,7 @@ describe('platform-stats-generator', () => { let pgClient before(async () => { - const pgPool = await getStats() + const pgPool = await getStatsPgPool() pgClient = await pgPool.connect() await migrateWithPgClient(pgClient) }) diff --git a/stats/test/handler.test.js b/stats/test/handler.test.js index b1c624e..c4f722d 100644 --- a/stats/test/handler.test.js +++ b/stats/test/handler.test.js @@ -3,7 +3,7 @@ import { once } from 'node:events' import assert from 'node:assert' import createDebug from 'debug' import { mapParticipantsToIds } from 'spark-evaluate/lib/platform-stats.js' -import { getEvaluate } from '@filecoin-station/spark-stats-db' +import { getEvaluatePgPool } from '@filecoin-station/spark-stats-db' import { assertResponseStatus } from './test-helpers.js' import { createHandler } from '../lib/handler.js' @@ -21,7 +21,7 @@ describe('HTTP request handler', () => { before(async () => { // handler doesn't use Stats DB - pgPool = await getEvaluate() + pgPool = await getEvaluatePgPool() const handler = createHandler({ pgPools: { From fb79b4e37ce8aba42c75cf727ba2cc125205bfcc Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 05:00:44 -0700 Subject: [PATCH 40/62] Update db/typings.d.ts MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Miroslav Bajtoš --- db/typings.d.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/db/typings.d.ts b/db/typings.d.ts index 1cf8a0b..56419bd 100644 --- a/db/typings.d.ts +++ b/db/typings.d.ts @@ -1,6 +1,6 @@ import type { Pool } from 'pg' export interface pgPools { - pgPool: Pool; - pgPoolEvaluate: Pool; + stats: Pool; + evaluate: Pool; } From 7455209778067ecf29c303ae2f8b49f26504bf96 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 14:02:01 +0200 Subject: [PATCH 41/62] harden tests using hooks --- observer/test/observer.test.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/observer/test/observer.test.js b/observer/test/observer.test.js index a4a4b01..0eaffeb 100644 --- a/observer/test/observer.test.js +++ b/observer/test/observer.test.js @@ -10,7 +10,7 @@ describe('observer', () => { pgPools = await getPgPools() }) - it('observes scheduled rewards', async () => { + beforeEach(async () => { await pgPools.evaluate.query('DELETE FROM daily_participants') await pgPools.evaluate.query('DELETE FROM participants') const { rows: insertRows } = await pgPools.evaluate.query(` @@ -28,7 +28,9 @@ describe('observer', () => { ($1, now()), ($2, now() - interval '4 days') `, [insertRows[0].id, insertRows[1].id]) + }) + it('observes scheduled rewards', async () => { const ieContract = { rewardsScheduledFor: async (address) => { if (address === '0xCURRENT') { From d57e0425801303c77d1cf07ba5d629b0f0e6d5d7 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 14:03:57 +0200 Subject: [PATCH 42/62] improve test assertions --- observer/test/observer.test.js | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/observer/test/observer.test.js b/observer/test/observer.test.js index 0eaffeb..314c725 100644 --- a/observer/test/observer.test.js +++ b/observer/test/observer.test.js @@ -42,12 +42,13 @@ describe('observer', () => { } await observeScheduledRewards(pgPools, ieContract) const { rows } = await pgPools.stats.query(` - SELECT * + SELECT participant_address, scheduled_rewards FROM daily_scheduled_rewards `) - assert.strictEqual(rows.length, 1) - assert.strictEqual(rows[0].participant_address, '0xCURRENT') - assert.strictEqual(rows[0].scheduled_rewards, '100') + assert.deepStrictEqual(rows, [{ + participant_address: '0xCURRENT', + scheduled_rewards: '100' + }]) }) it('updates scheduled rewards', async () => { const ieContract = { @@ -55,12 +56,13 @@ describe('observer', () => { } await observeScheduledRewards(pgPools, ieContract) const { rows } = await pgPools.stats.query(` - SELECT * + SELECT participant_address, scheduled_rewards FROM daily_scheduled_rewards `) - assert.strictEqual(rows.length, 1) - assert.strictEqual(rows[0].participant_address, '0xCURRENT') - assert.strictEqual(rows[0].scheduled_rewards, '200') + assert.deepStrictEqual(rows, [{ + participant_address: '0xCURRENT', + scheduled_rewards: '200' + }]) }) }) }) From d162516096093c7e14d01280fbc94f7a6ac5ff2a Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 14:12:43 +0200 Subject: [PATCH 43/62] refactor loop --- observer/bin/dry-run.js | 5 ++- observer/bin/spark-observer.js | 58 ++++++++++++++++++---------------- 2 files changed, 35 insertions(+), 28 deletions(-) diff --git a/observer/bin/dry-run.js b/observer/bin/dry-run.js index 0f56675..992f8e1 100644 --- a/observer/bin/dry-run.js +++ b/observer/bin/dry-run.js @@ -15,6 +15,9 @@ const ieContract = new ethers.Contract(SparkImpactEvaluator.ADDRESS, SparkImpact await pgPools.stats.query('DELETE FROM daily_reward_transfers') -await observe(pgPools, ieContract, provider) +await Promise.all([ + observeTransferEvents(pgPools.stats, ieContract, provider), + observeScheduledRewards(pgPools, ieContract) +]) await pgPools.end() diff --git a/observer/bin/spark-observer.js b/observer/bin/spark-observer.js index 956eacc..990d542 100644 --- a/observer/bin/spark-observer.js +++ b/observer/bin/spark-observer.js @@ -20,33 +20,37 @@ const ieContract = new ethers.Contract(SparkImpactEvaluator.ADDRESS, SparkImpact const ONE_HOUR = 60 * 60 * 1000 -await Promise.all([ - (async () => { - while (true) { - const start = new Date() - try { - await observeTransferEvents(pgPools, ieContract, provider) - } catch (e) { - console.error(e) - Sentry.captureException(e) - } - const dt = new Date() - start - console.log(`Observing Transfer events took ${dt}ms`) - await timers.setTimeout(ONE_HOUR - dt) +const loopObserveTransferEvents = async () => { + while (true) { + const start = new Date() + try { + await observeTransferEvents(pgPools, ieContract, provider) + } catch (e) { + console.error(e) + Sentry.captureException(e) } - })(), - (async () => { - while (true) { - const start = new Date() - try { - await observeScheduledRewards(pgPools, ieContract, provider) - } catch (e) { - console.error(e) - Sentry.captureException(e) - } - const dt = new Date() - start - console.log(`Observing scheduled rewards took ${dt}ms`) - await timers.setTimeout((24 * ONE_HOUR) - dt) + const dt = new Date() - start + console.log(`Observing Transfer events took ${dt}ms`) + await timers.setTimeout(ONE_HOUR - dt) + } +} + +const loopObserveScheduledRewards = async () => { + while (true) { + const start = new Date() + try { + await observeScheduledRewards(pgPools, ieContract, provider) + } catch (e) { + console.error(e) + Sentry.captureException(e) } - })() + const dt = new Date() - start + console.log(`Observing scheduled rewards took ${dt}ms`) + await timers.setTimeout((24 * ONE_HOUR) - dt) + } +} + +await Promise.all([ + loopObserveTransferEvents(), + loopObserveScheduledRewards() ]) From 7c98e1c2be0843a3fe4549510776fcd0d11d4ecb Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:04:10 +0200 Subject: [PATCH 44/62] move `migrations` into `db` --- Dockerfile | 1 - db/index.js | 30 ++++++++++++++++++- {migrations => db/migrations}/001.do.sql | 0 .../002.do.daily-reward-transfers.sql | 0 .../migrations}/003.do.scheduled-rewards.sql | 0 db/package.json | 3 +- migrations/index.js | 29 ------------------ migrations/package.json | 23 -------------- observer/bin/dry-run.js | 4 +-- observer/package.json | 1 - observer/test/platform-stats.test.js | 3 +- package.json | 1 - stats/package.json | 2 +- 13 files changed, 35 insertions(+), 62 deletions(-) rename {migrations => db/migrations}/001.do.sql (100%) rename {migrations => db/migrations}/002.do.daily-reward-transfers.sql (100%) rename {migrations => db/migrations}/003.do.scheduled-rewards.sql (100%) delete mode 100644 migrations/index.js delete mode 100644 migrations/package.json diff --git a/Dockerfile b/Dockerfile index 43045e9..0cfd66d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -30,7 +30,6 @@ COPY --link package-lock.json package.json ./ # We cannot use a wildcard until `COPY --parents` is stabilised # See https://docs.docker.com/reference/dockerfile/#copy---parents COPY --link db/package.json ./db/ -COPY --link migrations/package.json ./migrations/ COPY --link stats/package.json ./stats/ COPY --link observer/package.json ./observer/ diff --git a/db/index.js b/db/index.js index 4590743..4aac7f7 100644 --- a/db/index.js +++ b/db/index.js @@ -1,6 +1,9 @@ import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' +import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db' import pg from 'pg' +import { dirname, join } from 'node:path' +import { fileURLToPath } from 'node:url' +import Postgrator from 'postgrator' const { // DATABASE_URL points to `spark_stats` database managed by this monorepo @@ -12,6 +15,11 @@ const { EVALUATE_DB_URL = 'postgres://localhost:5432/spark_evaluate' } = process.env +const migrationsDirectory = join( + dirname(fileURLToPath(import.meta.url)), + 'migrations' +) + const poolConfig = { // allow the pool to close all connections and become empty min: 0, @@ -73,3 +81,23 @@ export const migrate = async () => { await pgPools.end() } + +/** + * @param {pg.Client} client + */ +export const migrateWithPgClient = async (client) => { + const postgrator = new Postgrator({ + migrationPattern: join(migrationsDirectory, '*'), + driver: 'pg', + execQuery: (query) => client.query(query) + }) + console.log( + 'Migrating DB schema from version %s to version %s', + await postgrator.getDatabaseVersion(), + await postgrator.getMaxVersion() + ) + + await postgrator.migrate() + + console.log('Migrated DB schema to version', await postgrator.getDatabaseVersion()) +} diff --git a/migrations/001.do.sql b/db/migrations/001.do.sql similarity index 100% rename from migrations/001.do.sql rename to db/migrations/001.do.sql diff --git a/migrations/002.do.daily-reward-transfers.sql b/db/migrations/002.do.daily-reward-transfers.sql similarity index 100% rename from migrations/002.do.daily-reward-transfers.sql rename to db/migrations/002.do.daily-reward-transfers.sql diff --git a/migrations/003.do.scheduled-rewards.sql b/db/migrations/003.do.scheduled-rewards.sql similarity index 100% rename from migrations/003.do.scheduled-rewards.sql rename to db/migrations/003.do.scheduled-rewards.sql diff --git a/db/package.json b/db/package.json index 2c83223..a88b67e 100644 --- a/db/package.json +++ b/db/package.json @@ -12,7 +12,8 @@ "spark-evaluate": "filecoin-station/spark-evaluate#main" }, "dependencies": { - "pg": "^8.12.0" + "pg": "^8.12.0", + "postgrator": "^7.2.0" }, "standard": { "env": [ diff --git a/migrations/index.js b/migrations/index.js deleted file mode 100644 index 1c2709e..0000000 --- a/migrations/index.js +++ /dev/null @@ -1,29 +0,0 @@ -import { dirname, join } from 'node:path' -import { fileURLToPath } from 'node:url' -import Postgrator from 'postgrator' - -const migrationsDirectory = join( - dirname(fileURLToPath(import.meta.url)), - '..', - 'migrations' -) - -/** - * @param {import('pg').Client} client - */ -export const migrateWithPgClient = async (client) => { - const postgrator = new Postgrator({ - migrationPattern: join(migrationsDirectory, '*'), - driver: 'pg', - execQuery: (query) => client.query(query) - }) - console.log( - 'Migrating DB schema from version %s to version %s', - await postgrator.getDatabaseVersion(), - await postgrator.getMaxVersion() - ) - - await postgrator.migrate() - - console.log('Migrated DB schema to version', await postgrator.getDatabaseVersion()) -} diff --git a/migrations/package.json b/migrations/package.json deleted file mode 100644 index eb0149b..0000000 --- a/migrations/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "@filecoin-station/spark-stats-db-migrations", - "version": "1.0.0", - "type": "module", - "main": "index.js", - "private": true, - "scripts": { - "lint": "standard", - "test": "mocha" - }, - "devDependencies": { - "standard": "^17.1.0" - }, - "dependencies": { - "pg": "^8.12.0", - "postgrator": "^7.2.0" - }, - "standard": { - "env": [ - "mocha" - ] - } -} diff --git a/observer/bin/dry-run.js b/observer/bin/dry-run.js index 992f8e1..c54ba36 100644 --- a/observer/bin/dry-run.js +++ b/observer/bin/dry-run.js @@ -2,8 +2,8 @@ import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator' import { ethers } from 'ethers' import { RPC_URL, rpcHeaders } from '../lib/config.js' -import { observe } from '../lib/observer.js' -import { getPgPools } from '../../db/index.js' +import { observeTransferEvents, observeScheduledRewards } from '../lib/observer.js' +import { getPgPools } from '../../db-clients/index.js' const pgPools = await getPgPools() diff --git a/observer/package.json b/observer/package.json index 8c4143c..fb8df4d 100644 --- a/observer/package.json +++ b/observer/package.json @@ -15,7 +15,6 @@ "dependencies": { "@filecoin-station/spark-impact-evaluator": "^1.1.1", "@filecoin-station/spark-stats-db": "^1.0.0", - "@filecoin-station/spark-stats-db-migrations": "^1.0.0", "@sentry/node": "^8.9.1", "debug": "^4.3.5", "ethers": "^6.13.0", diff --git a/observer/test/platform-stats.test.js b/observer/test/platform-stats.test.js index 035f258..2dc58d8 100644 --- a/observer/test/platform-stats.test.js +++ b/observer/test/platform-stats.test.js @@ -1,9 +1,8 @@ import assert from 'node:assert' import { beforeEach, describe, it } from 'mocha' -import { getStatsPgPool } from '@filecoin-station/spark-stats-db' +import { getStatsPgPool, migrateWithPgClient } from '@filecoin-station/spark-stats-db' import { updateDailyTransferStats } from '../lib/platform-stats.js' -import { migrateWithPgClient } from '@filecoin-station/spark-stats-db-migrations' describe('platform-stats-generator', () => { /** @type {pg.Client} */ diff --git a/package.json b/package.json index 9758eac..dc19535 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,6 @@ "type": "module", "workspaces": [ "db", - "migrations", "observer", "stats" ], diff --git a/stats/package.json b/stats/package.json index 2c83b0e..90f3b30 100644 --- a/stats/package.json +++ b/stats/package.json @@ -9,7 +9,7 @@ "test": "mocha" }, "devDependencies": { - "@filecoin-station/spark-stats-db-migrations": "^1.0.0", + "@filecoin-station/spark-stats-db": "^1.0.0", "mocha": "^10.4.0", "spark-evaluate": "filecoin-station/spark-evaluate#main", "standard": "^17.1.0" From cae8594ef5f977df753172da0ed1b5482507c0df Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:07:05 +0200 Subject: [PATCH 45/62] inline `observer()` in `dry-run.js` --- observer/lib/observer.js | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/observer/lib/observer.js b/observer/lib/observer.js index 75dd38a..26bbf69 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -1,25 +1,13 @@ import { updateDailyTransferStats } from './platform-stats.js' import * as Sentry from '@sentry/node' -/** - * @param {import('@filecoin-station/spark-stats-db').pgPools} pgPools - * @param {import('ethers').Contract} ieContract - * @param {import('ethers').Provider} provider - */ -export const observe = async (pgPools, ieContract, provider) => { - await Promise.all([ - observeTransferEvents(pgPools.stats, ieContract, provider), - observeScheduledRewards(pgPools, ieContract) - ]) -} - /** * Observe the transfer events on the Filecoin blockchain * @param {import('pg').Pool} pgPoolStats * @param {import('ethers').Contract} ieContract * @param {import('ethers').Provider} provider */ -const observeTransferEvents = async (pgPoolStats, ieContract, provider) => { +export const observeTransferEvents = async (pgPoolStats, ieContract, provider) => { const { rows } = await pgPoolStats.query( 'SELECT MAX(last_checked_block) FROM daily_reward_transfers' ) From 0a0f417b98ced251800d62f575a837b9e5bf9512 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:24:44 +0200 Subject: [PATCH 46/62] use helper from spark-evaluate --- observer/package.json | 1 + observer/test/observer.test.js | 27 ++++++++++++--------------- package-lock.json | 33 ++++++++++++++++----------------- stats/test/handler.test.js | 14 +------------- 4 files changed, 30 insertions(+), 45 deletions(-) diff --git a/observer/package.json b/observer/package.json index fb8df4d..3933a75 100644 --- a/observer/package.json +++ b/observer/package.json @@ -10,6 +10,7 @@ }, "devDependencies": { "mocha": "^10.4.0", + "spark-evaluate": "github:filecoin-station/spark-evaluate#534454c9b2323b67d50b159a3504de8a06eedcb2", "standard": "^17.1.0" }, "dependencies": { diff --git a/observer/test/observer.test.js b/observer/test/observer.test.js index 314c725..926d568 100644 --- a/observer/test/observer.test.js +++ b/observer/test/observer.test.js @@ -1,6 +1,7 @@ import assert from 'node:assert' import { observeScheduledRewards } from '../lib/observer.js' import { getPgPools } from '@filecoin-station/spark-stats-db' +import { givenDailyParticipants } from 'spark-evaluate/test/helpers/queries.js' describe('observer', () => { describe('observeScheduledRewards', () => { @@ -13,21 +14,17 @@ describe('observer', () => { beforeEach(async () => { await pgPools.evaluate.query('DELETE FROM daily_participants') await pgPools.evaluate.query('DELETE FROM participants') - const { rows: insertRows } = await pgPools.evaluate.query(` - INSERT INTO participants - (participant_address) - VALUES - ('0xCURRENT'), - ('0xOLD') - RETURNING id - `) - await pgPools.evaluate.query(` - INSERT INTO daily_participants - (participant_id, day) - VALUES - ($1, now()), - ($2, now() - interval '4 days') - `, [insertRows[0].id, insertRows[1].id]) + const today = new Date() + await givenDailyParticipants( + pgPools.evaluate, + `${today.getFullYear()}-${today.getMonth() + 1}-${today.getDate()}`, + ['0xCURRENT'] + ) + await givenDailyParticipants( + pgPools.evaluate, + `2000-01-01`, + ['0xOLD'] + ) }) it('observes scheduled rewards', async () => { diff --git a/package-lock.json b/package-lock.json index 04c80c3..34fb6d6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7,7 +7,6 @@ "name": "@filecoin-station/spark-stats-monorepo", "workspaces": [ "db", - "migrations", "observer", "stats" ], @@ -19,7 +18,8 @@ "name": "@filecoin-station/spark-stats-db", "version": "1.0.0", "dependencies": { - "pg": "^8.12.0" + "pg": "^8.12.0", + "postgrator": "^7.2.0" }, "devDependencies": { "spark-evaluate": "filecoin-station/spark-evaluate#main", @@ -29,6 +29,7 @@ "migrations": { "name": "@filecoin-station/spark-stats-db-migrations", "version": "1.0.0", + "extraneous": true, "dependencies": { "pg": "^8.12.0", "postgrator": "^7.2.0" @@ -115,10 +116,6 @@ "resolved": "db", "link": true }, - "node_modules/@filecoin-station/spark-stats-db-migrations": { - "resolved": "migrations", - "link": true - }, "node_modules/@glif/filecoin-address": { "version": "3.0.5", "resolved": "https://registry.npmjs.org/@glif/filecoin-address/-/filecoin-address-3.0.5.tgz", @@ -212,10 +209,11 @@ "dev": true }, "node_modules/@ipld/car": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/@ipld/car/-/car-5.3.0.tgz", - "integrity": "sha512-OB8LVvJeVAFFGluNIkZeDZ/aGeoekFKsuIvNT9I5sJIb5WekQuW5+lekjQ7Z7mZ7DBKuke/kI4jBT1j0/akU1w==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/@ipld/car/-/car-5.3.1.tgz", + "integrity": "sha512-8fNkYAZvL9yX2zesF32k7tYqUDGG41felmmBnwjCZJto06QXCb0NOMPJc/mhNgnVa5gkKqxPO1ZdSoHuaYcVSw==", "dev": true, + "license": "Apache-2.0 OR MIT", "dependencies": { "@ipld/dag-cbor": "^9.0.7", "cborg": "^4.0.5", @@ -5404,23 +5402,24 @@ } }, "node_modules/spark-evaluate": { - "resolved": "git+ssh://git@github.com/filecoin-station/spark-evaluate.git#6d3d547e00ceed6fdc90d59578a3d3fdfa6fdc78", + "resolved": "git+ssh://git@github.com/filecoin-station/spark-evaluate.git#534454c9b2323b67d50b159a3504de8a06eedcb2", + "integrity": "sha512-KFIA5F8Lw/+I+2MzPKT6qsgtt5LNSqdbuk9/LUTUyduGSUpQj++TCI+PlvSs2mTRQz7dZCQcXhhY5cchj7n1vA==", "dev": true, "dependencies": { - "@filecoin-station/spark-impact-evaluator": "^1.1.0", + "@filecoin-station/spark-impact-evaluator": "^1.1.1", "@glif/filecoin-address": "^3.0.5", "@influxdata/influxdb-client": "^1.33.2", - "@ipld/car": "^5.3.0", - "@sentry/node": "^8.7.0", + "@ipld/car": "^5.3.1", + "@sentry/node": "^8.9.1", "@web3-storage/car-block-validator": "^1.2.0", - "debug": "^4.3.4", + "debug": "^4.3.5", "ethers": "^6.10.0", "ipfs-car": "^1.2.0", "ipfs-unixfs-exporter": "^13.5.0", "just-percentile": "^4.2.0", "p-map": "^7.0.2", "p-retry": "^6.2.0", - "pg": "^8.11.5", + "pg": "^8.12.0", "postgrator": "^7.2.0" } }, @@ -6144,7 +6143,6 @@ "dependencies": { "@filecoin-station/spark-impact-evaluator": "^1.1.1", "@filecoin-station/spark-stats-db": "^1.0.0", - "@filecoin-station/spark-stats-db-migrations": "^1.0.0", "@sentry/node": "^8.9.1", "debug": "^4.3.5", "ethers": "^6.13.0", @@ -6152,6 +6150,7 @@ }, "devDependencies": { "mocha": "^10.4.0", + "spark-evaluate": "github:filecoin-station/spark-evaluate#534454c9b2323b67d50b159a3504de8a06eedcb2", "standard": "^17.1.0" } }, @@ -6166,7 +6165,7 @@ "pg": "^8.12.0" }, "devDependencies": { - "@filecoin-station/spark-stats-db-migrations": "^1.0.0", + "@filecoin-station/spark-stats-db": "^1.0.0", "mocha": "^10.4.0", "spark-evaluate": "filecoin-station/spark-evaluate#main", "standard": "^17.1.0" diff --git a/stats/test/handler.test.js b/stats/test/handler.test.js index c4f722d..5cf363e 100644 --- a/stats/test/handler.test.js +++ b/stats/test/handler.test.js @@ -2,7 +2,7 @@ import http from 'node:http' import { once } from 'node:events' import assert from 'node:assert' import createDebug from 'debug' -import { mapParticipantsToIds } from 'spark-evaluate/lib/platform-stats.js' +import { givenDailyParticipants } from 'spark-evaluate/test/helpers/queries.js' import { getEvaluatePgPool } from '@filecoin-station/spark-stats-db' import { assertResponseStatus } from './test-helpers.js' @@ -357,15 +357,3 @@ const givenRetrievalStats = async (pgPool, { day, minerId, total, successful }) [day, minerId ?? 'f1test', total, successful] ) } - -const givenDailyParticipants = async (pgPool, day, participantAddresses) => { - const ids = await mapParticipantsToIds(pgPool, new Set(participantAddresses)) - await pgPool.query(` - INSERT INTO daily_participants (day, participant_id) - SELECT $1 as day, UNNEST($2::INT[]) AS participant_id - ON CONFLICT DO NOTHING - `, [ - day, - ids - ]) -} From 99a6ba9415868bc04cdc6a0dacd3976b3f2b20d5 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:25:41 +0200 Subject: [PATCH 47/62] fix test --- observer/test/observer.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/observer/test/observer.test.js b/observer/test/observer.test.js index 926d568..29d5b47 100644 --- a/observer/test/observer.test.js +++ b/observer/test/observer.test.js @@ -22,7 +22,7 @@ describe('observer', () => { ) await givenDailyParticipants( pgPools.evaluate, - `2000-01-01`, + '2000-01-01', ['0xOLD'] ) }) From b97e9accd565175b40d0dc5c28d08c55fe31ad7a Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:31:46 +0200 Subject: [PATCH 48/62] ci: add dry-run --- .github/workflows/ci.yml | 30 ++++++++++++++++++++++++++++++ observer/package.json | 1 + 2 files changed, 31 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4913744..b80101e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -75,6 +75,36 @@ jobs: node-version: 20 - run: npm ci - run: npm run lint + + dry-run: + runs-on: ubuntu-latest + services: + postgres: + image: postgres:latest + env: + POSTGRES_DB: spark_stats + POSTGRES_PASSWORD: postgres + POSTGRES_USER: postgres + ports: + - 5432:5432 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + env: + DATABASE_URL: postgres://postgres:postgres@localhost:5432/spark_stats + EVALUATE_DB_URL: postgres://postgres:postgres@localhost:5432/spark_evaluate + NPM_CONFIG_WORKSPACE: observer + steps: + - run: psql "${DATABASE_URL}" -c "CREATE DATABASE spark_evaluate" + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 20 + - run: npm ci + - run: npm run migrate + - run: npm run dry-run docker-build: runs-on: ubuntu-latest diff --git a/observer/package.json b/observer/package.json index 3933a75..624b3ae 100644 --- a/observer/package.json +++ b/observer/package.json @@ -3,6 +3,7 @@ "type": "module", "private": true, "scripts": { + "dry-run": "node bin/dry-run.js", "migrate": "node bin/migrate.js", "start": "node bin/spark-observer.js", "lint": "standard", From 63379580d4a579dee935040199bd73f91fc58ccf Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:34:02 +0200 Subject: [PATCH 49/62] add glif token to dry-run --- .github/workflows/ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b80101e..f94eea9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -105,6 +105,8 @@ jobs: - run: npm ci - run: npm run migrate - run: npm run dry-run + env: + GLIF_TOKEN: ${{ secrets.GLIF_TOKEN }} docker-build: runs-on: ubuntu-latest From 46db5b9c265951e0c0cb44c556c054d6c490025d Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:34:48 +0200 Subject: [PATCH 50/62] fix dry-run import --- observer/bin/dry-run.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/observer/bin/dry-run.js b/observer/bin/dry-run.js index c54ba36..a1a15fc 100644 --- a/observer/bin/dry-run.js +++ b/observer/bin/dry-run.js @@ -3,7 +3,7 @@ import { ethers } from 'ethers' import { RPC_URL, rpcHeaders } from '../lib/config.js' import { observeTransferEvents, observeScheduledRewards } from '../lib/observer.js' -import { getPgPools } from '../../db-clients/index.js' +import { getPgPools } from '@filecoin-station/spark-stats-db' const pgPools = await getPgPools() From fb5ac664eca43ca4d82a7554ff4571e105a3b257 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:38:33 +0200 Subject: [PATCH 51/62] refine migration --- db/index.js | 17 ++++------------- observer/bin/migrate.js | 6 ++++-- stats/bin/migrate.js | 6 ++++-- 3 files changed, 12 insertions(+), 17 deletions(-) diff --git a/db/index.js b/db/index.js index 4aac7f7..b18129b 100644 --- a/db/index.js +++ b/db/index.js @@ -1,10 +1,12 @@ -import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db' +export { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' +export { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db' import pg from 'pg' import { dirname, join } from 'node:path' import { fileURLToPath } from 'node:url' import Postgrator from 'postgrator' +export { migrateEvaluateDB, migrateStatsDB } + const { // DATABASE_URL points to `spark_stats` database managed by this monorepo DATABASE_URL = 'postgres://localhost:5432/spark_stats', @@ -71,17 +73,6 @@ export const getPgPools = async () => { return { stats, evaluate, end } } -export const migrate = async () => { - const pgPools = await getPgPools() - - console.log('Migrating spark_evaluate database') - await migrateEvaluateDB(pgPools.evaluate) - console.log('Migrating spark_stats database') - await migrateStatsDB(pgPools.stats) - - await pgPools.end() -} - /** * @param {pg.Client} client */ diff --git a/observer/bin/migrate.js b/observer/bin/migrate.js index 0e4d5ef..71a14b1 100644 --- a/observer/bin/migrate.js +++ b/observer/bin/migrate.js @@ -1,3 +1,5 @@ -import { migrate } from '@filecoin-station/spark-stats-db' +import { migrateStatsDB, migrateEvaluateDB, getPgPools } from '@filecoin-station/spark-stats-db' -await migrate() +const pgPools = await getPgPools() +await migrateStatsDB(pgPools.stats) +await migrateEvaluateDB(pgPools.evaluate) diff --git a/stats/bin/migrate.js b/stats/bin/migrate.js index 0e4d5ef..71a14b1 100644 --- a/stats/bin/migrate.js +++ b/stats/bin/migrate.js @@ -1,3 +1,5 @@ -import { migrate } from '@filecoin-station/spark-stats-db' +import { migrateStatsDB, migrateEvaluateDB, getPgPools } from '@filecoin-station/spark-stats-db' -await migrate() +const pgPools = await getPgPools() +await migrateStatsDB(pgPools.stats) +await migrateEvaluateDB(pgPools.evaluate) From ad79c95af3eb009b0923181b8139022d59153a20 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:39:52 +0200 Subject: [PATCH 52/62] fix import --- db/index.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/db/index.js b/db/index.js index b18129b..c273ed2 100644 --- a/db/index.js +++ b/db/index.js @@ -1,5 +1,5 @@ -export { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -export { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db' +import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' +import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db' import pg from 'pg' import { dirname, join } from 'node:path' import { fileURLToPath } from 'node:url' From 3a2090b3af2a0ece7886b4ae0b10876c1999773a Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:50:53 +0200 Subject: [PATCH 53/62] docs --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index db37a58..78e01eb 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ Base URL: http://stats.filspark.com/ - `GET /participants/scheduled-rewards/?address=
&from=&to=` - http://stats.filspark.com/participants/scheduled-rewards/ + http://stats.filspark.com/participants/scheduled-rewards - `GET /miners/retrieval-success-rate/summary?from=&to=` From 8f0d740138ae66fecd3e4664d77ef8b5b70f5f01 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:51:30 +0200 Subject: [PATCH 54/62] fix version --- observer/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/observer/package.json b/observer/package.json index 624b3ae..df9d5d9 100644 --- a/observer/package.json +++ b/observer/package.json @@ -11,7 +11,7 @@ }, "devDependencies": { "mocha": "^10.4.0", - "spark-evaluate": "github:filecoin-station/spark-evaluate#534454c9b2323b67d50b159a3504de8a06eedcb2", + "spark-evaluate": "github:filecoin-station/spark-evaluate#main", "standard": "^17.1.0" }, "dependencies": { From 83719b52d233347c07d11ebf535368963bcfb097 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:52:01 +0200 Subject: [PATCH 55/62] revert --- stats/lib/typings.d.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/stats/lib/typings.d.ts b/stats/lib/typings.d.ts index eb94aa0..274f919 100644 --- a/stats/lib/typings.d.ts +++ b/stats/lib/typings.d.ts @@ -7,5 +7,4 @@ export interface Logger { export interface DateRangeFilter { from: string; to: string; - address?: string; } From db09bc8094a9f486d99003492d735eae77048e33 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:55:24 +0200 Subject: [PATCH 56/62] log --- db/index.js | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/db/index.js b/db/index.js index c273ed2..34905df 100644 --- a/db/index.js +++ b/db/index.js @@ -1,5 +1,4 @@ import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' -import { migrateWithPgClient as migrateStatsDB } from '@filecoin-station/spark-stats-db' import pg from 'pg' import { dirname, join } from 'node:path' import { fileURLToPath } from 'node:url' @@ -76,19 +75,19 @@ export const getPgPools = async () => { /** * @param {pg.Client} client */ -export const migrateWithPgClient = async (client) => { +export const migrateStatsDB = async (client) => { const postgrator = new Postgrator({ migrationPattern: join(migrationsDirectory, '*'), driver: 'pg', execQuery: (query) => client.query(query) }) console.log( - 'Migrating DB schema from version %s to version %s', + 'Migrating `spark-stats` DB schema from version %s to version %s', await postgrator.getDatabaseVersion(), await postgrator.getMaxVersion() ) await postgrator.migrate() - console.log('Migrated DB schema to version', await postgrator.getDatabaseVersion()) + console.log('Migrated `spark-stats` DB schema to version', await postgrator.getDatabaseVersion()) } From 201fc76f7bf0f76ccff80ed26aca11cf3f61a96e Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 15:59:59 +0200 Subject: [PATCH 57/62] refactor --- observer/test/observer.test.js | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/observer/test/observer.test.js b/observer/test/observer.test.js index 29d5b47..80c6c7e 100644 --- a/observer/test/observer.test.js +++ b/observer/test/observer.test.js @@ -3,6 +3,9 @@ import { observeScheduledRewards } from '../lib/observer.js' import { getPgPools } from '@filecoin-station/spark-stats-db' import { givenDailyParticipants } from 'spark-evaluate/test/helpers/queries.js' +const getDayAsISOString = d => d.toISOString().split('T')[0] +const today = () => getDayAsISOString(new Date()) + describe('observer', () => { describe('observeScheduledRewards', () => { let pgPools @@ -14,17 +17,8 @@ describe('observer', () => { beforeEach(async () => { await pgPools.evaluate.query('DELETE FROM daily_participants') await pgPools.evaluate.query('DELETE FROM participants') - const today = new Date() - await givenDailyParticipants( - pgPools.evaluate, - `${today.getFullYear()}-${today.getMonth() + 1}-${today.getDate()}`, - ['0xCURRENT'] - ) - await givenDailyParticipants( - pgPools.evaluate, - '2000-01-01', - ['0xOLD'] - ) + await givenDailyParticipants(pgPools.evaluate, today(), ['0xCURRENT']) + await givenDailyParticipants(pgPools.evaluate, '2000-01-01', ['0xOLD']) }) it('observes scheduled rewards', async () => { From a9bf99811d459807654ef9d7ace6f022361f17cd Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 16:05:57 +0200 Subject: [PATCH 58/62] observer: add missing Sentry init --- observer/bin/spark-observer.js | 1 + observer/lib/instrument.js | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 observer/lib/instrument.js diff --git a/observer/bin/spark-observer.js b/observer/bin/spark-observer.js index 990d542..e4addb3 100644 --- a/observer/bin/spark-observer.js +++ b/observer/bin/spark-observer.js @@ -1,3 +1,4 @@ +import '../lib/instrument.js' import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator' import { ethers } from 'ethers' import * as Sentry from '@sentry/node' diff --git a/observer/lib/instrument.js b/observer/lib/instrument.js new file mode 100644 index 0000000..e804d6f --- /dev/null +++ b/observer/lib/instrument.js @@ -0,0 +1,24 @@ +import * as Sentry from '@sentry/node' +import fs from 'node:fs/promises' +import { join, dirname } from 'node:path' +import { fileURLToPath } from 'node:url' + +const { SENTRY_ENVIRONMENT = 'development' } = process.env + +const pkg = JSON.parse( + await fs.readFile( + join( + dirname(fileURLToPath(import.meta.url)), + '..', + 'package.json' + ), + 'utf8' + ) +) + +Sentry.init({ + dsn: 'https://47b65848a6171ecd8bf9f5395a782b3f@o1408530.ingest.sentry.io/4506576125427712', + release: pkg.version, + environment: SENTRY_ENVIRONMENT, + tracesSampleRate: 0.1 +}) From ec5e592b9bfa38ed7ec8dda880e1cb1d35b0049c Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 07:07:12 -0700 Subject: [PATCH 59/62] Update README.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Miroslav Bajtoš --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 78e01eb..46658c2 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Base URL: http://stats.filspark.com/ http://stats.filspark.com/participants/change-rates -- `GET /participants/scheduled-rewards/?address=
&from=&to=` +- `GET /participants/scheduled-rewards?address=
&from=&to=` http://stats.filspark.com/participants/scheduled-rewards From f2811b5dd5fa121069816e43e5eadcd2188689fe Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 16:07:33 +0200 Subject: [PATCH 60/62] fix export --- db/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db/index.js b/db/index.js index 34905df..2bac676 100644 --- a/db/index.js +++ b/db/index.js @@ -4,7 +4,7 @@ import { dirname, join } from 'node:path' import { fileURLToPath } from 'node:url' import Postgrator from 'postgrator' -export { migrateEvaluateDB, migrateStatsDB } +export { migrateEvaluateDB } const { // DATABASE_URL points to `spark_stats` database managed by this monorepo From 1cf47b19ba7fa21b9b1a27128bf16e34a18eddc0 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 16:09:25 +0200 Subject: [PATCH 61/62] fix test --- observer/test/platform-stats.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/observer/test/platform-stats.test.js b/observer/test/platform-stats.test.js index 2dc58d8..9b3edbe 100644 --- a/observer/test/platform-stats.test.js +++ b/observer/test/platform-stats.test.js @@ -1,7 +1,7 @@ import assert from 'node:assert' import { beforeEach, describe, it } from 'mocha' -import { getStatsPgPool, migrateWithPgClient } from '@filecoin-station/spark-stats-db' +import { getStatsPgPool, migrateStatsDB } from '@filecoin-station/spark-stats-db' import { updateDailyTransferStats } from '../lib/platform-stats.js' describe('platform-stats-generator', () => { From 7d39cb5dfa4ded89f6171962aeb15b6b5af61564 Mon Sep 17 00:00:00 2001 From: Julian Gruber Date: Wed, 12 Jun 2024 16:15:02 +0200 Subject: [PATCH 62/62] fix --- observer/test/platform-stats.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/observer/test/platform-stats.test.js b/observer/test/platform-stats.test.js index 9b3edbe..074e4ef 100644 --- a/observer/test/platform-stats.test.js +++ b/observer/test/platform-stats.test.js @@ -11,7 +11,7 @@ describe('platform-stats-generator', () => { before(async () => { const pgPool = await getStatsPgPool() pgClient = await pgPool.connect() - await migrateWithPgClient(pgClient) + await migrateStatsDB(pgClient) }) let today