-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* add read scheduled rewards * fix lint * fix lint * fix migration number * wrap observation functions * wip * simplify, dry run works * fix lint * split up loops * get participants from db * sentry * read from last 3 days of participants * big refactor * add tests * tests pass * fix lint * add missing env * fix missing ci db * fix missing migrate call * fix migration bin * clean up * fix lint * fix migrate bin * clean up * Revert "clean up" This reverts commit 6f574ac. * refactor `migrate()` * remove implicit migrate * create `db` package * add missing dep * add missing dependency * try run `npm ci` first * try run `npm install` first * clean up * try update node * fix Dockerfile * downgrade again to keep diff small * add back implicit migrate * Update db/index.js Co-authored-by: Miroslav Bajtoš <oss@bajtos.net> * rename pg pools * Update db/typings.d.ts Co-authored-by: Miroslav Bajtoš <oss@bajtos.net> * harden tests using hooks * improve test assertions * refactor loop * move `migrations` into `db` * inline `observer()` in `dry-run.js` * use helper from spark-evaluate * fix test * ci: add dry-run * add glif token to dry-run * fix dry-run import * refine migration * fix import * docs * fix version * revert * log * refactor * observer: add missing Sentry init * Update README.md Co-authored-by: Miroslav Bajtoš <oss@bajtos.net> * fix export * fix test * fix --------- Co-authored-by: Miroslav Bajtoš <oss@bajtos.net>
- Loading branch information
1 parent
d3d12fe
commit e03bcf0
Showing
31 changed files
with
449 additions
and
280 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,93 @@ | ||
import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js' | ||
import pg from 'pg' | ||
import { dirname, join } from 'node:path' | ||
import { fileURLToPath } from 'node:url' | ||
import Postgrator from 'postgrator' | ||
|
||
export { migrateEvaluateDB } | ||
|
||
const { | ||
// DATABASE_URL points to `spark_stats` database managed by this monorepo | ||
DATABASE_URL = 'postgres://localhost:5432/spark_stats', | ||
|
||
// EVALUATE_DB_URL points to `spark_evaluate` database managed by spark-evaluate repo. | ||
// Eventually, we should move the code updating stats from spark-evaluate to this repo | ||
// and then we won't need two connection strings. | ||
EVALUATE_DB_URL = 'postgres://localhost:5432/spark_evaluate' | ||
} = process.env | ||
|
||
const migrationsDirectory = join( | ||
dirname(fileURLToPath(import.meta.url)), | ||
'migrations' | ||
) | ||
|
||
const poolConfig = { | ||
// allow the pool to close all connections and become empty | ||
min: 0, | ||
// this values should correlate with service concurrency hard_limit configured in fly.toml | ||
// and must take into account the connection limit of our PG server, see | ||
// https://fly.io/docs/postgres/managing/configuration-tuning/ | ||
max: 100, | ||
// close connections that haven't been used for one second | ||
idleTimeoutMillis: 1000, | ||
// automatically close connections older than 60 seconds | ||
maxLifetimeSeconds: 60 | ||
} | ||
|
||
const onError = err => { | ||
// Prevent crashing the process on idle client errors, the pool will recover | ||
// itself. If all connections are lost, the process will still crash. | ||
// https://github.com/brianc/node-postgres/issues/1324#issuecomment-308778405 | ||
console.error('An idle client has experienced an error', err.stack) | ||
} | ||
|
||
export const getStatsPgPool = async () => { | ||
const stats = new pg.Pool({ | ||
...poolConfig, | ||
connectionString: DATABASE_URL | ||
}) | ||
stats.on('error', onError) | ||
await migrateStatsDB(stats) | ||
return stats | ||
} | ||
|
||
export const getEvaluatePgPool = async () => { | ||
const evaluate = new pg.Pool({ | ||
...poolConfig, | ||
connectionString: EVALUATE_DB_URL | ||
}) | ||
evaluate.on('error', onError) | ||
await evaluate.query('SELECT 1') | ||
return evaluate | ||
} | ||
|
||
/** | ||
* @returns {Promise<import('./typings').pgPools>} | ||
*/ | ||
export const getPgPools = async () => { | ||
const stats = await getStatsPgPool() | ||
const evaluate = await getEvaluatePgPool() | ||
const end = () => Promise.all([stats.end(), evaluate.end()]) | ||
|
||
return { stats, evaluate, end } | ||
} | ||
|
||
/** | ||
* @param {pg.Client} client | ||
*/ | ||
export const migrateStatsDB = async (client) => { | ||
const postgrator = new Postgrator({ | ||
migrationPattern: join(migrationsDirectory, '*'), | ||
driver: 'pg', | ||
execQuery: (query) => client.query(query) | ||
}) | ||
console.log( | ||
'Migrating `spark-stats` DB schema from version %s to version %s', | ||
await postgrator.getDatabaseVersion(), | ||
await postgrator.getMaxVersion() | ||
) | ||
|
||
await postgrator.migrate() | ||
|
||
console.log('Migrated `spark-stats` DB schema to version', await postgrator.getDatabaseVersion()) | ||
} |
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
CREATE TABLE daily_scheduled_rewards ( | ||
day DATE NOT NULL, | ||
participant_address TEXT NOT NULL, | ||
scheduled_rewards NUMERIC NOT NULL, | ||
PRIMARY KEY (day, participant_address) | ||
); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
import type { Pool } from 'pg' | ||
|
||
export interface pgPools { | ||
stats: Pool; | ||
evaluate: Pool; | ||
} |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,5 @@ | ||
import { DATABASE_URL } from '../lib/config.js' | ||
import { migrateWithPgConfig as migrateStatsDB } from '@filecoin-station/spark-stats-db-migrations' | ||
import { migrateStatsDB, migrateEvaluateDB, getPgPools } from '@filecoin-station/spark-stats-db' | ||
|
||
console.log('Migrating spark_stats database') | ||
await migrateStatsDB({ connectionString: DATABASE_URL }) | ||
const pgPools = await getPgPools() | ||
await migrateStatsDB(pgPools.stats) | ||
await migrateEvaluateDB(pgPools.evaluate) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,27 +1,57 @@ | ||
import '../lib/instrument.js' | ||
import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator' | ||
import { ethers } from 'ethers' | ||
import * as Sentry from '@sentry/node' | ||
import timers from 'node:timers/promises' | ||
|
||
import { RPC_URL, rpcHeaders, OBSERVATION_INTERVAL_MS } from '../lib/config.js' | ||
import { getPgPool } from '../lib/db.js' | ||
import { observeTransferEvents } from '../lib/observer.js' | ||
import { RPC_URL, rpcHeaders } from '../lib/config.js' | ||
import { getPgPools } from '@filecoin-station/spark-stats-db' | ||
import { | ||
observeTransferEvents, | ||
observeScheduledRewards | ||
} from '../lib/observer.js' | ||
|
||
const pgPool = await getPgPool() | ||
const pgPools = await getPgPools() | ||
|
||
const fetchRequest = new ethers.FetchRequest(RPC_URL) | ||
fetchRequest.setHeader('Authorization', rpcHeaders.Authorization || '') | ||
const provider = new ethers.JsonRpcProvider(fetchRequest, null, { polling: true }) | ||
|
||
const ieContract = new ethers.Contract(SparkImpactEvaluator.ADDRESS, SparkImpactEvaluator.ABI, provider) | ||
|
||
// Listen for Transfer events from the IE contract | ||
while (true) { | ||
try { | ||
await observeTransferEvents(pgPool, ieContract, provider) | ||
} catch (e) { | ||
console.error(e) | ||
Sentry.captureException(e) | ||
const ONE_HOUR = 60 * 60 * 1000 | ||
|
||
const loopObserveTransferEvents = async () => { | ||
while (true) { | ||
const start = new Date() | ||
try { | ||
await observeTransferEvents(pgPools, ieContract, provider) | ||
} catch (e) { | ||
console.error(e) | ||
Sentry.captureException(e) | ||
} | ||
const dt = new Date() - start | ||
console.log(`Observing Transfer events took ${dt}ms`) | ||
await timers.setTimeout(ONE_HOUR - dt) | ||
} | ||
await timers.setTimeout(OBSERVATION_INTERVAL_MS) | ||
} | ||
|
||
const loopObserveScheduledRewards = async () => { | ||
while (true) { | ||
const start = new Date() | ||
try { | ||
await observeScheduledRewards(pgPools, ieContract, provider) | ||
} catch (e) { | ||
console.error(e) | ||
Sentry.captureException(e) | ||
} | ||
const dt = new Date() - start | ||
console.log(`Observing scheduled rewards took ${dt}ms`) | ||
await timers.setTimeout((24 * ONE_HOUR) - dt) | ||
} | ||
} | ||
|
||
await Promise.all([ | ||
loopObserveTransferEvents(), | ||
loopObserveScheduledRewards() | ||
]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.