From a9e60ed6360e75d883aeb6b36a6a8756b4fd5d16 Mon Sep 17 00:00:00 2001 From: Patrick Nercessian Date: Wed, 10 Apr 2024 23:21:47 -0400 Subject: [PATCH] Included API handling for fetching from daily_node_metrics --- lib/handler.js | 9 +++++++++ lib/platform-stats-fetchers.js | 16 ++++++++++++++++ test/handler.test.js | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 57 insertions(+) create mode 100644 lib/platform-stats-fetchers.js diff --git a/lib/handler.js b/lib/handler.js index f38017e..f32176a 100644 --- a/lib/handler.js +++ b/lib/handler.js @@ -10,6 +10,8 @@ import { fetchRetrievalSuccessRate } from './stats-fetchers.js' +import { fetchDailyNodeMetrics } from './platform-stats-fetchers.js' + /** * @param {object} args * @param {import('pg').Pool} args.pgPool @@ -68,6 +70,13 @@ const handler = async (req, res, pgPool) => { res, pgPool, fetchMinersRSRSummary) + } else if (req.method === 'GET' && segs[0] === 'nodes' && segs[1] === 'daily' && segs.length === 2) { + await getStatsWithFilterAndCaching( + pathname, + searchParams, + res, + pgPool, + fetchDailyNodeMetrics) } else if (req.method === 'GET' && segs.length === 0) { // health check - required by Grafana datasources res.end('OK') diff --git a/lib/platform-stats-fetchers.js b/lib/platform-stats-fetchers.js new file mode 100644 index 0000000..7b7a41d --- /dev/null +++ b/lib/platform-stats-fetchers.js @@ -0,0 +1,16 @@ +/** + * @param {import('pg').Pool} pgPool + * @param {import('./typings').Filter} filter + */ +export const fetchDailyNodeMetrics = async (pgPool, filter) => { + const { rows } = await pgPool.query(` + SELECT metric_date::TEXT, station_id + FROM daily_node_metrics + WHERE metric_date >= $1 AND metric_date <= $2 + GROUP BY metric_date, station_id + `, [ + filter.from, + filter.to + ]) + return rows +} diff --git a/test/handler.test.js b/test/handler.test.js index dc29863..b633b12 100644 --- a/test/handler.test.js +++ b/test/handler.test.js @@ -45,6 +45,7 @@ describe('HTTP request handler', () => { beforeEach(async () => { await pgPool.query('DELETE FROM retrieval_stats') await pgPool.query('DELETE FROM daily_participants') + await pgPool.query('DELETE FROM daily_node_metrics') }) it('returns 200 for GET /', async () => { @@ -244,6 +245,30 @@ describe('HTTP request handler', () => { ]) }) }) + + describe('GET /nodes/daily', () => { + it('returns daily node metrics for the given date range', async () => { + await givenDailyNodeMetrics(pgPool, '2024-01-10', 'station1') + await givenDailyNodeMetrics(pgPool, '2024-01-11', 'station2') + await givenDailyNodeMetrics(pgPool, '2024-01-12', 'station3') + await givenDailyNodeMetrics(pgPool, '2024-01-13', 'station1') + + const res = await fetch( + new URL( + '/nodes/daily?from=2024-01-11&to=2024-01-12', + baseUrl + ), { + redirect: 'manual' + } + ) + await assertResponseStatus(res, 200) + const metrics = await res.json() + assert.deepStrictEqual(metrics, [ + { metric_date: '2024-01-11', station_id: 'station2' }, + { metric_date: '2024-01-12', station_id: 'station3' } + ]) + }) + }) }) const assertResponseStatus = async (res, status) => { @@ -274,3 +299,10 @@ const givenDailyParticipants = async (pgPool, day, participantAddresses) => { ids ]) } + +const givenDailyNodeMetrics = async (pgPool, day, stationId) => { + await pgPool.query( + 'INSERT INTO daily_node_metrics (metric_date, station_id) VALUES ($1, $2)', + [day, stationId] + ) +}