Skip to content

Commit

Permalink
Add unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewvc committed Jun 4, 2020
1 parent d4cb7f5 commit e34c12c
Show file tree
Hide file tree
Showing 4 changed files with 122 additions and 62 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

import { fullyMatchingIds } from '../refine_potential_matches';
import { MonitorLocCheckGroup } from '..';

const mockQueryResult = (opts: { latestSummary: any; latestMatching: any }) => {
return {
aggregations: {
monitor: {
buckets: [
{
key: 'my-monitor',
location: {
buckets: [
{
key: 'my-location',
summaries: {
latest: {
hits: {
hits: [
{
_source: opts.latestSummary,
},
],
},
},
},
latest_matching: {
top: {
hits: {
hits: [
{
_source: opts.latestMatching,
},
],
},
},
},
},
],
},
},
],
},
},
};
};

describe('fully matching IDs', () => {
it('should exclude items whose latest result does not match', () => {
const queryRes = mockQueryResult({
latestSummary: {
'@timestamp': '2020-06-04T12:39:54.698-0500',
monitor: {
check_group: 'latest-summary-check-group',
},
summary: {
up: 1,
down: 0,
},
},
latestMatching: {
'@timestamp': '2019-06-04T12:39:54.698-0500',
summary: {
up: 1,
down: 0,
},
},
});
const res = fullyMatchingIds(queryRes, undefined);
const expected = new Map<string, MonitorLocCheckGroup[]>();
expect(res).toEqual(expected);
});

it('should include items whose latest result does match', () => {
const queryRes = mockQueryResult({
latestSummary: {
'@timestamp': '2020-06-04T12:39:54.698-0500',
monitor: {
check_group: 'latest-summary-check-group',
},
summary: {
up: 1,
down: 0,
},
},
latestMatching: {
'@timestamp': '2020-06-04T12:39:54.698-0500',
summary: {
up: 1,
down: 0,
},
},
});
const res = fullyMatchingIds(queryRes, undefined);
const expected = new Map<string, MonitorLocCheckGroup[]>();
expected.set('my-monitor', [
{
checkGroup: 'latest-summary-check-group',
location: 'my-location',
monitorId: 'my-monitor',
status: 'up',
summaryTimestamp: '2020-06-04T12:39:54.698-0500',
},
]);
expect(res).toEqual(expected);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@ export const fetchChunk: ChunkFetcher = async (
searchAfter: any,
size: number
): Promise<ChunkResult> => {
const { monitorIds, checkGroups, searchAfter: foundSearchAfter } = await findPotentialMatches(
const { monitorIds, searchAfter: foundSearchAfter } = await findPotentialMatches(
queryContext,
searchAfter,
size
);
const matching = await refinePotentialMatches(queryContext, monitorIds, checkGroups);
const matching = await refinePotentialMatches(queryContext, monitorIds);

return {
monitorGroups: matching,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,8 @@ import { get, set } from 'lodash';
import { CursorDirection } from '../../../../common/runtime_types';
import { QueryContext } from './query_context';

// This is the first phase of the query. In it, we find the most recent check groups that matched the given query.
// Note that these check groups may not be the most recent groups for the matching monitor ID! We'll filter those
/**
* This is the first phase of the query. In it, we find the most recent check groups that matched the given query.
* Note that these check groups may not be the most recent groups for the matching monitor ID. They'll be filtered
* out in the next phase.
* This is the first phase of the query. In it, we find all monitor IDs that have ever matched the given filters.
* @param queryContext the data and resources needed to perform the query
* @param searchAfter indicates where Elasticsearch should continue querying on subsequent requests, if at all
* @param size the minimum size of the matches to chunk
Expand All @@ -24,29 +20,14 @@ export const findPotentialMatches = async (
size: number
) => {
const queryResult = await query(queryContext, searchAfter, size);
const checkGroups = new Set<string>();
const monitorIds: string[] = [];
get<any>(queryResult, 'aggregations.monitors.buckets', []).forEach((b: any) => {
const monitorId = b.key.monitor_id;
monitorIds.push(monitorId);

// Doc count can be zero if status filter optimization does not match
if (b.doc_count > 0) {
// Here we grab the most recent 2 check groups per location and add them to the list.
// Why 2? Because the most recent one may be a partial result from mode: all, and hence not match a summary doc.
b.locations.buckets.forEach((lb: any) => {
lb.ips.buckets.forEach((ib: any) => {
ib.top.hits.hits.forEach((h: any) => {
checkGroups.add(h._source.monitor.check_group);
});
});
});
}
});

return {
monitorIds,
checkGroups,
searchAfter: queryResult.aggregations?.monitors?.after_key,
};
};
Expand Down Expand Up @@ -89,29 +70,6 @@ const queryBody = async (queryContext: QueryContext, searchAfter: any, size: num
},
],
},
aggs: {
// Here we grab the most recent 2 check groups per location.
// Why 2? Because the most recent one may not be for a summary, it may be incomplete.
locations: {
terms: { field: 'observer.geo.name', missing: '__missing__' },
aggs: {
ips: {
terms: { field: 'monitor.ip', missing: '0.0.0.0' },
aggs: {
top: {
top_hits: {
sort: [{ '@timestamp': 'desc' }],
_source: {
includes: ['monitor.check_group', '@timestamp'],
},
size: 2,
},
},
},
},
},
},
},
},
},
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,14 @@ import { MonitorGroups, MonitorLocCheckGroup } from './fetch_page';
// check groups for their associated monitor IDs. If not, it discards the result.
export const refinePotentialMatches = async (
queryContext: QueryContext,
potentialMatchMonitorIDs: string[],
potentialMatchCheckGroups: Set<string>
potentialMatchMonitorIDs: string[]
): Promise<MonitorGroups[]> => {
if (potentialMatchMonitorIDs.length === 0) {
return [];
}

const recentGroupsMatchingStatus = await fullyMatchingIds(
queryContext,
potentialMatchMonitorIDs,
potentialMatchCheckGroups
);
const queryResult = await query(queryContext, potentialMatchMonitorIDs);
const recentGroupsMatchingStatus = await fullyMatchingIds(queryResult, queryContext.statusFilter);

// Return the monitor groups filtering out potential matches that weren't current
const matches: MonitorGroups[] = potentialMatchMonitorIDs
Expand All @@ -49,15 +45,9 @@ export const refinePotentialMatches = async (
return matches;
};

const fullyMatchingIds = async (
queryContext: QueryContext,
potentialMatchMonitorIDs: string[],
potentialMatchCheckGroups: Set<string>
) => {
const mostRecentQueryResult = await mostRecentCheckGroups(queryContext, potentialMatchMonitorIDs);

export const fullyMatchingIds = (queryResult: any, statusFilter?: string) => {
const matching = new Map<string, MonitorLocCheckGroup[]>();
MonitorLoop: for (const monBucket of mostRecentQueryResult.aggregations.monitor.buckets) {
MonitorLoop: for (const monBucket of queryResult.aggregations.monitor.buckets) {
const monitorId: string = monBucket.key;
const groups: MonitorLocCheckGroup[] = [];

Expand All @@ -80,7 +70,7 @@ const fullyMatchingIds = async (

// This monitor doesn't match, so just skip ahead and don't add it to the output
// Only skip in case of up statusFilter, for a monitor to be up, all checks should be up
if (queryContext?.statusFilter === 'up' && queryContext.statusFilter !== status) {
if (statusFilter === 'up' && statusFilter !== status) {
continue MonitorLoop;
}

Expand All @@ -102,7 +92,7 @@ const fullyMatchingIds = async (
return matching;
};

export const mostRecentCheckGroups = async (
export const query = async (
queryContext: QueryContext,
potentialMatchMonitorIDs: string[]
): Promise<any> => {
Expand Down

0 comments on commit e34c12c

Please sign in to comment.