From 067757edc54aa24fd2a491399738486c7791bc2d Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 30 May 2018 22:30:24 +0200 Subject: [PATCH] test: check that benchmark tests are minimal Check that benchmark tests are not running longer than necessary by confirming that they only produce one set of configs to report on per benchmark file. --- test/common/benchmark.js | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/test/common/benchmark.js b/test/common/benchmark.js index 1fd4476ba55a35..f8952a8b9ad647 100644 --- a/test/common/benchmark.js +++ b/test/common/benchmark.js @@ -20,10 +20,27 @@ function runBenchmark(name, args, env) { const mergedEnv = Object.assign({}, process.env, env); - const child = fork(runjs, argv, { env: mergedEnv }); + const child = fork(runjs, argv, { env: mergedEnv, stdio: 'pipe' }); + child.stdout.setEncoding('utf8'); + + let stdout = ''; + child.stdout.on('data', (line) => { + stdout += line; + }); + child.on('exit', (code, signal) => { assert.strictEqual(code, 0); assert.strictEqual(signal, null); + // This bit makes sure that each benchmark file is being sent settings such + // that the benchmark file runs just one set of options. This helps keep the + // benchmark tests from taking a long time to run. Therefore, each benchmark + // file should result in three lines of output: a blank line, a line with + // the name of the benchmark file, and a line with the only results that we + // get from testing the benchmark file. + assert.ok( + /^(?:\n.+?\n.+?\n)+$/.test(stdout), + `benchmark file not running exactly one configuration in test: ${stdout}` + ); }); }