mirror of https://github.com/nodejs/node.git
benchmark: adds groups to better separate benchmarks
Fixes: https://github.com/nodejs/node/issues/26425 Co-Authored-By: Yaman Kassir <mestery@protonmail.com> PR-URL: https://github.com/nodejs/node/pull/54393 Reviewed-By: Vinícius Lourenço Claro Cardoso <contact@viniciusl.com.br>
This commit is contained in:
parent
b7b96282b2
commit
03fe00e3da
|
@ -22,27 +22,36 @@ class Benchmark {
|
|||
this.name = require.main.filename.slice(__dirname.length + 1);
|
||||
|
||||
// Execution arguments i.e. flags used to run the jobs
|
||||
this.flags = process.env.NODE_BENCHMARK_FLAGS ?
|
||||
process.env.NODE_BENCHMARK_FLAGS.split(/\s+/) :
|
||||
[];
|
||||
this.flags = process.env.NODE_BENCHMARK_FLAGS?.split(/\s+/) ?? [];
|
||||
|
||||
// Parse job-specific configuration from the command line arguments
|
||||
const argv = process.argv.slice(2);
|
||||
const parsed_args = this._parseArgs(argv, configs, options);
|
||||
|
||||
this.options = parsed_args.cli;
|
||||
this.extra_options = parsed_args.extra;
|
||||
this.combinationFilter = typeof options.combinationFilter === 'function' ? options.combinationFilter : allow;
|
||||
|
||||
if (options.byGroups) {
|
||||
this.queue = [];
|
||||
const groupNames = process.env.NODE_RUN_BENCHMARK_GROUPS?.split(',') ?? Object.keys(configs);
|
||||
|
||||
for (const groupName of groupNames) {
|
||||
const config = { ...configs[groupName][0], group: groupName };
|
||||
const parsed_args = this._parseArgs(argv, config, options);
|
||||
|
||||
this.options = parsed_args.cli;
|
||||
this.extra_options = parsed_args.extra;
|
||||
this.queue = this.queue.concat(this._queue(this.options));
|
||||
}
|
||||
} else {
|
||||
this.queue = this._queue(this.options);
|
||||
}
|
||||
|
||||
if (options.flags) {
|
||||
this.flags = this.flags.concat(options.flags);
|
||||
}
|
||||
|
||||
if (typeof options.combinationFilter === 'function')
|
||||
this.combinationFilter = options.combinationFilter;
|
||||
else
|
||||
this.combinationFilter = allow;
|
||||
|
||||
// The configuration list as a queue of jobs
|
||||
this.queue = this._queue(this.options);
|
||||
|
||||
if (this.queue.length === 0)
|
||||
return;
|
||||
|
||||
|
|
|
@ -4,10 +4,22 @@ const common = require('../common.js');
|
|||
const http = require('http');
|
||||
|
||||
const bench = common.createBenchmark(main, {
|
||||
n: [10, 600],
|
||||
len: [1, 100],
|
||||
duration: 5,
|
||||
});
|
||||
fewHeaders: {
|
||||
n: [10],
|
||||
len: [1, 5],
|
||||
duration: 5,
|
||||
},
|
||||
mediumHeaders: {
|
||||
n: [50],
|
||||
len: [1, 10],
|
||||
duration: 5,
|
||||
},
|
||||
manyHeaders: {
|
||||
n: [600],
|
||||
len: [1, 100],
|
||||
duration: 5,
|
||||
},
|
||||
}, { byGroups: true });
|
||||
|
||||
function main({ len, n, duration }) {
|
||||
const headers = {
|
||||
|
@ -15,10 +27,9 @@ function main({ len, n, duration }) {
|
|||
'Transfer-Encoding': 'chunked',
|
||||
};
|
||||
|
||||
// TODO(BridgeAR): Change this benchmark to use grouped arguments when
|
||||
// implemented. https://github.com/nodejs/node/issues/26425
|
||||
const Is = [ ...Array(Math.max(n / len, 1)).keys() ];
|
||||
const Js = [ ...Array(len).keys() ];
|
||||
const Is = [...Array(n / len).keys()];
|
||||
const Js = [...Array(len).keys()];
|
||||
|
||||
for (const i of Is) {
|
||||
headers[`foo${i}`] = Js.map(() => `some header value ${i}`);
|
||||
}
|
||||
|
@ -27,6 +38,7 @@ function main({ len, n, duration }) {
|
|||
res.writeHead(200, headers);
|
||||
res.end();
|
||||
});
|
||||
|
||||
server.listen(0, () => {
|
||||
bench.http({
|
||||
path: '/',
|
||||
|
|
|
@ -272,6 +272,19 @@ process/bench-env.js operation="query" n=1000000: 3,625,787.2150573144
|
|||
process/bench-env.js operation="delete" n=1000000: 1,521,131.5742806569
|
||||
```
|
||||
|
||||
#### Grouping benchmarks
|
||||
|
||||
Benchmarks can also have groups, giving the developer greater flexibility in differentiating between test cases
|
||||
and also helping reduce the time to run the combination of benchmark parameters.
|
||||
|
||||
By default, all groups are executed when running the benchmark.
|
||||
However, it is possible to specify individual groups by setting the
|
||||
`NODE_RUN_BENCHMARK_GROUPS` environment variable when running `compare.js`:
|
||||
|
||||
```bash
|
||||
NODE_RUN_BENCHMARK_GROUPS=fewHeaders,manyHeaders node http/headers.js
|
||||
```
|
||||
|
||||
### Comparing Node.js versions
|
||||
|
||||
To compare the effect of a new Node.js version use the `compare.js` tool. This
|
||||
|
@ -492,6 +505,23 @@ The arguments of `createBenchmark` are:
|
|||
* `options` {Object} The benchmark options. Supported options:
|
||||
* `flags` {Array} Contains node-specific command line flags to pass to
|
||||
the child process.
|
||||
|
||||
* `byGroups` {Boolean} option for processing `configs` by groups:
|
||||
```js
|
||||
const bench = common.createBenchmark(main, {
|
||||
groupA: {
|
||||
source: ['array'],
|
||||
len: [10, 2048],
|
||||
n: [50],
|
||||
},
|
||||
groupB: {
|
||||
source: ['buffer', 'string'],
|
||||
len: [2048],
|
||||
n: [50, 2048],
|
||||
}
|
||||
}, { byGroups: true });
|
||||
```
|
||||
|
||||
* `combinationFilter` {Function} Has a single parameter which is an object
|
||||
containing a combination of benchmark parameters. It should return `true`
|
||||
or `false` to indicate whether the combination should be included or not.
|
||||
|
|
|
@ -27,16 +27,27 @@ function runBenchmark(name, env) {
|
|||
child.on('exit', (code, signal) => {
|
||||
assert.strictEqual(code, 0);
|
||||
assert.strictEqual(signal, null);
|
||||
|
||||
// This bit makes sure that each benchmark file is being sent settings such
|
||||
// that the benchmark file runs just one set of options. This helps keep the
|
||||
// benchmark tests from taking a long time to run. Therefore, each benchmark
|
||||
// file should result in three lines of output: a blank line, a line with
|
||||
// the name of the benchmark file, and a line with the only results that we
|
||||
// get from testing the benchmark file.
|
||||
assert.ok(
|
||||
/^(?:\n.+?\n.+?\n)+$/.test(stdout),
|
||||
`benchmark file not running exactly one configuration in test: ${stdout}`,
|
||||
);
|
||||
// benchmark tests from taking a long time to run. Therefore, stdout should be composed as follows:
|
||||
// The first and last lines should be empty.
|
||||
// Each test should be separated by a blank line.
|
||||
// The first line of each test should contain the test's name.
|
||||
// The second line of each test should contain the configuration for the test.
|
||||
// If the test configuration is not a group, there should be exactly two lines.
|
||||
// Otherwise, it is possible to have more than two lines.
|
||||
|
||||
const splitTests = stdout.split(/\n\s*\n/);
|
||||
|
||||
for (let testIdx = 1; testIdx < splitTests.length - 1; testIdx++) {
|
||||
const lines = splitTests[testIdx].split('\n');
|
||||
assert.ok(/.+/.test(lines[0]));
|
||||
|
||||
if (!lines[1].includes('group="')) {
|
||||
assert.strictEqual(lines.length, 2, `benchmark file not running exactly one configuration in test: ${stdout}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue