test: ensure global options for benchmark tests can be set in bazel (#34753)

Previously, when the benchmark tests ran outside of Bazel, developers
had the posibility to control how the tests run through command line
options. e.g. `--dryrun`. This no longer works reliable in Bazel where
command line arguments are not passed to the text executable.

To make the global options still usable (as they could be still useful
in some cases), we just pass them through the Bazel `--test_env`. This
reduces the code we need to read the command line, but still preserves
the flexibility in a Bazel idiomatic way.

PR Close #34753
This commit is contained in:
Paul Gschwendtner 2020-01-16 14:34:03 +01:00 committed by Andrew Kushnir
parent 9c55fc0f13
commit 53c4c8de8f
6 changed files with 40 additions and 54 deletions

View File

@ -23,3 +23,23 @@ yarn bazel test modules/benchmarks/...
The `*_aot.ts` files are used as entry-points within Google to run the benchmark
tests. These are still built as part of the corresponding `ng_module` rule.
## Specifying benchmark options
There are options that can be specified in order to control how a given benchmark target
runs. The following options can be set through [test environment variables](https://docs.bazel.build/versions/master/command-line-reference.html#flag--test_env):
* `PERF_SAMPLE_SIZE`: Benchpress performs measurements until `scriptTime` predictively no longer
decreases. It does this by using a simple linear regression with the amount of samples specified.
Defaults to `20` samples.
* `PERF_FORCE_GC`: If set to `true`, `@angular/benchpress` will run run the garbage collector
before and after performing measurements. Benchpress will measure and report the garbage
collection time.
* `PERF_DRYRUN`: If set to `true`, no results are printed and stored in a `json` file. Also
benchpress only performs a single measurement (unlike with the simple linear regression).
Here is an example command that sets the `PERF_DRYRUN` option:
```bash
yarn bazel test modules/benchmarks/src/tree/baseline:perf --test_env=PERF_DRYRUN=true
```

View File

@ -7,7 +7,7 @@ load("//tools:defaults.bzl", "protractor_web_test_suite")
unless explicitly requested.
"""
def benchmark_test(name, server, deps, tags = []):
def benchmark_test(name, server, tags = [], **kwargs):
protractor_web_test_suite(
name = name,
configuration = "//:protractor-perf.conf.js",
@ -19,7 +19,5 @@ def benchmark_test(name, server, deps, tags = []):
# Benchmark targets should not run on CI by default.
tags = tags + ["manual"],
test_suite_tags = ["manual"],
deps = [
"@npm//yargs",
] + deps,
**kwargs
)

View File

@ -6,12 +6,10 @@ load("//tools:defaults.bzl", "protractor_web_test_suite")
with `@angular/benchpress`.
"""
def e2e_test(name, server, deps, **kwargs):
def e2e_test(name, server, **kwargs):
protractor_web_test_suite(
name = name,
on_prepare = "//modules/benchmarks:start-server.js",
server = server,
# `yargs` is needed as runtime dependency for the e2e utils.
deps = ["@npm//yargs"] + deps,
**kwargs
)

View File

@ -8,28 +8,10 @@
/* tslint:disable:no-console */
import {browser} from 'protractor';
const yargs = require('yargs');
import * as webdriver from 'selenium-webdriver';
let cmdArgs: {'bundles': boolean}|undefined;
declare var expect: any;
export function readCommandLine(extraOptions?: {[key: string]: any}) {
const options: {[key: string]: any} = {
'bundles': {describe: 'Whether to use the angular bundles or not.', default: false}
};
if (extraOptions) {
for (const key in extraOptions) {
options[key] = extraOptions[key];
}
}
cmdArgs = yargs.usage('Angular e2e test options.').options(options).help('ng-help').wrap(40).argv;
return cmdArgs;
}
export function openBrowser(config: {
url: string,
params?: {name: string, value: any}[],
@ -38,13 +20,10 @@ export function openBrowser(config: {
if (config.ignoreBrowserSynchronization) {
browser.ignoreSynchronization = true;
}
let params = config.params || [];
if (cmdArgs !== undefined && !params.some((param) => param.name === 'bundles')) {
params = params.concat([{name: 'bundles', value: cmdArgs.bundles}]);
}
const urlParams: string[] = [];
params.forEach((param) => { urlParams.push(param.name + '=' + param.value); });
if (config.params) {
config.params.forEach((param) => urlParams.push(param.name + '=' + param.value));
}
const url = encodeURI(config.url + '?' + urlParams.join('&'));
browser.get(url);
if (config.ignoreBrowserSynchronization) {

View File

@ -11,20 +11,16 @@ const nodeUuid = require('node-uuid');
import * as fs from 'fs-extra';
import {SeleniumWebDriverAdapter, Options, JsonFileReporter, Validator, RegressionSlopeValidator, ConsoleReporter, SizeValidator, MultiReporter, MultiMetric, Runner, StaticProvider} from '@angular/benchpress';
import {readCommandLine as readE2eCommandLine, openBrowser} from './e2e_util';
import {openBrowser} from './e2e_util';
let cmdArgs: {'sample-size': number, 'force-gc': boolean, 'dryrun': boolean, 'bundles': boolean};
let runner: Runner;
// Note: Keep the `modules/benchmarks/README.md` file in sync with the supported options.
const globalOptions = {
sampleSize: process.env.PERF_SAMPLE_SIZE || 20,
forceGc: process.env.PERF_FORCE_GC === 'true',
dryRun: process.env.PERF_DRYRUN === 'true',
};
export function readCommandLine() {
cmdArgs = <any>readE2eCommandLine({
'sample-size': {describe: 'Used for perf: sample size.', default: 20},
'force-gc': {describe: 'Used for perf: force gc.', default: false, type: 'boolean'},
'dryrun': {describe: 'If true, only run performance benchmarks once.', default: false},
'bundles': {describe: 'Whether to use the angular bundles or not.', default: false}
});
runner = createBenchpressRunner();
}
const runner = createBenchpressRunner();
export function runBenchmark(config: {
id: string,
@ -40,15 +36,14 @@ export function runBenchmark(config: {
if (config.setup) {
config.setup();
}
if (!cmdArgs) readCommandLine();
const description: {[key: string]: any} = {'bundles': cmdArgs.bundles};
config.params.forEach((param) => { description[param.name] = param.value; });
const description: {[key: string]: any} = {};
config.params.forEach((param) => description[param.name] = param.value);
return runner.sample({
id: config.id,
execute: config.work,
prepare: config.prepare,
microMetrics: config.microMetrics,
providers: [{provide: Options.SAMPLE_DESCRIPTION, useValue: description}]
providers: [{provide: Options.SAMPLE_DESCRIPTION, useValue: {}}]
});
}
@ -61,14 +56,14 @@ function createBenchpressRunner(): Runner {
fs.ensureDirSync(resultsFolder);
const providers: StaticProvider[] = [
SeleniumWebDriverAdapter.PROTRACTOR_PROVIDERS,
{provide: Options.FORCE_GC, useValue: cmdArgs['force-gc']},
{provide: Options.FORCE_GC, useValue: globalOptions.forceGc},
{provide: Options.DEFAULT_DESCRIPTION, useValue: {'runId': runId}}, JsonFileReporter.PROVIDERS,
{provide: JsonFileReporter.PATH, useValue: resultsFolder}
];
if (!cmdArgs['dryrun']) {
if (!globalOptions.dryRun) {
providers.push({provide: Validator, useExisting: RegressionSlopeValidator});
providers.push(
{provide: RegressionSlopeValidator.SAMPLE_SIZE, useValue: cmdArgs['sample-size']});
{provide: RegressionSlopeValidator.SAMPLE_SIZE, useValue: globalOptions.sampleSize});
providers.push(MultiReporter.provideWith([ConsoleReporter, JsonFileReporter]));
} else {
providers.push({provide: Validator, useExisting: SizeValidator});

View File

@ -6,10 +6,6 @@
* found in the LICENSE file at https://angular.io/license
*/
// Make sure that the command line is read as the first thing
// as this could exit node if the help script should be printed.
require('angular/modules/e2e_util/perf_util').readCommandLine();
const CHROME_OPTIONS = {
'args': ['--js-flags=--expose-gc', '--no-sandbox', '--headless', '--disable-dev-shm-usage'],
'perfLoggingPrefs': {