feat(benchpress): add custom user metric to benchpress
This is a continuation of #7440 (@jeffbcross). Closes #9229
This commit is contained in:
parent
1eaa193c51
commit
6686bc62f6
|
@ -0,0 +1,3 @@
|
|||
library benchmarks.e2e_test.page_load_perf;
|
||||
|
||||
main() {}
|
|
@ -0,0 +1,37 @@
|
|||
import {verifyNoBrowserErrors} from 'angular2/src/testing/perf_util';
|
||||
|
||||
describe('ng2 largetable benchmark', function() {
|
||||
|
||||
var URL = 'benchmarks/src/page_load/page_load.html';
|
||||
var runner = global['benchpressRunner'];
|
||||
|
||||
afterEach(verifyNoBrowserErrors);
|
||||
|
||||
|
||||
it('should log the load time', function(done) {
|
||||
runner.sample({
|
||||
id: 'loadTime',
|
||||
prepare: null,
|
||||
microMetrics: null,
|
||||
userMetrics:
|
||||
{loadTime: 'The time in milliseconds to bootstrap', someConstant: 'Some constant'},
|
||||
bindings: [
|
||||
benchpress.bind(benchpress.SizeValidator.SAMPLE_SIZE)
|
||||
.toValue(2),
|
||||
benchpress.bind(benchpress.RegressionSlopeValidator.SAMPLE_SIZE).toValue(2),
|
||||
benchpress.bind(benchpress.RegressionSlopeValidator.METRIC).toValue('someConstant')
|
||||
],
|
||||
execute: () => { browser.get(URL); }
|
||||
})
|
||||
.then(report => {
|
||||
expect(report.completeSample.map(val => val.values.someConstant)
|
||||
.every(v => v === 1234567890))
|
||||
.toBe(true);
|
||||
expect(report.completeSample.map(val => val.values.loadTime)
|
||||
.filter(t => typeof t === 'number' && t > 0)
|
||||
.length)
|
||||
.toBeGreaterThan(1);
|
||||
})
|
||||
.then(done);
|
||||
});
|
||||
});
|
|
@ -26,6 +26,7 @@ transformers:
|
|||
- web/src/naive_infinite_scroll/index.dart
|
||||
- web/src/static_tree/tree_benchmark.dart
|
||||
- web/src/tree/tree_benchmark.dart
|
||||
- web/src/page_load/page_load.dart
|
||||
- $dart2js:
|
||||
$include: web/src/**
|
||||
minify: false
|
||||
|
|
|
@ -29,6 +29,9 @@
|
|||
<li>
|
||||
<a href="costs/index.html">Benchmarks measuring costs of things</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="page_load/page_load.html">Benchmark measuring time to bootstrap</a>
|
||||
</li>
|
||||
</ul>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<body>
|
||||
|
||||
<h2>Angular2 page load benchmark</h2>
|
||||
|
||||
<div>
|
||||
<app></app>
|
||||
</div>
|
||||
|
||||
$SCRIPTS$
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,11 @@
|
|||
import {Component} from 'angular2/core';
|
||||
import {bootstrap} from 'angular2/platform/browser';
|
||||
|
||||
@Component({selector: 'app', template: '<h1>Page Load Time</h1>'})
|
||||
class App {
|
||||
}
|
||||
|
||||
bootstrap(App).then(() => {
|
||||
(<any>window).loadTime = Date.now() - performance.timing.navigationStart;
|
||||
(<any>window).someConstant = 1234567890;
|
||||
});
|
|
@ -17,6 +17,7 @@ export {Runner} from './src/runner';
|
|||
export {Options} from './src/common_options';
|
||||
export {MeasureValues} from './src/measure_values';
|
||||
export {MultiMetric} from './src/metric/multi_metric';
|
||||
export {UserMetric} from './src/metric/user_metric';
|
||||
export {MultiReporter} from './src/reporter/multi_reporter';
|
||||
|
||||
export {bind, provide, Injector, ReflectiveInjector, OpaqueToken} from '@angular/core/src/di';
|
||||
|
|
|
@ -160,6 +160,43 @@ runner.sample({
|
|||
When looking into the DevTools Timeline, we see a marker as well:
|
||||
![Marked Timeline](marked_timeline.png)
|
||||
|
||||
### Custom Metrics Without Using `console.time`
|
||||
|
||||
It's also possible to measure any "user metric" within the browser
|
||||
by setting a numeric value on the `window` object. For example:
|
||||
|
||||
```js
|
||||
bootstrap(App)
|
||||
.then(() => {
|
||||
window.timeToBootstrap = Date.now() - performance.timing.navigationStart;
|
||||
});
|
||||
```
|
||||
|
||||
A test driver for this user metric could be written as follows:
|
||||
|
||||
```js
|
||||
|
||||
describe('home page load', function() {
|
||||
it('should log load time for a 2G connection', done => {
|
||||
runner.sample({
|
||||
execute: () => {
|
||||
browser.get(`http://localhost:8080`);
|
||||
},
|
||||
userMetrics: {
|
||||
timeToBootstrap: 'The time in milliseconds to bootstrap'
|
||||
},
|
||||
bindings: [
|
||||
bind(RegressionSlopeValidator.METRIC).toValue('timeToBootstrap')
|
||||
]
|
||||
}).then(done);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Using this strategy, benchpress will wait until the specified property name,
|
||||
`timeToBootstrap` in this case, is defined as a number on the `window` object
|
||||
inside the application under test.
|
||||
|
||||
# Smoothness Metrics
|
||||
|
||||
Benchpress can also measure the "smoothness" of scrolling and animations. In order to do that, the following set of metrics can be collected by benchpress:
|
||||
|
|
|
@ -26,6 +26,8 @@ export class Options {
|
|||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get MICRO_METRICS() { return _MICRO_METRICS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get USER_METRICS() { return _USER_METRICS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get RECEIVED_DATA() { return _RECEIVED_DATA; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get REQUEST_COUNT() { return _REQUEST_COUNT; }
|
||||
|
@ -42,6 +44,7 @@ var _EXECUTE = new OpaqueToken('Options.execute');
|
|||
var _CAPABILITIES = new OpaqueToken('Options.capabilities');
|
||||
var _USER_AGENT = new OpaqueToken('Options.userAgent');
|
||||
var _MICRO_METRICS = new OpaqueToken('Options.microMetrics');
|
||||
var _USER_METRICS = new OpaqueToken('Options.userMetrics');
|
||||
var _NOW = new OpaqueToken('Options.now');
|
||||
var _WRITE_FILE = new OpaqueToken('Options.writeFile');
|
||||
var _RECEIVED_DATA = new OpaqueToken('Options.receivedData');
|
||||
|
@ -54,6 +57,7 @@ var _DEFAULT_PROVIDERS = [
|
|||
{provide: _FORCE_GC, useValue: false},
|
||||
{provide: _PREPARE, useValue: false},
|
||||
{provide: _MICRO_METRICS, useValue: {}},
|
||||
{provide: _USER_METRICS, useValue: {}},
|
||||
{provide: _NOW, useValue: () => DateWrapper.now()},
|
||||
{provide: _RECEIVED_DATA, useValue: false},
|
||||
{provide: _REQUEST_COUNT, useValue: false},
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
import {bind, Provider, OpaqueToken} from 'angular2/src/core/di';
|
||||
import {PromiseWrapper, TimerWrapper} from 'angular2/src/facade/async';
|
||||
import {StringMapWrapper} from 'angular2/src/facade/collection';
|
||||
import {isNumber} from 'angular2/src/facade/lang';
|
||||
|
||||
import {Metric} from '../metric';
|
||||
import {Options} from '../common_options';
|
||||
import {WebDriverAdapter} from '../web_driver_adapter';
|
||||
|
||||
export class UserMetric extends Metric {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): Provider[] { return _PROVIDERS; }
|
||||
|
||||
constructor(private _userMetrics: {[key: string]: string}, private _wdAdapter: WebDriverAdapter) {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts measuring
|
||||
*/
|
||||
beginMeasure(): Promise<any> { return PromiseWrapper.resolve(true); }
|
||||
|
||||
/**
|
||||
* Ends measuring.
|
||||
*/
|
||||
endMeasure(restart: boolean): Promise<{[key: string]: any}> {
|
||||
let completer = PromiseWrapper.completer<{[key: string]: any}>();
|
||||
let adapter = this._wdAdapter;
|
||||
let names = StringMapWrapper.keys(this._userMetrics);
|
||||
|
||||
function getAndClearValues() {
|
||||
PromiseWrapper.all(names.map(name => adapter.executeScript(`return window.${name}`)))
|
||||
.then((values: any[]) => {
|
||||
if (values.every(isNumber)) {
|
||||
PromiseWrapper.all(names.map(name => adapter.executeScript(`delete window.${name}`)))
|
||||
.then((_: any[]) => {
|
||||
let map = StringMapWrapper.create();
|
||||
for (let i = 0, n = names.length; i < n; i++) {
|
||||
StringMapWrapper.set(map, names[i], values[i]);
|
||||
}
|
||||
completer.resolve(map);
|
||||
}, completer.reject);
|
||||
} else {
|
||||
TimerWrapper.setTimeout(getAndClearValues, 100);
|
||||
}
|
||||
}, completer.reject);
|
||||
}
|
||||
getAndClearValues();
|
||||
return completer.promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes the metrics provided by this metric implementation.
|
||||
* (e.g. units, ...)
|
||||
*/
|
||||
describe(): {[key: string]: any} { return this._userMetrics; }
|
||||
}
|
||||
|
||||
var _PROVIDERS = [
|
||||
bind(UserMetric)
|
||||
.toFactory((userMetrics, wdAdapter) => new UserMetric(userMetrics, wdAdapter),
|
||||
[Options.USER_METRICS, WebDriverAdapter])
|
||||
];
|
|
@ -10,6 +10,7 @@ import {SizeValidator} from './validator/size_validator';
|
|||
import {Validator} from './validator';
|
||||
import {PerflogMetric} from './metric/perflog_metric';
|
||||
import {MultiMetric} from './metric/multi_metric';
|
||||
import {UserMetric} from './metric/user_metric';
|
||||
import {ChromeDriverExtension} from './webdriver/chrome_driver_extension';
|
||||
import {FirefoxDriverExtension} from './webdriver/firefox_driver_extension';
|
||||
import {IOsDriverExtension} from './webdriver/ios_driver_extension';
|
||||
|
@ -33,8 +34,8 @@ export class Runner {
|
|||
this._defaultProviders = defaultProviders;
|
||||
}
|
||||
|
||||
sample({id, execute, prepare, microMetrics, providers}:
|
||||
{id: string, execute?: any, prepare?: any, microMetrics?: any, providers?: any}):
|
||||
sample({id, execute, prepare, microMetrics, providers, userMetrics}:
|
||||
{id: string, execute?: any, prepare?: any, microMetrics?: any, providers?: any, userMetrics?: any}):
|
||||
Promise<SampleState> {
|
||||
var sampleProviders = [
|
||||
_DEFAULT_PROVIDERS,
|
||||
|
@ -48,6 +49,9 @@ export class Runner {
|
|||
if (isPresent(microMetrics)) {
|
||||
sampleProviders.push({provide: Options.MICRO_METRICS, useValue: microMetrics});
|
||||
}
|
||||
if (isPresent(userMetrics)) {
|
||||
sampleProviders.push({provide: Options.USER_METRICS, useValue: userMetrics});
|
||||
}
|
||||
if (isPresent(providers)) {
|
||||
sampleProviders.push(providers);
|
||||
}
|
||||
|
@ -89,10 +93,10 @@ var _DEFAULT_PROVIDERS = [
|
|||
FirefoxDriverExtension.PROVIDERS,
|
||||
IOsDriverExtension.PROVIDERS,
|
||||
PerflogMetric.PROVIDERS,
|
||||
UserMetric.BINDINGS,
|
||||
SampleDescription.PROVIDERS,
|
||||
MultiReporter.createBindings([ConsoleReporter]),
|
||||
MultiMetric.createBindings([PerflogMetric]),
|
||||
|
||||
MultiMetric.createBindings([PerflogMetric, UserMetric]),
|
||||
Reporter.bindTo(MultiReporter),
|
||||
Validator.bindTo(RegressionSlopeValidator),
|
||||
WebDriverExtension.bindTo([ChromeDriverExtension, FirefoxDriverExtension, IOsDriverExtension]),
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
import {ReflectiveInjector} from "angular2/core";
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit
|
||||
} from 'angular2/testing_internal';
|
||||
|
||||
import {TimerWrapper} from 'angular2/src/facade/async';
|
||||
import {StringMapWrapper} from 'angular2/src/facade/collection';
|
||||
import {PromiseWrapper} from 'angular2/src/facade/async';
|
||||
import {isPresent, isBlank, Json} from 'angular2/src/facade/lang';
|
||||
|
||||
import {
|
||||
Metric,
|
||||
MultiMetric,
|
||||
PerflogMetric,
|
||||
UserMetric,
|
||||
WebDriverAdapter,
|
||||
WebDriverExtension,
|
||||
PerfLogFeatures,
|
||||
bind,
|
||||
provide,
|
||||
Injector,
|
||||
Options
|
||||
} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
var wdAdapter: MockDriverAdapter;
|
||||
|
||||
function createMetric(perfLogs, perfLogFeatures,
|
||||
{userMetrics}: {userMetrics?: {[key: string]: string}} = {}): UserMetric {
|
||||
if (isBlank(perfLogFeatures)) {
|
||||
perfLogFeatures =
|
||||
new PerfLogFeatures({render: true, gc: true, frameCapture: true, userTiming: true});
|
||||
}
|
||||
if (isBlank(userMetrics)) {
|
||||
userMetrics = StringMapWrapper.create();
|
||||
}
|
||||
wdAdapter = new MockDriverAdapter();
|
||||
var bindings = [
|
||||
Options.DEFAULT_PROVIDERS,
|
||||
UserMetric.BINDINGS,
|
||||
bind(Options.USER_METRICS).toValue(userMetrics),
|
||||
provide(WebDriverAdapter, {useValue: wdAdapter})
|
||||
];
|
||||
return ReflectiveInjector.resolveAndCreate(bindings).get(UserMetric);
|
||||
}
|
||||
|
||||
describe('user metric', () => {
|
||||
|
||||
it('should describe itself based on userMetrics', () => {
|
||||
expect(createMetric([[]], new PerfLogFeatures(), {userMetrics: {'loadTime': 'time to load'}})
|
||||
.describe())
|
||||
.toEqual({'loadTime': 'time to load'});
|
||||
});
|
||||
|
||||
describe('endMeasure', () => {
|
||||
it('should stop measuring when all properties have numeric values',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
let metric = createMetric(
|
||||
[[]], new PerfLogFeatures(),
|
||||
{userMetrics: {'loadTime': 'time to load', 'content': 'time to see content'}});
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(true))
|
||||
.then((values: {[key: string]: string}) => {
|
||||
expect(values['loadTime']).toBe(25);
|
||||
expect(values['content']).toBe(250);
|
||||
async.done();
|
||||
});
|
||||
|
||||
wdAdapter.data['loadTime'] = 25;
|
||||
// Wait before setting 2nd property.
|
||||
TimerWrapper.setTimeout(() => { wdAdapter.data['content'] = 250; }, 50);
|
||||
|
||||
}), 600);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
data: any = {};
|
||||
|
||||
executeScript(script: string): any {
|
||||
// Just handles `return window.propName` ignores `delete window.propName`.
|
||||
if (script.indexOf('return window.') == 0) {
|
||||
let metricName = script.substring('return window.'.length);
|
||||
return PromiseWrapper.resolve(this.data[metricName]);
|
||||
} else if (script.indexOf('delete window.') == 0) {
|
||||
return PromiseWrapper.resolve(null);
|
||||
} else {
|
||||
return PromiseWrapper.reject(`Unexpected syntax: ${script}`, null);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -24,6 +24,7 @@ const kServedPaths = [
|
|||
'benchmarks/src/element_injector',
|
||||
'benchmarks/src/largetable',
|
||||
'benchmarks/src/naive_infinite_scroll',
|
||||
'benchmarks/src/page_load',
|
||||
'benchmarks/src/tree',
|
||||
'benchmarks/src/static_tree',
|
||||
|
||||
|
|
Loading…
Reference in New Issue