feat(benchpress): rewritten implementation
Limitations: - cloud reporter is not yet supported any more
This commit is contained in:
parent
44845839a6
commit
f6284f2a55
|
@ -272,7 +272,7 @@ gulp.task('build/transpile.js.prod', function(done) {
|
|||
});
|
||||
|
||||
gulp.task('build/transpile.js.cjs', transpile(gulp, gulpPlugins, {
|
||||
src: CONFIG.transpile.src.js.concat(['tools/benchp*/**/*.es6']),
|
||||
src: CONFIG.transpile.src.js,
|
||||
copy: CONFIG.transpile.copy.js,
|
||||
dest: CONFIG.dest.js.cjs,
|
||||
outputExt: 'js',
|
||||
|
|
|
@ -43,6 +43,7 @@ module.exports = function(config) {
|
|||
|
||||
// Local dependencies, transpiled from the source.
|
||||
'/packages/angular': 'http://localhost:9877/base/modules/angular',
|
||||
'/packages/benchpress': 'http://localhost:9877/base/modules/benchpress',
|
||||
'/packages/core': 'http://localhost:9877/base/modules/core',
|
||||
'/packages/change_detection': 'http://localhost:9877/base/modules/change_detection',
|
||||
'/packages/reflection': 'http://localhost:9877/base/modules/reflection',
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
var benchpress = require('benchpress/index.js');
|
||||
var webdriver = require('protractor/node_modules/selenium-webdriver');
|
||||
var testUtil = require('./test_util');
|
||||
var benchpress = require('benchpress/benchpress');
|
||||
|
||||
module.exports = {
|
||||
runClickBenchmark: runClickBenchmark,
|
||||
runBenchmark: runBenchmark,
|
||||
verifyNoBrowserErrors: benchpress.verifyNoBrowserErrors
|
||||
verifyNoBrowserErrors: testUtil.verifyNoBrowserErrors
|
||||
};
|
||||
|
||||
function runClickBenchmark(config) {
|
||||
|
@ -16,27 +16,29 @@ function runClickBenchmark(config) {
|
|||
button.click();
|
||||
});
|
||||
}
|
||||
runBenchmark(config);
|
||||
return runBenchmark(config);
|
||||
}
|
||||
|
||||
function runBenchmark(config) {
|
||||
var globalParams = browser.params;
|
||||
getScaleFactor(globalParams.benchmark.scaling).then(function(scaleFactor) {
|
||||
var params = config.params.map(function(param) {
|
||||
return {
|
||||
name: param.name, value: applyScaleFactor(param.value, scaleFactor, param.scale)
|
||||
}
|
||||
return getScaleFactor(browser.params.benchmark.scaling).then(function(scaleFactor) {
|
||||
var description = {};
|
||||
var urlParams = [];
|
||||
config.params.forEach(function(param) {
|
||||
var name = param.name;
|
||||
var value = applyScaleFactor(param.value, scaleFactor, param.scale);
|
||||
urlParams.push(name + '=' + value);
|
||||
description[name] = value;
|
||||
});
|
||||
var benchmarkConfig = Object.create(globalParams.benchmark);
|
||||
benchmarkConfig.id = globalParams.lang+'.'+config.id;
|
||||
benchmarkConfig.params = params;
|
||||
benchmarkConfig.scaleFactor = scaleFactor;
|
||||
|
||||
var url = encodeURI(config.url + '?' + params.map(function(param) {
|
||||
return param.name + '=' + param.value;
|
||||
}).join('&'));
|
||||
var url = encodeURI(config.url + '?' + urlParams.join('&'));
|
||||
browser.get(url);
|
||||
benchpress.runBenchmark(benchmarkConfig, config.work);
|
||||
return benchpressRunner.sample({
|
||||
id: config.id,
|
||||
execute: config.work,
|
||||
prepare: config.prepare,
|
||||
bindings: [
|
||||
benchpress.bind(benchpress.Options.SAMPLE_DESCRIPTION).toValue(description)
|
||||
]
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
var benchpress = require('benchpress/index.js');
|
||||
var webdriver = require('selenium-webdriver');
|
||||
|
||||
module.exports = {
|
||||
verifyNoBrowserErrors: benchpress.verifyNoBrowserErrors,
|
||||
verifyNoBrowserErrors: verifyNoBrowserErrors,
|
||||
clickAll: clickAll
|
||||
};
|
||||
|
||||
|
@ -10,3 +10,19 @@ function clickAll(buttonSelectors) {
|
|||
$(selector).click();
|
||||
});
|
||||
}
|
||||
|
||||
function verifyNoBrowserErrors() {
|
||||
// TODO(tbosch): Bug in ChromeDriver: Need to execute at least one command
|
||||
// so that the browser logs can be read out!
|
||||
browser.executeScript('1+1');
|
||||
browser.manage().logs().get('browser').then(function(browserLog) {
|
||||
var filteredLog = browserLog.filter(function(logEntry) {
|
||||
return logEntry.level.value > webdriver.logging.Level.WARNING.value;
|
||||
});
|
||||
expect(filteredLog.length).toEqual(0);
|
||||
if (filteredLog.length) {
|
||||
console.log('browser console errors: ' + require('util').inspect(filteredLog));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import {ListWrapper, List} from 'angular2/src/facade/collection';
|
||||
import {stringify} from 'angular2/src/facade/lang';
|
||||
import {Key} from './key';
|
||||
|
||||
function findFirstClosedCycle(keys:List) {
|
||||
var res = [];
|
||||
|
@ -31,14 +30,16 @@ export class ProviderError extends Error {
|
|||
keys:List;
|
||||
constructResolvingMessage:Function;
|
||||
message;
|
||||
constructor(key:Key, constructResolvingMessage:Function) {
|
||||
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
|
||||
constructor(key, constructResolvingMessage:Function) {
|
||||
super();
|
||||
this.keys = [key];
|
||||
this.constructResolvingMessage = constructResolvingMessage;
|
||||
this.message = this.constructResolvingMessage(this.keys);
|
||||
}
|
||||
|
||||
addKey(key:Key) {
|
||||
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
|
||||
addKey(key) {
|
||||
ListWrapper.push(this.keys, key);
|
||||
this.message = this.constructResolvingMessage(this.keys);
|
||||
}
|
||||
|
@ -49,7 +50,8 @@ export class ProviderError extends Error {
|
|||
}
|
||||
|
||||
export class NoProviderError extends ProviderError {
|
||||
constructor(key:Key) {
|
||||
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
|
||||
constructor(key) {
|
||||
super(key, function (keys:List) {
|
||||
var first = stringify(ListWrapper.first(keys).token);
|
||||
return `No provider for ${first}!${constructResolvingPath(keys)}`;
|
||||
|
@ -58,7 +60,8 @@ export class NoProviderError extends ProviderError {
|
|||
}
|
||||
|
||||
export class AsyncBindingError extends ProviderError {
|
||||
constructor(key:Key) {
|
||||
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
|
||||
constructor(key) {
|
||||
super(key, function (keys:List) {
|
||||
var first = stringify(ListWrapper.first(keys).token);
|
||||
return `Cannot instantiate ${first} synchronously. ` +
|
||||
|
@ -68,7 +71,8 @@ export class AsyncBindingError extends ProviderError {
|
|||
}
|
||||
|
||||
export class CyclicDependencyError extends ProviderError {
|
||||
constructor(key:Key) {
|
||||
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
|
||||
constructor(key) {
|
||||
super(key, function (keys:List) {
|
||||
return `Cannot instantiate cyclic dependency!${constructResolvingPath(keys)}`;
|
||||
});
|
||||
|
@ -76,7 +80,8 @@ export class CyclicDependencyError extends ProviderError {
|
|||
}
|
||||
|
||||
export class InstantiationError extends ProviderError {
|
||||
constructor(originalException, key:Key) {
|
||||
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
|
||||
constructor(originalException, key) {
|
||||
super(key, function (keys:List) {
|
||||
var first = stringify(ListWrapper.first(keys).token);
|
||||
return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +
|
||||
|
|
|
@ -98,8 +98,7 @@ class ListWrapper {
|
|||
l.add(e);
|
||||
}
|
||||
static List concat(List a, List b) {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
return []..addAll(a)..addAll(b);
|
||||
}
|
||||
static bool isList(l) => l is List;
|
||||
static void insert(List l, int index, value) {
|
||||
|
|
|
@ -2,6 +2,7 @@ library angular.core.facade.lang;
|
|||
|
||||
export 'dart:core' show Type, RegExp, print;
|
||||
import 'dart:math' as math;
|
||||
import 'dart:convert' as convert;
|
||||
|
||||
class Math {
|
||||
static final _random = new math.Random();
|
||||
|
@ -176,3 +177,9 @@ bool assertionsEnabled() {
|
|||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Can't be all uppercase as our transpiler would think it is a special directive...
|
||||
class Json {
|
||||
static parse(String s) => convert.JSON.decode(s);
|
||||
static stringify(data) => convert.JSON.encode(data);
|
||||
}
|
||||
|
|
|
@ -247,3 +247,6 @@ export function print(obj) {
|
|||
console.log(obj);
|
||||
}
|
||||
}
|
||||
|
||||
// Can't be all uppercase as our transpiler would think it is a special directive...
|
||||
export var Json = _global.JSON;
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
library angular.core.facade.math;
|
||||
|
||||
import 'dart:core' show double, num;
|
||||
import 'dart:math' as math;
|
||||
|
||||
var NaN = double.NAN;
|
||||
|
||||
class Math {
|
||||
static num pow(num x, num exponent) {
|
||||
return math.pow(x, exponent);
|
||||
|
@ -10,4 +13,8 @@ class Math {
|
|||
static num min(num a, num b) => math.min(a, b);
|
||||
|
||||
static num floor(num a) => a.floor();
|
||||
|
||||
static num ceil(num a) => a.ceil();
|
||||
|
||||
static num sqrt(num x) => math.sqrt(x);
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import {global} from 'angular2/src/facade/lang';
|
||||
|
||||
export var Math = global.Math;
|
||||
export var Math = global.Math;
|
||||
export var NaN = global.NaN;
|
||||
|
|
|
@ -26,6 +26,7 @@ class Expect extends gns.Expect {
|
|||
void toThrowError([message=""]) => this.toThrowWith(message: message);
|
||||
void toBePromise() => _expect(actual is Future, equals(true));
|
||||
void toImplement(expected) => toBeA(expected);
|
||||
void toBeNaN() => _expect(double.NAN.compareTo(actual) == 0, equals(true));
|
||||
Function get _expect => gns.guinness.matchers.expect;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ describe('ng2 change detection benchmark', function () {
|
|||
|
||||
afterEach(perfUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should log ng stats (dynamic)', function() {
|
||||
it('should log ng stats (dynamic)', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#ng2ChangeDetectionDynamic'],
|
||||
|
@ -14,10 +14,10 @@ describe('ng2 change detection benchmark', function () {
|
|||
params: [{
|
||||
name: 'numberOfChecks', value: 900000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log ng stats (jit)', function() {
|
||||
it('should log ng stats (jit)', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#ng2ChangeDetectionJit'],
|
||||
|
@ -25,10 +25,10 @@ describe('ng2 change detection benchmark', function () {
|
|||
params: [{
|
||||
name: 'numberOfChecks', value: 900000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log baseline stats', function() {
|
||||
it('should log baseline stats', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#baselineChangeDetection'],
|
||||
|
@ -36,7 +36,7 @@ describe('ng2 change detection benchmark', function () {
|
|||
params: [{
|
||||
name: 'numberOfChecks', value: 900000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng2 change detection benchmark', function () {
|
||||
|
||||
var URL = 'benchmarks/src/change_detection/change_detection_benchmark.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should not throw errors', function() {
|
||||
browser.get(URL);
|
||||
testUtil.clickAll(['#ng2ChangeDetectionDynamic', '#ng2ChangeDetectionJit', '#baselineChangeDetection']);
|
||||
});
|
||||
|
||||
});
|
|
@ -6,7 +6,7 @@ describe('ng2 compiler benchmark', function () {
|
|||
|
||||
afterEach(perfUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should log withBindings stats', function() {
|
||||
it('should log withBindings stats', function(done) {
|
||||
perfUtil.runBenchmark({
|
||||
url: URL,
|
||||
id: 'ng2.compile.withBindings',
|
||||
|
@ -17,10 +17,10 @@ describe('ng2 compiler benchmark', function () {
|
|||
browser.executeScript('document.querySelector("#compileWithBindings").click()');
|
||||
browser.sleep(500);
|
||||
}
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log noBindings stats', function() {
|
||||
it('should log noBindings stats', function(done) {
|
||||
perfUtil.runBenchmark({
|
||||
url: URL,
|
||||
id: 'ng2.compile.noBindings',
|
||||
|
@ -31,7 +31,7 @@ describe('ng2 compiler benchmark', function () {
|
|||
browser.executeScript('document.querySelector("#compileNoBindings").click()');
|
||||
browser.sleep(500);
|
||||
}
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng2 compiler benchmark', function () {
|
||||
|
||||
var URL = 'benchmarks/src/compiler/compiler_benchmark.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should not throw errors', function() {
|
||||
browser.get(URL);
|
||||
testUtil.clickAll(['#compileWithBindings', '#compileNoBindings']);
|
||||
});
|
||||
|
||||
});
|
|
@ -6,7 +6,7 @@ describe('ng2 di benchmark', function () {
|
|||
|
||||
afterEach(perfUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should log the stats for getByToken', function() {
|
||||
it('should log the stats for getByToken', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#getByToken'],
|
||||
|
@ -14,10 +14,10 @@ describe('ng2 di benchmark', function () {
|
|||
params: [{
|
||||
name: 'iterations', value: 20000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log the stats for getByKey', function() {
|
||||
it('should log the stats for getByKey', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#getByKey'],
|
||||
|
@ -25,10 +25,10 @@ describe('ng2 di benchmark', function () {
|
|||
params: [{
|
||||
name: 'iterations', value: 20000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log the stats for getChild', function() {
|
||||
it('should log the stats for getChild', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#getChild'],
|
||||
|
@ -36,10 +36,10 @@ describe('ng2 di benchmark', function () {
|
|||
params: [{
|
||||
name: 'iterations', value: 20000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log the stats for instantiate', function() {
|
||||
it('should log the stats for instantiate', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#instantiate'],
|
||||
|
@ -47,7 +47,7 @@ describe('ng2 di benchmark', function () {
|
|||
params: [{
|
||||
name: 'iterations', value: 10000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng2 di benchmark', function () {
|
||||
|
||||
var URL = 'benchmarks/src/di/di_benchmark.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should not throw errors', function() {
|
||||
browser.get(URL);
|
||||
testUtil.clickAll(['#getByToken', '#getByKey', '#getChild', '#instantiate']);
|
||||
});
|
||||
|
||||
});
|
|
@ -6,7 +6,7 @@ describe('ng2 element injector benchmark', function () {
|
|||
|
||||
afterEach(perfUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should log the stats for instantiate', function() {
|
||||
it('should log the stats for instantiate', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#instantiate'],
|
||||
|
@ -14,10 +14,10 @@ describe('ng2 element injector benchmark', function () {
|
|||
params: [{
|
||||
name: 'iterations', value: 20000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log the stats for instantiateDirectives', function() {
|
||||
it('should log the stats for instantiateDirectives', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#instantiateDirectives'],
|
||||
|
@ -25,7 +25,7 @@ describe('ng2 element injector benchmark', function () {
|
|||
params: [{
|
||||
name: 'iterations', value: 20000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng2 element injector benchmark', function () {
|
||||
|
||||
var URL = 'benchmarks/src/element_injector/element_injector_benchmark.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should not throw errors', function() {
|
||||
browser.get(URL);
|
||||
testUtil.clickAll(['#instantiate', '#instantiateDirectives']);
|
||||
});
|
||||
|
||||
});
|
|
@ -8,7 +8,7 @@ describe('ng2 naive infinite scroll benchmark', function () {
|
|||
|
||||
[1, 2, 4].forEach(function(appSize) {
|
||||
it('should run scroll benchmark and collect stats for appSize = ' +
|
||||
appSize, function() {
|
||||
appSize, function(done) {
|
||||
perfUtil.runBenchmark({
|
||||
url: URL,
|
||||
id: 'ng2.naive_infinite_scroll',
|
||||
|
@ -30,7 +30,7 @@ describe('ng2 naive infinite scroll benchmark', function () {
|
|||
}, {
|
||||
name: 'scrollIncrement', value: 40
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ describe('ng2 selector benchmark', function () {
|
|||
|
||||
afterEach(perfUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should log parse stats', function() {
|
||||
it('should log parse stats', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#parse'],
|
||||
|
@ -14,10 +14,10 @@ describe('ng2 selector benchmark', function () {
|
|||
params: [{
|
||||
name: 'selectors', value: 10000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log addSelectable stats', function() {
|
||||
it('should log addSelectable stats', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#addSelectable'],
|
||||
|
@ -25,10 +25,10 @@ describe('ng2 selector benchmark', function () {
|
|||
params: [{
|
||||
name: 'selectors', value: 10000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log match stats', function() {
|
||||
it('should log match stats', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#match'],
|
||||
|
@ -36,7 +36,7 @@ describe('ng2 selector benchmark', function () {
|
|||
params: [{
|
||||
name: 'selectors', value: 10000, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng2 selector benchmark', function () {
|
||||
|
||||
var URL = 'benchmarks/src/compiler/selector_benchmark.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should not throw errors', function() {
|
||||
browser.get(URL);
|
||||
testUtil.clickAll(['#parse', '#addSelectable', '#match']);
|
||||
});
|
||||
|
||||
});
|
|
@ -6,7 +6,7 @@ describe('ng2 tree benchmark', function () {
|
|||
|
||||
afterEach(perfUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should log the ng stats', function() {
|
||||
it('should log the ng stats', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#ng2DestroyDom', '#ng2CreateDom'],
|
||||
|
@ -14,10 +14,10 @@ describe('ng2 tree benchmark', function () {
|
|||
params: [{
|
||||
name: 'depth', value: 9, scale: 'log2'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log the baseline stats', function() {
|
||||
it('should log the baseline stats', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#baselineDestroyDom', '#baselineCreateDom'],
|
||||
|
@ -25,7 +25,7 @@ describe('ng2 tree benchmark', function () {
|
|||
params: [{
|
||||
name: 'depth', value: 9, scale: 'log2'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng2 tree benchmark', function () {
|
||||
|
||||
var URL = 'benchmarks/src/tree/tree_benchmark.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should not throw errors', function() {
|
||||
browser.get(URL);
|
||||
testUtil.clickAll(['#ng2CreateDom', '#ng2DestroyDom', '#baselineCreateDom', '#baselineDestroyDom']);
|
||||
});
|
||||
|
||||
});
|
|
@ -6,7 +6,7 @@ describe('ng1.x compiler benchmark', function () {
|
|||
|
||||
afterEach(perfUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should log withBinding stats', function() {
|
||||
it('should log withBinding stats', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#compileWithBindings'],
|
||||
|
@ -14,10 +14,10 @@ describe('ng1.x compiler benchmark', function () {
|
|||
params: [{
|
||||
name: 'elements', value: 150, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
it('should log noBindings stats', function() {
|
||||
it('should log noBindings stats', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#compileNoBindings'],
|
||||
|
@ -25,7 +25,7 @@ describe('ng1.x compiler benchmark', function () {
|
|||
params: [{
|
||||
name: 'elements', value: 150, scale: 'linear'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng1.x compiler benchmark', function () {
|
||||
|
||||
var URL = 'benchmarks_external/src/compiler/compiler_benchmark.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should not throw errors', function() {
|
||||
browser.get(URL);
|
||||
testUtil.clickAll(['#compileWithBindings', '#compileNoBindings']);
|
||||
});
|
||||
|
||||
});
|
|
@ -17,7 +17,7 @@ describe('ng1.x largetable benchmark', function () {
|
|||
'ngBindFilter',
|
||||
'interpolationFilter'
|
||||
].forEach(function(benchmarkType) {
|
||||
it('should log the stats with: ' + benchmarkType, function() {
|
||||
it('should log the stats with: ' + benchmarkType, function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#destroyDom', '#createDom'],
|
||||
|
@ -34,7 +34,7 @@ describe('ng1.x largetable benchmark', function () {
|
|||
name: 'benchmarkType',
|
||||
value: benchmarkType
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng1.x largetable benchmark', function () {
|
||||
var URL = 'benchmarks_external/src/largetable/largetable_benchmark.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
[
|
||||
'baselineBinding',
|
||||
'baselineInterpolation',
|
||||
'ngBind',
|
||||
'ngBindOnce',
|
||||
'interpolation',
|
||||
'interpolationAttr',
|
||||
'ngBindFn',
|
||||
'interpolationFn',
|
||||
'ngBindFilter',
|
||||
'interpolationFilter'
|
||||
].forEach(function(benchmarkType) {
|
||||
it('should log the stats with: ' + benchmarkType, function() {
|
||||
browser.get(URL + '?benchmarkType='+benchmarkType);
|
||||
testUtil.clickAll(['#createDom', '#destroyDom']);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -8,7 +8,7 @@ describe('ng-dart1.x naive infinite scroll benchmark', function () {
|
|||
|
||||
[1, 2, 4].forEach(function(appSize) {
|
||||
it('should run scroll benchmark and collect stats for appSize = ' +
|
||||
appSize, function() {
|
||||
appSize, function(done) {
|
||||
perfUtil.runBenchmark({
|
||||
url: URL,
|
||||
id: 'ng1-dart1.x.naive_infinite_scroll',
|
||||
|
@ -30,7 +30,7 @@ describe('ng-dart1.x naive infinite scroll benchmark', function () {
|
|||
}, {
|
||||
name: 'scrollIncrement', value: 40
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng-dart1.x naive infinite scroll benchmark', function () {
|
||||
|
||||
var URL = 'benchmarks_external/src/naive_infinite_scroll/index.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should not throw errors', function() {
|
||||
browser.get(URL);
|
||||
browser.executeScript(
|
||||
'document.querySelector("scroll-app /deep/ #reset-btn").click()');
|
||||
browser.executeScript(
|
||||
'document.querySelector("scroll-app /deep/ #run-btn").click()');
|
||||
browser.sleep(1000);
|
||||
});
|
||||
|
||||
});
|
|
@ -6,7 +6,7 @@ describe('ng1.x tree benchmark', function () {
|
|||
|
||||
afterEach(perfUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should log the stats', function() {
|
||||
it('should log the stats', function(done) {
|
||||
perfUtil.runClickBenchmark({
|
||||
url: URL,
|
||||
buttons: ['#destroyDom', '#createDom'],
|
||||
|
@ -14,7 +14,7 @@ describe('ng1.x tree benchmark', function () {
|
|||
params: [{
|
||||
name: 'depth', value: 9, scale: 'log2'
|
||||
}]
|
||||
});
|
||||
}).then(done, done.fail);
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
|
||||
describe('ng1.x tree benchmark', function () {
|
||||
|
||||
var URL = 'benchmarks_external/src/tree/tree_benchmark.html';
|
||||
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
it('should not throw errors', function() {
|
||||
browser.get(URL);
|
||||
testUtil.clickAll(['#createDom', '#destroyDom']);
|
||||
});
|
||||
|
||||
});
|
|
@ -0,0 +1,16 @@
|
|||
export { Sampler, SampleState } from './src/sampler';
|
||||
export { Metric } from './src/metric';
|
||||
export { Validator } from './src/validator';
|
||||
export { Reporter } from './src/reporter';
|
||||
export { WebDriverExtension } from './src/web_driver_extension';
|
||||
export { WebDriverAdapter } from './src/web_driver_adapter';
|
||||
export { SizeValidator } from './src/validator/size_validator';
|
||||
export { RegressionSlopeValidator } from './src/validator/regression_slope_validator';
|
||||
export { ConsoleReporter } from './src/reporter/console_reporter';
|
||||
export { SampleDescription } from './src/sample_description';
|
||||
export { PerflogMetric } from './src/metric/perflog_metric';
|
||||
export { ChromeDriverExtension } from './src/webdriver/chrome_driver_extension';
|
||||
export { Runner } from './src/runner';
|
||||
export { Options } from './src/sample_options';
|
||||
|
||||
export { bind, Injector, OpaqueToken } from 'angular2/di';
|
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"name": "angular-benchpress2",
|
||||
"version": "<%= packageJson.version %>",
|
||||
"description": "Angular-Benchpress - a framework for e2e performance tests",
|
||||
"homepage": "<%= packageJson.homepage %>",
|
||||
"bugs": "<%= packageJson.bugs %>",
|
||||
"contributors": <%= JSON.stringify(packageJson.contributors) %>,
|
||||
"license": "<%= packageJson.license %>",
|
||||
"dependencies": {
|
||||
"rtts_assert": "<%= packageJson.version %>",
|
||||
"angular2": "<%= packageJson.version %>.dev"
|
||||
},
|
||||
"devDependencies": {
|
||||
"yargs": "2.3.*",
|
||||
"gulp-sourcemaps": "1.3.*",
|
||||
"gulp-traceur": "0.16.*",
|
||||
"gulp": "^3.8.8",
|
||||
"gulp-rename": "^1.2.0",
|
||||
"through2": "^0.6.1"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
name: benchpress
|
||||
version: <%= packageJson.version %>
|
||||
authors:
|
||||
<%= Object.keys(packageJson.contributors).map(function(name) {
|
||||
return '- '+name+' <'+packageJson.contributors[name]+'>';
|
||||
}).join('\n') %>
|
||||
description: Benchpress - a framework for e2e performance tests
|
||||
homepage: <%= packageJson.homepage %>
|
||||
environment:
|
||||
sdk: '>=1.4.0'
|
||||
dependencies:
|
||||
stack_trace: '>=1.1.1 <2.0.0'
|
||||
angular2:
|
||||
path: ../angular2
|
||||
dev_dependencies:
|
||||
guinness: ">=0.1.16 <0.2.0"
|
|
@ -0,0 +1,36 @@
|
|||
import {
|
||||
Promise, PromiseWrapper
|
||||
} from 'angular2/src/facade/async';
|
||||
import {
|
||||
ABSTRACT, BaseException
|
||||
} from 'angular2/src/facade/lang';
|
||||
|
||||
/**
|
||||
* A metric is measures values
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class Metric {
|
||||
/**
|
||||
* Starts measuring
|
||||
*/
|
||||
beginMeasure():Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
/**
|
||||
* Ends measuring and reports the data
|
||||
* since the begin call.
|
||||
* @param restart: Whether to restart right after this.
|
||||
*/
|
||||
endMeasure(restart:boolean):Promise<any> {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes the metrics provided by this metric implementation.
|
||||
* (e.g. units, ...)
|
||||
*/
|
||||
describe():any {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,144 @@
|
|||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
import { isPresent, isBlank, int, BaseException, StringWrapper } from 'angular2/src/facade/lang';
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { WebDriverExtension } from '../web_driver_extension';
|
||||
import { Metric } from '../metric';
|
||||
|
||||
/**
|
||||
* A metric that reads out the performance log
|
||||
*/
|
||||
export class PerflogMetric extends Metric {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get SET_TIMEOUT() { return _SET_TIMEOUT; }
|
||||
|
||||
_driverExtension:WebDriverExtension;
|
||||
_remainingEvents:List;
|
||||
_measureCount:int;
|
||||
_setTimeout:Function;
|
||||
|
||||
constructor(driverExtension:WebDriverExtension, setTimeout:Function) {
|
||||
super();
|
||||
this._driverExtension = driverExtension;
|
||||
this._remainingEvents = [];
|
||||
this._measureCount = 0;
|
||||
this._setTimeout = setTimeout;
|
||||
}
|
||||
|
||||
describe() {
|
||||
return {
|
||||
'script': 'script execution time in ms',
|
||||
'render': 'render time in ms',
|
||||
'gcTime': 'gc time in ms',
|
||||
'gcAmount': 'gc amount in bytes',
|
||||
'gcTimeInScript': 'gc time during script execution in ms',
|
||||
'gcAmountInScript': 'gc amount during script execution in bytes'
|
||||
};
|
||||
}
|
||||
|
||||
beginMeasure():Promise {
|
||||
return this._driverExtension.timeBegin(this._markName(this._measureCount++));
|
||||
}
|
||||
|
||||
endMeasure(restart:boolean):Promise<Object> {
|
||||
var markName = this._markName(this._measureCount-1);
|
||||
var nextMarkName = restart ? this._markName(this._measureCount++) : null;
|
||||
return this._driverExtension.timeEnd(markName, nextMarkName)
|
||||
.then( (_) => this._readUntilEndMark(markName) );
|
||||
}
|
||||
|
||||
_readUntilEndMark(markName:string, loopCount:int = 0) {
|
||||
return this._driverExtension.readPerfLog().then( (events) => {
|
||||
this._remainingEvents = ListWrapper.concat(this._remainingEvents, events);
|
||||
if (loopCount > _MAX_RETRY_COUNT) {
|
||||
throw new BaseException(`Tried too often to get the ending mark: ${loopCount}`);
|
||||
}
|
||||
var result = this._aggregateEvents(
|
||||
this._remainingEvents, markName
|
||||
);
|
||||
if (isPresent(result)) {
|
||||
this._remainingEvents = events;
|
||||
return result;
|
||||
}
|
||||
var completer = PromiseWrapper.completer();
|
||||
this._setTimeout(
|
||||
() => completer.complete(this._readUntilEndMark(markName, loopCount+1)),
|
||||
100
|
||||
);
|
||||
return completer.promise;
|
||||
});
|
||||
}
|
||||
|
||||
_aggregateEvents(events, markName) {
|
||||
var result = {
|
||||
'script': 0,
|
||||
'render': 0,
|
||||
'gcTime': 0,
|
||||
'gcAmount': 0,
|
||||
'gcTimeInScript': 0,
|
||||
'gcAmountInScript': 0
|
||||
};
|
||||
|
||||
var startMarkFound = false;
|
||||
var endMarkFound = false;
|
||||
if (isBlank(markName)) {
|
||||
startMarkFound = true;
|
||||
endMarkFound = true;
|
||||
}
|
||||
|
||||
var intervalStarts = {};
|
||||
events.forEach( (event) => {
|
||||
var ph = event['ph'];
|
||||
var name = event['name'];
|
||||
var ts = event['ts'];
|
||||
var args = event['args'];
|
||||
if (StringWrapper.equals(ph, 'b') && StringWrapper.equals(name, markName)) {
|
||||
startMarkFound = true;
|
||||
} else if (StringWrapper.equals(ph, 'e') && StringWrapper.equals(name, markName)) {
|
||||
endMarkFound = true;
|
||||
}
|
||||
if (startMarkFound && !endMarkFound) {
|
||||
if (StringWrapper.equals(ph, 'B')) {
|
||||
intervalStarts[name] = ts;
|
||||
} else if (StringWrapper.equals(ph, 'E') && isPresent(intervalStarts[name])) {
|
||||
var diff = ts - intervalStarts[name];
|
||||
intervalStarts[name] = null;
|
||||
if (StringWrapper.equals(name, 'gc')) {
|
||||
result['gcTime'] += diff;
|
||||
var gcAmount = 0;
|
||||
if (isPresent(args)) {
|
||||
gcAmount = args['amount'];
|
||||
}
|
||||
result['gcAmount'] += gcAmount;
|
||||
if (isPresent(intervalStarts['script'])) {
|
||||
result['gcTimeInScript'] += diff;
|
||||
result['gcAmountInScript'] += gcAmount;
|
||||
}
|
||||
} else {
|
||||
result[name] += diff;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
result['script'] -= result['gcTimeInScript'];
|
||||
return startMarkFound && endMarkFound ? result : null;
|
||||
}
|
||||
|
||||
_markName(index) {
|
||||
return `${_MARK_NAME_PREFIX}${index}`;
|
||||
}
|
||||
}
|
||||
|
||||
var _MAX_RETRY_COUNT = 20;
|
||||
var _MARK_NAME_PREFIX = 'benchpress';
|
||||
var _SET_TIMEOUT = new OpaqueToken('PerflogMetric.setTimeout');
|
||||
var _BINDINGS = [
|
||||
bind(Metric).toFactory(
|
||||
(driverExtension, setTimeout) => new PerflogMetric(driverExtension, setTimeout),
|
||||
[WebDriverExtension, _SET_TIMEOUT]
|
||||
),
|
||||
bind(_SET_TIMEOUT).toValue( (fn, millis) => PromiseWrapper.setTimeout(fn, millis) )
|
||||
];
|
|
@ -0,0 +1,20 @@
|
|||
import {
|
||||
Promise, PromiseWrapper
|
||||
} from 'angular2/src/facade/async';
|
||||
import {
|
||||
ABSTRACT, BaseException
|
||||
} from 'angular2/src/facade/lang';
|
||||
|
||||
/**
|
||||
* A reporter reports measure values and the valid sample.
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class Reporter {
|
||||
reportMeasureValues(index:number, values:any):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
reportSample(completeSample:List, validSample:List):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,117 @@
|
|||
import { print, isPresent, isBlank } from 'angular2/src/facade/lang';
|
||||
import { StringMapWrapper, ListWrapper, List } from 'angular2/src/facade/collection';
|
||||
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { Math } from 'angular2/src/facade/math';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { Statistic } from '../statistic';
|
||||
import { Reporter } from '../reporter';
|
||||
import { SampleDescription } from '../sample_description';
|
||||
|
||||
/**
|
||||
* A reporter for the console
|
||||
*/
|
||||
export class ConsoleReporter extends Reporter {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PRINT() { return _PRINT; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get COLUMN_WIDTH() { return _COLUMN_WIDTH; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
static _lpad(value, columnWidth, fill = ' ') {
|
||||
var result = '';
|
||||
for (var i=0; i<columnWidth - value.length; i++) {
|
||||
result += fill;
|
||||
}
|
||||
return result + value;
|
||||
}
|
||||
|
||||
static _formatNum(num) {
|
||||
var result;
|
||||
if (num === 0) {
|
||||
result = '000';
|
||||
} else {
|
||||
result = `${Math.floor(num * 100)}`;
|
||||
}
|
||||
return result.substring(0, result.length - 2) + '.' + result.substring(result.length-2);
|
||||
}
|
||||
|
||||
static _sortedProps(obj) {
|
||||
var props = [];
|
||||
StringMapWrapper.forEach(obj, (value, prop) => ListWrapper.push(props, prop));
|
||||
props.sort();
|
||||
return props;
|
||||
}
|
||||
|
||||
_columnWidth:number;
|
||||
_metricNames:List;
|
||||
_print:Function;
|
||||
|
||||
constructor(columnWidth, sampleDescription, print) {
|
||||
super();
|
||||
this._columnWidth = columnWidth;
|
||||
this._metricNames = ConsoleReporter._sortedProps(sampleDescription.metrics);
|
||||
this._print = print;
|
||||
this._printDescription(sampleDescription);
|
||||
}
|
||||
|
||||
_printDescription(sampleDescription) {
|
||||
this._print(`BENCHMARK ${sampleDescription.id}`);
|
||||
this._print('Description:');
|
||||
var props = ConsoleReporter._sortedProps(sampleDescription.description);
|
||||
props.forEach( (prop) => {
|
||||
this._print(`- ${prop}: ${sampleDescription.description[prop]}`);
|
||||
});
|
||||
this._print('Metrics:');
|
||||
this._metricNames.forEach( (metricName) => {
|
||||
this._print(`- ${metricName}: ${sampleDescription.metrics[metricName]}`);
|
||||
});
|
||||
this._print('');
|
||||
this._printStringRow(this._metricNames);
|
||||
this._printStringRow(this._metricNames.map( (_) => '' ), '-');
|
||||
}
|
||||
|
||||
reportMeasureValues(index:number, measuredValues:any):Promise {
|
||||
var formattedValues = ListWrapper.map(this._metricNames, (metricName) => {
|
||||
var value = measuredValues[metricName];
|
||||
return ConsoleReporter._formatNum(value);
|
||||
});
|
||||
this._printStringRow(formattedValues);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
reportSample(completeSample:List, validSample:List):Promise {
|
||||
this._printStringRow(this._metricNames.map( (_) => '' ), '=');
|
||||
this._printStringRow(
|
||||
ListWrapper.map(this._metricNames, (metricName) => {
|
||||
var sample = ListWrapper.map(validSample, (measuredValues) => measuredValues[metricName]);
|
||||
var mean = Statistic.calculateMean(sample);
|
||||
var cv = Statistic.calculateCoefficientOfVariation(sample, mean);
|
||||
return `${ConsoleReporter._formatNum(mean)}\u00B1${Math.floor(cv)}%`;
|
||||
})
|
||||
);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
_printStringRow(parts, fill = ' ') {
|
||||
this._print(
|
||||
ListWrapper.map(parts, (part) => {
|
||||
var w = this._columnWidth;
|
||||
return ConsoleReporter._lpad(part, w, fill);
|
||||
}).join(' | ')
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var _PRINT = new OpaqueToken('ConsoleReporter.print');
|
||||
var _COLUMN_WIDTH = new OpaqueToken('ConsoleReporter.columnWidht');
|
||||
var _BINDINGS = [
|
||||
bind(Reporter).toFactory(
|
||||
(columnWidth, sampleDescription, print) => new ConsoleReporter(columnWidth, sampleDescription, print),
|
||||
[_COLUMN_WIDTH, SampleDescription, _PRINT]
|
||||
),
|
||||
bind(_COLUMN_WIDTH).toValue(18),
|
||||
bind(_PRINT).toValue(print)
|
||||
];
|
|
@ -0,0 +1,54 @@
|
|||
import { Injector, bind } from 'angular2/di';
|
||||
import { isPresent, isBlank } from 'angular2/src/facade/lang';
|
||||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { Promise } from 'angular2/src/facade/async';
|
||||
|
||||
import { Sampler, SampleState } from './sampler';
|
||||
import { ConsoleReporter } from './reporter/console_reporter';
|
||||
import { RegressionSlopeValidator } from './validator/regression_slope_validator';
|
||||
import { PerflogMetric } from './metric/perflog_metric';
|
||||
import { ChromeDriverExtension } from './webdriver/chrome_driver_extension';
|
||||
import { SampleDescription } from './sample_description';
|
||||
|
||||
import { Options } from './sample_options';
|
||||
|
||||
/**
|
||||
* The Runner is the main entry point for executing a sample run.
|
||||
* It provides defaults, creates the injector and calls the sampler.
|
||||
*/
|
||||
export class Runner {
|
||||
_defaultBindings:List;
|
||||
|
||||
constructor(defaultBindings:List = null) {
|
||||
if (isBlank(defaultBindings)) {
|
||||
defaultBindings = [];
|
||||
}
|
||||
this._defaultBindings = defaultBindings;
|
||||
}
|
||||
|
||||
sample({id, execute, prepare, bindings}):Promise<SampleState> {
|
||||
var sampleBindings = [
|
||||
_DEFAULT_BINDINGS,
|
||||
this._defaultBindings,
|
||||
bind(Options.SAMPLE_ID).toValue(id),
|
||||
bind(Options.EXECUTE).toValue(execute)
|
||||
];
|
||||
if (isPresent(prepare)) {
|
||||
ListWrapper.push(sampleBindings, bind(Options.PREPARE).toValue(prepare));
|
||||
}
|
||||
if (isPresent(bindings)) {
|
||||
ListWrapper.push(sampleBindings, bindings);
|
||||
}
|
||||
return new Injector(sampleBindings).asyncGet(Sampler)
|
||||
.then( (sampler) => sampler.sample() );
|
||||
}
|
||||
}
|
||||
|
||||
var _DEFAULT_BINDINGS = [
|
||||
Sampler.BINDINGS,
|
||||
ConsoleReporter.BINDINGS,
|
||||
RegressionSlopeValidator.BINDINGS,
|
||||
ChromeDriverExtension.BINDINGS,
|
||||
PerflogMetric.BINDINGS,
|
||||
SampleDescription.BINDINGS
|
||||
];
|
|
@ -0,0 +1,43 @@
|
|||
import { StringMapWrapper, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
import { Sampler } from './sampler';
|
||||
import { Validator } from './validator';
|
||||
import { Metric } from './metric';
|
||||
import { Options } from './sample_options';
|
||||
|
||||
/**
|
||||
* SampleDescription merges all available descriptions about a sample
|
||||
*/
|
||||
export class SampleDescription {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
id:string;
|
||||
description:any;
|
||||
metrics:any;
|
||||
|
||||
constructor(id, descriptions, metrics) {
|
||||
this.id = id;
|
||||
this.metrics = metrics;
|
||||
this.description = {};
|
||||
ListWrapper.forEach(descriptions, (description) => {
|
||||
StringMapWrapper.forEach(description, (value, prop) => this.description[prop] = value );
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var _BINDINGS = [
|
||||
bind(SampleDescription).toFactory(
|
||||
(metric, id, forceGc, validator, defaultDesc, userDesc) => new SampleDescription(id,
|
||||
[
|
||||
{'forceGc': forceGc},
|
||||
validator.describe(),
|
||||
defaultDesc,
|
||||
userDesc
|
||||
],
|
||||
metric.describe()),
|
||||
[Metric, Options.SAMPLE_ID, Options.FORCE_GC, Validator, Options.DEFAULT_DESCRIPTION, Options.SAMPLE_DESCRIPTION]
|
||||
),
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({}),
|
||||
bind(Options.SAMPLE_DESCRIPTION).toValue({})
|
||||
];
|
|
@ -0,0 +1,23 @@
|
|||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
export class Options {
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get SAMPLE_ID() { return _SAMPLE_ID; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get DEFAULT_DESCRIPTION() { return _DEFAULT_DESCRIPTION; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get SAMPLE_DESCRIPTION() { return _SAMPLE_DESCRIPTION; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get FORCE_GC() { return _FORCE_GC; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get PREPARE() { return _PREPARE; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get EXECUTE() { return _EXECUTE; }
|
||||
}
|
||||
|
||||
var _SAMPLE_ID = new OpaqueToken('SampleDescription.sampleId');
|
||||
var _DEFAULT_DESCRIPTION = new OpaqueToken('SampleDescription.defaultDescription');
|
||||
var _SAMPLE_DESCRIPTION = new OpaqueToken('SampleDescription.sampleDescription');
|
||||
var _FORCE_GC = new OpaqueToken('Sampler.forceGc');
|
||||
var _PREPARE = new OpaqueToken('Sampler.prepare');
|
||||
var _EXECUTE = new OpaqueToken('Sampler.execute');
|
|
@ -0,0 +1,134 @@
|
|||
import { isPresent, isBlank } from 'angular2/src/facade/lang';
|
||||
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { StringMapWrapper, List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { Metric } from './metric';
|
||||
import { Validator } from './validator';
|
||||
import { Reporter } from './reporter';
|
||||
import { WebDriverExtension } from './web_driver_extension';
|
||||
import { WebDriverAdapter } from './web_driver_adapter';
|
||||
|
||||
import { Options } from './sample_options';
|
||||
|
||||
/**
|
||||
* The Sampler owns the sample loop:
|
||||
* 1. calls the prepare/execute callbacks,
|
||||
* 2. gets data from the metric
|
||||
* 3. asks the validator for a valid sample
|
||||
* 4. reports the new data to the reporter
|
||||
* 5. loop until there is a valid sample
|
||||
*/
|
||||
export class Sampler {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
_driver:WebDriverAdapter;
|
||||
_driverExtension:WebDriverExtension;
|
||||
_metric:Metric;
|
||||
_reporter:Reporter;
|
||||
_validator:Validator;
|
||||
_forceGc:boolean;
|
||||
_prepare:Function;
|
||||
_execute:Function;
|
||||
|
||||
constructor({
|
||||
driver, driverExtension, metric, reporter, validator, forceGc, prepare, execute
|
||||
}:{
|
||||
driver: WebDriverAdapter,
|
||||
driverExtension: WebDriverExtension, metric: Metric, reporter: Reporter,
|
||||
validator: Validator, prepare: Function, execute: Function
|
||||
}={}) {
|
||||
this._driver = driver;
|
||||
this._driverExtension = driverExtension;
|
||||
this._metric = metric;
|
||||
this._reporter = reporter;
|
||||
this._validator = validator;
|
||||
this._forceGc = forceGc;
|
||||
this._prepare = prepare;
|
||||
this._execute = execute;
|
||||
}
|
||||
|
||||
sample():Promise<SampleState> {
|
||||
var loop;
|
||||
loop = (lastState) => {
|
||||
return this._iterate(lastState)
|
||||
.then( (newState) => {
|
||||
if (isPresent(newState.validSample)) {
|
||||
return newState;
|
||||
} else {
|
||||
return loop(newState);
|
||||
}
|
||||
});
|
||||
}
|
||||
return this._gcIfNeeded().then( (_) => loop(new SampleState([], null)) );
|
||||
}
|
||||
|
||||
_gcIfNeeded() {
|
||||
if (this._forceGc) {
|
||||
return this._driverExtension.gc();
|
||||
} else {
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
||||
|
||||
_iterate(lastState) {
|
||||
var resultPromise;
|
||||
if (isPresent(this._prepare)) {
|
||||
resultPromise = this._driver.waitFor(this._prepare)
|
||||
.then( (_) => this._gcIfNeeded() );
|
||||
} else {
|
||||
resultPromise = PromiseWrapper.resolve(null);
|
||||
}
|
||||
if (isPresent(this._prepare) || lastState.completeSample.length === 0) {
|
||||
resultPromise = resultPromise.then( (_) => this._metric.beginMeasure() );
|
||||
}
|
||||
return resultPromise
|
||||
.then( (_) => this._driver.waitFor(this._execute) )
|
||||
.then( (_) => this._gcIfNeeded() )
|
||||
.then( (_) => this._metric.endMeasure(isBlank(this._prepare)) )
|
||||
.then( (measureValues) => this._report(lastState, measureValues) );
|
||||
}
|
||||
|
||||
_report(state:SampleState, measuredValues:any):Promise<SampleState> {
|
||||
var completeSample = ListWrapper.concat(state.completeSample, [measuredValues]);
|
||||
var validSample = this._validator.validate(completeSample);
|
||||
var resultPromise = this._reporter.reportMeasureValues(completeSample.length - 1, measuredValues);
|
||||
if (isPresent(validSample)) {
|
||||
resultPromise = resultPromise.then( (_) => this._reporter.reportSample(completeSample, validSample) )
|
||||
}
|
||||
return resultPromise.then( (_) => new SampleState(completeSample, validSample) );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class SampleState {
|
||||
completeSample:List;
|
||||
validSample:List;
|
||||
|
||||
constructor(completeSample: List, validSample: List) {
|
||||
this.completeSample = completeSample;
|
||||
this.validSample = validSample;
|
||||
}
|
||||
}
|
||||
|
||||
var _BINDINGS = [
|
||||
bind(Sampler).toFactory(
|
||||
(driver, driverExtension, metric, reporter, validator, forceGc, prepare, execute) => new Sampler({
|
||||
driver: driver,
|
||||
driverExtension: driverExtension,
|
||||
reporter: reporter,
|
||||
validator: validator,
|
||||
metric: metric,
|
||||
forceGc: forceGc,
|
||||
// TODO(tbosch): DI right now does not support null/undefined objects
|
||||
// Mostly because the cache would have to be initialized with a
|
||||
// special null object, which is expensive.
|
||||
prepare: prepare !== false ? prepare : null,
|
||||
execute: execute
|
||||
}),
|
||||
[WebDriverAdapter, WebDriverExtension, Metric, Reporter, Validator, Options.FORCE_GC, Options.PREPARE, Options.EXECUTE]
|
||||
),
|
||||
bind(Options.FORCE_GC).toValue(false),
|
||||
bind(Options.PREPARE).toValue(false)
|
||||
];
|
|
@ -0,0 +1,37 @@
|
|||
import { Math } from 'angular2/src/facade/math';
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
|
||||
export class Statistic {
|
||||
static calculateCoefficientOfVariation(sample, mean) {
|
||||
return Statistic.calculateStandardDeviation(sample, mean) / mean * 100;
|
||||
}
|
||||
|
||||
static calculateMean(sample) {
|
||||
var total = 0;
|
||||
ListWrapper.forEach(sample, (x) => { total += x } );
|
||||
return total / sample.length;
|
||||
}
|
||||
|
||||
static calculateStandardDeviation(sample, mean) {
|
||||
var deviation = 0;
|
||||
ListWrapper.forEach(sample, (x) => {
|
||||
deviation += Math.pow(x - mean, 2);
|
||||
});
|
||||
deviation = deviation / (sample.length);
|
||||
deviation = Math.sqrt(deviation);
|
||||
return deviation;
|
||||
}
|
||||
|
||||
static calculateRegressionSlope(xValues, xMean, yValues, yMean) {
|
||||
// See http://en.wikipedia.org/wiki/Simple_linear_regression
|
||||
var dividendSum = 0;
|
||||
var divisorSum = 0;
|
||||
for (var i=0; i<xValues.length; i++) {
|
||||
dividendSum += (xValues[i] - xMean) * (yValues[i] - yMean);
|
||||
divisorSum += Math.pow(xValues[i] - xMean, 2);
|
||||
}
|
||||
return dividendSum / divisorSum;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
import { List } from 'angular2/src/facade/collection';
|
||||
import {
|
||||
ABSTRACT, BaseException
|
||||
} from 'angular2/src/facade/lang';
|
||||
|
||||
/**
|
||||
* A Validator calculates a valid sample out of the complete sample.
|
||||
* A valid sample is a sample that represents the population that should be observed
|
||||
* in the correct way.
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class Validator {
|
||||
/**
|
||||
* Calculates a valid sample out of the complete sample
|
||||
*/
|
||||
validate(completeSample:List<any>):List<any> {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Map that describes the properties of the validator
|
||||
* (e.g. sample size, ...)
|
||||
*/
|
||||
describe():any {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { Validator } from '../validator';
|
||||
import { Statistic } from '../statistic';
|
||||
|
||||
/**
|
||||
* A validator that checks the regression slope of a specific metric.
|
||||
* Waits for the regression slope to be >=0.
|
||||
*/
|
||||
export class RegressionSlopeValidator extends Validator {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get SAMPLE_SIZE() { return _SAMPLE_SIZE; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get METRIC() { return _METRIC; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
_sampleSize:number;
|
||||
_metric:string;
|
||||
|
||||
constructor(sampleSize, metric) {
|
||||
super();
|
||||
this._sampleSize = sampleSize;
|
||||
this._metric = metric;
|
||||
}
|
||||
|
||||
describe():any {
|
||||
return {
|
||||
'sampleSize': this._sampleSize,
|
||||
'regressionSlopeMetric': this._metric
|
||||
};
|
||||
}
|
||||
|
||||
validate(completeSample:List<any>):List<any> {
|
||||
if (completeSample.length >= this._sampleSize) {
|
||||
var latestSample =
|
||||
ListWrapper.slice(completeSample, completeSample.length - this._sampleSize, completeSample.length);
|
||||
var xValues = [];
|
||||
var yValues = [];
|
||||
for (var i = 0; i<latestSample.length; i++) {
|
||||
// For now, we only use the array index as x value.
|
||||
// TODO(tbosch): think about whether we should use time here instead
|
||||
ListWrapper.push(xValues, i);
|
||||
ListWrapper.push(yValues, latestSample[i][this._metric]);
|
||||
}
|
||||
var regressionSlope = Statistic.calculateRegressionSlope(
|
||||
xValues, Statistic.calculateMean(xValues),
|
||||
yValues, Statistic.calculateMean(yValues)
|
||||
);
|
||||
return regressionSlope >= 0 ? latestSample : null;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var _SAMPLE_SIZE = new OpaqueToken('RegressionSlopeValidator.sampleSize');
|
||||
var _METRIC = new OpaqueToken('RegressionSlopeValidator.metric');
|
||||
var _BINDINGS = [
|
||||
bind(Validator).toFactory(
|
||||
(sampleSize, metric) => new RegressionSlopeValidator(sampleSize, metric),
|
||||
[_SAMPLE_SIZE, _METRIC]
|
||||
),
|
||||
bind(_SAMPLE_SIZE).toValue(10),
|
||||
bind(_METRIC).toValue('script')
|
||||
];
|
|
@ -0,0 +1,45 @@
|
|||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { Validator } from '../validator';
|
||||
|
||||
/**
|
||||
* A validator that waits for the sample to have a certain size.
|
||||
*/
|
||||
export class SizeValidator extends Validator {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get SAMPLE_SIZE() { return _SAMPLE_SIZE; }
|
||||
|
||||
_sampleSize:number;
|
||||
|
||||
constructor(size) {
|
||||
super();
|
||||
this._sampleSize = size;
|
||||
}
|
||||
|
||||
describe():any {
|
||||
return {
|
||||
'sampleSize': this._sampleSize
|
||||
};
|
||||
}
|
||||
|
||||
validate(completeSample:List<any>):List<any> {
|
||||
if (completeSample.length >= this._sampleSize) {
|
||||
return ListWrapper.slice(completeSample, completeSample.length - this._sampleSize, completeSample.length);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var _SAMPLE_SIZE = new OpaqueToken('SizeValidator.sampleSize');
|
||||
var _BINDINGS = [
|
||||
bind(Validator).toFactory(
|
||||
(size) => new SizeValidator(size),
|
||||
[_SAMPLE_SIZE]
|
||||
),
|
||||
bind(_SAMPLE_SIZE).toValue(10)
|
||||
];
|
|
@ -0,0 +1,23 @@
|
|||
import { Promise } from 'angular2/src/facade/async';
|
||||
import { BaseException, ABSTRACT } from 'angular2/src/facade/lang';
|
||||
|
||||
/**
|
||||
* A WebDriverAdapter bridges API differences between different WebDriver clients,
|
||||
* e.g. JS vs Dart Async vs Dart Sync webdriver.
|
||||
* Needs one implementation for every supported WebDriver client.
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class WebDriverAdapter {
|
||||
waitFor(callback:Function):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
executeScript(script:string):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
capabilities():Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
logs(type:string):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
import { BaseException, ABSTRACT } from 'angular2/src/facade/lang';
|
||||
import { Promise } from 'angular2/src/facade/async';
|
||||
import { List } from 'angular2/src/facade/collection';
|
||||
|
||||
/**
|
||||
* A WebDriverExtension implements extended commands of the webdriver protocol
|
||||
* for a given browser, independent of the WebDriverAdapter.
|
||||
* Needs one implementation for every supported Browser.
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class WebDriverExtension {
|
||||
gc():Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
timeStamp(name:string, names:List<String>):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
timeBegin(name):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
timeEnd(name, restart:boolean):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
/**
|
||||
* Format:
|
||||
* - name: event name, e.g. 'script', 'gc', ...
|
||||
* - ph: phase: 'B' (begin), 'E' (end), 'b' (nestable start), 'e' (nestable end)
|
||||
* - ts: timestamp, e.g. 12345
|
||||
* - args: arguments, e.g. {someArg: 1}
|
||||
*
|
||||
* Based on [Chrome Trace Event Format](https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/edit)
|
||||
**/
|
||||
readPerfLog():Promise<List> {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
library benchpress.src.webdriver.async_webdriver_adapter_dart;
|
||||
|
||||
import 'package:angular2/src/facade/async.dart' show Future;
|
||||
import '../web_driver_adapter.dart' show WebDriverAdapter;
|
||||
|
||||
class AsyncWebDriverAdapter extends WebDriverAdapter {
|
||||
dynamic _driver;
|
||||
AsyncWebDriverAdapter(driver) {
|
||||
this._driver = driver;
|
||||
}
|
||||
Future waitFor(Function callback) {
|
||||
return callback();
|
||||
}
|
||||
Future executeScript(String script) {
|
||||
return this._driver.execute(script);
|
||||
}
|
||||
Future capabilities() {
|
||||
return this._driver.capabilities;
|
||||
}
|
||||
Future logs(String type) {
|
||||
return this._driver.logs.get(type);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,151 @@
|
|||
import { bind } from 'angular2/di';
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import {
|
||||
Json, isPresent, isBlank, RegExpWrapper, StringWrapper
|
||||
} from 'angular2/src/facade/lang';
|
||||
|
||||
import { WebDriverExtension } from '../web_driver_extension';
|
||||
import { WebDriverAdapter } from '../web_driver_adapter';
|
||||
import { Promise } from 'angular2/src/facade/async';
|
||||
|
||||
|
||||
var BEGIN_MARK_RE = RegExpWrapper.create('begin_(.*)');
|
||||
var END_MARK_RE = RegExpWrapper.create('end_(.*)');
|
||||
|
||||
export class ChromeDriverExtension extends WebDriverExtension {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
_driver:WebDriverAdapter;
|
||||
|
||||
constructor(driver:WebDriverAdapter) {
|
||||
super();
|
||||
this._driver = driver;
|
||||
}
|
||||
|
||||
gc() {
|
||||
return this._driver.executeScript('window.gc()');
|
||||
}
|
||||
|
||||
timeBegin(name:string):Promise {
|
||||
// Note: Can't use console.time / console.timeEnd as it does not show up in the perf log!
|
||||
return this._driver.executeScript(`console.timeStamp('begin_${name}');`);
|
||||
}
|
||||
|
||||
timeEnd(name:string, restartName:string = null):Promise {
|
||||
// Note: Can't use console.time / console.timeEnd as it does not show up in the perf log!
|
||||
var script = `console.timeStamp('end_${name}');`;
|
||||
if (isPresent(restartName)) {
|
||||
script += `console.timeStamp('begin_${restartName}');`
|
||||
}
|
||||
return this._driver.executeScript(script);
|
||||
}
|
||||
|
||||
readPerfLog() {
|
||||
// TODO(tbosch): Bug in ChromeDriver: Need to execute at least one command
|
||||
// so that the browser logs can be read out!
|
||||
return this._driver.executeScript('1+1')
|
||||
.then( (_) => this._driver.logs('performance') )
|
||||
.then( (entries) => {
|
||||
var records = [];
|
||||
ListWrapper.forEach(entries, function(entry) {
|
||||
var message = Json.parse(entry['message'])['message'];
|
||||
if (StringWrapper.equals(message['method'], 'Timeline.eventRecorded')) {
|
||||
ListWrapper.push(records, message['params']['record']);
|
||||
}
|
||||
});
|
||||
return this._convertPerfRecordsToEvents(records);
|
||||
});
|
||||
}
|
||||
|
||||
_convertPerfRecordsToEvents(records, events = null) {
|
||||
if (isBlank(events)) {
|
||||
events = [];
|
||||
}
|
||||
records.forEach( (record) => {
|
||||
var endEvent = null;
|
||||
var type = record['type'];
|
||||
var data = record['data'];
|
||||
var startTime = record['startTime'];
|
||||
var endTime = record['endTime'];
|
||||
|
||||
if (StringWrapper.equals(type, 'FunctionCall') &&
|
||||
(isBlank(data) || !StringWrapper.equals(data['scriptName'], 'InjectedScript'))) {
|
||||
ListWrapper.push(events, {
|
||||
'name': 'script',
|
||||
'ts': startTime,
|
||||
'ph': 'B'
|
||||
});
|
||||
endEvent = {
|
||||
'name': 'script',
|
||||
'ts': endTime,
|
||||
'ph': 'E',
|
||||
'args': null
|
||||
}
|
||||
} else if (StringWrapper.equals(type, 'TimeStamp')) {
|
||||
var name = data['message'];
|
||||
var ph;
|
||||
var match = RegExpWrapper.firstMatch(BEGIN_MARK_RE, name);
|
||||
if (isPresent(match)) {
|
||||
ph = 'b';
|
||||
} else {
|
||||
match = RegExpWrapper.firstMatch(END_MARK_RE, name);
|
||||
if (isPresent(match)) {
|
||||
ph = 'e';
|
||||
}
|
||||
}
|
||||
if (isPresent(ph)) {
|
||||
ListWrapper.push(events, {
|
||||
'name': match[1],
|
||||
'ph': ph
|
||||
});
|
||||
}
|
||||
} else if (StringWrapper.equals(type, 'RecalculateStyles') ||
|
||||
StringWrapper.equals(type, 'Layout') ||
|
||||
StringWrapper.equals(type, 'UpdateLayerTree') ||
|
||||
StringWrapper.equals(type, 'Paint') ||
|
||||
StringWrapper.equals(type, 'Rasterize') ||
|
||||
StringWrapper.equals(type, 'CompositeLayers')) {
|
||||
ListWrapper.push(events, {
|
||||
'name': 'render',
|
||||
'ts': startTime,
|
||||
'ph': 'B'
|
||||
});
|
||||
endEvent = {
|
||||
'name': 'render',
|
||||
'ts': endTime,
|
||||
'ph': 'E',
|
||||
'args': null
|
||||
}
|
||||
} else if (StringWrapper.equals(type, 'GCEvent')) {
|
||||
ListWrapper.push(events, {
|
||||
'name': 'gc',
|
||||
'ts': startTime,
|
||||
'ph': 'B'
|
||||
});
|
||||
endEvent = {
|
||||
'name': 'gc',
|
||||
'ts': endTime,
|
||||
'ph': 'E',
|
||||
'args': {
|
||||
'amount': data['usedHeapSizeDelta']
|
||||
}
|
||||
};
|
||||
}
|
||||
if (isPresent(record['children'])) {
|
||||
this._convertPerfRecordsToEvents(record['children'], events);
|
||||
}
|
||||
if (isPresent(endEvent)) {
|
||||
ListWrapper.push(events, endEvent);
|
||||
}
|
||||
});
|
||||
return events;
|
||||
}
|
||||
}
|
||||
|
||||
var _BINDINGS = [
|
||||
bind(WebDriverExtension).toFactory(
|
||||
(driver) => new ChromeDriverExtension(driver),
|
||||
[WebDriverAdapter]
|
||||
)
|
||||
];
|
|
@ -0,0 +1,49 @@
|
|||
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { bind } from 'angular2/di';
|
||||
import { WebDriverAdapter } from '../web_driver_adapter';
|
||||
|
||||
import webdriver from 'selenium-webdriver';
|
||||
|
||||
/**
|
||||
* Adapter for the selenium-webdriver.
|
||||
*/
|
||||
export class SeleniumWebDriverAdapter extends WebDriverAdapter {
|
||||
_driver:any;
|
||||
|
||||
constructor(driver) {
|
||||
super();
|
||||
this._driver = driver;
|
||||
}
|
||||
|
||||
_convertPromise(thenable) {
|
||||
var completer = PromiseWrapper.completer();
|
||||
thenable.then(completer.complete, completer.reject);
|
||||
return completer.promise;
|
||||
}
|
||||
|
||||
waitFor(callback):Promise {
|
||||
return this._convertPromise(this._driver.controlFlow().execute(callback));
|
||||
}
|
||||
|
||||
executeScript(script:string):Promise {
|
||||
return this._convertPromise(this._driver.executeScript(script));
|
||||
}
|
||||
|
||||
capabilities():Promise {
|
||||
return this._convertPromise(this._driver.getCapabilities());
|
||||
}
|
||||
|
||||
logs(type:string):Promise {
|
||||
// Needed as selenium-webdriver does not forward
|
||||
// performance logs in the correct way via manage().logs
|
||||
return this._convertPromise(this._driver.schedule(
|
||||
new webdriver.Command(webdriver.CommandName.GET_LOG).
|
||||
setParameter('type', type),
|
||||
'WebDriver.manage().logs().get(' + type + ')').then( (logs) => {
|
||||
// Need to convert the Array into an instance of an Array
|
||||
// as selenium-webdriver uses an own Node.js context!
|
||||
return [].slice.call(logs);
|
||||
}));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
library benchpress.src.webdriver.sync_webdriver_adapter_dart;
|
||||
|
||||
import 'package:angular2/src/facade/async.dart' show Future, PromiseWrapper;
|
||||
import '../web_driver_adapter.dart' show WebDriverAdapter;
|
||||
|
||||
class SyncWebDriverAdapter extends WebDriverAdapter {
|
||||
dynamic _driver;
|
||||
SyncWebDriverAdapter(driver) {
|
||||
this._driver = driver;
|
||||
}
|
||||
Future waitFor(Function callback) {
|
||||
return this._convertToAsync(callback);
|
||||
}
|
||||
Future _convertToAsync(callback) {
|
||||
try {
|
||||
var result = callback();
|
||||
if (result is Promise) {
|
||||
return result;
|
||||
} else {
|
||||
return PromiseWrapper.resolve(result);
|
||||
}
|
||||
} catch (e) {
|
||||
return PromiseWrapper.reject(result);
|
||||
}
|
||||
}
|
||||
Future executeScript(String script) {
|
||||
return this._convertToAsync(() {
|
||||
return this._driver.execute(script);
|
||||
});
|
||||
}
|
||||
Future capabilities() {
|
||||
return this._convertToAsync(() {
|
||||
return this._driver.capabilities;
|
||||
});
|
||||
}
|
||||
Future logs(String type) {
|
||||
return this._convertToAsync(() {
|
||||
return this._driver.logs.get(script);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,329 @@
|
|||
import {ddescribe, describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
|
||||
import { Metric, PerflogMetric, WebDriverExtension, bind, Injector } from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
var commandLog;
|
||||
|
||||
function createMetric(perfLogs) {
|
||||
commandLog = [];
|
||||
return new Injector([
|
||||
PerflogMetric.BINDINGS,
|
||||
bind(PerflogMetric.SET_TIMEOUT).toValue( (fn, millis) => {
|
||||
ListWrapper.push(commandLog, ['setTimeout', millis]);
|
||||
fn();
|
||||
}),
|
||||
bind(WebDriverExtension).toValue(new MockDriverExtension(perfLogs, commandLog))
|
||||
]).get(Metric);
|
||||
}
|
||||
|
||||
describe('perflog metric', () => {
|
||||
|
||||
it('should describe itself', () => {
|
||||
expect(createMetric([[]]).describe()['script']).toBe('script execution time in ms');
|
||||
});
|
||||
|
||||
describe('beginMeasure', () => {
|
||||
|
||||
it('should mark the timeline', (done) => {
|
||||
var metric = createMetric([[]]);
|
||||
metric.beginMeasure().then((_) => {
|
||||
expect(commandLog).toEqual([['timeBegin', 'benchpress0']]);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('endMeasure', () => {
|
||||
|
||||
it('should mark and aggregate events in between the marks', (done) => {
|
||||
var events = [
|
||||
[
|
||||
markStartEvent('benchpress0'),
|
||||
startEvent('script', 4),
|
||||
endEvent('script', 6),
|
||||
markEndEvent('benchpress0')
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(false) )
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', null],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['script']).toBe(2);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should restart timing', (done) => {
|
||||
var events = [
|
||||
[
|
||||
markStartEvent('benchpress0'),
|
||||
markEndEvent('benchpress0'),
|
||||
markStartEvent('benchpress1'),
|
||||
], [
|
||||
markEndEvent('benchpress1')
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (_) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['timeEnd', 'benchpress1', 'benchpress2'],
|
||||
'readPerfLog'
|
||||
]);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should loop and aggregate until the end mark is present', (done) => {
|
||||
var events = [
|
||||
[ markStartEvent('benchpress0'), startEvent('script', 1) ],
|
||||
[ endEvent('script', 2) ],
|
||||
[ startEvent('script', 3), endEvent('script', 5), markEndEvent('benchpress0') ]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(false) )
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', null],
|
||||
'readPerfLog',
|
||||
[ 'setTimeout', 100 ],
|
||||
'readPerfLog',
|
||||
[ 'setTimeout', 100 ],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['script']).toBe(3);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should store events after the end mark for the next call', (done) => {
|
||||
var events = [
|
||||
[ markStartEvent('benchpress0'), markEndEvent('benchpress0'), markStartEvent('benchpress1'),
|
||||
startEvent('script', 1), endEvent('script', 2) ],
|
||||
[ startEvent('script', 3), endEvent('script', 5), markEndEvent('benchpress1') ]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (data) => {
|
||||
expect(data['script']).toBe(0);
|
||||
return metric.endMeasure(true)
|
||||
})
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['timeEnd', 'benchpress1', 'benchpress2'],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['script']).toBe(3);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('aggregation', () => {
|
||||
|
||||
function aggregate(events) {
|
||||
ListWrapper.insert(events, 0, markStartEvent('benchpress0'));
|
||||
ListWrapper.push(events, markEndEvent('benchpress0'));
|
||||
var metric = createMetric([events]);
|
||||
return metric
|
||||
.beginMeasure().then( (_) => metric.endMeasure(false) );
|
||||
}
|
||||
|
||||
|
||||
it('should report a single interval', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 0),
|
||||
endEvent('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(5);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should sum up multiple intervals', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 0),
|
||||
endEvent('script', 5),
|
||||
startEvent('script', 10),
|
||||
endEvent('script', 17)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(12);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore not started intervals', (done) => {
|
||||
aggregate([
|
||||
endEvent('script', 10)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore not ended intervals', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 10)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
['script', 'gcTime', 'render'].forEach( (metricName) => {
|
||||
it(`should support ${metricName} metric`, (done) => {
|
||||
aggregate([
|
||||
startEvent(metricName, 0),
|
||||
endEvent(metricName, 5)
|
||||
]).then((data) => {
|
||||
expect(data[metricName]).toBe(5);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should support gcAmount metric', (done) => {
|
||||
aggregate([
|
||||
startEvent('gc', 0),
|
||||
endEvent('gc', 5, {'amount': 10})
|
||||
]).then((data) => {
|
||||
expect(data['gcAmount']).toBe(10);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should subtract gcTime in script from script time', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 0),
|
||||
startEvent('gc', 1),
|
||||
endEvent('gc', 4, {'amount': 10}),
|
||||
endEvent('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('gcTimeInScript / gcAmountInScript', () => {
|
||||
|
||||
it('should use gc during script execution', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 0),
|
||||
startEvent('gc', 1),
|
||||
endEvent('gc', 4, {'amount': 10}),
|
||||
endEvent('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['gcTimeInScript']).toBe(3);
|
||||
expect(data['gcAmountInScript']).toBe(10);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore gc outside of script execution', (done) => {
|
||||
aggregate([
|
||||
startEvent('gc', 1),
|
||||
endEvent('gc', 4, {'amount': 10}),
|
||||
startEvent('script', 0),
|
||||
endEvent('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['gcTimeInScript']).toBe(0);
|
||||
expect(data['gcAmountInScript']).toBe(0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function markStartEvent(type) {
|
||||
return {
|
||||
'name': type,
|
||||
'ph': 'b'
|
||||
}
|
||||
}
|
||||
|
||||
function markEndEvent(type) {
|
||||
return {
|
||||
'name': type,
|
||||
'ph': 'e'
|
||||
}
|
||||
}
|
||||
|
||||
function startEvent(type, time) {
|
||||
return {
|
||||
'name': type,
|
||||
'ts': time,
|
||||
'ph': 'B'
|
||||
}
|
||||
}
|
||||
|
||||
function endEvent(type, time, args = null) {
|
||||
return {
|
||||
'name': type,
|
||||
'ts': time,
|
||||
'ph': 'E',
|
||||
'args': args
|
||||
}
|
||||
}
|
||||
|
||||
class MockDriverExtension extends WebDriverExtension {
|
||||
_perfLogs:List;
|
||||
_commandLog:List;
|
||||
constructor(perfLogs, commandLog) {
|
||||
super();
|
||||
this._perfLogs = perfLogs;
|
||||
this._commandLog = commandLog;
|
||||
}
|
||||
|
||||
timeBegin(name):Promise {
|
||||
ListWrapper.push(this._commandLog, ['timeBegin', name]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
timeEnd(name, restartName):Promise {
|
||||
ListWrapper.push(this._commandLog, ['timeEnd', name, restartName]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
readPerfLog():Promise {
|
||||
ListWrapper.push(this._commandLog, 'readPerfLog');
|
||||
if (this._perfLogs.length > 0) {
|
||||
var next = this._perfLogs[0];
|
||||
ListWrapper.removeAt(this._perfLogs, 0);
|
||||
return PromiseWrapper.resolve(next);
|
||||
} else {
|
||||
return PromiseWrapper.resolve([]);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,101 @@
|
|||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { isBlank, isPresent } from 'angular2/src/facade/lang';
|
||||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
|
||||
import {
|
||||
SampleState, Reporter, bind, Injector,
|
||||
ConsoleReporter, SampleDescription
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
describe('console reporter', () => {
|
||||
var reporter;
|
||||
var log;
|
||||
|
||||
function createReporter({columnWidth, sampleId, descriptions, metrics}) {
|
||||
log = [];
|
||||
if (isBlank(descriptions)) {
|
||||
descriptions = [];
|
||||
}
|
||||
if (isBlank(sampleId)) {
|
||||
sampleId = 'null';
|
||||
}
|
||||
var bindings = [
|
||||
ConsoleReporter.BINDINGS,
|
||||
bind(SampleDescription).toValue(new SampleDescription(sampleId, descriptions, metrics)),
|
||||
bind(ConsoleReporter.PRINT).toValue((line) => ListWrapper.push(log, line))
|
||||
];
|
||||
if (isPresent(columnWidth)) {
|
||||
ListWrapper.push(bindings, bind(ConsoleReporter.COLUMN_WIDTH).toValue(columnWidth));
|
||||
}
|
||||
reporter = new Injector(bindings).get(Reporter);
|
||||
}
|
||||
|
||||
it('should print the sample id, description and table header', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
sampleId: 'someSample',
|
||||
descriptions: [{
|
||||
'a': 1,
|
||||
'b': 2
|
||||
}],
|
||||
metrics: {
|
||||
'm1': 'some desc',
|
||||
'm2': 'some other desc'
|
||||
}
|
||||
});
|
||||
expect(log).toEqual([
|
||||
'BENCHMARK someSample',
|
||||
'Description:',
|
||||
'- a: 1',
|
||||
'- b: 2',
|
||||
'Metrics:',
|
||||
'- m1: some desc',
|
||||
'- m2: some other desc',
|
||||
'',
|
||||
' m1 | m2',
|
||||
'-------- | --------',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should print a table row', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
metrics: {
|
||||
'a': '',
|
||||
'b': ''
|
||||
}
|
||||
});
|
||||
log = [];
|
||||
reporter.reportMeasureValues(0, {
|
||||
'a': 1.23, 'b': 2
|
||||
});
|
||||
expect(log).toEqual([
|
||||
' 1.23 | 2.00'
|
||||
]);
|
||||
});
|
||||
|
||||
it('should print the table footer and stats when there is a valid sample', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
metrics: {
|
||||
'a': '',
|
||||
'b': ''
|
||||
}
|
||||
});
|
||||
log = [];
|
||||
reporter.reportSample([], [{
|
||||
'a': 3, 'b': 6
|
||||
},{
|
||||
'a': 5, 'b': 9
|
||||
}]);
|
||||
expect(log).toEqual([
|
||||
'======== | ========',
|
||||
'4.00±25% | 7.50±20%'
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
import {describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
import {
|
||||
Runner, Sampler, SampleDescription,
|
||||
Validator, bind, Injector, Metric,
|
||||
Options
|
||||
} from 'benchpress/benchpress';
|
||||
import { isBlank } from 'angular2/src/facade/lang';
|
||||
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
|
||||
|
||||
export function main() {
|
||||
describe('runner', () => {
|
||||
var injector;
|
||||
var runner;
|
||||
|
||||
function createRunner(defaultBindings = null) {
|
||||
if (isBlank(defaultBindings)) {
|
||||
defaultBindings = [];
|
||||
}
|
||||
runner = new Runner([
|
||||
defaultBindings,
|
||||
bind(Sampler).toFactory(
|
||||
(_injector) => {
|
||||
injector = _injector;
|
||||
return new MockSampler();
|
||||
}, [Injector]
|
||||
),
|
||||
bind(Metric).toFactory( () => new MockMetric(), []),
|
||||
bind(Validator).toFactory( () => new MockValidator(), [])
|
||||
]);
|
||||
return runner;
|
||||
}
|
||||
|
||||
it('should set SampleDescription.id', (done) => {
|
||||
createRunner().sample({id: 'someId'}).then( (_) => {
|
||||
expect(injector.get(SampleDescription).id).toBe('someId');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge SampleDescription.description', (done) => {
|
||||
createRunner([
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1})
|
||||
]).sample({id: 'someId', bindings: [
|
||||
bind(Options.SAMPLE_DESCRIPTION).toValue({'b': 2})
|
||||
]}).then( (_) => {
|
||||
expect(injector.get(SampleDescription).description).toEqual({
|
||||
'forceGc': false,
|
||||
'a': 1,
|
||||
'b': 2,
|
||||
'v': 11
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should fill SampleDescription.metrics from the Metric', (done) => {
|
||||
createRunner().sample({id: 'someId'}).then( (_) => {
|
||||
expect(injector.get(SampleDescription).metrics).toEqual({ 'm1': 'some metric' });
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should bind Options.EXECUTE', (done) => {
|
||||
var execute = () => {};
|
||||
createRunner().sample({id: 'someId', execute: execute}).then( (_) => {
|
||||
expect(injector.get(Options.EXECUTE)).toEqual(execute);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should bind Options.PREPARE', (done) => {
|
||||
var prepare = () => {};
|
||||
createRunner().sample({id: 'someId', prepare: prepare}).then( (_) => {
|
||||
expect(injector.get(Options.PREPARE)).toEqual(prepare);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should overwrite bindings per sample call', (done) => {
|
||||
createRunner([
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1}),
|
||||
]).sample({id: 'someId', bindings: [
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 2}),
|
||||
]}).then( (_) => {
|
||||
expect(injector.get(SampleDescription).description['a']).toBe(2);
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockValidator extends Validator {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
describe() {
|
||||
return { 'v': 11 };
|
||||
}
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
describe() {
|
||||
return { 'm1': 'some metric' };
|
||||
}
|
||||
}
|
||||
|
||||
class MockSampler extends Sampler {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
sample():Promise {
|
||||
return PromiseWrapper.resolve(23);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,364 @@
|
|||
import {describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { isBlank, isPresent, BaseException, stringify } from 'angular2/src/facade/lang';
|
||||
import { ListWrapper, List } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
|
||||
import {
|
||||
Sampler, WebDriverAdapter, WebDriverExtension,
|
||||
Validator, Metric, Reporter, Browser,
|
||||
bind, Injector, Options
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
var EMPTY_EXECUTE = () => {};
|
||||
|
||||
describe('sampler', () => {
|
||||
var sampler;
|
||||
|
||||
function createSampler({
|
||||
driver,
|
||||
driverExtension,
|
||||
metric,
|
||||
reporter,
|
||||
validator,
|
||||
forceGc,
|
||||
prepare,
|
||||
execute
|
||||
} = {}) {
|
||||
if (isBlank(metric)) {
|
||||
metric = new MockMetric([]);
|
||||
}
|
||||
if (isBlank(reporter)) {
|
||||
reporter = new MockReporter([]);
|
||||
}
|
||||
if (isBlank(driver)) {
|
||||
driver = new MockDriverAdapter([]);
|
||||
}
|
||||
if (isBlank(driverExtension)) {
|
||||
driverExtension = new MockDriverExtension([]);
|
||||
}
|
||||
var bindings = ListWrapper.concat(Sampler.BINDINGS, [
|
||||
bind(Metric).toValue(metric),
|
||||
bind(Reporter).toValue(reporter),
|
||||
bind(WebDriverAdapter).toValue(driver),
|
||||
bind(WebDriverExtension).toValue(driverExtension),
|
||||
bind(Options.EXECUTE).toValue(execute),
|
||||
bind(Validator).toValue(validator)
|
||||
]);
|
||||
if (isPresent(prepare)) {
|
||||
ListWrapper.push(bindings, bind(Options.PREPARE).toValue(prepare));
|
||||
}
|
||||
if (isPresent(forceGc)) {
|
||||
ListWrapper.push(bindings, bind(Options.FORCE_GC).toValue(forceGc));
|
||||
}
|
||||
|
||||
sampler = new Injector(bindings).get(Sampler);
|
||||
}
|
||||
|
||||
it('should call the prepare and execute callbacks using WebDriverAdapter.waitFor', (done) => {
|
||||
var log = [];
|
||||
var count = 0;
|
||||
var driver = new MockDriverAdapter([], (callback) => {
|
||||
var result = callback();
|
||||
ListWrapper.push(log, result);
|
||||
return PromiseWrapper.resolve(result);
|
||||
});
|
||||
createSampler({
|
||||
driver: driver,
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => {
|
||||
return count++;
|
||||
},
|
||||
execute: () => {
|
||||
return count++;
|
||||
}
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(count).toBe(4);
|
||||
expect(log).toEqual([0,1,2,3]);
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('should call prepare, gc, beginMeasure, execute, gc, endMeasure for every iteration', (done) => {
|
||||
var workCount = 0;
|
||||
var log = [];
|
||||
createSampler({
|
||||
forceGc: true,
|
||||
metric: createCountingMetric(log),
|
||||
driverExtension: new MockDriverExtension(log),
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => {
|
||||
ListWrapper.push(log, `p${workCount++}`);
|
||||
},
|
||||
execute: () => {
|
||||
ListWrapper.push(log, `w${workCount++}`);
|
||||
}
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(log).toEqual([
|
||||
['gc'],
|
||||
'p0',
|
||||
['gc'],
|
||||
['beginMeasure'],
|
||||
'w1',
|
||||
['gc'],
|
||||
['endMeasure', false, {'script': 0}],
|
||||
'p2',
|
||||
['gc'],
|
||||
['beginMeasure'],
|
||||
'w3',
|
||||
['gc'],
|
||||
['endMeasure', false, {'script': 1}],
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should call execute, gc, endMeasure for every iteration if there is no prepare callback', (done) => {
|
||||
var log = [];
|
||||
var workCount = 0;
|
||||
createSampler({
|
||||
forceGc: true,
|
||||
metric: createCountingMetric(log),
|
||||
driverExtension: new MockDriverExtension(log),
|
||||
validator: createCountingValidator(2),
|
||||
execute: () => {
|
||||
ListWrapper.push(log, `w${workCount++}`);
|
||||
},
|
||||
prepare: null
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(log).toEqual([
|
||||
['gc'],
|
||||
['beginMeasure'],
|
||||
'w0',
|
||||
['gc'],
|
||||
['endMeasure', true, {'script': 0}],
|
||||
'w1',
|
||||
['gc'],
|
||||
['endMeasure', true, {'script': 1}],
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should not gc if the flag is not set', (done) => {
|
||||
var workCount = 0;
|
||||
var log = [];
|
||||
createSampler({
|
||||
metric: createCountingMetric(),
|
||||
driverExtension: new MockDriverExtension(log),
|
||||
validator: createCountingValidator(2),
|
||||
prepare: EMPTY_EXECUTE,
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(log).toEqual([]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should only collect metrics for execute and ignore metrics from prepare', (done) => {
|
||||
var scriptTime = 0;
|
||||
var iterationCount = 1;
|
||||
createSampler({
|
||||
validator: createCountingValidator(2),
|
||||
metric: new MockMetric([], () => {
|
||||
var result = PromiseWrapper.resolve({'script': scriptTime});
|
||||
scriptTime = 0;
|
||||
return result;
|
||||
}),
|
||||
prepare: () => {
|
||||
scriptTime = 1 * iterationCount;
|
||||
},
|
||||
execute: () => {
|
||||
scriptTime = 10 * iterationCount;
|
||||
iterationCount++;
|
||||
}
|
||||
});
|
||||
sampler.sample().then( (state) => {
|
||||
expect(state.completeSample.length).toBe(2);
|
||||
expect(state.completeSample[0]).toEqual({'script': 10});
|
||||
expect(state.completeSample[1]).toEqual({'script': 20});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the validator for every execution and store the valid sample', (done) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
|
||||
createSampler({
|
||||
metric: createCountingMetric(),
|
||||
validator: createCountingValidator(2, validSample, log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then( (state) => {
|
||||
expect(state.validSample).toBe(validSample);
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['validate', [{'script': 0}], null],
|
||||
// ['validate', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
|
||||
expect(log.length).toBe(2);
|
||||
expect(log[0]).toEqual(
|
||||
['validate', [{'script': 0}], null]
|
||||
);
|
||||
expect(log[1]).toEqual(
|
||||
['validate', [{'script': 0}, {'script': 1}], validSample]
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report the metric values', (done) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
createSampler({
|
||||
validator: createCountingValidator(2, validSample),
|
||||
metric: createCountingMetric(),
|
||||
reporter: new MockReporter(log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['reportMeasureValues', 0, {'script': 0}],
|
||||
// ['reportMeasureValues', 1, {'script': 1}],
|
||||
// ['reportSample', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
expect(log.length).toBe(3);
|
||||
expect(log[0]).toEqual(
|
||||
['reportMeasureValues', 0, {'script': 0}]
|
||||
);
|
||||
expect(log[1]).toEqual(
|
||||
['reportMeasureValues', 1, {'script': 1}]
|
||||
);
|
||||
expect(log[2]).toEqual(
|
||||
['reportSample', [{'script': 0}, {'script': 1}], validSample]
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function createCountingValidator(count, validSample = null, log = null) {
|
||||
return new MockValidator(log, (completeSample) => {
|
||||
count--;
|
||||
if (count === 0) {
|
||||
return isPresent(validSample) ? validSample : completeSample;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createCountingMetric(log = null) {
|
||||
var scriptTime = 0;
|
||||
return new MockMetric(log, () => {
|
||||
return { 'script': scriptTime++ };
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
_log:List;
|
||||
_waitFor:Function;
|
||||
constructor(log = null, waitFor = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
this._waitFor = waitFor;
|
||||
}
|
||||
waitFor(callback:Function):Promise {
|
||||
if (isPresent(this._waitFor)) {
|
||||
return this._waitFor(callback);
|
||||
} else {
|
||||
return PromiseWrapper.resolve(callback());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class MockDriverExtension extends WebDriverExtension {
|
||||
_log:List;
|
||||
constructor(log = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
gc():Promise {
|
||||
ListWrapper.push(this._log, ['gc']);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
||||
|
||||
class MockValidator extends Validator {
|
||||
_validate:Function;
|
||||
_log:List;
|
||||
constructor(log = null, validate = null) {
|
||||
super();
|
||||
this._validate = validate;
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
validate(completeSample:List<Object>):List<Object> {
|
||||
var stableSample = isPresent(this._validate) ? this._validate(completeSample) : completeSample;
|
||||
ListWrapper.push(this._log, ['validate', completeSample, stableSample]);
|
||||
return stableSample;
|
||||
}
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
_endMeasure:Function;
|
||||
_log:List;
|
||||
constructor(log = null, endMeasure = null) {
|
||||
super();
|
||||
this._endMeasure = endMeasure;
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
beginMeasure() {
|
||||
ListWrapper.push(this._log, ['beginMeasure']);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
endMeasure(restart) {
|
||||
var measureValues = isPresent(this._endMeasure) ? this._endMeasure() : {};
|
||||
ListWrapper.push(this._log, ['endMeasure', restart, measureValues]);
|
||||
return PromiseWrapper.resolve(measureValues);
|
||||
}
|
||||
}
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
_log:List;
|
||||
constructor(log = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
reportMeasureValues(index, values):Promise {
|
||||
ListWrapper.push(this._log, ['reportMeasureValues', index, values]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
reportSample(completeSample, validSample):Promise {
|
||||
ListWrapper.push(this._log, ['reportSample', completeSample, validSample]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { Statistic } from 'benchpress/src/statistic';
|
||||
|
||||
import { NaN } from 'angular2/src/facade/math';
|
||||
|
||||
export function main() {
|
||||
describe('statistic', () => {
|
||||
|
||||
it('should calculate the mean', () => {
|
||||
expect(Statistic.calculateMean([])).toBeNaN();
|
||||
expect(Statistic.calculateMean([1,2,3])).toBe(2.0);
|
||||
});
|
||||
|
||||
it('should calculate the standard deviation', () => {
|
||||
expect(Statistic.calculateStandardDeviation([], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateStandardDeviation([1], 1)).toBe(0.0);
|
||||
expect(Statistic.calculateStandardDeviation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(2.0);
|
||||
});
|
||||
|
||||
it('should calculate the coefficient of variation', () => {
|
||||
expect(Statistic.calculateCoefficientOfVariation([], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateCoefficientOfVariation([1], 1)).toBe(0.0);
|
||||
expect(Statistic.calculateCoefficientOfVariation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(40.0);
|
||||
});
|
||||
|
||||
it('should calculate the regression slope', () => {
|
||||
expect(Statistic.calculateRegressionSlope([], NaN, [], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateRegressionSlope([1], 1, [2], 2)).toBeNaN();
|
||||
expect(Statistic.calculateRegressionSlope([1,2], 1.5, [2,4], 3)).toBe(2.0);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import {
|
||||
Validator, RegressionSlopeValidator, Injector, bind
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
describe('regression slope validator', () => {
|
||||
var validator;
|
||||
|
||||
function createValidator({size, metric}) {
|
||||
validator = new Injector([
|
||||
RegressionSlopeValidator.BINDINGS,
|
||||
bind(RegressionSlopeValidator.METRIC).toValue(metric),
|
||||
bind(RegressionSlopeValidator.SAMPLE_SIZE).toValue(size)
|
||||
]).get(Validator);
|
||||
}
|
||||
|
||||
it('should return sampleSize and metric as description', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.describe()).toEqual({
|
||||
'sampleSize': 2,
|
||||
'regressionSlopeMetric': 'script'
|
||||
});
|
||||
});
|
||||
|
||||
it('should return null while the completeSample is smaller than the given size', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([])).toBe(null);
|
||||
expect(validator.validate([{}])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return null while the regression slope is < 0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([{'script':2}, {'script':1}])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when the regression slope is ==0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([{'script':1}, {'script':1}])).toEqual([{'script':1}, {'script':1}]);
|
||||
expect(validator.validate([{'script':1}, {'script':1}, {'script':1}])).toEqual([{'script':1}, {'script':1}]);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when the regression slope is >0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([{'script':1}, {'script':2}])).toEqual([{'script':1}, {'script':2}]);
|
||||
expect(validator.validate([{'script':1}, {'script':2}, {'script':3}])).toEqual([{'script':2}, {'script':3}]);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import {
|
||||
Validator, SizeValidator, Injector, bind
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
describe('size validator', () => {
|
||||
var validator;
|
||||
|
||||
function createValidator(size) {
|
||||
validator = new Injector([
|
||||
SizeValidator.BINDINGS,
|
||||
bind(SizeValidator.SAMPLE_SIZE).toValue(size)
|
||||
]).get(Validator);
|
||||
}
|
||||
|
||||
it('should return sampleSize as description', () => {
|
||||
createValidator(2);
|
||||
expect(validator.describe()).toEqual({
|
||||
'sampleSize': 2
|
||||
});
|
||||
});
|
||||
|
||||
it('should return null while the completeSample is smaller than the given size', () => {
|
||||
createValidator(2);
|
||||
expect(validator.validate([])).toBe(null);
|
||||
expect(validator.validate([{}])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when it has at least the given size', () => {
|
||||
createValidator(2);
|
||||
expect(validator.validate([{'a':1}, {'b':2}])).toEqual([{'a':1}, {'b':2}]);
|
||||
expect(validator.validate([{'a':1}, {'b':2}, {'c':3}])).toEqual([{'b':2}, {'c':3}]);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
|
@ -0,0 +1,267 @@
|
|||
import {describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { Json, perfRecords, isBlank } from 'angular2/src/facade/lang';
|
||||
|
||||
import {
|
||||
WebDriverExtension, ChromeDriverExtension,
|
||||
WebDriverAdapter, Injector, bind
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
describe('chrome driver extension', () => {
|
||||
var log;
|
||||
var extension;
|
||||
|
||||
function createExtension(perfRecords = null) {
|
||||
if (isBlank(perfRecords)) {
|
||||
perfRecords = [];
|
||||
}
|
||||
log = [];
|
||||
extension = new Injector([
|
||||
ChromeDriverExtension.BINDINGS,
|
||||
bind(WebDriverAdapter).toValue(new MockDriverAdapter(log, perfRecords))
|
||||
]).get(WebDriverExtension);
|
||||
return extension;
|
||||
}
|
||||
|
||||
it('should force gc via window.gc()', (done) => {
|
||||
createExtension().gc().then( (_) => {
|
||||
expect(log).toEqual([['executeScript', 'window.gc()']]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should mark the timeline via console.timeStamp()', (done) => {
|
||||
createExtension().timeBegin('someName').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeStamp('begin_someName');`]]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should mark the timeline via console.timeEnd()', (done) => {
|
||||
createExtension().timeEnd('someName').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeStamp('end_someName');`]]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should mark the timeline via console.time() and console.timeEnd()', (done) => {
|
||||
createExtension().timeEnd('name1', 'name2').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeStamp('end_name1');console.timeStamp('begin_name2');`]]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('readPerfLog', () => {
|
||||
|
||||
it('should execute a dummy script before reading them', (done) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
// Sometimes it does not report the newest events of the performance log
|
||||
// to the WebDriver client unless a script is executed...
|
||||
createExtension([]).readPerfLog().then( (_) => {
|
||||
expect(log).toEqual([ [ 'executeScript', '1+1' ], [ 'logs', 'performance' ] ]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report FunctionCall records as "script"', (done) => {
|
||||
createExtension([
|
||||
durationRecord('FunctionCall', 1, 5)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
startEvent('script', 1),
|
||||
endEvent('script', 5)
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore FunctionCalls from webdriver', (done) => {
|
||||
createExtension([
|
||||
internalScriptRecord(1, 5)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report begin timestamps', (done) => {
|
||||
createExtension([
|
||||
timeStampRecord('begin_someName')
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
markStartEvent('someName')
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report end timestamps', (done) => {
|
||||
createExtension([
|
||||
timeStampRecord('end_someName')
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
markEndEvent('someName')
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report gc', (done) => {
|
||||
createExtension([
|
||||
gcRecord(1, 3, 21)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
startEvent('gc', 1),
|
||||
endEvent('gc', 3, {'amount': 21}),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint', 'Rasterize', 'CompositeLayers'].forEach( (recordType) => {
|
||||
it(`should report ${recordType}`, (done) => {
|
||||
createExtension([
|
||||
durationRecord(recordType, 0, 1)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
startEvent('render', 0),
|
||||
endEvent('render', 1),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should walk children', (done) => {
|
||||
createExtension([
|
||||
durationRecord('FunctionCall', 1, 5, [
|
||||
timeStampRecord('begin_someName')
|
||||
])
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
startEvent('script', 1),
|
||||
markStartEvent('someName'),
|
||||
endEvent('script', 5)
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function timeStampRecord(name) {
|
||||
return {
|
||||
'type': 'TimeStamp',
|
||||
'data': {
|
||||
'message': name
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function durationRecord(type, startTime, endTime, children = null) {
|
||||
if (isBlank(children)) {
|
||||
children = [];
|
||||
}
|
||||
return {
|
||||
'type': type,
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'children': children
|
||||
};
|
||||
}
|
||||
|
||||
function internalScriptRecord(startTime, endTime) {
|
||||
return {
|
||||
'type': 'FunctionCall',
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'data': {
|
||||
'scriptName': 'InjectedScript'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function gcRecord(startTime, endTime, gcAmount) {
|
||||
return {
|
||||
'type': 'GCEvent',
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'data': {
|
||||
'usedHeapSizeDelta': gcAmount
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function markStartEvent(type) {
|
||||
return {
|
||||
'name': type,
|
||||
'ph': 'b'
|
||||
}
|
||||
}
|
||||
|
||||
function markEndEvent(type) {
|
||||
return {
|
||||
'name': type,
|
||||
'ph': 'e'
|
||||
}
|
||||
}
|
||||
|
||||
function startEvent(type, time) {
|
||||
return {
|
||||
'name': type,
|
||||
'ts': time,
|
||||
'ph': 'B'
|
||||
}
|
||||
}
|
||||
|
||||
function endEvent(type, time, args = null) {
|
||||
return {
|
||||
'name': type,
|
||||
'ts': time,
|
||||
'ph': 'E',
|
||||
'args': args
|
||||
}
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
_log:List;
|
||||
_perfRecords:List;
|
||||
constructor(log, perfRecords) {
|
||||
super();
|
||||
this._log = log;
|
||||
this._perfRecords = perfRecords;
|
||||
}
|
||||
|
||||
executeScript(script) {
|
||||
ListWrapper.push(this._log, ['executeScript', script]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
logs(type) {
|
||||
ListWrapper.push(this._log, ['logs', type]);
|
||||
if (type === 'performance') {
|
||||
return PromiseWrapper.resolve(this._perfRecords.map(function(record) {
|
||||
return {
|
||||
'message': Json.stringify({
|
||||
'message': {
|
||||
'method': 'Timeline.eventRecorded',
|
||||
'params': {
|
||||
'record': record
|
||||
}
|
||||
}
|
||||
})
|
||||
};
|
||||
}));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
var benchpress = require('benchpress/index.js');
|
||||
var testUtil = require('angular2/e2e_test/test_util');
|
||||
describe('hello world', function () {
|
||||
|
||||
afterEach(benchpress.verifyNoBrowserErrors);
|
||||
afterEach(testUtil.verifyNoBrowserErrors);
|
||||
|
||||
describe('static reflection', function() {
|
||||
var URL = 'examples/src/hello_world/index_static.html';
|
||||
|
|
|
@ -29,7 +29,8 @@
|
|||
"which": "~1",
|
||||
"zone.js": "0.4.0",
|
||||
"googleapis": "1.0.x",
|
||||
"node-uuid": "1.4.x"
|
||||
"node-uuid": "1.4.x",
|
||||
"selenium-webdriver": "2.x.x"
|
||||
},
|
||||
"devDependencies": {
|
||||
"temp": "^0.8.1",
|
||||
|
|
|
@ -1,6 +1,16 @@
|
|||
var config = exports.config = require('./protractor-e2e-shared.js').config;
|
||||
var data = module.exports = require('./protractor-e2e-shared.js');
|
||||
var config = data.config;
|
||||
|
||||
config.baseUrl = 'http://localhost:8002/';
|
||||
|
||||
// TODO: remove this line when largetable dart has been added
|
||||
config.exclude = config.exclude || [];
|
||||
config.exclude.push('dist/js/cjs/benchmarks_external/e2e_test/largetable_spec.js');
|
||||
config.exclude.push('dist/js/cjs/examples/e2e_test/sourcemap/sourcemap_spec.js');
|
||||
config.exclude.push('dist/js/cjs/benchmarks_external/e2e_test/largetable_perf.js');
|
||||
|
||||
data.createBenchpressRunner({
|
||||
forceGc: false,
|
||||
lang: 'dart',
|
||||
test: true,
|
||||
sampleSize: 1
|
||||
});
|
||||
|
|
|
@ -1,6 +1,16 @@
|
|||
var config = exports.config = require('./protractor-e2e-shared.js').config;
|
||||
var data = module.exports = require('./protractor-e2e-shared.js');
|
||||
var config = data.config;
|
||||
|
||||
config.baseUrl = 'http://localhost:8001/';
|
||||
|
||||
// TODO: remove exclusion when JS verison of scrolling benchmark is available
|
||||
config.exclude = config.exclude || [];
|
||||
config.exclude.push('dist/js/cjs/benchmarks_external/e2e_test/naive_infinite_scroll_spec.js');
|
||||
config.exclude.push('dist/js/cjs/benchmarks_external/e2e_test/naive_infinite_scroll_perf.js');
|
||||
|
||||
data.createBenchpressRunner({
|
||||
forceGc: false,
|
||||
lang: 'js',
|
||||
test: true,
|
||||
sampleSize: 1
|
||||
});
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
var config = exports.config = require('./protractor-shared.js').config;
|
||||
config.specs = ['dist/js/cjs/**/e2e_test/**/*_spec.js'];
|
||||
var data = module.exports = require('./protractor-shared.js');
|
||||
var config = data.config;
|
||||
|
||||
config.specs = ['dist/js/cjs/**/e2e_test/**/*_spec.js', 'dist/js/cjs/**/e2e_test/**/*_perf.js'];
|
||||
config.exclude = ['dist/js/cjs/**/node_modules/**'];
|
||||
|
|
|
@ -1,6 +1,16 @@
|
|||
var config = exports.config = require('./protractor-perf-shared.js').config;
|
||||
var data = module.exports = require('./protractor-perf-shared.js');
|
||||
var config = data.config;
|
||||
|
||||
config.baseUrl = 'http://localhost:8002/';
|
||||
config.params.lang = 'dart';
|
||||
|
||||
// TODO: remove this line when largetable dart has been added
|
||||
config.exclude = config.exclude || [];
|
||||
config.exclude.push('dist/js/cjs/benchmarks_external/e2e_test/largetable_perf.js');
|
||||
|
||||
data.createBenchpressRunner({
|
||||
forceGc: false,
|
||||
lang: 'dart',
|
||||
test: false,
|
||||
sampleSize: 20
|
||||
});
|
||||
|
||||
|
|
|
@ -1,7 +1,16 @@
|
|||
var config = exports.config = require('./protractor-perf-shared.js').config;
|
||||
var data = module.exports = require('./protractor-perf-shared.js');
|
||||
var config = data.config;
|
||||
|
||||
config.baseUrl = 'http://localhost:8001/';
|
||||
config.params.lang = 'js';
|
||||
|
||||
// TODO: remove exclusion when JS verison of scrolling benchmark is available
|
||||
config.exclude = config.exclude || [];
|
||||
config.exclude.push('dist/js/cjs/benchmarks_external/e2e_test/naive_infinite_scroll_perf.js');
|
||||
|
||||
data.createBenchpressRunner({
|
||||
forceGc: false,
|
||||
lang: 'js',
|
||||
test: false,
|
||||
sampleSize: 20
|
||||
});
|
||||
|
||||
|
|
|
@ -1,45 +1,8 @@
|
|||
var config = exports.config = require('./protractor-shared.js').config;
|
||||
// load traceur runtime as our tests are written in es6
|
||||
require('traceur/bin/traceur-runtime.js');
|
||||
var nodeUuid = require('node-uuid');
|
||||
|
||||
var cloudReporterConfig;
|
||||
if (process.env.CLOUD_SECRET_PATH) {
|
||||
console.log('using cloud reporter!');
|
||||
cloudReporterConfig = {
|
||||
auth: require(process.env.CLOUD_SECRET_PATH),
|
||||
projectId: 'angular-perf',
|
||||
datasetId: 'benchmarks',
|
||||
tableId: 'ng2perf'
|
||||
};
|
||||
}
|
||||
var data = module.exports = require('./protractor-shared.js');
|
||||
var config = data.config;
|
||||
|
||||
config.specs = ['dist/js/cjs/**/e2e_test/**/*_perf.js'];
|
||||
config.exclude = ['dist/js/cjs/**/node_modules/**'];
|
||||
|
||||
config.jasmineNodeOpts.defaultTimeoutInterval = 80000;
|
||||
|
||||
var runId = nodeUuid.v1();
|
||||
if (process.env.GIT_SHA) {
|
||||
runId = process.env.GIT_SHA + ' ' + runId;
|
||||
}
|
||||
|
||||
config.params = {
|
||||
benchmark: {
|
||||
runId: runId,
|
||||
// size of the sample to take
|
||||
sampleSize: 20,
|
||||
timeout: 60000,
|
||||
metrics: ['script', 'render', 'gcAmount', 'gcAmountInScript', 'gcTime'],
|
||||
// forces a gc after every run
|
||||
forceGc: false,
|
||||
reporters: [
|
||||
require('./dist/js/cjs/benchpress/src/console_reporter.js'),
|
||||
cloudReporterConfig ? require('./dist/js/cjs/benchpress/src/cloud_reporter.js') : null,
|
||||
],
|
||||
cloudReporter: cloudReporterConfig,
|
||||
scaling: [{
|
||||
userAgent: /Android/, value: 0.125
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
// load traceur runtime as our tests are written in es6
|
||||
require('traceur/bin/traceur-runtime.js');
|
||||
|
||||
var nodeUuid = require('node-uuid');
|
||||
var benchpress = require('./dist/js/cjs/benchpress/benchpress');
|
||||
var SeleniumWebDriverAdapter = require('./dist/js/cjs/benchpress/src/webdriver/selenium_webdriver_adapter').SeleniumWebDriverAdapter;
|
||||
var cmdArgs = require('minimist')(process.argv);
|
||||
|
||||
var cmdLineBrowsers = cmdArgs.browsers ? cmdArgs.browsers.split(',') : [];
|
||||
|
||||
var config = exports.config = {
|
||||
|
@ -23,9 +28,55 @@ var config = exports.config = {
|
|||
jasmineNodeOpts: {
|
||||
showColors: true,
|
||||
defaultTimeoutInterval: 30000
|
||||
},
|
||||
params: {
|
||||
benchmark: {
|
||||
scaling: [{
|
||||
userAgent: /Android/, value: 0.125
|
||||
}]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
exports.createBenchpressRunner = function(options) {
|
||||
// TODO(tbosch): add cloud reporter again (only when !options.test)
|
||||
// var cloudReporterConfig;
|
||||
// if (process.env.CLOUD_SECRET_PATH) {
|
||||
// console.log('using cloud reporter!');
|
||||
// cloudReporterConfig = {
|
||||
// auth: require(process.env.CLOUD_SECRET_PATH),
|
||||
// projectId: 'angular-perf',
|
||||
// datasetId: 'benchmarks',
|
||||
// tableId: 'ng2perf'
|
||||
// };
|
||||
// }
|
||||
|
||||
var runId = nodeUuid.v1();
|
||||
if (process.env.GIT_SHA) {
|
||||
runId = process.env.GIT_SHA + ' ' + runId;
|
||||
}
|
||||
var bindings = [
|
||||
benchpress.bind(benchpress.WebDriverAdapter).toFactory(
|
||||
function() { return new SeleniumWebDriverAdapter(global.browser); }, []
|
||||
),
|
||||
benchpress.bind(benchpress.Options.FORCE_GC).toValue(options.forceGc),
|
||||
benchpress.bind(benchpress.Options.DEFAULT_DESCRIPTION).toValue({
|
||||
'lang': options.lang,
|
||||
'runId': runId
|
||||
})
|
||||
];
|
||||
if (options.test) {
|
||||
bindings.push(benchpress.SizeValidator.BINDINGS);
|
||||
bindings.push(benchpress.bind(benchpress.SizeValidator.SAMPLE_SIZE).toValue(1));
|
||||
} else {
|
||||
bindings.push(benchpress.RegressionSlopeValidator.BINDINGS);
|
||||
bindings.push(benchpress.bind(benchpress.RegressionSlopeValidator.SAMPLE_SIZE).toValue(options.sampleSize));
|
||||
}
|
||||
|
||||
global.benchpressRunner = new benchpress.Runner(bindings);
|
||||
}
|
||||
|
||||
|
||||
var POSSIBLE_CAPS = {
|
||||
Dartium: {
|
||||
name: 'Dartium',
|
||||
|
|
|
@ -19,6 +19,15 @@ function rttsAssert {
|
|||
npm publish ./
|
||||
}
|
||||
|
||||
# only publish dev version of benchpress
|
||||
# as implementation is not performance sensitive
|
||||
function benchpress {
|
||||
cd $ROOT_DIR/dist/js/dev/es6/benchpress
|
||||
rm -fr test
|
||||
npm publish ./
|
||||
}
|
||||
|
||||
rttsAssert
|
||||
angular dev
|
||||
angular prod
|
||||
benchpress
|
|
@ -1,7 +0,0 @@
|
|||
var benchmark = require('./src/benchmark');
|
||||
var tools = require('./src/tools');
|
||||
|
||||
module.exports = {
|
||||
runBenchmark: benchmark.runBenchmark,
|
||||
verifyNoBrowserErrors: tools.verifyNoBrowserErrors
|
||||
};
|
|
@ -1,237 +0,0 @@
|
|||
var statistics = require('./statistics');
|
||||
var commands = require('./commands');
|
||||
var webdriver = require('protractor/node_modules/selenium-webdriver');
|
||||
|
||||
var SUPPORTED_METRICS = {
|
||||
script: true,
|
||||
gcTime: true,
|
||||
gcAmount: true,
|
||||
gcTimeInScript: true,
|
||||
gcAmountInScript: true,
|
||||
gcAmountPerMs: true,
|
||||
render: true
|
||||
};
|
||||
|
||||
var nextTimestampId = 0;
|
||||
|
||||
module.exports = {
|
||||
runBenchmark: runBenchmark,
|
||||
supportedMetrics: SUPPORTED_METRICS
|
||||
};
|
||||
|
||||
function runBenchmark(config, workCallback) {
|
||||
var reporters = config.reporters.filter(function(Class) {
|
||||
return !!Class;
|
||||
}).map(function(Class) {
|
||||
return new Class(config);
|
||||
});
|
||||
var scriptMetricIndex = -1;
|
||||
config.metrics.forEach(function(metric, index) {
|
||||
if (!(metric in SUPPORTED_METRICS)) {
|
||||
throw new Error('Metric '+metric+' is not suported by benchpress right now');
|
||||
}
|
||||
if (metric === 'script') {
|
||||
scriptMetricIndex = index;
|
||||
}
|
||||
});
|
||||
if (scriptMetricIndex === -1) {
|
||||
throw new Error('Metric "script" needs to be included in the metrics');
|
||||
}
|
||||
|
||||
var startTime = Date.now();
|
||||
commands.gc();
|
||||
reporters.forEach(function(reporter) {
|
||||
reporter.begin();
|
||||
});
|
||||
return measureLoop({
|
||||
index: 0,
|
||||
prevSample: [],
|
||||
endAfterRun: false,
|
||||
work: function() {
|
||||
workCallback();
|
||||
if (this.endAfterRun || config.forceGc) {
|
||||
commands.gc();
|
||||
}
|
||||
},
|
||||
process: function(data) {
|
||||
var measuredValues = config.metrics.map(function(metric) {
|
||||
return data.stats[metric];
|
||||
});
|
||||
var reporterData = {
|
||||
values: measuredValues,
|
||||
index: this.index,
|
||||
records: data.records,
|
||||
forceGc: this.endAfterRun || config.forceGc
|
||||
};
|
||||
reporters.forEach(function(reporter) {
|
||||
reporter.add(reporterData);
|
||||
});
|
||||
|
||||
var newSample = this.prevSample.concat([reporterData]);
|
||||
if (newSample.length > config.sampleSize) {
|
||||
newSample = newSample.slice(newSample.length - config.sampleSize);
|
||||
}
|
||||
|
||||
var result = null;
|
||||
var xValues = [];
|
||||
var yValues = [];
|
||||
newSample.forEach(function(data, index) {
|
||||
// For now, we only use the array index as x value.
|
||||
// TODO(tbosch): think about whether we should use time here instead
|
||||
xValues.push(index);
|
||||
yValues.push(data.values[scriptMetricIndex]);
|
||||
});
|
||||
var regressionSlope = statistics.getRegressionSlope(
|
||||
xValues, statistics.calculateMean(xValues),
|
||||
yValues, statistics.calculateMean(yValues)
|
||||
);
|
||||
// TODO(tbosch): ask someone who really understands statistics whether this is reasonable
|
||||
// When we detect that we are not getting slower any more,
|
||||
// we do one more round where we force gc so we get all the gc data before we stop.
|
||||
var endAfterNextRun = ((Date.now() - startTime > config.timeout) ||
|
||||
(newSample.length === config.sampleSize && regressionSlope >= 0));
|
||||
return {
|
||||
index: this.index+1,
|
||||
work: this.work,
|
||||
process: this.process,
|
||||
endAfterRun: endAfterNextRun,
|
||||
result: this.endAfterRun ? newSample : null,
|
||||
prevSample: newSample
|
||||
};
|
||||
}
|
||||
}).then(function(stableSample) {
|
||||
reporters.forEach(function(reporter) {
|
||||
reporter.end(stableSample);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function measureLoop(startState) {
|
||||
var startTimestampId = (nextTimestampId++).toString();
|
||||
commands.timelineTimestamp(startTimestampId);
|
||||
|
||||
return next(startTimestampId, startState, []);
|
||||
|
||||
function next(startTimestampId, state, lastRecords) {
|
||||
state.work();
|
||||
var endTimestampId = (nextTimestampId++).toString();
|
||||
commands.timelineTimestamp(endTimestampId);
|
||||
|
||||
return readStats(startTimestampId, endTimestampId, lastRecords).then(function(data) {
|
||||
var nextState = state.process({
|
||||
stats: data.stats,
|
||||
records: data.records
|
||||
});
|
||||
if (nextState.result) {
|
||||
return nextState.result;
|
||||
} else {
|
||||
return next(endTimestampId, nextState, data.lastRecords);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function readStats(startTimestampId, endTimestampId, lastRecords) {
|
||||
return commands.timelineRecords().then(function(newRecords) {
|
||||
var records = lastRecords.concat(newRecords);
|
||||
var stats = sumTimelineRecords(records, startTimestampId, endTimestampId);
|
||||
if (stats.timeStamps.indexOf(startTimestampId) === -1 ||
|
||||
stats.timeStamps.indexOf(endTimestampId) === -1) {
|
||||
// Sometimes the logs have not yet arrived at the webdriver
|
||||
// server from the browser, so we need to wait
|
||||
// TODO(tbosch): This seems to be a bug in chrome / chromedriver!
|
||||
// And sometimes, just waiting is not enough, so we
|
||||
// execute a dummy js function :-(
|
||||
browser.executeScript('1+1');
|
||||
browser.sleep(100);
|
||||
return readStats(startTimestampId, endTimestampId, records);
|
||||
} else {
|
||||
return {
|
||||
stats: stats,
|
||||
records: records,
|
||||
lastRecords: newRecords
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function sumTimelineRecords(records, startTimeStampId, endTimeStampId) {
|
||||
var isStarted = false;
|
||||
var recordStats = {
|
||||
script: 0,
|
||||
gcTime: 0,
|
||||
gcAmount: 0,
|
||||
gcTimeInScript: 0,
|
||||
gcAmountInScript: 0,
|
||||
render: 0,
|
||||
timeStamps: []
|
||||
};
|
||||
records.forEach(function(record) {
|
||||
processRecord(record, recordStats, false);
|
||||
});
|
||||
recordStats.gcAmountPerMs = 0;
|
||||
if (recordStats.gcAmount) {
|
||||
recordStats.gcAmountPerMs = recordStats.gcAmount / recordStats.gcTime;
|
||||
}
|
||||
return recordStats;
|
||||
|
||||
function processRecord(record, recordStats, parentIsFunctionCall) {
|
||||
if (record.type === 'TimeStamp' && record.data.message === startTimeStampId) {
|
||||
isStarted = true;
|
||||
}
|
||||
|
||||
// ignore scripts that were injected by Webdriver (e.g. calculation of element positions, ...)
|
||||
var isFunctionCall = record.type === 'FunctionCall' &&
|
||||
(!record.data || record.data.scriptName !== 'InjectedScript');
|
||||
|
||||
var summedChildrenDuration = 0;
|
||||
if (record.children) {
|
||||
record.children.forEach(function(child) {
|
||||
summedChildrenDuration += processRecord(child, recordStats, isFunctionCall);
|
||||
});
|
||||
}
|
||||
var recordDuration;
|
||||
var recordUsed = false;
|
||||
// we need to substract the time of child records
|
||||
// that have been added to the stats from this record.
|
||||
// E.g. for a script record that triggered a gc or reflow while executing.
|
||||
|
||||
// Attention: If a gc happens during a script execution, the
|
||||
// execution time of the script is usually slower than normal,
|
||||
// even when we substract the gc time!!
|
||||
recordDuration = (record.endTime ? record.endTime - record.startTime : 0)
|
||||
- summedChildrenDuration;
|
||||
|
||||
if (isStarted) {
|
||||
if (isFunctionCall) {
|
||||
recordStats.script += recordDuration;
|
||||
recordUsed = true;
|
||||
} else if (record.type === 'GCEvent') {
|
||||
recordStats.gcTime += recordDuration;
|
||||
recordStats.gcAmount += record.data.usedHeapSizeDelta;
|
||||
if (parentIsFunctionCall) {
|
||||
recordStats.gcTimeInScript += recordDuration;
|
||||
recordStats.gcAmountInScript += record.data.usedHeapSizeDelta;
|
||||
}
|
||||
recordUsed = true;
|
||||
} else if (record.type === 'RecalculateStyles' ||
|
||||
record.type === 'Layout' ||
|
||||
record.type === 'UpdateLayerTree' ||
|
||||
record.type === 'Paint' ||
|
||||
record.type === 'Rasterize' ||
|
||||
record.type === 'CompositeLayers') {
|
||||
recordStats.render += recordDuration;
|
||||
recordUsed = true;
|
||||
} else if (record.type === 'TimeStamp') {
|
||||
recordStats.timeStamps.push(record.data.message);
|
||||
}
|
||||
}
|
||||
|
||||
if (record.type === 'TimeStamp' && record.data.message === endTimeStampId) {
|
||||
isStarted = false;
|
||||
}
|
||||
return recordUsed ? recordDuration : summedChildrenDuration;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,305 +0,0 @@
|
|||
var google = require('googleapis');
|
||||
var bigquery = google.bigquery('v2');
|
||||
var webdriver = require('protractor/node_modules/selenium-webdriver');
|
||||
|
||||
var TABLE_FIELDS = [
|
||||
{
|
||||
"name": 'runId',
|
||||
"type": 'STRING',
|
||||
"description": 'git SHA and uuid for the benchmark run'
|
||||
},
|
||||
{
|
||||
"name": 'benchmarkId',
|
||||
"type": 'STRING',
|
||||
"description": 'id of the benchmark'
|
||||
},
|
||||
{
|
||||
"name": 'index',
|
||||
"type": 'INTEGER',
|
||||
"description": 'index within the sample'
|
||||
},
|
||||
{
|
||||
"name": 'creationTime',
|
||||
"type": 'TIMESTAMP'
|
||||
},
|
||||
{
|
||||
"name": 'browser',
|
||||
"type": 'STRING',
|
||||
"description": 'navigator.platform'
|
||||
},
|
||||
{
|
||||
"name": 'forceGc',
|
||||
"type": 'BOOLEAN',
|
||||
"description": 'whether gc was forced at end of action'
|
||||
},
|
||||
{
|
||||
"name": 'stable',
|
||||
"type": 'BOOLEAN',
|
||||
"description": 'whether this entry was part of the stable sample'
|
||||
},
|
||||
{
|
||||
"name": 'params',
|
||||
"type": 'RECORD',
|
||||
"description": 'parameters of the benchmark',
|
||||
"mode": 'REPEATED',
|
||||
"fields": [
|
||||
{
|
||||
"name": 'name',
|
||||
"type": 'STRING',
|
||||
"description": 'param name'
|
||||
},
|
||||
{
|
||||
"name": 'strvalue',
|
||||
"type": 'STRING',
|
||||
"description": 'param value for strings'
|
||||
},
|
||||
{
|
||||
"name": 'numvalue',
|
||||
"type": 'FLOAT',
|
||||
"description": 'param value for numbers'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": 'metrics',
|
||||
"type": 'RECORD',
|
||||
"description": 'metrics of the benchmark',
|
||||
"mode": 'REPEATED',
|
||||
"fields": [
|
||||
{
|
||||
"name": 'name',
|
||||
"type": 'STRING',
|
||||
"description": 'metric name'
|
||||
},
|
||||
{
|
||||
"name": 'value',
|
||||
"type": 'FLOAT',
|
||||
"description": 'metric value'
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
var RETRY_COUNT = 3;
|
||||
|
||||
class CloudReporter {
|
||||
constructor(benchmarkConfig) {
|
||||
this.tableConfig = createTableConfig(benchmarkConfig);
|
||||
this.authConfig = benchmarkConfig.cloudReporter.auth;
|
||||
this.benchmarkConfig = benchmarkConfig;
|
||||
this.allSample = [];
|
||||
var self = this;
|
||||
browser.executeScript('return navigator.userAgent').then(function(userAgent) {
|
||||
self.browserUserAgent = userAgent;
|
||||
});
|
||||
}
|
||||
begin() {
|
||||
var self = this;
|
||||
var flow = browser.driver.controlFlow();
|
||||
flow.execute(function() {
|
||||
return authenticate(self.authConfig, RETRY_COUNT).then(function(authClient) {
|
||||
self.authClient = authClient;
|
||||
});
|
||||
});
|
||||
flow.execute(function() {
|
||||
return getOrCreateTable(self.authClient, self.tableConfig, RETRY_COUNT);
|
||||
});
|
||||
}
|
||||
add(data) {
|
||||
this.allSample.push(data);
|
||||
}
|
||||
end(stableSample) {
|
||||
var self = this;
|
||||
var flow = browser.driver.controlFlow();
|
||||
var allRows = this.allSample.map(function(data) {
|
||||
return self._convertToTableRow(data, stableSample);
|
||||
});
|
||||
return insertRows(this.authClient, this.tableConfig, allRows, RETRY_COUNT);
|
||||
}
|
||||
_convertToTableRow(benchpressRow, stableSample) {
|
||||
return {
|
||||
insertId: this.benchmarkConfig.runId+'#'+this.benchmarkConfig.id+'#'+benchpressRow.index,
|
||||
json: {
|
||||
runId: this.benchmarkConfig.runId,
|
||||
benchmarkId: this.benchmarkConfig.id,
|
||||
index: benchpressRow.index,
|
||||
creationTime: new Date(),
|
||||
browser: this.browserUserAgent,
|
||||
forceGc: benchpressRow.forceGc,
|
||||
stable: stableSample.indexOf(benchpressRow) >= 0,
|
||||
params: this.benchmarkConfig.params.map(function(param) {
|
||||
if (typeof param.value === 'number') {
|
||||
return {
|
||||
name: param.name,
|
||||
numvalue: param.value
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
name: param.name,
|
||||
strvalue: ''+param.value
|
||||
}
|
||||
}
|
||||
}),
|
||||
metrics: this.benchmarkConfig.metrics.map(function(metricName, index) {
|
||||
return {
|
||||
name: metricName,
|
||||
value: benchpressRow.values[index]
|
||||
};
|
||||
})
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function createTableConfig(benchmarkConfig) {
|
||||
return {
|
||||
projectId: benchmarkConfig.cloudReporter.projectId,
|
||||
datasetId: benchmarkConfig.cloudReporter.datasetId,
|
||||
table: {
|
||||
id: benchmarkConfig.cloudReporter.tableId,
|
||||
fields: TABLE_FIELDS
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function getOrCreateTable(authClient, tableConfig, retryCount) {
|
||||
return getTable(authClient, tableConfig, retryCount).then(null, function(err) {
|
||||
// create the table if it does not exist
|
||||
return createTable(authClient, tableConfig, retryCount);
|
||||
});
|
||||
}
|
||||
|
||||
function authenticate(authConfig, retryCount) {
|
||||
var authClient = new google.auth.JWT(
|
||||
authConfig['client_email'],
|
||||
null,
|
||||
authConfig['private_key'],
|
||||
['https://www.googleapis.com/auth/bigquery'],
|
||||
// User to impersonate (leave empty if no impersonation needed)
|
||||
null);
|
||||
|
||||
var defer = webdriver.promise.defer();
|
||||
authClient.authorize(makeNodeJsResolver(defer));
|
||||
var resultPromise = defer.promise.then(function() {
|
||||
return authClient;
|
||||
});
|
||||
resultPromise = retryIfNeeded(resultPromise, retryCount, function(newRetryCount) {
|
||||
return authenticate(authConfig, newRetryCount);
|
||||
});
|
||||
return resultPromise;
|
||||
}
|
||||
|
||||
function getTable(authClient, tableConfig, retryCount) {
|
||||
// see https://cloud.google.com/bigquery/docs/reference/v2/tables/get
|
||||
var params = {
|
||||
auth: authClient,
|
||||
projectId: tableConfig.projectId,
|
||||
datasetId: tableConfig.datasetId,
|
||||
tableId: tableConfig.table.id
|
||||
};
|
||||
var defer = webdriver.promise.defer();
|
||||
bigquery.tables.get(params, makeNodeJsResolver(defer));
|
||||
var resultPromise = defer.promise;
|
||||
resultPromise = retryIfNeeded(resultPromise, retryCount, function(newRetryCount) {
|
||||
return getTable(authClient, tableConfig, newRetryCount);
|
||||
});
|
||||
return resultPromise;
|
||||
}
|
||||
|
||||
function createTable(authClient, tableConfig, retryCount) {
|
||||
// see https://cloud.google.com/bigquery/docs/reference/v2/tables
|
||||
// see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource
|
||||
var params = {
|
||||
auth: authClient,
|
||||
projectId: tableConfig.projectId,
|
||||
datasetId: tableConfig.datasetId,
|
||||
resource: {
|
||||
"kind": "bigquery#table",
|
||||
"tableReference": {
|
||||
projectId: tableConfig.projectId,
|
||||
datasetId: tableConfig.datasetId,
|
||||
tableId: tableConfig.table.id
|
||||
},
|
||||
"schema": {
|
||||
"fields": tableConfig.table.fields
|
||||
}
|
||||
}
|
||||
};
|
||||
var defer = webdriver.promise.defer();
|
||||
bigquery.tables.insert(params, makeNodeJsResolver(defer));
|
||||
var resultPromise = defer.promise;
|
||||
resultPromise = retryIfNeeded(resultPromise, retryCount, function(newRetryCount) {
|
||||
return createTable(authClient, tableConfig, newRetryCount);
|
||||
});
|
||||
return resultPromise;
|
||||
}
|
||||
|
||||
function insertRows(authClient, tableConfig, rows, retryCount) {
|
||||
// We need to split up the rows in batches as BigQuery
|
||||
// has a size limit on requests.
|
||||
// Note: executing the requests in parallel leads to timeouts sometime...
|
||||
var recurseRows = null;
|
||||
if (rows.length > 10) {
|
||||
recurseRows = rows.slice(10);
|
||||
rows = rows.slice(0, 10);
|
||||
}
|
||||
|
||||
// see https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll
|
||||
var params = {
|
||||
auth: authClient,
|
||||
projectId: tableConfig.projectId,
|
||||
datasetId: tableConfig.datasetId,
|
||||
tableId: tableConfig.table.id,
|
||||
resource: {
|
||||
"kind": "bigquery#tableDataInsertAllRequest",
|
||||
"rows": rows
|
||||
}
|
||||
};
|
||||
var defer = webdriver.promise.defer();
|
||||
bigquery.tabledata.insertAll(params, makeNodeJsResolver(defer));
|
||||
var resultPromise = defer.promise.then(function(result) {
|
||||
if (result.insertErrors) {
|
||||
throw JSON.stringify(result.insertErrors, null, ' ');
|
||||
}
|
||||
});
|
||||
resultPromise = retryIfNeeded(resultPromise, retryCount, function(newRetryCount) {
|
||||
return insertRows(authClient, tableConfig, rows, newRetryCount);
|
||||
});
|
||||
if (recurseRows) {
|
||||
resultPromise = resultPromise.then(function() {
|
||||
return insertRows(authClient, tableConfig, recurseRows, retryCount);
|
||||
});
|
||||
}
|
||||
return resultPromise;
|
||||
}
|
||||
|
||||
function retryIfNeeded(promise, retryCount, retryCallback) {
|
||||
if (!retryCount) {
|
||||
return promise;
|
||||
}
|
||||
return promise.then(null, function(err) {
|
||||
var errStr = err.toString();
|
||||
if (typeof err === 'object') {
|
||||
errStr += JSON.stringify(err, null, ' ');
|
||||
}
|
||||
if (errStr.indexOf('timeout') !== -1) {
|
||||
console.log('Retrying', retryCallback.toString());
|
||||
return retryCallback();
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function makeNodeJsResolver(defer) {
|
||||
return function(err, result) {
|
||||
if (err) {
|
||||
// Format errors in a nice way
|
||||
defer.reject(JSON.stringify(err, null, ' '));
|
||||
} else {
|
||||
defer.fulfill(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CloudReporter;
|
|
@ -1,55 +0,0 @@
|
|||
var webdriver = require('protractor/node_modules/selenium-webdriver');
|
||||
|
||||
module.exports = {
|
||||
gc: gc,
|
||||
timelineRecords: timelineRecords,
|
||||
timelineTimestamp: timelineTimestamp
|
||||
};
|
||||
|
||||
function timelineTimestamp(timestampId) {
|
||||
browser.executeScript('console.timeStamp("'+timestampId+'")');
|
||||
}
|
||||
|
||||
function timelineRecords() {
|
||||
return perfLogs().then(function(logs) {
|
||||
var logs = logs && logs['Timeline.eventRecorded'] || [];
|
||||
return logs.map(function(message) {
|
||||
return message.record;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function perfLogs() {
|
||||
return plainLogs('performance').then(function(entries) {
|
||||
var entriesByMethod = {};
|
||||
entries.forEach(function(entry) {
|
||||
var message = JSON.parse(entry.message).message;
|
||||
var entries = entriesByMethod[message.method];
|
||||
if (!entries) {
|
||||
entries = entriesByMethod[message.method] = [];
|
||||
}
|
||||
entries.push(message.params);
|
||||
});
|
||||
return entriesByMethod;
|
||||
});
|
||||
}
|
||||
|
||||
// Needed as selenium-webdriver does not forward
|
||||
// performance logs in the correct way
|
||||
function plainLogs(type) {
|
||||
return browser.driver.schedule(
|
||||
new webdriver.Command(webdriver.CommandName.GET_LOG).
|
||||
setParameter('type', type),
|
||||
'WebDriver.manage().logs().get(' + type + ')');
|
||||
}
|
||||
|
||||
function gc() {
|
||||
// TODO(tbosch): this only works on chrome, and we actually should
|
||||
// extend chromedriver to use the Debugger.CollectGarbage call of the
|
||||
// remote debugger protocol.
|
||||
// See http://src.chromium.org/viewvc/blink/trunk/Source/devtools/protocol.json
|
||||
// For iOS Safari we need an extension to appium that uses
|
||||
// the webkit remote debug protocol. See
|
||||
// https://github.com/WebKit/webkit/blob/master/Source/WebInspectorUI/Versions/Inspector-iOS-8.0.json
|
||||
return browser.executeScript('window.gc()');
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
var vsprintf = require("sprintf-js").vsprintf;
|
||||
var statistics = require("./statistics");
|
||||
|
||||
var HEADER_SEPARATORS = ['----', '----', '----', '----', '----', '----', '----'];
|
||||
var FOOTER_SEPARATORS = ['====', '====', '====', '====', '====', '====', '===='];
|
||||
|
||||
class ConsoleReporter {
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.rowFormat = ['%12s'].concat(config.metrics.map(function() {
|
||||
return '%12s';
|
||||
})).join(' | ');
|
||||
}
|
||||
begin() {
|
||||
printHeading('BENCHMARK '+this.config.id);
|
||||
console.log('sample size', this.config.sampleSize);
|
||||
console.log('run id', this.config.runId);
|
||||
console.log('params', JSON.stringify(this.config.params, null, ' '));
|
||||
printTableHeader(this.rowFormat, ['index', 'forceGc'].concat(this.config.metrics));
|
||||
}
|
||||
add(data) {
|
||||
var values = data.values;
|
||||
var index = data.index;
|
||||
printRow(this.rowFormat, ['#' + index, data.forceGc]
|
||||
.concat(formatValues(values))
|
||||
);
|
||||
}
|
||||
end(stableSample) {
|
||||
printTableFooter(this.rowFormat, [this.config.id, '']
|
||||
.concat(formatSample(stableSample, this.config.metrics)));
|
||||
}
|
||||
}
|
||||
|
||||
function formatValues(values) {
|
||||
return values.map(function(val) {
|
||||
if (typeof val === 'number') {
|
||||
return val.toFixed(2);
|
||||
} else {
|
||||
return val;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function formatSample(sample, metrics) {
|
||||
return metrics.map(function(_, metricIndex) {
|
||||
var metricSample = sample.map(function(row) {
|
||||
return row.values[metricIndex];
|
||||
});
|
||||
var mean = statistics.calculateMean(metricSample);
|
||||
var coefficientOfVariation = statistics.calculateCoefficientOfVariation(metricSample, mean);
|
||||
return mean.toFixed(2) + '\u00B1' + coefficientOfVariation.toFixed(0)+ '%';
|
||||
});
|
||||
}
|
||||
|
||||
function printHeading(title) {
|
||||
console.log('\n');
|
||||
console.log('## '+title);
|
||||
}
|
||||
|
||||
function printTableHeader(format, values) {
|
||||
printRow(format, values);
|
||||
// TODO(tbosch): generate separators dynamically based on the format!
|
||||
printRow(format, HEADER_SEPARATORS);
|
||||
}
|
||||
|
||||
function printTableFooter(format, values) {
|
||||
// TODO(tbosch): generate separators dynamically based on the format!
|
||||
printRow(format, FOOTER_SEPARATORS);
|
||||
printRow(format, values);
|
||||
}
|
||||
|
||||
function printRow(format, values) {
|
||||
console.log(vsprintf(format, values));
|
||||
}
|
||||
|
||||
module.exports = ConsoleReporter;
|
|
@ -1,37 +0,0 @@
|
|||
module.exports = {
|
||||
calculateCoefficientOfVariation: calculateCoefficientOfVariation,
|
||||
calculateMean: calculateMean,
|
||||
calculateStandardDeviation: calculateStandardDeviation,
|
||||
getRegressionSlope: getRegressionSlope
|
||||
};
|
||||
|
||||
function calculateCoefficientOfVariation(sample, mean) {
|
||||
return calculateStandardDeviation(sample, mean) / mean * 100;
|
||||
}
|
||||
|
||||
function calculateMean(sample) {
|
||||
var total = 0;
|
||||
sample.forEach(function(x) { total += x; });
|
||||
return total / sample.length;
|
||||
}
|
||||
|
||||
function calculateStandardDeviation(sample, mean) {
|
||||
var deviation = 0;
|
||||
sample.forEach(function(x) {
|
||||
deviation += Math.pow(x - mean, 2);
|
||||
});
|
||||
deviation = deviation / (sample.length);
|
||||
deviation = Math.sqrt(deviation);
|
||||
return deviation;
|
||||
}
|
||||
|
||||
function getRegressionSlope(xValues, xMean, yValues, yMean) {
|
||||
// See http://en.wikipedia.org/wiki/Simple_linear_regression
|
||||
var dividendSum = 0;
|
||||
var divisorSum = 0;
|
||||
for (var i=0; i<xValues.length; i++) {
|
||||
dividendSum += (xValues[i] - xMean) * (yValues[i] - yMean);
|
||||
divisorSum += Math.pow(xValues[i] - xMean, 2);
|
||||
}
|
||||
return dividendSum / divisorSum;
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
var webdriver = require('protractor/node_modules/selenium-webdriver');
|
||||
|
||||
module.exports = {
|
||||
verifyNoBrowserErrors: verifyNoBrowserErrors
|
||||
};
|
||||
|
||||
function verifyNoBrowserErrors() {
|
||||
browser.manage().logs().get('browser').then(function(browserLog) {
|
||||
var filteredLog = browserLog.filter(function(logEntry) {
|
||||
return logEntry.level.value > webdriver.logging.Level.WARNING.value;
|
||||
});
|
||||
expect(filteredLog.length).toEqual(0);
|
||||
if (filteredLog.length) {
|
||||
console.log('browser console errors: ' + require('util').inspect(filteredLog));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
Loading…
Reference in New Issue