2016-08-03 18:00:07 -04:00
|
|
|
/**
|
|
|
|
* @license
|
2020-05-19 15:08:49 -04:00
|
|
|
* Copyright Google LLC All Rights Reserved.
|
2016-08-03 18:00:07 -04:00
|
|
|
*
|
|
|
|
* Use of this source code is governed by an MIT-style license that can be
|
|
|
|
* found in the LICENSE file at https://angular.io/license
|
|
|
|
*/
|
|
|
|
|
perf: switch angular to use StaticInjector instead of ReflectiveInjector
This change allows ReflectiveInjector to be tree shaken resulting
in not needed Reflect polyfil and smaller bundles.
Code savings for HelloWorld using Closure:
Reflective: bundle.js: 105,864(34,190 gzip)
Static: bundle.js: 154,889(33,555 gzip)
645( 2%)
BREAKING CHANGE:
`platformXXXX()` no longer accepts providers which depend on reflection.
Specifically the method signature when from `Provider[]` to
`StaticProvider[]`.
Example:
Before:
```
[
MyClass,
{provide: ClassA, useClass: SubClassA}
]
```
After:
```
[
{provide: MyClass, deps: [Dep1,...]},
{provide: ClassA, useClass: SubClassA, deps: [Dep1,...]}
]
```
NOTE: This only applies to platform creation and providers for the JIT
compiler. It does not apply to `@Compotent` or `@NgModule` provides
declarations.
Benchpress note: Previously Benchpress also supported reflective
provides, which now require static providers.
DEPRECATION:
- `ReflectiveInjector` is now deprecated as it will be remove. Use
`Injector.create` as a replacement.
closes #18496
2017-08-03 15:33:29 -04:00
|
|
|
import {StaticProvider} from '@angular/core';
|
2017-03-02 15:12:46 -05:00
|
|
|
import {AsyncTestCompleter, beforeEach, describe, expect, inject, it} from '@angular/core/testing/src/testing_internal';
|
2015-05-27 17:57:54 -04:00
|
|
|
|
perf: switch angular to use StaticInjector instead of ReflectiveInjector
This change allows ReflectiveInjector to be tree shaken resulting
in not needed Reflect polyfil and smaller bundles.
Code savings for HelloWorld using Closure:
Reflective: bundle.js: 105,864(34,190 gzip)
Static: bundle.js: 154,889(33,555 gzip)
645( 2%)
BREAKING CHANGE:
`platformXXXX()` no longer accepts providers which depend on reflection.
Specifically the method signature when from `Provider[]` to
`StaticProvider[]`.
Example:
Before:
```
[
MyClass,
{provide: ClassA, useClass: SubClassA}
]
```
After:
```
[
{provide: MyClass, deps: [Dep1,...]},
{provide: ClassA, useClass: SubClassA, deps: [Dep1,...]}
]
```
NOTE: This only applies to platform creation and providers for the JIT
compiler. It does not apply to `@Compotent` or `@NgModule` provides
declarations.
Benchpress note: Previously Benchpress also supported reflective
provides, which now require static providers.
DEPRECATION:
- `ReflectiveInjector` is now deprecated as it will be remove. Use
`Injector.create` as a replacement.
closes #18496
2017-08-03 15:33:29 -04:00
|
|
|
import {Injector, Metric, Options, PerfLogEvent, PerfLogFeatures, PerflogMetric, WebDriverExtension} from '../../index';
|
2015-05-27 17:57:54 -04:00
|
|
|
import {TraceEventFactory} from '../trace_event_factory';
|
|
|
|
|
2017-12-17 18:10:54 -05:00
|
|
|
(function() {
|
2020-04-13 19:40:21 -04:00
|
|
|
let commandLog: any[];
|
|
|
|
const eventFactory = new TraceEventFactory('timeline', 'pid0');
|
|
|
|
|
|
|
|
function createMetric(
|
|
|
|
perfLogs: PerfLogEvent[], perfLogFeatures: PerfLogFeatures,
|
|
|
|
{microMetrics, forceGc, captureFrames, receivedData, requestCount, ignoreNavigation}: {
|
|
|
|
microMetrics?: {[key: string]: string},
|
|
|
|
forceGc?: boolean,
|
|
|
|
captureFrames?: boolean,
|
|
|
|
receivedData?: boolean,
|
|
|
|
requestCount?: boolean,
|
|
|
|
ignoreNavigation?: boolean
|
|
|
|
} = {}): Metric {
|
|
|
|
commandLog = [];
|
|
|
|
if (!perfLogFeatures) {
|
|
|
|
perfLogFeatures =
|
|
|
|
new PerfLogFeatures({render: true, gc: true, frameCapture: true, userTiming: true});
|
|
|
|
}
|
|
|
|
if (!microMetrics) {
|
|
|
|
microMetrics = {};
|
|
|
|
}
|
|
|
|
const providers: StaticProvider[] = [
|
|
|
|
Options.DEFAULT_PROVIDERS, PerflogMetric.PROVIDERS,
|
|
|
|
{provide: Options.MICRO_METRICS, useValue: microMetrics}, {
|
|
|
|
provide: PerflogMetric.SET_TIMEOUT,
|
|
|
|
useValue: (fn: Function, millis: number) => {
|
|
|
|
commandLog.push(['setTimeout', millis]);
|
|
|
|
fn();
|
2016-06-02 20:30:40 -04:00
|
|
|
},
|
2020-04-13 19:40:21 -04:00
|
|
|
},
|
|
|
|
{
|
|
|
|
provide: WebDriverExtension,
|
|
|
|
useValue: new MockDriverExtension(perfLogs, commandLog, perfLogFeatures)
|
2015-12-09 20:15:55 -05:00
|
|
|
}
|
2020-04-13 19:40:21 -04:00
|
|
|
];
|
|
|
|
if (forceGc != null) {
|
|
|
|
providers.push({provide: Options.FORCE_GC, useValue: forceGc});
|
2015-05-27 17:57:54 -04:00
|
|
|
}
|
2020-04-13 19:40:21 -04:00
|
|
|
if (captureFrames != null) {
|
|
|
|
providers.push({provide: Options.CAPTURE_FRAMES, useValue: captureFrames});
|
|
|
|
}
|
|
|
|
if (receivedData != null) {
|
|
|
|
providers.push({provide: Options.RECEIVED_DATA, useValue: receivedData});
|
|
|
|
}
|
|
|
|
if (requestCount != null) {
|
|
|
|
providers.push({provide: Options.REQUEST_COUNT, useValue: requestCount});
|
|
|
|
}
|
|
|
|
if (ignoreNavigation != null) {
|
|
|
|
providers.push({provide: PerflogMetric.IGNORE_NAVIGATION, useValue: ignoreNavigation});
|
|
|
|
}
|
|
|
|
return Injector.create(providers).get(PerflogMetric);
|
|
|
|
}
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
describe('perflog metric', () => {
|
|
|
|
function sortedKeys(stringMap: {[key: string]: any}) {
|
|
|
|
const res: string[] = [];
|
|
|
|
res.push(...Object.keys(stringMap));
|
|
|
|
res.sort();
|
|
|
|
return res;
|
|
|
|
}
|
2015-06-09 18:19:26 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should describe itself based on the perfLogFeatrues', () => {
|
|
|
|
expect(sortedKeys(createMetric([[]], new PerfLogFeatures()).describe())).toEqual([
|
|
|
|
'pureScriptTime', 'scriptTime'
|
|
|
|
]);
|
|
|
|
|
|
|
|
expect(
|
|
|
|
sortedKeys(createMetric([[]], new PerfLogFeatures({render: true, gc: false})).describe()))
|
|
|
|
.toEqual(['pureScriptTime', 'renderTime', 'scriptTime']);
|
|
|
|
|
|
|
|
expect(sortedKeys(createMetric([[]], null!).describe())).toEqual([
|
|
|
|
'gcAmount', 'gcTime', 'majorGcTime', 'pureScriptTime', 'renderTime', 'scriptTime'
|
|
|
|
]);
|
|
|
|
|
|
|
|
expect(sortedKeys(createMetric([[]], new PerfLogFeatures({render: true, gc: true}), {
|
|
|
|
forceGc: true
|
|
|
|
}).describe()))
|
|
|
|
.toEqual([
|
|
|
|
'forcedGcAmount', 'forcedGcTime', 'gcAmount', 'gcTime', 'majorGcTime', 'pureScriptTime',
|
|
|
|
'renderTime', 'scriptTime'
|
|
|
|
]);
|
|
|
|
|
|
|
|
|
|
|
|
expect(sortedKeys(createMetric([[]], new PerfLogFeatures({userTiming: true}), {
|
|
|
|
receivedData: true,
|
|
|
|
requestCount: true
|
|
|
|
}).describe()))
|
|
|
|
.toEqual(['pureScriptTime', 'receivedData', 'requestCount', 'scriptTime']);
|
|
|
|
});
|
2015-06-09 18:19:26 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should describe itself based on micro metrics', () => {
|
|
|
|
const description =
|
|
|
|
createMetric([[]], null!, {microMetrics: {'myMicroMetric': 'someDesc'}}).describe();
|
|
|
|
expect(description['myMicroMetric']).toEqual('someDesc');
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should describe itself if frame capture is requested and available', () => {
|
|
|
|
const description = createMetric([[]], new PerfLogFeatures({frameCapture: true}), {
|
|
|
|
captureFrames: true
|
|
|
|
}).describe();
|
|
|
|
expect(description['frameTime.mean']).not.toContain('WARNING');
|
|
|
|
expect(description['frameTime.best']).not.toContain('WARNING');
|
|
|
|
expect(description['frameTime.worst']).not.toContain('WARNING');
|
|
|
|
expect(description['frameTime.smooth']).not.toContain('WARNING');
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should describe itself if frame capture is requested and not available', () => {
|
|
|
|
const description = createMetric([[]], new PerfLogFeatures({frameCapture: false}), {
|
|
|
|
captureFrames: true
|
|
|
|
}).describe();
|
|
|
|
expect(description['frameTime.mean']).toContain('WARNING');
|
|
|
|
expect(description['frameTime.best']).toContain('WARNING');
|
|
|
|
expect(description['frameTime.worst']).toContain('WARNING');
|
|
|
|
expect(description['frameTime.smooth']).toContain('WARNING');
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
describe('beginMeasure', () => {
|
|
|
|
it('should not force gc and mark the timeline',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const metric = createMetric([[]], null!);
|
|
|
|
metric.beginMeasure().then((_) => {
|
|
|
|
expect(commandLog).toEqual([['timeBegin', 'benchpress0']]);
|
|
|
|
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should force gc and mark the timeline',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const metric = createMetric([[]], null!, {forceGc: true});
|
|
|
|
metric.beginMeasure().then((_) => {
|
|
|
|
expect(commandLog).toEqual([['gc'], ['timeBegin', 'benchpress0']]);
|
|
|
|
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
describe('endMeasure', () => {
|
|
|
|
it('should mark and aggregate events in between the marks',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const events = [[
|
|
|
|
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 4),
|
|
|
|
eventFactory.end('script', 6), eventFactory.markEnd('benchpress0', 10)
|
|
|
|
]];
|
|
|
|
const metric = createMetric(events, null!);
|
|
|
|
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
|
|
|
expect(commandLog).toEqual([
|
|
|
|
['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', null], 'readPerfLog'
|
|
|
|
]);
|
|
|
|
expect(data['scriptTime']).toBe(2);
|
|
|
|
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should mark and aggregate events since navigationStart',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const events = [[
|
|
|
|
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 4),
|
|
|
|
eventFactory.end('script', 6), eventFactory.instant('navigationStart', 7),
|
|
|
|
eventFactory.start('script', 8), eventFactory.end('script', 9),
|
|
|
|
eventFactory.markEnd('benchpress0', 10)
|
|
|
|
]];
|
|
|
|
const metric = createMetric(events, null!);
|
|
|
|
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(1);
|
|
|
|
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should ignore navigationStart if ignoreNavigation is set',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const events = [[
|
|
|
|
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 4),
|
|
|
|
eventFactory.end('script', 6), eventFactory.instant('navigationStart', 7),
|
|
|
|
eventFactory.start('script', 8), eventFactory.end('script', 9),
|
|
|
|
eventFactory.markEnd('benchpress0', 10)
|
|
|
|
]];
|
|
|
|
const metric = createMetric(events, null!, {ignoreNavigation: true});
|
|
|
|
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(3);
|
|
|
|
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should restart timing', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const events = [
|
|
|
|
[
|
|
|
|
eventFactory.markStart('benchpress0', 0),
|
|
|
|
eventFactory.markEnd('benchpress0', 1),
|
|
|
|
eventFactory.markStart('benchpress1', 2),
|
|
|
|
],
|
|
|
|
[eventFactory.markEnd('benchpress1', 3)]
|
|
|
|
];
|
|
|
|
const metric = createMetric(events, null!);
|
|
|
|
metric.beginMeasure()
|
|
|
|
.then((_) => metric.endMeasure(true))
|
|
|
|
.then((_) => metric.endMeasure(true))
|
|
|
|
.then((_) => {
|
|
|
|
expect(commandLog).toEqual([
|
|
|
|
['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', 'benchpress1'],
|
|
|
|
'readPerfLog', ['timeEnd', 'benchpress1', 'benchpress2'], 'readPerfLog'
|
|
|
|
]);
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should loop and aggregate until the end mark is present',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const events = [
|
|
|
|
[eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 1)],
|
|
|
|
[eventFactory.end('script', 2)],
|
|
|
|
[
|
|
|
|
eventFactory.start('script', 3), eventFactory.end('script', 5),
|
|
|
|
eventFactory.markEnd('benchpress0', 10)
|
|
|
|
]
|
|
|
|
];
|
|
|
|
const metric = createMetric(events, null!);
|
|
|
|
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
|
|
|
expect(commandLog).toEqual([
|
|
|
|
['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', null], 'readPerfLog',
|
|
|
|
['setTimeout', 100], 'readPerfLog', ['setTimeout', 100], 'readPerfLog'
|
|
|
|
]);
|
|
|
|
expect(data['scriptTime']).toBe(3);
|
|
|
|
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should store events after the end mark for the next call',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const events = [
|
|
|
|
[
|
|
|
|
eventFactory.markStart('benchpress0', 0), eventFactory.markEnd('benchpress0', 1),
|
|
|
|
eventFactory.markStart('benchpress1', 1), eventFactory.start('script', 1),
|
|
|
|
eventFactory.end('script', 2)
|
|
|
|
],
|
|
|
|
[
|
|
|
|
eventFactory.start('script', 3), eventFactory.end('script', 5),
|
|
|
|
eventFactory.markEnd('benchpress1', 6)
|
|
|
|
]
|
|
|
|
];
|
|
|
|
const metric = createMetric(events, null!);
|
|
|
|
metric.beginMeasure()
|
|
|
|
.then((_) => metric.endMeasure(true))
|
|
|
|
.then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(0);
|
|
|
|
return metric.endMeasure(true);
|
|
|
|
})
|
|
|
|
.then((data) => {
|
|
|
|
expect(commandLog).toEqual([
|
|
|
|
['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', 'benchpress1'],
|
|
|
|
'readPerfLog', ['timeEnd', 'benchpress1', 'benchpress2'], 'readPerfLog'
|
|
|
|
]);
|
|
|
|
expect(data['scriptTime']).toBe(3);
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
describe('with forced gc', () => {
|
|
|
|
let events: PerfLogEvent[][];
|
|
|
|
beforeEach(() => {
|
|
|
|
events = [[
|
|
|
|
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 4),
|
|
|
|
eventFactory.end('script', 6), eventFactory.markEnd('benchpress0', 10),
|
|
|
|
eventFactory.markStart('benchpress1', 11),
|
|
|
|
eventFactory.start('gc', 12, {'usedHeapSize': 2500}),
|
|
|
|
eventFactory.end('gc', 15, {'usedHeapSize': 1000}),
|
|
|
|
eventFactory.markEnd('benchpress1', 20)
|
|
|
|
]];
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should measure forced gc', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const metric = createMetric(events, null!, {forceGc: true});
|
2016-08-03 18:00:07 -04:00
|
|
|
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
|
|
|
expect(commandLog).toEqual([
|
2020-04-13 19:40:21 -04:00
|
|
|
['gc'], ['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', 'benchpress1'],
|
|
|
|
'readPerfLog', ['gc'], ['timeEnd', 'benchpress1', null], 'readPerfLog'
|
2016-08-03 18:00:07 -04:00
|
|
|
]);
|
2020-04-13 19:40:21 -04:00
|
|
|
expect(data['forcedGcTime']).toBe(3);
|
|
|
|
expect(data['forcedGcAmount']).toBe(1.5);
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2016-08-03 18:00:07 -04:00
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should restart after the forced gc if needed',
|
2016-09-15 11:51:55 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
const metric = createMetric(events, null!, {forceGc: true});
|
|
|
|
metric.beginMeasure().then((_) => metric.endMeasure(true)).then((data) => {
|
|
|
|
expect(commandLog[5]).toEqual(['timeEnd', 'benchpress1', 'benchpress2']);
|
2017-11-09 15:51:17 -05:00
|
|
|
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
2020-04-13 19:40:21 -04:00
|
|
|
});
|
|
|
|
});
|
2017-11-09 15:51:17 -05:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
describe('aggregation', () => {
|
|
|
|
function aggregate(events: any[], {microMetrics, captureFrames, receivedData, requestCount}: {
|
|
|
|
microMetrics?: {[key: string]: string},
|
|
|
|
captureFrames?: boolean,
|
|
|
|
receivedData?: boolean,
|
|
|
|
requestCount?: boolean
|
|
|
|
} = {}) {
|
|
|
|
events.unshift(eventFactory.markStart('benchpress0', 0));
|
|
|
|
events.push(eventFactory.markEnd('benchpress0', 10));
|
|
|
|
const metric = createMetric([events], null!, {
|
|
|
|
microMetrics: microMetrics,
|
|
|
|
captureFrames: captureFrames,
|
|
|
|
receivedData: receivedData,
|
|
|
|
requestCount: requestCount
|
|
|
|
});
|
|
|
|
return metric.beginMeasure().then((_) => metric.endMeasure(false));
|
|
|
|
}
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
describe('frame metrics', () => {
|
|
|
|
it('should calculate mean frame time',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate(
|
|
|
|
[
|
|
|
|
eventFactory.markStart('frameCapture', 0), eventFactory.instant('frame', 1),
|
|
|
|
eventFactory.instant('frame', 3), eventFactory.instant('frame', 4),
|
|
|
|
eventFactory.markEnd('frameCapture', 5)
|
|
|
|
],
|
|
|
|
{captureFrames: true})
|
|
|
|
.then((data) => {
|
|
|
|
expect(data['frameTime.mean']).toBe(((3 - 1) + (4 - 3)) / 2);
|
2015-05-27 17:57:54 -04:00
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should throw if no start event',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate([eventFactory.instant('frame', 4), eventFactory.markEnd('frameCapture', 5)], {
|
|
|
|
captureFrames: true
|
|
|
|
}).catch((err): any => {
|
|
|
|
expect(() => {
|
|
|
|
throw err;
|
|
|
|
}).toThrowError('missing start event for frame capture');
|
2016-08-03 18:00:07 -04:00
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should throw if no end event',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate(
|
|
|
|
[eventFactory.markStart('frameCapture', 3), eventFactory.instant('frame', 4)],
|
|
|
|
{captureFrames: true})
|
|
|
|
.catch((err): any => {
|
|
|
|
expect(() => {
|
|
|
|
throw err;
|
|
|
|
}).toThrowError('missing end event for frame capture');
|
2015-05-27 17:57:54 -04:00
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should throw if trying to capture twice',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate(
|
|
|
|
[
|
|
|
|
eventFactory.markStart('frameCapture', 3),
|
|
|
|
eventFactory.markStart('frameCapture', 4)
|
|
|
|
],
|
|
|
|
{captureFrames: true})
|
|
|
|
.catch((err): any => {
|
|
|
|
expect(() => {
|
|
|
|
throw err;
|
|
|
|
}).toThrowError('can capture frames only once per benchmark run');
|
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should throw if trying to capture when frame capture is disabled',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate([eventFactory.markStart('frameCapture', 3)]).catch((err) => {
|
|
|
|
expect(() => {
|
|
|
|
throw err;
|
|
|
|
})
|
|
|
|
.toThrowError(
|
|
|
|
'found start event for frame capture, but frame capture was not requested in benchpress');
|
2016-08-03 18:00:07 -04:00
|
|
|
async.done();
|
2020-04-13 19:40:21 -04:00
|
|
|
return null;
|
2016-08-03 18:00:07 -04:00
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should throw if frame capture is enabled, but nothing is captured',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate([], {captureFrames: true}).catch((err): any => {
|
|
|
|
expect(() => {
|
|
|
|
throw err;
|
|
|
|
}).toThrowError('frame capture requested in benchpress, but no start event was found');
|
2016-08-03 18:00:07 -04:00
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should calculate best and worst frame time',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate(
|
|
|
|
[
|
|
|
|
eventFactory.markStart('frameCapture', 0), eventFactory.instant('frame', 1),
|
|
|
|
eventFactory.instant('frame', 9), eventFactory.instant('frame', 15),
|
|
|
|
eventFactory.instant('frame', 18), eventFactory.instant('frame', 28),
|
|
|
|
eventFactory.instant('frame', 32), eventFactory.markEnd('frameCapture', 10)
|
|
|
|
],
|
|
|
|
{captureFrames: true})
|
|
|
|
.then((data) => {
|
|
|
|
expect(data['frameTime.worst']).toBe(10);
|
|
|
|
expect(data['frameTime.best']).toBe(3);
|
|
|
|
async.done();
|
|
|
|
});
|
2015-06-03 12:47:15 -04:00
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should calculate percentage of smoothness to be good',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate(
|
|
|
|
[
|
|
|
|
eventFactory.markStart('frameCapture', 0), eventFactory.instant('frame', 1),
|
|
|
|
eventFactory.instant('frame', 2), eventFactory.instant('frame', 3),
|
|
|
|
eventFactory.markEnd('frameCapture', 4)
|
|
|
|
],
|
|
|
|
{captureFrames: true})
|
|
|
|
.then((data) => {
|
|
|
|
expect(data['frameTime.smooth']).toBe(1.0);
|
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should calculate percentage of smoothness to be bad',
|
2018-01-08 14:22:15 -05:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate(
|
|
|
|
[
|
|
|
|
eventFactory.markStart('frameCapture', 0), eventFactory.instant('frame', 1),
|
|
|
|
eventFactory.instant('frame', 2), eventFactory.instant('frame', 22),
|
|
|
|
eventFactory.instant('frame', 23), eventFactory.instant('frame', 24),
|
|
|
|
eventFactory.markEnd('frameCapture', 4)
|
|
|
|
],
|
|
|
|
{captureFrames: true})
|
|
|
|
.then((data) => {
|
|
|
|
expect(data['frameTime.smooth']).toBe(0.75);
|
|
|
|
async.done();
|
|
|
|
});
|
2018-01-08 14:22:15 -05:00
|
|
|
}));
|
2020-04-13 19:40:21 -04:00
|
|
|
});
|
2018-01-08 14:22:15 -05:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should report a single interval',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([
|
|
|
|
eventFactory.start('script', 0), eventFactory.end('script', 5)
|
|
|
|
]).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(5);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should sum up multiple intervals',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([
|
|
|
|
eventFactory.start('script', 0), eventFactory.end('script', 5),
|
|
|
|
eventFactory.start('script', 10), eventFactory.end('script', 17)
|
|
|
|
]).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(12);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should ignore not started intervals',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([eventFactory.end('script', 10)]).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(0);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should ignore not ended intervals',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([eventFactory.start('script', 10)]).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(0);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should ignore nested intervals',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([
|
|
|
|
eventFactory.start('script', 0), eventFactory.start('script', 5),
|
|
|
|
eventFactory.end('script', 10), eventFactory.end('script', 17)
|
|
|
|
]).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(17);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should ignore events from different processed as the start mark',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const otherProcessEventFactory = new TraceEventFactory('timeline', 'pid1');
|
|
|
|
const metric = createMetric(
|
|
|
|
[[
|
|
|
|
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 0, null),
|
|
|
|
eventFactory.end('script', 5, null),
|
|
|
|
otherProcessEventFactory.start('script', 10, null),
|
|
|
|
otherProcessEventFactory.end('script', 17, null),
|
|
|
|
eventFactory.markEnd('benchpress0', 20)
|
|
|
|
]],
|
|
|
|
null!);
|
|
|
|
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(5);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should mark a run as invalid if the start and end marks are different',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
const otherProcessEventFactory = new TraceEventFactory('timeline', 'pid1');
|
|
|
|
const metric = createMetric(
|
|
|
|
[[
|
|
|
|
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 0, null),
|
|
|
|
eventFactory.end('script', 5, null),
|
|
|
|
otherProcessEventFactory.start('script', 10, null),
|
|
|
|
otherProcessEventFactory.end('script', 17, null),
|
|
|
|
otherProcessEventFactory.markEnd('benchpress0', 20)
|
|
|
|
]],
|
|
|
|
null!);
|
|
|
|
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
|
|
|
expect(data['invalid']).toBe(1);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should support scriptTime metric',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([
|
|
|
|
eventFactory.start('script', 0), eventFactory.end('script', 5)
|
|
|
|
]).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(5);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should support renderTime metric',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([
|
|
|
|
eventFactory.start('render', 0), eventFactory.end('render', 5)
|
|
|
|
]).then((data) => {
|
|
|
|
expect(data['renderTime']).toBe(5);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should support gcTime/gcAmount metric',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([
|
|
|
|
eventFactory.start('gc', 0, {'usedHeapSize': 2500}),
|
|
|
|
eventFactory.end('gc', 5, {'usedHeapSize': 1000})
|
|
|
|
]).then((data) => {
|
|
|
|
expect(data['gcTime']).toBe(5);
|
|
|
|
expect(data['gcAmount']).toBe(1.5);
|
|
|
|
expect(data['majorGcTime']).toBe(0);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should support majorGcTime metric',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([
|
|
|
|
eventFactory.start('gc', 0, {'usedHeapSize': 2500}),
|
|
|
|
eventFactory.end('gc', 5, {'usedHeapSize': 1000, 'majorGc': true})
|
|
|
|
]).then((data) => {
|
|
|
|
expect(data['gcTime']).toBe(5);
|
|
|
|
expect(data['majorGcTime']).toBe(5);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
it('should support pureScriptTime = scriptTime-gcTime-renderTime',
|
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
|
|
|
aggregate([
|
|
|
|
eventFactory.start('script', 0), eventFactory.start('gc', 1, {'usedHeapSize': 1000}),
|
|
|
|
eventFactory.end('gc', 4, {'usedHeapSize': 0}), eventFactory.start('render', 4),
|
|
|
|
eventFactory.end('render', 5), eventFactory.end('script', 6)
|
|
|
|
]).then((data) => {
|
|
|
|
expect(data['scriptTime']).toBe(6);
|
|
|
|
expect(data['pureScriptTime']).toBe(2);
|
|
|
|
async.done();
|
|
|
|
});
|
|
|
|
}));
|
|
|
|
|
|
|
|
describe('receivedData', () => {
|
|
|
|
it('should report received data since last navigationStart',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate(
|
|
|
|
[
|
|
|
|
eventFactory.instant('receivedData', 0, {'encodedDataLength': 1}),
|
|
|
|
eventFactory.instant('navigationStart', 1),
|
|
|
|
eventFactory.instant('receivedData', 2, {'encodedDataLength': 2}),
|
|
|
|
eventFactory.instant('navigationStart', 3),
|
|
|
|
eventFactory.instant('receivedData', 4, {'encodedDataLength': 4}),
|
|
|
|
eventFactory.instant('receivedData', 5, {'encodedDataLength': 8})
|
|
|
|
],
|
|
|
|
{receivedData: true})
|
|
|
|
.then((data) => {
|
|
|
|
expect(data['receivedData']).toBe(12);
|
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
2020-04-13 19:40:21 -04:00
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
describe('requestCount', () => {
|
|
|
|
it('should report count of requests sent since last navigationStart',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate(
|
|
|
|
[
|
|
|
|
eventFactory.instant('sendRequest', 0), eventFactory.instant('navigationStart', 1),
|
|
|
|
eventFactory.instant('sendRequest', 2), eventFactory.instant('navigationStart', 3),
|
|
|
|
eventFactory.instant('sendRequest', 4), eventFactory.instant('sendRequest', 5)
|
|
|
|
],
|
|
|
|
{requestCount: true})
|
|
|
|
.then((data) => {
|
|
|
|
expect(data['requestCount']).toBe(2);
|
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
2020-04-13 19:40:21 -04:00
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
describe('microMetrics', () => {
|
|
|
|
it('should report micro metrics',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate(
|
|
|
|
[
|
|
|
|
eventFactory.markStart('mm1', 0),
|
|
|
|
eventFactory.markEnd('mm1', 5),
|
|
|
|
],
|
|
|
|
{microMetrics: {'mm1': 'micro metric 1'}})
|
|
|
|
.then((data) => {
|
|
|
|
expect(data['mm1']).toBe(5.0);
|
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should ignore micro metrics that were not specified',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2015-05-27 17:57:54 -04:00
|
|
|
aggregate([
|
2020-04-13 19:40:21 -04:00
|
|
|
eventFactory.markStart('mm1', 0),
|
|
|
|
eventFactory.markEnd('mm1', 5),
|
2016-08-03 18:00:07 -04:00
|
|
|
]).then((data) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
expect(data['mm1']).toBeFalsy();
|
2016-08-03 18:00:07 -04:00
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
it('should report micro metric averages',
|
2016-08-26 19:34:08 -04:00
|
|
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
2020-04-13 19:40:21 -04:00
|
|
|
aggregate(
|
|
|
|
[
|
|
|
|
eventFactory.markStart('mm1*20', 0),
|
|
|
|
eventFactory.markEnd('mm1*20', 5),
|
|
|
|
],
|
|
|
|
{microMetrics: {'mm1': 'micro metric 1'}})
|
|
|
|
.then((data) => {
|
|
|
|
expect(data['mm1']).toBe(5 / 20);
|
|
|
|
async.done();
|
|
|
|
});
|
2015-05-27 17:57:54 -04:00
|
|
|
}));
|
|
|
|
});
|
|
|
|
});
|
2020-04-13 19:40:21 -04:00
|
|
|
});
|
2017-12-16 17:42:55 -05:00
|
|
|
})();
|
2015-05-27 17:57:54 -04:00
|
|
|
|
|
|
|
class MockDriverExtension extends WebDriverExtension {
|
2016-08-03 18:00:07 -04:00
|
|
|
constructor(
|
|
|
|
private _perfLogs: any[], private _commandLog: any[],
|
|
|
|
private _perfLogFeatures: PerfLogFeatures) {
|
2015-05-27 17:57:54 -04:00
|
|
|
super();
|
|
|
|
}
|
|
|
|
|
2016-08-26 19:34:08 -04:00
|
|
|
timeBegin(name: string): Promise<any> {
|
2015-06-17 14:17:21 -04:00
|
|
|
this._commandLog.push(['timeBegin', name]);
|
2016-08-02 18:53:34 -04:00
|
|
|
return Promise.resolve(null);
|
2015-05-27 17:57:54 -04:00
|
|
|
}
|
|
|
|
|
2017-03-24 12:56:50 -04:00
|
|
|
timeEnd(name: string, restartName: string|null): Promise<any> {
|
2015-06-17 14:17:21 -04:00
|
|
|
this._commandLog.push(['timeEnd', name, restartName]);
|
2016-08-02 18:53:34 -04:00
|
|
|
return Promise.resolve(null);
|
2015-05-27 17:57:54 -04:00
|
|
|
}
|
|
|
|
|
2020-04-13 19:40:21 -04:00
|
|
|
perfLogFeatures(): PerfLogFeatures {
|
|
|
|
return this._perfLogFeatures;
|
|
|
|
}
|
2015-05-27 17:57:54 -04:00
|
|
|
|
|
|
|
readPerfLog(): Promise<any> {
|
2015-06-17 14:17:21 -04:00
|
|
|
this._commandLog.push('readPerfLog');
|
2015-05-27 17:57:54 -04:00
|
|
|
if (this._perfLogs.length > 0) {
|
2016-11-12 08:08:58 -05:00
|
|
|
const next = this._perfLogs[0];
|
2015-10-07 12:09:43 -04:00
|
|
|
this._perfLogs.shift();
|
2016-08-02 18:53:34 -04:00
|
|
|
return Promise.resolve(next);
|
2015-05-27 17:57:54 -04:00
|
|
|
} else {
|
2016-08-02 18:53:34 -04:00
|
|
|
return Promise.resolve([]);
|
2015-05-27 17:57:54 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
gc(): Promise<any> {
|
2015-06-17 14:17:21 -04:00
|
|
|
this._commandLog.push(['gc']);
|
2016-08-02 18:53:34 -04:00
|
|
|
return Promise.resolve(null);
|
2015-05-27 17:57:54 -04:00
|
|
|
}
|
|
|
|
}
|