fix(benchpress): work around missing events from Chrome 63 (#21396)
Chrome 63 can cause the navigationStart event for the first run to arrive with a different pid than the start of the benchpress run. This makes the first collected result invalid. This workaround causes the sampler to ignore runs that have this condition. PR Close #21396
This commit is contained in:
parent
27196b676b
commit
fa03ae14b0
|
@ -249,6 +249,9 @@ export class PerflogMetric extends Metric {
|
||||||
// not all events have been received, no further processing for now
|
// not all events have been received, no further processing for now
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
if (markStartEvent.pid !== markEndEvent.pid) {
|
||||||
|
result['invalid'] = 1;
|
||||||
|
}
|
||||||
|
|
||||||
let gcTimeInScript = 0;
|
let gcTimeInScript = 0;
|
||||||
let renderTimeInScript = 0;
|
let renderTimeInScript = 0;
|
||||||
|
|
|
@ -63,7 +63,12 @@ export class Sampler {
|
||||||
}
|
}
|
||||||
return resultPromise.then((_) => this._driver.waitFor(this._execute))
|
return resultPromise.then((_) => this._driver.waitFor(this._execute))
|
||||||
.then((_) => this._metric.endMeasure(this._prepare === Options.NO_PREPARE))
|
.then((_) => this._metric.endMeasure(this._prepare === Options.NO_PREPARE))
|
||||||
.then((measureValues) => this._report(lastState, measureValues));
|
.then((measureValues) => {
|
||||||
|
if (!!measureValues['invalid']) {
|
||||||
|
return lastState;
|
||||||
|
}
|
||||||
|
return this._report(lastState, measureValues);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private _report(state: SampleState, metricValues: {[key: string]: any}): Promise<SampleState> {
|
private _report(state: SampleState, metricValues: {[key: string]: any}): Promise<SampleState> {
|
||||||
|
|
|
@ -537,6 +537,24 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
it('should mark a run as invalid if the start and end marks are different',
|
||||||
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||||
|
const otherProcessEventFactory = new TraceEventFactory('timeline', 'pid1');
|
||||||
|
const metric = createMetric(
|
||||||
|
[[
|
||||||
|
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 0, null),
|
||||||
|
eventFactory.end('script', 5, null),
|
||||||
|
otherProcessEventFactory.start('script', 10, null),
|
||||||
|
otherProcessEventFactory.end('script', 17, null),
|
||||||
|
otherProcessEventFactory.markEnd('benchpress0', 20)
|
||||||
|
]],
|
||||||
|
null !);
|
||||||
|
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
||||||
|
expect(data['invalid']).toBe(1);
|
||||||
|
async.done();
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
|
||||||
it('should support scriptTime metric',
|
it('should support scriptTime metric',
|
||||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||||
aggregate([
|
aggregate([
|
||||||
|
|
Loading…
Reference in New Issue