feat(benchpress): add mean frame time metric

This is useful to measure the smoothness of animations and scrolling actions.

part of #821
closes #2474
This commit is contained in:
Michael Goderbauer
2015-06-09 15:19:26 -07:00
committed by Tobias Bosch
parent 1cf807c319
commit 6834c4992d
7 changed files with 264 additions and 12 deletions

View File

@ -31,10 +31,11 @@ export function main() {
var commandLog;
var eventFactory = new TraceEventFactory('timeline', 'pid0');
function createMetric(perfLogs, microMetrics = null, perfLogFeatures = null, forceGc = null) {
function createMetric(perfLogs, microMetrics = null, perfLogFeatures = null, forceGc = null,
captureFrames = null) {
commandLog = [];
if (isBlank(perfLogFeatures)) {
perfLogFeatures = new PerfLogFeatures({render: true, gc: true});
perfLogFeatures = new PerfLogFeatures({render: true, gc: true, frameCapture: true});
}
if (isBlank(microMetrics)) {
microMetrics = StringMapWrapper.create();
@ -54,6 +55,9 @@ export function main() {
if (isPresent(forceGc)) {
ListWrapper.push(bindings, bind(Options.FORCE_GC).toValue(forceGc));
}
if (isPresent(captureFrames)) {
ListWrapper.push(bindings, bind(Options.CAPTURE_FRAMES).toValue(captureFrames));
}
return Injector.resolveAndCreate(bindings).get(PerflogMetric);
}
@ -98,6 +102,20 @@ export function main() {
expect(description['myMicroMetric']).toEqual('someDesc');
});
it('should describe itself if frame capture is requested and available', () => {
var description =
createMetric([[]], null, new PerfLogFeatures({frameCapture: true}), null, true)
.describe();
expect(description['meanFrameTime']).not.toContain('WARNING');
});
it('should describe itself if frame capture is requested and not available', () => {
var description =
createMetric([[]], null, new PerfLogFeatures({frameCapture: false}), null, true)
.describe();
expect(description['meanFrameTime']).toContain('WARNING');
});
describe('beginMeasure', () => {
it('should not force gc and mark the timeline', inject([AsyncTestCompleter], (async) => {
@ -300,13 +318,88 @@ export function main() {
describe('aggregation', () => {
function aggregate(events, microMetrics = null) {
function aggregate(events, microMetrics = null, captureFrames = null) {
ListWrapper.insert(events, 0, eventFactory.markStart('benchpress0', 0));
ListWrapper.push(events, eventFactory.markEnd('benchpress0', 10));
var metric = createMetric([events], microMetrics);
var metric = createMetric([events], microMetrics, null, null, captureFrames);
return metric.beginMeasure().then((_) => metric.endMeasure(false));
}
describe('frame metrics', () => {
it('should calculate mean frame time', inject([AsyncTestCompleter], (async) => {
aggregate([
eventFactory.markStart('frameCapture', 0),
eventFactory.instant('frame', 1),
eventFactory.instant('frame', 3),
eventFactory.instant('frame', 4),
eventFactory.markEnd('frameCapture', 5)
],
null, true)
.then((data) => {
expect(data['meanFrameTime']).toBe(((3 - 1) + (4 - 3)) / 2);
async.done();
});
}));
it('should throw if no start event', inject([AsyncTestCompleter], (async) => {
PromiseWrapper.catchError(
aggregate(
[eventFactory.instant('frame', 4), eventFactory.markEnd('frameCapture', 5)],
null, true),
(err) => {
expect(() => { throw err; })
.toThrowError('missing start event for frame capture');
async.done();
});
}));
it('should throw if no end event', inject([AsyncTestCompleter], (async) => {
PromiseWrapper.catchError(
aggregate(
[eventFactory.markStart('frameCapture', 3), eventFactory.instant('frame', 4)],
null, true),
(err) => {
expect(() => { throw err; }).toThrowError('missing end event for frame capture');
async.done();
});
}));
it('should throw if trying to capture twice', inject([AsyncTestCompleter], (async) => {
PromiseWrapper.catchError(
aggregate([
eventFactory.markStart('frameCapture', 3),
eventFactory.markStart('frameCapture', 4)
],
null, true),
(err) => {
expect(() => { throw err; })
.toThrowError('can capture frames only once per benchmark run');
async.done();
});
}));
it('should throw if trying to capture when frame capture is disabled',
inject([AsyncTestCompleter], (async) => {
PromiseWrapper.catchError(aggregate([eventFactory.markStart('frameCapture', 3)]), (err) => {
expect(() => { throw err; })
.toThrowError(
'found start event for frame capture, but frame capture was not requested in benchpress');
async.done();
});
}));
it('should throw if frame capture is enabled, but nothing is captured',
inject([AsyncTestCompleter], (async) => {
PromiseWrapper.catchError(aggregate([], null, true), (err) => {
expect(() => { throw err; })
.toThrowError(
'frame capture requested in benchpress, but no start event was found');
async.done();
});
}));
});
it('should report a single interval', inject([AsyncTestCompleter], (async) => {
aggregate([eventFactory.start('script', 0), eventFactory.end('script', 5)])

View File

@ -25,6 +25,8 @@ export class TraceEventFactory {
end(name, time, args = null) { return this.create('E', name, time, args); }
instant(name, time, args = null) { return this.create('i', name, time, args); }
complete(name, time, duration, args = null) {
var res = this.create('X', name, time, args);
res['dur'] = duration;

View File

@ -35,6 +35,7 @@ export function main() {
var v8EventsOtherProcess = new TraceEventFactory('v8', 'pid1');
var chromeTimelineEvents =
new TraceEventFactory('disabled-by-default-devtools.timeline', 'pid0');
var benchmarkEvents = new TraceEventFactory('benchmark', 'pid0');
var normEvents = new TraceEventFactory('timeline', 'pid0');
function createExtension(perfRecords = null, messageMethod = 'Tracing.dataCollected') {
@ -111,6 +112,49 @@ export function main() {
async.done();
});
}));
describe('frame metrics', () => {
it('should report ImplThreadRenderingStats as frame event',
inject([AsyncTestCompleter], (async) => {
createExtension([
benchmarkEvents.instant('BenchmarkInstrumentation::ImplThreadRenderingStats', 1100,
{'data': {'frame_count': 1}})
])
.readPerfLog()
.then((events) => {
expect(events).toEqual([
normEvents.create('i', 'frame', 1.1),
]);
async.done();
});
}));
it('should not report ImplThreadRenderingStats with zero frames',
inject([AsyncTestCompleter], (async) => {
createExtension([
benchmarkEvents.instant('BenchmarkInstrumentation::ImplThreadRenderingStats', 1100,
{'data': {'frame_count': 0}})
])
.readPerfLog()
.then((events) => {
expect(events).toEqual([]);
async.done();
});
}));
it('should throw when ImplThreadRenderingStats contains more than one frame',
inject([AsyncTestCompleter], (async) => {
PromiseWrapper.catchError(
createExtension([
benchmarkEvents.instant('BenchmarkInstrumentation::ImplThreadRenderingStats',
1100, {'data': {'frame_count': 2}})
]).readPerfLog(),
(err) => {
expect(() => { throw err; })
.toThrowError('multi-frame render stats not supported');
async.done();
});
}));
});
it('should normalize "tdur" to "dur"', inject([AsyncTestCompleter], (async) => {
var event = chromeTimelineEvents.create('X', 'FunctionCall', 1100, null);