feat(bench press): replace microIterations with microMetrics

This commit is contained in:
Tobias Bosch
2015-03-27 10:37:02 -07:00
parent 3afb744e77
commit 33bfc4c24a
13 changed files with 190 additions and 98 deletions

View File

@ -20,15 +20,11 @@ export class Options {
// TODO(tbosch): use static initializer when our transpiler supports it
static get USER_AGENT() { return _USER_AGENT; }
// TODO(tbosch): use static initializer when our transpiler supports it
/**
* Number of iterations that run inside the browser by user code.
* Used for micro benchmarks.
**/
static get MICRO_ITERATIONS() { return _MICRO_ITERATIONS; }
// TODO(tbosch): use static initializer when our transpiler supports it
static get NOW() { return _NOW; }
// TODO(tbosch): use static values when our transpiler supports them
static get WRITE_FILE() { return _WRITE_FILE; }
// TODO(tbosch): use static values when our transpiler supports them
static get MICRO_METRICS() { return _MICRO_METRICS; }
}
var _SAMPLE_ID = new OpaqueToken('Options.sampleId');
@ -39,7 +35,7 @@ var _PREPARE = new OpaqueToken('Options.prepare');
var _EXECUTE = new OpaqueToken('Options.execute');
var _CAPABILITIES = new OpaqueToken('Options.capabilities');
var _USER_AGENT = new OpaqueToken('Options.userAgent');
var _MICRO_ITERATIONS = new OpaqueToken('Options.microIterations');
var _MICRO_METRICS = new OpaqueToken('Options.microMetrics');
var _NOW = new OpaqueToken('Options.now');
var _WRITE_FILE = new OpaqueToken('Options.writeFile');
@ -48,5 +44,6 @@ var _DEFAULT_BINDINGS = [
bind(_SAMPLE_DESCRIPTION).toValue({}),
bind(_FORCE_GC).toValue(false),
bind(_PREPARE).toValue(false),
bind(_MICRO_METRICS).toValue({}),
bind(_NOW).toValue( () => DateWrapper.now() )
];

View File

@ -1,5 +1,7 @@
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
import { isPresent, isBlank, int, BaseException, StringWrapper, Math } from 'angular2/src/facade/lang';
import {
isPresent, isBlank, int, BaseException, StringWrapper, Math, RegExpWrapper, NumberWrapper
} from 'angular2/src/facade/lang';
import { ListWrapper, StringMap, StringMapWrapper } from 'angular2/src/facade/collection';
import { bind, OpaqueToken } from 'angular2/di';
@ -20,22 +22,21 @@ export class PerflogMetric extends Metric {
_remainingEvents:List;
_measureCount:int;
_setTimeout:Function;
_microIterations:int;
_microMetrics:StringMap<string, string>;
_perfLogFeatures:PerfLogFeatures;
/**
* @param driverExtension
* @param setTimeout
* @param microIterations Number of iterations that run inside the browser by user code.
* Used for micro benchmarks.
* @param microMetrics Name and description of metrics provided via console.time / console.timeEnd
**/
constructor(driverExtension:WebDriverExtension, setTimeout:Function, microIterations:int) {
constructor(driverExtension:WebDriverExtension, setTimeout:Function, microMetrics:StringMap<string, string>) {
super();
this._driverExtension = driverExtension;
this._remainingEvents = [];
this._measureCount = 0;
this._setTimeout = setTimeout;
this._microIterations = microIterations;
this._microMetrics = microMetrics;
this._perfLogFeatures = driverExtension.perfLogFeatures();
}
@ -52,9 +53,9 @@ export class PerflogMetric extends Metric {
res['gcAmount'] = 'gc amount in kbytes';
res['majorGcTime'] = 'time of major gcs in ms';
}
if (this._microIterations > 0) {
res['microScriptTimeAvg'] = 'average script time for a micro iteration';
}
StringMapWrapper.forEach(this._microMetrics, (desc, name) => {
StringMapWrapper.set(res, name, desc);
});
return res;
}
@ -137,6 +138,9 @@ export class PerflogMetric extends Metric {
if (this._perfLogFeatures.render) {
result['renderTime'] = 0;
}
StringMapWrapper.forEach(this._microMetrics, (desc, name) => {
result[name] = 0;
});
var markStartEvent = null;
var markEndEvent = null;
@ -147,17 +151,24 @@ export class PerflogMetric extends Metric {
events.forEach( (event) => {
var ph = event['ph'];
var name = event['name'];
var microIterations = 1;
var microIterationsMatch = RegExpWrapper.firstMatch(_MICRO_ITERATIONS_REGEX, name);
if (isPresent(microIterationsMatch)) {
name = microIterationsMatch[1];
microIterations = NumberWrapper.parseInt(microIterationsMatch[2], 10);
}
if (StringWrapper.equals(ph, 'b') && StringWrapper.equals(name, markName)) {
markStartEvent = event;
} else if (StringWrapper.equals(ph, 'e') && StringWrapper.equals(name, markName)) {
markEndEvent = event;
}
if (isPresent(markStartEvent) && isBlank(markEndEvent) && event['pid'] === markStartEvent['pid']) {
if (StringWrapper.equals(ph, 'B')) {
if (StringWrapper.equals(ph, 'B') || StringWrapper.equals(ph, 'b')) {
intervalStarts[name] = event;
} else if (StringWrapper.equals(ph, 'E') && isPresent(intervalStarts[name])) {
} else if ((StringWrapper.equals(ph, 'E') || StringWrapper.equals(ph, 'e')) && isPresent(intervalStarts[name])) {
var startEvent = intervalStarts[name];
var duration = event['ts'] - startEvent['ts'];
var duration = (event['ts'] - startEvent['ts']);
intervalStarts[name] = null;
if (StringWrapper.equals(name, 'gc')) {
result['gcTime'] += duration;
@ -177,14 +188,13 @@ export class PerflogMetric extends Metric {
}
} else if (StringWrapper.equals(name, 'script')) {
result['scriptTime'] += duration;
} else if (isPresent(this._microMetrics[name])) {
result[name] += duration / microIterations;
}
}
}
});
result['pureScriptTime'] = result['scriptTime'] - gcTimeInScript - renderTimeInScript;
if (this._microIterations > 0) {
result['microScriptTimeAvg'] = result['scriptTime'] / this._microIterations;
}
return isPresent(markStartEvent) && isPresent(markEndEvent) ? result : null;
}
@ -193,15 +203,16 @@ export class PerflogMetric extends Metric {
}
}
var _MICRO_ITERATIONS_REGEX = RegExpWrapper.create('(.+)\\*(\\d+)$');
var _MAX_RETRY_COUNT = 20;
var _MARK_NAME_PREFIX = 'benchpress';
var _SET_TIMEOUT = new OpaqueToken('PerflogMetric.setTimeout');
var _BINDINGS = [
bind(PerflogMetric).toFactory(
(driverExtension, setTimeout, microIterations) =>
new PerflogMetric(driverExtension, setTimeout, microIterations),
[WebDriverExtension, _SET_TIMEOUT, Options.MICRO_ITERATIONS]
(driverExtension, setTimeout, microMetrics) =>
new PerflogMetric(driverExtension, setTimeout, microMetrics),
[WebDriverExtension, _SET_TIMEOUT, Options.MICRO_METRICS]
),
bind(_SET_TIMEOUT).toValue( (fn, millis) => PromiseWrapper.setTimeout(fn, millis) ),
bind(Options.MICRO_ITERATIONS).toValue(0)
bind(_SET_TIMEOUT).toValue( (fn, millis) => PromiseWrapper.setTimeout(fn, millis) )
];

View File

@ -34,7 +34,7 @@ export class Runner {
this._defaultBindings = defaultBindings;
}
sample({id, execute, prepare, microIterations, bindings}):Promise<SampleState> {
sample({id, execute, prepare, microMetrics, bindings}):Promise<SampleState> {
var sampleBindings = [
_DEFAULT_BINDINGS,
this._defaultBindings,
@ -44,8 +44,8 @@ export class Runner {
if (isPresent(prepare)) {
ListWrapper.push(sampleBindings, bind(Options.PREPARE).toValue(prepare));
}
if (isPresent(microIterations)) {
ListWrapper.push(sampleBindings, bind(Options.MICRO_ITERATIONS).toValue(microIterations));
if (isPresent(microMetrics)) {
ListWrapper.push(sampleBindings, bind(Options.MICRO_METRICS).toValue(microMetrics));
}
if (isPresent(bindings)) {
ListWrapper.push(sampleBindings, bindings);

View File

@ -11,7 +11,7 @@ import {
xit,
} from 'angular2/test_lib';
import { List, ListWrapper } from 'angular2/src/facade/collection';
import { List, ListWrapper, StringMapWrapper } from 'angular2/src/facade/collection';
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
import { isPresent, isBlank } from 'angular2/src/facade/lang';
@ -27,20 +27,23 @@ export function main() {
var commandLog;
var eventFactory = new TraceEventFactory('timeline', 'pid0');
function createMetric(perfLogs, microIterations = 0, perfLogFeatures = null) {
function createMetric(perfLogs, microMetrics = null, perfLogFeatures = null) {
commandLog = [];
if (isBlank(perfLogFeatures)) {
perfLogFeatures = new PerfLogFeatures({render: true, gc: true});
}
if (isBlank(microMetrics)) {
microMetrics = StringMapWrapper.create();
}
var bindings = [
Options.DEFAULT_BINDINGS,
PerflogMetric.BINDINGS,
bind(Options.MICRO_METRICS).toValue(microMetrics),
bind(PerflogMetric.SET_TIMEOUT).toValue( (fn, millis) => {
ListWrapper.push(commandLog, ['setTimeout', millis]);
fn();
}),
bind(WebDriverExtension).toValue(new MockDriverExtension(perfLogs, commandLog, perfLogFeatures)),
bind(Options.MICRO_ITERATIONS).toValue(microIterations)
bind(WebDriverExtension).toValue(new MockDriverExtension(perfLogs, commandLog, perfLogFeatures))
];
return new Injector(bindings).get(PerflogMetric);
}
@ -48,12 +51,12 @@ export function main() {
describe('perflog metric', () => {
it('should describe itself based on the perfLogFeatrues', () => {
expect(createMetric([[]], 0, new PerfLogFeatures()).describe()).toEqual({
expect(createMetric([[]], null, new PerfLogFeatures()).describe()).toEqual({
'scriptTime': 'script execution time in ms, including gc and render',
'pureScriptTime': 'script execution time in ms, without gc nor render'
});
expect(createMetric([[]], 0, new PerfLogFeatures({
expect(createMetric([[]], null, new PerfLogFeatures({
render: true,
gc: false
})).describe()).toEqual({
@ -72,6 +75,13 @@ export function main() {
});
});
it('should describe itself based on micro metrics', () => {
var description = createMetric([[]], {
'myMicroMetric': 'someDesc'
}).describe();
expect(description['myMicroMetric']).toEqual('someDesc');
});
describe('beginMeasure', () => {
it('should mark the timeline', inject([AsyncTestCompleter], (async) => {
@ -194,10 +204,10 @@ export function main() {
describe('aggregation', () => {
function aggregate(events, microIterations = 0) {
function aggregate(events, microMetrics = null) {
ListWrapper.insert(events, 0, eventFactory.markStart('benchpress0', 0));
ListWrapper.push(events, eventFactory.markEnd('benchpress0', 10));
var metric = createMetric([events], microIterations);
var metric = createMetric([events], microMetrics);
return metric
.beginMeasure().then( (_) => metric.endMeasure(false) );
}
@ -319,25 +329,34 @@ export function main() {
});
}));
describe('microIterations', () => {
describe('microMetrics', () => {
it('should not report microScriptTimeAvg if microIterations = 0', inject([AsyncTestCompleter], (async) => {
it('should report micro metrics', inject([AsyncTestCompleter], (async) => {
aggregate([
eventFactory.start('script', 0),
eventFactory.end('script', 5)
], 0).then((data) => {
expect(isPresent(data['microScriptTimeAvg'])).toBe(false);
eventFactory.markStart('mm1', 0),
eventFactory.markEnd('mm1', 5),
], {'mm1': 'micro metric 1'}).then((data) => {
expect(data['mm1']).toBe(5.0);
async.done();
});
}));
it('should report microScriptTimeAvg', inject([AsyncTestCompleter], (async) => {
it('should ignore micro metrics that were not specified', inject([AsyncTestCompleter], (async) => {
aggregate([
eventFactory.start('script', 0),
eventFactory.end('script', 5)
], 4).then((data) => {
expect(data['scriptTime']).toBe(5);
expect(data['microScriptTimeAvg']).toBe(5/4);
eventFactory.markStart('mm1', 0),
eventFactory.markEnd('mm1', 5),
]).then((data) => {
expect(data['mm1']).toBeFalsy();
async.done();
});
}));
it('should report micro metric averages', inject([AsyncTestCompleter], (async) => {
aggregate([
eventFactory.markStart('mm1*20', 0),
eventFactory.markEnd('mm1*20', 5),
], {'mm1': 'micro metric 1'}).then((data) => {
expect(data['mm1']).toBe(5/20);
async.done();
});
}));

View File

@ -96,9 +96,9 @@ export function main() {
});
}));
it('should bind Options.MICRO_ITERATIONS', inject([AsyncTestCompleter], (async) => {
createRunner().sample({id: 'someId', microIterations: 23}).then( (_) => {
expect(injector.get(Options.MICRO_ITERATIONS)).toEqual(23);
it('should bind Options.MICRO_METRICS', inject([AsyncTestCompleter], (async) => {
createRunner().sample({id: 'someId', microMetrics: {'a': 'b'}}).then( (_) => {
expect(injector.get(Options.MICRO_METRICS)).toEqual({'a': 'b'});
async.done();
});
}));