feat(benchpress): rewritten implementation

Limitations:
- cloud reporter is not yet supported any more
This commit is contained in:
Tobias Bosch
2015-02-11 10:13:49 -08:00
parent 44845839a6
commit f6284f2a55
78 changed files with 2666 additions and 1018 deletions

View File

@ -0,0 +1,329 @@
import {ddescribe, describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
import { List, ListWrapper } from 'angular2/src/facade/collection';
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
import { Metric, PerflogMetric, WebDriverExtension, bind, Injector } from 'benchpress/benchpress';
export function main() {
var commandLog;
function createMetric(perfLogs) {
commandLog = [];
return new Injector([
PerflogMetric.BINDINGS,
bind(PerflogMetric.SET_TIMEOUT).toValue( (fn, millis) => {
ListWrapper.push(commandLog, ['setTimeout', millis]);
fn();
}),
bind(WebDriverExtension).toValue(new MockDriverExtension(perfLogs, commandLog))
]).get(Metric);
}
describe('perflog metric', () => {
it('should describe itself', () => {
expect(createMetric([[]]).describe()['script']).toBe('script execution time in ms');
});
describe('beginMeasure', () => {
it('should mark the timeline', (done) => {
var metric = createMetric([[]]);
metric.beginMeasure().then((_) => {
expect(commandLog).toEqual([['timeBegin', 'benchpress0']]);
done();
});
});
});
describe('endMeasure', () => {
it('should mark and aggregate events in between the marks', (done) => {
var events = [
[
markStartEvent('benchpress0'),
startEvent('script', 4),
endEvent('script', 6),
markEndEvent('benchpress0')
]
];
var metric = createMetric(events);
metric.beginMeasure()
.then( (_) => metric.endMeasure(false) )
.then( (data) => {
expect(commandLog).toEqual([
['timeBegin', 'benchpress0'],
['timeEnd', 'benchpress0', null],
'readPerfLog'
]);
expect(data['script']).toBe(2);
done();
});
});
it('should restart timing', (done) => {
var events = [
[
markStartEvent('benchpress0'),
markEndEvent('benchpress0'),
markStartEvent('benchpress1'),
], [
markEndEvent('benchpress1')
]
];
var metric = createMetric(events);
metric.beginMeasure()
.then( (_) => metric.endMeasure(true) )
.then( (_) => metric.endMeasure(true) )
.then( (_) => {
expect(commandLog).toEqual([
['timeBegin', 'benchpress0'],
['timeEnd', 'benchpress0', 'benchpress1'],
'readPerfLog',
['timeEnd', 'benchpress1', 'benchpress2'],
'readPerfLog'
]);
done();
});
});
it('should loop and aggregate until the end mark is present', (done) => {
var events = [
[ markStartEvent('benchpress0'), startEvent('script', 1) ],
[ endEvent('script', 2) ],
[ startEvent('script', 3), endEvent('script', 5), markEndEvent('benchpress0') ]
];
var metric = createMetric(events);
metric.beginMeasure()
.then( (_) => metric.endMeasure(false) )
.then( (data) => {
expect(commandLog).toEqual([
['timeBegin', 'benchpress0'],
['timeEnd', 'benchpress0', null],
'readPerfLog',
[ 'setTimeout', 100 ],
'readPerfLog',
[ 'setTimeout', 100 ],
'readPerfLog'
]);
expect(data['script']).toBe(3);
done();
});
});
it('should store events after the end mark for the next call', (done) => {
var events = [
[ markStartEvent('benchpress0'), markEndEvent('benchpress0'), markStartEvent('benchpress1'),
startEvent('script', 1), endEvent('script', 2) ],
[ startEvent('script', 3), endEvent('script', 5), markEndEvent('benchpress1') ]
];
var metric = createMetric(events);
metric.beginMeasure()
.then( (_) => metric.endMeasure(true) )
.then( (data) => {
expect(data['script']).toBe(0);
return metric.endMeasure(true)
})
.then( (data) => {
expect(commandLog).toEqual([
['timeBegin', 'benchpress0'],
['timeEnd', 'benchpress0', 'benchpress1'],
'readPerfLog',
['timeEnd', 'benchpress1', 'benchpress2'],
'readPerfLog'
]);
expect(data['script']).toBe(3);
done();
});
});
});
describe('aggregation', () => {
function aggregate(events) {
ListWrapper.insert(events, 0, markStartEvent('benchpress0'));
ListWrapper.push(events, markEndEvent('benchpress0'));
var metric = createMetric([events]);
return metric
.beginMeasure().then( (_) => metric.endMeasure(false) );
}
it('should report a single interval', (done) => {
aggregate([
startEvent('script', 0),
endEvent('script', 5)
]).then((data) => {
expect(data['script']).toBe(5);
done();
});
});
it('should sum up multiple intervals', (done) => {
aggregate([
startEvent('script', 0),
endEvent('script', 5),
startEvent('script', 10),
endEvent('script', 17)
]).then((data) => {
expect(data['script']).toBe(12);
done();
});
});
it('should ignore not started intervals', (done) => {
aggregate([
endEvent('script', 10)
]).then((data) => {
expect(data['script']).toBe(0);
done();
});
});
it('should ignore not ended intervals', (done) => {
aggregate([
startEvent('script', 10)
]).then((data) => {
expect(data['script']).toBe(0);
done();
});
});
['script', 'gcTime', 'render'].forEach( (metricName) => {
it(`should support ${metricName} metric`, (done) => {
aggregate([
startEvent(metricName, 0),
endEvent(metricName, 5)
]).then((data) => {
expect(data[metricName]).toBe(5);
done();
});
});
});
it('should support gcAmount metric', (done) => {
aggregate([
startEvent('gc', 0),
endEvent('gc', 5, {'amount': 10})
]).then((data) => {
expect(data['gcAmount']).toBe(10);
done();
});
});
it('should subtract gcTime in script from script time', (done) => {
aggregate([
startEvent('script', 0),
startEvent('gc', 1),
endEvent('gc', 4, {'amount': 10}),
endEvent('script', 5)
]).then((data) => {
expect(data['script']).toBe(2);
done();
});
});
describe('gcTimeInScript / gcAmountInScript', () => {
it('should use gc during script execution', (done) => {
aggregate([
startEvent('script', 0),
startEvent('gc', 1),
endEvent('gc', 4, {'amount': 10}),
endEvent('script', 5)
]).then((data) => {
expect(data['gcTimeInScript']).toBe(3);
expect(data['gcAmountInScript']).toBe(10);
done();
});
});
it('should ignore gc outside of script execution', (done) => {
aggregate([
startEvent('gc', 1),
endEvent('gc', 4, {'amount': 10}),
startEvent('script', 0),
endEvent('script', 5)
]).then((data) => {
expect(data['gcTimeInScript']).toBe(0);
expect(data['gcAmountInScript']).toBe(0);
done();
});
});
});
});
});
}
function markStartEvent(type) {
return {
'name': type,
'ph': 'b'
}
}
function markEndEvent(type) {
return {
'name': type,
'ph': 'e'
}
}
function startEvent(type, time) {
return {
'name': type,
'ts': time,
'ph': 'B'
}
}
function endEvent(type, time, args = null) {
return {
'name': type,
'ts': time,
'ph': 'E',
'args': args
}
}
class MockDriverExtension extends WebDriverExtension {
_perfLogs:List;
_commandLog:List;
constructor(perfLogs, commandLog) {
super();
this._perfLogs = perfLogs;
this._commandLog = commandLog;
}
timeBegin(name):Promise {
ListWrapper.push(this._commandLog, ['timeBegin', name]);
return PromiseWrapper.resolve(null);
}
timeEnd(name, restartName):Promise {
ListWrapper.push(this._commandLog, ['timeEnd', name, restartName]);
return PromiseWrapper.resolve(null);
}
readPerfLog():Promise {
ListWrapper.push(this._commandLog, 'readPerfLog');
if (this._perfLogs.length > 0) {
var next = this._perfLogs[0];
ListWrapper.removeAt(this._perfLogs, 0);
return PromiseWrapper.resolve(next);
} else {
return PromiseWrapper.resolve([]);
}
}
}

View File

@ -0,0 +1,101 @@
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
import { isBlank, isPresent } from 'angular2/src/facade/lang';
import { List, ListWrapper } from 'angular2/src/facade/collection';
import {
SampleState, Reporter, bind, Injector,
ConsoleReporter, SampleDescription
} from 'benchpress/benchpress';
export function main() {
describe('console reporter', () => {
var reporter;
var log;
function createReporter({columnWidth, sampleId, descriptions, metrics}) {
log = [];
if (isBlank(descriptions)) {
descriptions = [];
}
if (isBlank(sampleId)) {
sampleId = 'null';
}
var bindings = [
ConsoleReporter.BINDINGS,
bind(SampleDescription).toValue(new SampleDescription(sampleId, descriptions, metrics)),
bind(ConsoleReporter.PRINT).toValue((line) => ListWrapper.push(log, line))
];
if (isPresent(columnWidth)) {
ListWrapper.push(bindings, bind(ConsoleReporter.COLUMN_WIDTH).toValue(columnWidth));
}
reporter = new Injector(bindings).get(Reporter);
}
it('should print the sample id, description and table header', () => {
createReporter({
columnWidth: 8,
sampleId: 'someSample',
descriptions: [{
'a': 1,
'b': 2
}],
metrics: {
'm1': 'some desc',
'm2': 'some other desc'
}
});
expect(log).toEqual([
'BENCHMARK someSample',
'Description:',
'- a: 1',
'- b: 2',
'Metrics:',
'- m1: some desc',
'- m2: some other desc',
'',
' m1 | m2',
'-------- | --------',
]);
});
it('should print a table row', () => {
createReporter({
columnWidth: 8,
metrics: {
'a': '',
'b': ''
}
});
log = [];
reporter.reportMeasureValues(0, {
'a': 1.23, 'b': 2
});
expect(log).toEqual([
' 1.23 | 2.00'
]);
});
it('should print the table footer and stats when there is a valid sample', () => {
createReporter({
columnWidth: 8,
metrics: {
'a': '',
'b': ''
}
});
log = [];
reporter.reportSample([], [{
'a': 3, 'b': 6
},{
'a': 5, 'b': 9
}]);
expect(log).toEqual([
'======== | ========',
'4.00±25% | 7.50±20%'
]);
});
});
}

View File

@ -0,0 +1,119 @@
import {describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
import {
Runner, Sampler, SampleDescription,
Validator, bind, Injector, Metric,
Options
} from 'benchpress/benchpress';
import { isBlank } from 'angular2/src/facade/lang';
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
export function main() {
describe('runner', () => {
var injector;
var runner;
function createRunner(defaultBindings = null) {
if (isBlank(defaultBindings)) {
defaultBindings = [];
}
runner = new Runner([
defaultBindings,
bind(Sampler).toFactory(
(_injector) => {
injector = _injector;
return new MockSampler();
}, [Injector]
),
bind(Metric).toFactory( () => new MockMetric(), []),
bind(Validator).toFactory( () => new MockValidator(), [])
]);
return runner;
}
it('should set SampleDescription.id', (done) => {
createRunner().sample({id: 'someId'}).then( (_) => {
expect(injector.get(SampleDescription).id).toBe('someId');
done();
});
});
it('should merge SampleDescription.description', (done) => {
createRunner([
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1})
]).sample({id: 'someId', bindings: [
bind(Options.SAMPLE_DESCRIPTION).toValue({'b': 2})
]}).then( (_) => {
expect(injector.get(SampleDescription).description).toEqual({
'forceGc': false,
'a': 1,
'b': 2,
'v': 11
});
done();
});
});
it('should fill SampleDescription.metrics from the Metric', (done) => {
createRunner().sample({id: 'someId'}).then( (_) => {
expect(injector.get(SampleDescription).metrics).toEqual({ 'm1': 'some metric' });
done();
});
});
it('should bind Options.EXECUTE', (done) => {
var execute = () => {};
createRunner().sample({id: 'someId', execute: execute}).then( (_) => {
expect(injector.get(Options.EXECUTE)).toEqual(execute);
done();
});
});
it('should bind Options.PREPARE', (done) => {
var prepare = () => {};
createRunner().sample({id: 'someId', prepare: prepare}).then( (_) => {
expect(injector.get(Options.PREPARE)).toEqual(prepare);
done();
});
});
it('should overwrite bindings per sample call', (done) => {
createRunner([
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1}),
]).sample({id: 'someId', bindings: [
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 2}),
]}).then( (_) => {
expect(injector.get(SampleDescription).description['a']).toBe(2);
done();
});
});
});
}
class MockValidator extends Validator {
constructor() {
super();
}
describe() {
return { 'v': 11 };
}
}
class MockMetric extends Metric {
constructor() {
super();
}
describe() {
return { 'm1': 'some metric' };
}
}
class MockSampler extends Sampler {
constructor() {
super();
}
sample():Promise {
return PromiseWrapper.resolve(23);
}
}

View File

@ -0,0 +1,364 @@
import {describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
import { isBlank, isPresent, BaseException, stringify } from 'angular2/src/facade/lang';
import { ListWrapper, List } from 'angular2/src/facade/collection';
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
import {
Sampler, WebDriverAdapter, WebDriverExtension,
Validator, Metric, Reporter, Browser,
bind, Injector, Options
} from 'benchpress/benchpress';
export function main() {
var EMPTY_EXECUTE = () => {};
describe('sampler', () => {
var sampler;
function createSampler({
driver,
driverExtension,
metric,
reporter,
validator,
forceGc,
prepare,
execute
} = {}) {
if (isBlank(metric)) {
metric = new MockMetric([]);
}
if (isBlank(reporter)) {
reporter = new MockReporter([]);
}
if (isBlank(driver)) {
driver = new MockDriverAdapter([]);
}
if (isBlank(driverExtension)) {
driverExtension = new MockDriverExtension([]);
}
var bindings = ListWrapper.concat(Sampler.BINDINGS, [
bind(Metric).toValue(metric),
bind(Reporter).toValue(reporter),
bind(WebDriverAdapter).toValue(driver),
bind(WebDriverExtension).toValue(driverExtension),
bind(Options.EXECUTE).toValue(execute),
bind(Validator).toValue(validator)
]);
if (isPresent(prepare)) {
ListWrapper.push(bindings, bind(Options.PREPARE).toValue(prepare));
}
if (isPresent(forceGc)) {
ListWrapper.push(bindings, bind(Options.FORCE_GC).toValue(forceGc));
}
sampler = new Injector(bindings).get(Sampler);
}
it('should call the prepare and execute callbacks using WebDriverAdapter.waitFor', (done) => {
var log = [];
var count = 0;
var driver = new MockDriverAdapter([], (callback) => {
var result = callback();
ListWrapper.push(log, result);
return PromiseWrapper.resolve(result);
});
createSampler({
driver: driver,
validator: createCountingValidator(2),
prepare: () => {
return count++;
},
execute: () => {
return count++;
}
});
sampler.sample().then( (_) => {
expect(count).toBe(4);
expect(log).toEqual([0,1,2,3]);
done();
});
});
it('should call prepare, gc, beginMeasure, execute, gc, endMeasure for every iteration', (done) => {
var workCount = 0;
var log = [];
createSampler({
forceGc: true,
metric: createCountingMetric(log),
driverExtension: new MockDriverExtension(log),
validator: createCountingValidator(2),
prepare: () => {
ListWrapper.push(log, `p${workCount++}`);
},
execute: () => {
ListWrapper.push(log, `w${workCount++}`);
}
});
sampler.sample().then( (_) => {
expect(log).toEqual([
['gc'],
'p0',
['gc'],
['beginMeasure'],
'w1',
['gc'],
['endMeasure', false, {'script': 0}],
'p2',
['gc'],
['beginMeasure'],
'w3',
['gc'],
['endMeasure', false, {'script': 1}],
]);
done();
});
});
it('should call execute, gc, endMeasure for every iteration if there is no prepare callback', (done) => {
var log = [];
var workCount = 0;
createSampler({
forceGc: true,
metric: createCountingMetric(log),
driverExtension: new MockDriverExtension(log),
validator: createCountingValidator(2),
execute: () => {
ListWrapper.push(log, `w${workCount++}`);
},
prepare: null
});
sampler.sample().then( (_) => {
expect(log).toEqual([
['gc'],
['beginMeasure'],
'w0',
['gc'],
['endMeasure', true, {'script': 0}],
'w1',
['gc'],
['endMeasure', true, {'script': 1}],
]);
done();
});
});
it('should not gc if the flag is not set', (done) => {
var workCount = 0;
var log = [];
createSampler({
metric: createCountingMetric(),
driverExtension: new MockDriverExtension(log),
validator: createCountingValidator(2),
prepare: EMPTY_EXECUTE,
execute: EMPTY_EXECUTE
});
sampler.sample().then( (_) => {
expect(log).toEqual([]);
done();
});
});
it('should only collect metrics for execute and ignore metrics from prepare', (done) => {
var scriptTime = 0;
var iterationCount = 1;
createSampler({
validator: createCountingValidator(2),
metric: new MockMetric([], () => {
var result = PromiseWrapper.resolve({'script': scriptTime});
scriptTime = 0;
return result;
}),
prepare: () => {
scriptTime = 1 * iterationCount;
},
execute: () => {
scriptTime = 10 * iterationCount;
iterationCount++;
}
});
sampler.sample().then( (state) => {
expect(state.completeSample.length).toBe(2);
expect(state.completeSample[0]).toEqual({'script': 10});
expect(state.completeSample[1]).toEqual({'script': 20});
done();
});
});
it('should call the validator for every execution and store the valid sample', (done) => {
var log = [];
var validSample = [{}];
createSampler({
metric: createCountingMetric(),
validator: createCountingValidator(2, validSample, log),
execute: EMPTY_EXECUTE
});
sampler.sample().then( (state) => {
expect(state.validSample).toBe(validSample);
// TODO(tbosch): Why does this fail??
// expect(log).toEqual([
// ['validate', [{'script': 0}], null],
// ['validate', [{'script': 0}, {'script': 1}], validSample]
// ]);
expect(log.length).toBe(2);
expect(log[0]).toEqual(
['validate', [{'script': 0}], null]
);
expect(log[1]).toEqual(
['validate', [{'script': 0}, {'script': 1}], validSample]
);
done();
});
});
it('should report the metric values', (done) => {
var log = [];
var validSample = [{}];
createSampler({
validator: createCountingValidator(2, validSample),
metric: createCountingMetric(),
reporter: new MockReporter(log),
execute: EMPTY_EXECUTE
});
sampler.sample().then( (_) => {
// TODO(tbosch): Why does this fail??
// expect(log).toEqual([
// ['reportMeasureValues', 0, {'script': 0}],
// ['reportMeasureValues', 1, {'script': 1}],
// ['reportSample', [{'script': 0}, {'script': 1}], validSample]
// ]);
expect(log.length).toBe(3);
expect(log[0]).toEqual(
['reportMeasureValues', 0, {'script': 0}]
);
expect(log[1]).toEqual(
['reportMeasureValues', 1, {'script': 1}]
);
expect(log[2]).toEqual(
['reportSample', [{'script': 0}, {'script': 1}], validSample]
);
done();
});
});
});
}
function createCountingValidator(count, validSample = null, log = null) {
return new MockValidator(log, (completeSample) => {
count--;
if (count === 0) {
return isPresent(validSample) ? validSample : completeSample;
} else {
return null;
}
});
}
function createCountingMetric(log = null) {
var scriptTime = 0;
return new MockMetric(log, () => {
return { 'script': scriptTime++ };
});
}
class MockDriverAdapter extends WebDriverAdapter {
_log:List;
_waitFor:Function;
constructor(log = null, waitFor = null) {
super();
if (isBlank(log)) {
log = [];
}
this._log = log;
this._waitFor = waitFor;
}
waitFor(callback:Function):Promise {
if (isPresent(this._waitFor)) {
return this._waitFor(callback);
} else {
return PromiseWrapper.resolve(callback());
}
}
}
class MockDriverExtension extends WebDriverExtension {
_log:List;
constructor(log = null) {
super();
if (isBlank(log)) {
log = [];
}
this._log = log;
}
gc():Promise {
ListWrapper.push(this._log, ['gc']);
return PromiseWrapper.resolve(null);
}
}
class MockValidator extends Validator {
_validate:Function;
_log:List;
constructor(log = null, validate = null) {
super();
this._validate = validate;
if (isBlank(log)) {
log = [];
}
this._log = log;
}
validate(completeSample:List<Object>):List<Object> {
var stableSample = isPresent(this._validate) ? this._validate(completeSample) : completeSample;
ListWrapper.push(this._log, ['validate', completeSample, stableSample]);
return stableSample;
}
}
class MockMetric extends Metric {
_endMeasure:Function;
_log:List;
constructor(log = null, endMeasure = null) {
super();
this._endMeasure = endMeasure;
if (isBlank(log)) {
log = [];
}
this._log = log;
}
beginMeasure() {
ListWrapper.push(this._log, ['beginMeasure']);
return PromiseWrapper.resolve(null);
}
endMeasure(restart) {
var measureValues = isPresent(this._endMeasure) ? this._endMeasure() : {};
ListWrapper.push(this._log, ['endMeasure', restart, measureValues]);
return PromiseWrapper.resolve(measureValues);
}
}
class MockReporter extends Reporter {
_log:List;
constructor(log = null) {
super();
if (isBlank(log)) {
log = [];
}
this._log = log;
}
reportMeasureValues(index, values):Promise {
ListWrapper.push(this._log, ['reportMeasureValues', index, values]);
return PromiseWrapper.resolve(null);
}
reportSample(completeSample, validSample):Promise {
ListWrapper.push(this._log, ['reportSample', completeSample, validSample]);
return PromiseWrapper.resolve(null);
}
}

View File

@ -0,0 +1,34 @@
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
import { Statistic } from 'benchpress/src/statistic';
import { NaN } from 'angular2/src/facade/math';
export function main() {
describe('statistic', () => {
it('should calculate the mean', () => {
expect(Statistic.calculateMean([])).toBeNaN();
expect(Statistic.calculateMean([1,2,3])).toBe(2.0);
});
it('should calculate the standard deviation', () => {
expect(Statistic.calculateStandardDeviation([], NaN)).toBeNaN();
expect(Statistic.calculateStandardDeviation([1], 1)).toBe(0.0);
expect(Statistic.calculateStandardDeviation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(2.0);
});
it('should calculate the coefficient of variation', () => {
expect(Statistic.calculateCoefficientOfVariation([], NaN)).toBeNaN();
expect(Statistic.calculateCoefficientOfVariation([1], 1)).toBe(0.0);
expect(Statistic.calculateCoefficientOfVariation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(40.0);
});
it('should calculate the regression slope', () => {
expect(Statistic.calculateRegressionSlope([], NaN, [], NaN)).toBeNaN();
expect(Statistic.calculateRegressionSlope([1], 1, [2], 2)).toBeNaN();
expect(Statistic.calculateRegressionSlope([1,2], 1.5, [2,4], 3)).toBe(2.0);
});
});
}

View File

@ -0,0 +1,51 @@
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
import {
Validator, RegressionSlopeValidator, Injector, bind
} from 'benchpress/benchpress';
export function main() {
describe('regression slope validator', () => {
var validator;
function createValidator({size, metric}) {
validator = new Injector([
RegressionSlopeValidator.BINDINGS,
bind(RegressionSlopeValidator.METRIC).toValue(metric),
bind(RegressionSlopeValidator.SAMPLE_SIZE).toValue(size)
]).get(Validator);
}
it('should return sampleSize and metric as description', () => {
createValidator({size: 2, metric: 'script'});
expect(validator.describe()).toEqual({
'sampleSize': 2,
'regressionSlopeMetric': 'script'
});
});
it('should return null while the completeSample is smaller than the given size', () => {
createValidator({size: 2, metric: 'script'});
expect(validator.validate([])).toBe(null);
expect(validator.validate([{}])).toBe(null);
});
it('should return null while the regression slope is < 0', () => {
createValidator({size: 2, metric: 'script'});
expect(validator.validate([{'script':2}, {'script':1}])).toBe(null);
});
it('should return the last sampleSize runs when the regression slope is ==0', () => {
createValidator({size: 2, metric: 'script'});
expect(validator.validate([{'script':1}, {'script':1}])).toEqual([{'script':1}, {'script':1}]);
expect(validator.validate([{'script':1}, {'script':1}, {'script':1}])).toEqual([{'script':1}, {'script':1}]);
});
it('should return the last sampleSize runs when the regression slope is >0', () => {
createValidator({size: 2, metric: 'script'});
expect(validator.validate([{'script':1}, {'script':2}])).toEqual([{'script':1}, {'script':2}]);
expect(validator.validate([{'script':1}, {'script':2}, {'script':3}])).toEqual([{'script':2}, {'script':3}]);
});
});
}

View File

@ -0,0 +1,38 @@
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
import {
Validator, SizeValidator, Injector, bind
} from 'benchpress/benchpress';
export function main() {
describe('size validator', () => {
var validator;
function createValidator(size) {
validator = new Injector([
SizeValidator.BINDINGS,
bind(SizeValidator.SAMPLE_SIZE).toValue(size)
]).get(Validator);
}
it('should return sampleSize as description', () => {
createValidator(2);
expect(validator.describe()).toEqual({
'sampleSize': 2
});
});
it('should return null while the completeSample is smaller than the given size', () => {
createValidator(2);
expect(validator.validate([])).toBe(null);
expect(validator.validate([{}])).toBe(null);
});
it('should return the last sampleSize runs when it has at least the given size', () => {
createValidator(2);
expect(validator.validate([{'a':1}, {'b':2}])).toEqual([{'a':1}, {'b':2}]);
expect(validator.validate([{'a':1}, {'b':2}, {'c':3}])).toEqual([{'b':2}, {'c':3}]);
});
});
}

View File

@ -0,0 +1,267 @@
import {describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
import { ListWrapper } from 'angular2/src/facade/collection';
import { PromiseWrapper } from 'angular2/src/facade/async';
import { Json, perfRecords, isBlank } from 'angular2/src/facade/lang';
import {
WebDriverExtension, ChromeDriverExtension,
WebDriverAdapter, Injector, bind
} from 'benchpress/benchpress';
export function main() {
describe('chrome driver extension', () => {
var log;
var extension;
function createExtension(perfRecords = null) {
if (isBlank(perfRecords)) {
perfRecords = [];
}
log = [];
extension = new Injector([
ChromeDriverExtension.BINDINGS,
bind(WebDriverAdapter).toValue(new MockDriverAdapter(log, perfRecords))
]).get(WebDriverExtension);
return extension;
}
it('should force gc via window.gc()', (done) => {
createExtension().gc().then( (_) => {
expect(log).toEqual([['executeScript', 'window.gc()']]);
done();
});
});
it('should mark the timeline via console.timeStamp()', (done) => {
createExtension().timeBegin('someName').then( (_) => {
expect(log).toEqual([['executeScript', `console.timeStamp('begin_someName');`]]);
done();
});
});
it('should mark the timeline via console.timeEnd()', (done) => {
createExtension().timeEnd('someName').then( (_) => {
expect(log).toEqual([['executeScript', `console.timeStamp('end_someName');`]]);
done();
});
});
it('should mark the timeline via console.time() and console.timeEnd()', (done) => {
createExtension().timeEnd('name1', 'name2').then( (_) => {
expect(log).toEqual([['executeScript', `console.timeStamp('end_name1');console.timeStamp('begin_name2');`]]);
done();
});
});
describe('readPerfLog', () => {
it('should execute a dummy script before reading them', (done) => {
// TODO(tbosch): This seems to be a bug in ChromeDriver:
// Sometimes it does not report the newest events of the performance log
// to the WebDriver client unless a script is executed...
createExtension([]).readPerfLog().then( (_) => {
expect(log).toEqual([ [ 'executeScript', '1+1' ], [ 'logs', 'performance' ] ]);
done();
});
});
it('should report FunctionCall records as "script"', (done) => {
createExtension([
durationRecord('FunctionCall', 1, 5)
]).readPerfLog().then( (events) => {
expect(events).toEqual([
startEvent('script', 1),
endEvent('script', 5)
]);
done();
});
});
it('should ignore FunctionCalls from webdriver', (done) => {
createExtension([
internalScriptRecord(1, 5)
]).readPerfLog().then( (events) => {
expect(events).toEqual([]);
done();
});
});
it('should report begin timestamps', (done) => {
createExtension([
timeStampRecord('begin_someName')
]).readPerfLog().then( (events) => {
expect(events).toEqual([
markStartEvent('someName')
]);
done();
});
});
it('should report end timestamps', (done) => {
createExtension([
timeStampRecord('end_someName')
]).readPerfLog().then( (events) => {
expect(events).toEqual([
markEndEvent('someName')
]);
done();
});
});
it('should report gc', (done) => {
createExtension([
gcRecord(1, 3, 21)
]).readPerfLog().then( (events) => {
expect(events).toEqual([
startEvent('gc', 1),
endEvent('gc', 3, {'amount': 21}),
]);
done();
});
});
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint', 'Rasterize', 'CompositeLayers'].forEach( (recordType) => {
it(`should report ${recordType}`, (done) => {
createExtension([
durationRecord(recordType, 0, 1)
]).readPerfLog().then( (events) => {
expect(events).toEqual([
startEvent('render', 0),
endEvent('render', 1),
]);
done();
});
});
});
it('should walk children', (done) => {
createExtension([
durationRecord('FunctionCall', 1, 5, [
timeStampRecord('begin_someName')
])
]).readPerfLog().then( (events) => {
expect(events).toEqual([
startEvent('script', 1),
markStartEvent('someName'),
endEvent('script', 5)
]);
done();
});
});
});
});
}
function timeStampRecord(name) {
return {
'type': 'TimeStamp',
'data': {
'message': name
}
};
}
function durationRecord(type, startTime, endTime, children = null) {
if (isBlank(children)) {
children = [];
}
return {
'type': type,
'startTime': startTime,
'endTime': endTime,
'children': children
};
}
function internalScriptRecord(startTime, endTime) {
return {
'type': 'FunctionCall',
'startTime': startTime,
'endTime': endTime,
'data': {
'scriptName': 'InjectedScript'
}
};
}
function gcRecord(startTime, endTime, gcAmount) {
return {
'type': 'GCEvent',
'startTime': startTime,
'endTime': endTime,
'data': {
'usedHeapSizeDelta': gcAmount
}
};
}
function markStartEvent(type) {
return {
'name': type,
'ph': 'b'
}
}
function markEndEvent(type) {
return {
'name': type,
'ph': 'e'
}
}
function startEvent(type, time) {
return {
'name': type,
'ts': time,
'ph': 'B'
}
}
function endEvent(type, time, args = null) {
return {
'name': type,
'ts': time,
'ph': 'E',
'args': args
}
}
class MockDriverAdapter extends WebDriverAdapter {
_log:List;
_perfRecords:List;
constructor(log, perfRecords) {
super();
this._log = log;
this._perfRecords = perfRecords;
}
executeScript(script) {
ListWrapper.push(this._log, ['executeScript', script]);
return PromiseWrapper.resolve(null);
}
logs(type) {
ListWrapper.push(this._log, ['logs', type]);
if (type === 'performance') {
return PromiseWrapper.resolve(this._perfRecords.map(function(record) {
return {
'message': Json.stringify({
'message': {
'method': 'Timeline.eventRecorded',
'params': {
'record': record
}
}
})
};
}));
} else {
return null;
}
}
}