2
modules/benchpress/test/firefox_extension/conf.dart
Normal file
2
modules/benchpress/test/firefox_extension/conf.dart
Normal file
@ -0,0 +1,2 @@
|
||||
library benchpress.test.firefox_extension.conf;
|
||||
//empty as we don't have a version for dart
|
@ -1,3 +1,4 @@
|
||||
/// <reference path="../../../angular2/typings/node/node.d.ts" />
|
||||
var testHelper = require('../../src/firefox_extension/lib/test_helper.js');
|
||||
|
||||
// Where to save profile results (parent folder must exist)
|
||||
@ -8,11 +9,7 @@ exports.config = {
|
||||
|
||||
specs: ['spec.js'],
|
||||
|
||||
getMultiCapabilities: function() {
|
||||
return testHelper.getFirefoxProfileWithExtension();
|
||||
},
|
||||
getMultiCapabilities: function() { return testHelper.getFirefoxProfileWithExtension(); },
|
||||
|
||||
params: {
|
||||
profileSavePath: testHelper.getAbsolutePath(PROFILE_SAVE_PATH)
|
||||
}
|
||||
params: {profileSavePath: testHelper.getAbsolutePath(PROFILE_SAVE_PATH)}
|
||||
};
|
2
modules/benchpress/test/firefox_extension/spec.dart
Normal file
2
modules/benchpress/test/firefox_extension/spec.dart
Normal file
@ -0,0 +1,2 @@
|
||||
library benchpress.test.firefox_extension.spec;
|
||||
//no dart implementation
|
@ -1,3 +1,7 @@
|
||||
/// <reference path="../../../angular2/typings/node/node.d.ts" />
|
||||
/// <reference path="../../../angular2/typings/angular-protractor/angular-protractor.d.ts" />
|
||||
/// <reference path="../../../angular2/typings/jasmine/jasmine.d.ts" />
|
||||
|
||||
var fs = require('fs');
|
||||
|
||||
var validateFile = function() {
|
||||
@ -5,7 +9,7 @@ var validateFile = function() {
|
||||
var content = fs.readFileSync(browser.params.profileSavePath, 'utf8');
|
||||
// TODO(hankduan): This check not very useful. Ideally we want to validate
|
||||
// that the file contains all the events that we are looking for. Pending
|
||||
// on data transformer.
|
||||
// on data transformer.
|
||||
expect(content).toContain('forceGC');
|
||||
// Delete file
|
||||
fs.unlinkSync(browser.params.profileSavePath);
|
||||
@ -21,14 +25,13 @@ var validateFile = function() {
|
||||
};
|
||||
|
||||
describe('firefox extension', function() {
|
||||
it ('should measure performance', function() {
|
||||
browser.sleep(3000); // wait for extension to load
|
||||
it('should measure performance', function() {
|
||||
browser.sleep(3000); // wait for extension to load
|
||||
|
||||
browser.driver.get('http://www.angularjs.org');
|
||||
|
||||
browser.executeScript('window.startProfiler()').then(function() {
|
||||
console.log('started measuring perf');
|
||||
});
|
||||
browser.executeScript('window.startProfiler()')
|
||||
.then(function() { console.log('started measuring perf'); });
|
||||
|
||||
browser.executeScript('window.forceGC()');
|
||||
|
||||
@ -38,13 +41,10 @@ describe('firefox extension', function() {
|
||||
|
||||
browser.executeScript('window.forceGC()');
|
||||
|
||||
var script =
|
||||
'window.stopAndRecord("' + browser.params.profileSavePath + '")';
|
||||
browser.executeScript(script).then(function() {
|
||||
console.log('stopped measuring perf');
|
||||
});
|
||||
var script = 'window.stopAndRecord("' + browser.params.profileSavePath + '")';
|
||||
browser.executeScript(script).then(function() { console.log('stopped measuring perf'); });
|
||||
|
||||
// wait for it to finish, then validate file.
|
||||
browser.sleep(3000).then(validateFile);
|
||||
// wait for it to finish, then validate file.
|
||||
browser.sleep(3000).then(validateFile);
|
||||
})
|
||||
});
|
@ -1,91 +0,0 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import { List, ListWrapper, StringMap } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
|
||||
import { Metric, MultiMetric, bind, Injector } from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
function createMetric(ids) {
|
||||
return Injector.resolveAndCreate([
|
||||
ListWrapper.map(ids, (id) => bind(id).toValue(new MockMetric(id)) ),
|
||||
MultiMetric.createBindings(ids)
|
||||
]).asyncGet(MultiMetric);
|
||||
}
|
||||
|
||||
describe('multi metric', () => {
|
||||
|
||||
it('should merge descriptions', inject([AsyncTestCompleter], (async) => {
|
||||
createMetric(['m1', 'm2']).then( (m) => {
|
||||
expect(m.describe()).toEqual({
|
||||
'm1': 'describe', 'm2': 'describe'
|
||||
});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should merge all beginMeasure calls', inject([AsyncTestCompleter], (async) => {
|
||||
createMetric(['m1', 'm2'])
|
||||
.then( (m) => m.beginMeasure() )
|
||||
.then( (values) => {
|
||||
expect(values).toEqual([
|
||||
'm1_beginMeasure', 'm2_beginMeasure'
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
[false, true].forEach( (restartFlag) => {
|
||||
it(`should merge all endMeasure calls for restart=${restartFlag}`, inject([AsyncTestCompleter], (async) => {
|
||||
createMetric(['m1', 'm2'])
|
||||
.then( (m) => m.endMeasure(restartFlag) )
|
||||
.then( (values) => {
|
||||
expect(values).toEqual({
|
||||
'm1': { 'restart': restartFlag },
|
||||
'm2': { 'restart': restartFlag }
|
||||
});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
_id:string;
|
||||
|
||||
constructor(id) {
|
||||
super();
|
||||
this._id = id;
|
||||
}
|
||||
|
||||
beginMeasure():Promise {
|
||||
return PromiseWrapper.resolve(`${this._id}_beginMeasure`);
|
||||
}
|
||||
|
||||
endMeasure(restart:boolean):Promise<StringMap> {
|
||||
var result = {};
|
||||
result[this._id] = {
|
||||
'restart': restart
|
||||
};
|
||||
return PromiseWrapper.resolve(result);
|
||||
}
|
||||
|
||||
describe():StringMap {
|
||||
var result = {};
|
||||
result[this._id] = 'describe';
|
||||
return result;
|
||||
}
|
||||
}
|
86
modules/benchpress/test/metric/multi_metric_spec.ts
Normal file
86
modules/benchpress/test/metric/multi_metric_spec.ts
Normal file
@ -0,0 +1,86 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import {List, ListWrapper, StringMap} from 'angular2/src/facade/collection';
|
||||
import {PromiseWrapper, Promise} from 'angular2/src/facade/async';
|
||||
|
||||
import {Metric, MultiMetric, bind, Injector} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
function createMetric(ids) {
|
||||
return Injector.resolveAndCreate([
|
||||
ListWrapper.map(ids, (id) => bind(id).toValue(new MockMetric(id))),
|
||||
MultiMetric.createBindings(ids)
|
||||
])
|
||||
.asyncGet(MultiMetric);
|
||||
}
|
||||
|
||||
describe('multi metric', () => {
|
||||
|
||||
it('should merge descriptions', inject([AsyncTestCompleter], (async) => {
|
||||
createMetric(['m1', 'm2'])
|
||||
.then((m) => {
|
||||
expect(m.describe()).toEqual({'m1': 'describe', 'm2': 'describe'});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should merge all beginMeasure calls', inject([AsyncTestCompleter], (async) => {
|
||||
createMetric(['m1', 'm2'])
|
||||
.then((m) => m.beginMeasure())
|
||||
.then((values) => {
|
||||
expect(values).toEqual(['m1_beginMeasure', 'm2_beginMeasure']);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
[false, true].forEach((restartFlag) => {
|
||||
it(`should merge all endMeasure calls for restart=${restartFlag}`,
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createMetric(['m1', 'm2'])
|
||||
.then((m) => m.endMeasure(restartFlag))
|
||||
.then((values) => {
|
||||
expect(values)
|
||||
.toEqual({'m1': {'restart': restartFlag}, 'm2': {'restart': restartFlag}});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
_id: string;
|
||||
|
||||
constructor(id) {
|
||||
super();
|
||||
this._id = id;
|
||||
}
|
||||
|
||||
beginMeasure(): Promise<string> { return PromiseWrapper.resolve(`${this._id}_beginMeasure`); }
|
||||
|
||||
endMeasure(restart: boolean): Promise<StringMap<string, any>> {
|
||||
var result = {};
|
||||
result[this._id] = {
|
||||
'restart': restart
|
||||
};
|
||||
return PromiseWrapper.resolve(result);
|
||||
}
|
||||
|
||||
describe(): StringMap<string, string> {
|
||||
var result = {};
|
||||
result[this._id] = 'describe';
|
||||
return result;
|
||||
}
|
||||
}
|
@ -1,485 +0,0 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import { List, ListWrapper, StringMapWrapper } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
import { isPresent, isBlank } from 'angular2/src/facade/lang';
|
||||
|
||||
import {
|
||||
Metric, PerflogMetric, WebDriverExtension,
|
||||
PerfLogFeatures,
|
||||
bind, Injector, Options
|
||||
} from 'benchpress/common';
|
||||
|
||||
import { TraceEventFactory } from '../trace_event_factory';
|
||||
|
||||
export function main() {
|
||||
var commandLog;
|
||||
var eventFactory = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createMetric(perfLogs, microMetrics = null, perfLogFeatures = null, forceGc = null) {
|
||||
commandLog = [];
|
||||
if (isBlank(perfLogFeatures)) {
|
||||
perfLogFeatures = new PerfLogFeatures({render: true, gc: true});
|
||||
}
|
||||
if (isBlank(microMetrics)) {
|
||||
microMetrics = StringMapWrapper.create();
|
||||
}
|
||||
var bindings = [
|
||||
Options.DEFAULT_BINDINGS,
|
||||
PerflogMetric.BINDINGS,
|
||||
bind(Options.MICRO_METRICS).toValue(microMetrics),
|
||||
bind(PerflogMetric.SET_TIMEOUT).toValue( (fn, millis) => {
|
||||
ListWrapper.push(commandLog, ['setTimeout', millis]);
|
||||
fn();
|
||||
}),
|
||||
bind(WebDriverExtension).toValue(new MockDriverExtension(perfLogs, commandLog, perfLogFeatures))
|
||||
];
|
||||
if (isPresent(forceGc)) {
|
||||
ListWrapper.push(bindings, bind(Options.FORCE_GC).toValue(forceGc));
|
||||
}
|
||||
return Injector.resolveAndCreate(bindings).get(PerflogMetric);
|
||||
}
|
||||
|
||||
describe('perflog metric', () => {
|
||||
|
||||
function sortedKeys(stringMap) {
|
||||
var res = [];
|
||||
StringMapWrapper.forEach(stringMap, (_, key) => {
|
||||
ListWrapper.push(res, key);
|
||||
});
|
||||
res.sort();
|
||||
return res;
|
||||
}
|
||||
|
||||
it('should describe itself based on the perfLogFeatrues', () => {
|
||||
expect(sortedKeys(createMetric([[]], null, new PerfLogFeatures()).describe())).toEqual([
|
||||
'pureScriptTime', 'scriptTime'
|
||||
]);
|
||||
|
||||
expect(sortedKeys(createMetric([[]], null, new PerfLogFeatures({
|
||||
render: true,
|
||||
gc: false
|
||||
})).describe())).toEqual([
|
||||
'pureScriptTime', 'renderTime', 'scriptTime'
|
||||
]);
|
||||
|
||||
expect(sortedKeys(createMetric([[]]).describe())).toEqual([
|
||||
'gcAmount', 'gcTime', 'majorGcTime',
|
||||
'pureScriptTime', 'renderTime', 'scriptTime'
|
||||
]);
|
||||
|
||||
expect(sortedKeys(createMetric([[]], null, new PerfLogFeatures({
|
||||
render: true,
|
||||
gc: true
|
||||
}), true).describe())).toEqual([
|
||||
'forcedGcAmount', 'forcedGcTime',
|
||||
'gcAmount', 'gcTime', 'majorGcTime',
|
||||
'pureScriptTime', 'renderTime', 'scriptTime'
|
||||
]);
|
||||
});
|
||||
|
||||
it('should describe itself based on micro metrics', () => {
|
||||
var description = createMetric([[]], {
|
||||
'myMicroMetric': 'someDesc'
|
||||
}).describe();
|
||||
expect(description['myMicroMetric']).toEqual('someDesc');
|
||||
});
|
||||
|
||||
describe('beginMeasure', () => {
|
||||
|
||||
it('should not force gc and mark the timeline', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric([[]]);
|
||||
metric.beginMeasure().then((_) => {
|
||||
expect(commandLog).toEqual([['timeBegin', 'benchpress0']]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should force gc and mark the timeline', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric([[]], null, null, true);
|
||||
metric.beginMeasure().then((_) => {
|
||||
expect(commandLog).toEqual([['gc'], ['timeBegin', 'benchpress0']]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
describe('endMeasure', () => {
|
||||
|
||||
it('should mark and aggregate events in between the marks', inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.start('script', 4),
|
||||
eventFactory.end('script', 6),
|
||||
eventFactory.markEnd('benchpress0', 10)
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(false) )
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', null],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['scriptTime']).toBe(2);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should restart timing', inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.markEnd('benchpress0', 1),
|
||||
eventFactory.markStart('benchpress1', 2),
|
||||
], [
|
||||
eventFactory.markEnd('benchpress1', 3)
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (_) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['timeEnd', 'benchpress1', 'benchpress2'],
|
||||
'readPerfLog'
|
||||
]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should loop and aggregate until the end mark is present', inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[ eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 1) ],
|
||||
[ eventFactory.end('script', 2) ],
|
||||
[ eventFactory.start('script', 3), eventFactory.end('script', 5), eventFactory.markEnd('benchpress0', 10) ]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(false) )
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', null],
|
||||
'readPerfLog',
|
||||
[ 'setTimeout', 100 ],
|
||||
'readPerfLog',
|
||||
[ 'setTimeout', 100 ],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['scriptTime']).toBe(3);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should store events after the end mark for the next call', inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[ eventFactory.markStart('benchpress0', 0), eventFactory.markEnd('benchpress0', 1), eventFactory.markStart('benchpress1', 1),
|
||||
eventFactory.start('script', 1), eventFactory.end('script', 2) ],
|
||||
[ eventFactory.start('script', 3), eventFactory.end('script', 5), eventFactory.markEnd('benchpress1', 6) ]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (data) => {
|
||||
expect(data['scriptTime']).toBe(0);
|
||||
return metric.endMeasure(true)
|
||||
})
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['timeEnd', 'benchpress1', 'benchpress2'],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['scriptTime']).toBe(3);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('with forced gc', () => {
|
||||
var events;
|
||||
beforeEach( () => {
|
||||
events = [
|
||||
[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.start('script', 4),
|
||||
eventFactory.end('script', 6),
|
||||
eventFactory.markEnd('benchpress0', 10),
|
||||
eventFactory.markStart('benchpress1', 11),
|
||||
eventFactory.start('gc', 12, {'usedHeapSize': 2500}),
|
||||
eventFactory.end('gc', 15, {'usedHeapSize': 1000}),
|
||||
eventFactory.markEnd('benchpress1', 20)
|
||||
]
|
||||
];
|
||||
});
|
||||
|
||||
it('should measure forced gc', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric(events, null, null, true);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(false) )
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['gc'],
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['gc'],
|
||||
['timeEnd', 'benchpress1', null],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['forcedGcTime']).toBe(3);
|
||||
expect(data['forcedGcAmount']).toBe(1.5);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should restart after the forced gc if needed', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric(events, null, null, true);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (data) => {
|
||||
expect(commandLog[5]).toEqual(['timeEnd', 'benchpress1', 'benchpress2']);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('aggregation', () => {
|
||||
|
||||
function aggregate(events, microMetrics = null) {
|
||||
ListWrapper.insert(events, 0, eventFactory.markStart('benchpress0', 0));
|
||||
ListWrapper.push(events, eventFactory.markEnd('benchpress0', 10));
|
||||
var metric = createMetric([events], microMetrics);
|
||||
return metric
|
||||
.beginMeasure().then( (_) => metric.endMeasure(false) );
|
||||
}
|
||||
|
||||
|
||||
it('should report a single interval', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0),
|
||||
eventFactory.end('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should sum up multiple intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0),
|
||||
eventFactory.end('script', 5),
|
||||
eventFactory.start('script', 10),
|
||||
eventFactory.end('script', 17)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(12);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore not started intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.end('script', 10)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore not ended intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 10)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore events from different processed as the start mark', inject([AsyncTestCompleter], (async) => {
|
||||
var otherProcessEventFactory = new TraceEventFactory('timeline', 'pid1');
|
||||
var metric = createMetric([[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.start('script', 0, null),
|
||||
eventFactory.end('script', 5, null),
|
||||
otherProcessEventFactory.start('script', 10, null),
|
||||
otherProcessEventFactory.end('script', 17, null),
|
||||
eventFactory.markEnd('benchpress0', 20)
|
||||
]]);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(false) )
|
||||
.then((data) => {
|
||||
expect(data['scriptTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support scriptTime metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0),
|
||||
eventFactory.end('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support renderTime metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('render', 0),
|
||||
eventFactory.end('render', 5)
|
||||
]).then((data) => {
|
||||
expect(data['renderTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support gcTime/gcAmount metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('gc', 0, {'usedHeapSize': 2500}),
|
||||
eventFactory.end('gc', 5, {'usedHeapSize': 1000})
|
||||
]).then((data) => {
|
||||
expect(data['gcTime']).toBe(5);
|
||||
expect(data['gcAmount']).toBe(1.5);
|
||||
expect(data['majorGcTime']).toBe(0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support majorGcTime metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('gc', 0, {'usedHeapSize': 2500}),
|
||||
eventFactory.end('gc', 5, {'usedHeapSize': 1000, 'majorGc': true})
|
||||
]).then((data) => {
|
||||
expect(data['gcTime']).toBe(5);
|
||||
expect(data['majorGcTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support pureScriptTime = scriptTime-gcTime-renderTime', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0),
|
||||
eventFactory.start('gc', 1, {'usedHeapSize': 1000}),
|
||||
eventFactory.end('gc', 4, {'usedHeapSize': 0}),
|
||||
eventFactory.start('render', 4),
|
||||
eventFactory.end('render', 5),
|
||||
eventFactory.end('script', 6)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(6);
|
||||
expect(data['pureScriptTime']).toBe(2);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('microMetrics', () => {
|
||||
|
||||
it('should report micro metrics', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.markStart('mm1', 0),
|
||||
eventFactory.markEnd('mm1', 5),
|
||||
], {'mm1': 'micro metric 1'}).then((data) => {
|
||||
expect(data['mm1']).toBe(5.0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore micro metrics that were not specified', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.markStart('mm1', 0),
|
||||
eventFactory.markEnd('mm1', 5),
|
||||
]).then((data) => {
|
||||
expect(data['mm1']).toBeFalsy();
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report micro metric averages', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.markStart('mm1*20', 0),
|
||||
eventFactory.markEnd('mm1*20', 5),
|
||||
], {'mm1': 'micro metric 1'}).then((data) => {
|
||||
expect(data['mm1']).toBe(5/20);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverExtension extends WebDriverExtension {
|
||||
_perfLogs:List;
|
||||
_commandLog:List;
|
||||
_perfLogFeatures:PerfLogFeatures;
|
||||
constructor(perfLogs, commandLog, perfLogFeatures) {
|
||||
super();
|
||||
this._perfLogs = perfLogs;
|
||||
this._commandLog = commandLog;
|
||||
this._perfLogFeatures = perfLogFeatures;
|
||||
}
|
||||
|
||||
timeBegin(name):Promise {
|
||||
ListWrapper.push(this._commandLog, ['timeBegin', name]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
timeEnd(name, restartName):Promise {
|
||||
ListWrapper.push(this._commandLog, ['timeEnd', name, restartName]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
perfLogFeatures():PerfLogFeatures {
|
||||
return this._perfLogFeatures;
|
||||
}
|
||||
|
||||
readPerfLog():Promise {
|
||||
ListWrapper.push(this._commandLog, 'readPerfLog');
|
||||
if (this._perfLogs.length > 0) {
|
||||
var next = this._perfLogs[0];
|
||||
ListWrapper.removeAt(this._perfLogs, 0);
|
||||
return PromiseWrapper.resolve(next);
|
||||
} else {
|
||||
return PromiseWrapper.resolve([]);
|
||||
}
|
||||
}
|
||||
|
||||
gc():Promise {
|
||||
ListWrapper.push(this._commandLog, ['gc']);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
501
modules/benchpress/test/metric/perflog_metric_spec.ts
Normal file
501
modules/benchpress/test/metric/perflog_metric_spec.ts
Normal file
@ -0,0 +1,501 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import {List, ListWrapper, StringMapWrapper} from 'angular2/src/facade/collection';
|
||||
import {PromiseWrapper, Promise} from 'angular2/src/facade/async';
|
||||
import {isPresent, isBlank} from 'angular2/src/facade/lang';
|
||||
|
||||
import {
|
||||
Metric,
|
||||
PerflogMetric,
|
||||
WebDriverExtension,
|
||||
PerfLogFeatures,
|
||||
bind,
|
||||
Injector,
|
||||
Options
|
||||
} from 'benchpress/common';
|
||||
|
||||
import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
export function main() {
|
||||
var commandLog;
|
||||
var eventFactory = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createMetric(perfLogs, microMetrics = null, perfLogFeatures = null, forceGc = null) {
|
||||
commandLog = [];
|
||||
if (isBlank(perfLogFeatures)) {
|
||||
perfLogFeatures = new PerfLogFeatures({render: true, gc: true});
|
||||
}
|
||||
if (isBlank(microMetrics)) {
|
||||
microMetrics = StringMapWrapper.create();
|
||||
}
|
||||
var bindings = [
|
||||
Options.DEFAULT_BINDINGS,
|
||||
PerflogMetric.BINDINGS,
|
||||
bind(Options.MICRO_METRICS).toValue(microMetrics),
|
||||
bind(PerflogMetric.SET_TIMEOUT)
|
||||
.toValue((fn, millis) =>
|
||||
{
|
||||
ListWrapper.push(commandLog, ['setTimeout', millis]);
|
||||
fn();
|
||||
}),
|
||||
bind(WebDriverExtension)
|
||||
.toValue(new MockDriverExtension(perfLogs, commandLog, perfLogFeatures))
|
||||
];
|
||||
if (isPresent(forceGc)) {
|
||||
ListWrapper.push(bindings, bind(Options.FORCE_GC).toValue(forceGc));
|
||||
}
|
||||
return Injector.resolveAndCreate(bindings).get(PerflogMetric);
|
||||
}
|
||||
|
||||
describe('perflog metric', () => {
|
||||
|
||||
function sortedKeys(stringMap) {
|
||||
var res = [];
|
||||
StringMapWrapper.forEach(stringMap, (_, key) => { ListWrapper.push(res, key); });
|
||||
res.sort();
|
||||
return res;
|
||||
}
|
||||
|
||||
it('should describe itself based on the perfLogFeatrues', () => {
|
||||
expect(sortedKeys(createMetric([[]], null, new PerfLogFeatures()).describe()))
|
||||
.toEqual(['pureScriptTime', 'scriptTime']);
|
||||
|
||||
expect(sortedKeys(createMetric([[]], null, new PerfLogFeatures({render: true, gc: false}))
|
||||
.describe()))
|
||||
.toEqual(['pureScriptTime', 'renderTime', 'scriptTime']);
|
||||
|
||||
expect(sortedKeys(createMetric([[]]).describe()))
|
||||
.toEqual(
|
||||
['gcAmount', 'gcTime', 'majorGcTime', 'pureScriptTime', 'renderTime', 'scriptTime']);
|
||||
|
||||
expect(
|
||||
sortedKeys(createMetric([[]], null, new PerfLogFeatures({render: true, gc: true}), true)
|
||||
.describe()))
|
||||
.toEqual([
|
||||
'forcedGcAmount',
|
||||
'forcedGcTime',
|
||||
'gcAmount',
|
||||
'gcTime',
|
||||
'majorGcTime',
|
||||
'pureScriptTime',
|
||||
'renderTime',
|
||||
'scriptTime'
|
||||
]);
|
||||
});
|
||||
|
||||
it('should describe itself based on micro metrics', () => {
|
||||
var description = createMetric([[]], {'myMicroMetric': 'someDesc'}).describe();
|
||||
expect(description['myMicroMetric']).toEqual('someDesc');
|
||||
});
|
||||
|
||||
describe('beginMeasure', () => {
|
||||
|
||||
it('should not force gc and mark the timeline', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric([[]]);
|
||||
metric.beginMeasure().then((_) => {
|
||||
expect(commandLog).toEqual([['timeBegin', 'benchpress0']]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should force gc and mark the timeline', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric([[]], null, null, true);
|
||||
metric.beginMeasure().then((_) => {
|
||||
expect(commandLog).toEqual([['gc'], ['timeBegin', 'benchpress0']]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
describe('endMeasure', () => {
|
||||
|
||||
it('should mark and aggregate events in between the marks',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var events = [[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.start('script', 4),
|
||||
eventFactory.end('script', 6),
|
||||
eventFactory.markEnd('benchpress0', 10)
|
||||
]];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(false))
|
||||
.then((data) => {
|
||||
expect(commandLog)
|
||||
.toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', null],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['scriptTime']).toBe(2);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should restart timing', inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.markEnd('benchpress0', 1),
|
||||
eventFactory.markStart('benchpress1', 2),
|
||||
],
|
||||
[eventFactory.markEnd('benchpress1', 3)]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(true))
|
||||
.then((_) => metric.endMeasure(true))
|
||||
.then((_) => {
|
||||
expect(commandLog)
|
||||
.toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['timeEnd', 'benchpress1', 'benchpress2'],
|
||||
'readPerfLog'
|
||||
]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should loop and aggregate until the end mark is present',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 1)],
|
||||
[eventFactory.end('script', 2)],
|
||||
[
|
||||
eventFactory.start('script', 3),
|
||||
eventFactory.end('script', 5),
|
||||
eventFactory.markEnd('benchpress0', 10)
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(false))
|
||||
.then((data) => {
|
||||
expect(commandLog)
|
||||
.toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', null],
|
||||
'readPerfLog',
|
||||
['setTimeout', 100],
|
||||
'readPerfLog',
|
||||
['setTimeout', 100],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['scriptTime']).toBe(3);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should store events after the end mark for the next call',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.markEnd('benchpress0', 1),
|
||||
eventFactory.markStart('benchpress1', 1),
|
||||
eventFactory.start('script', 1),
|
||||
eventFactory.end('script', 2)
|
||||
],
|
||||
[
|
||||
eventFactory.start('script', 3),
|
||||
eventFactory.end('script', 5),
|
||||
eventFactory.markEnd('benchpress1', 6)
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(true))
|
||||
.then((data) =>
|
||||
{
|
||||
expect(data['scriptTime']).toBe(0);
|
||||
return metric.endMeasure(true)
|
||||
})
|
||||
.then((data) => {
|
||||
expect(commandLog)
|
||||
.toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['timeEnd', 'benchpress1', 'benchpress2'],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['scriptTime']).toBe(3);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('with forced gc', () => {
|
||||
var events;
|
||||
beforeEach(() => {
|
||||
events = [[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.start('script', 4),
|
||||
eventFactory.end('script', 6),
|
||||
eventFactory.markEnd('benchpress0', 10),
|
||||
eventFactory.markStart('benchpress1', 11),
|
||||
eventFactory.start('gc', 12, {'usedHeapSize': 2500}),
|
||||
eventFactory.end('gc', 15, {'usedHeapSize': 1000}),
|
||||
eventFactory.markEnd('benchpress1', 20)
|
||||
]];
|
||||
});
|
||||
|
||||
it('should measure forced gc', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric(events, null, null, true);
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(false))
|
||||
.then((data) => {
|
||||
expect(commandLog)
|
||||
.toEqual([
|
||||
['gc'],
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['gc'],
|
||||
['timeEnd', 'benchpress1', null],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['forcedGcTime']).toBe(3);
|
||||
expect(data['forcedGcAmount']).toBe(1.5);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should restart after the forced gc if needed', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric(events, null, null, true);
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(true))
|
||||
.then((data) => {
|
||||
expect(commandLog[5]).toEqual(['timeEnd', 'benchpress1', 'benchpress2']);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('aggregation', () => {
|
||||
|
||||
function aggregate(events, microMetrics = null) {
|
||||
ListWrapper.insert(events, 0, eventFactory.markStart('benchpress0', 0));
|
||||
ListWrapper.push(events, eventFactory.markEnd('benchpress0', 10));
|
||||
var metric = createMetric([events], microMetrics);
|
||||
return metric.beginMeasure().then((_) => metric.endMeasure(false));
|
||||
}
|
||||
|
||||
|
||||
it('should report a single interval', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([eventFactory.start('script', 0), eventFactory.end('script', 5)])
|
||||
.then((data) => {
|
||||
expect(data['scriptTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should sum up multiple intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0),
|
||||
eventFactory.end('script', 5),
|
||||
eventFactory.start('script', 10),
|
||||
eventFactory.end('script', 17)
|
||||
])
|
||||
.then((data) => {
|
||||
expect(data['scriptTime']).toBe(12);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore not started intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([eventFactory.end('script', 10)])
|
||||
.then((data) => {
|
||||
expect(data['scriptTime']).toBe(0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore not ended intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([eventFactory.start('script', 10)])
|
||||
.then((data) => {
|
||||
expect(data['scriptTime']).toBe(0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore events from different processed as the start mark',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var otherProcessEventFactory = new TraceEventFactory('timeline', 'pid1');
|
||||
var metric = createMetric([[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.start('script', 0, null),
|
||||
eventFactory.end('script', 5, null),
|
||||
otherProcessEventFactory.start('script', 10, null),
|
||||
otherProcessEventFactory.end('script', 17, null),
|
||||
eventFactory.markEnd('benchpress0', 20)
|
||||
]]);
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(false))
|
||||
.then((data) => {
|
||||
expect(data['scriptTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support scriptTime metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([eventFactory.start('script', 0), eventFactory.end('script', 5)])
|
||||
.then((data) => {
|
||||
expect(data['scriptTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support renderTime metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([eventFactory.start('render', 0), eventFactory.end('render', 5)])
|
||||
.then((data) => {
|
||||
expect(data['renderTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support gcTime/gcAmount metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('gc', 0, {'usedHeapSize': 2500}),
|
||||
eventFactory.end('gc', 5, {'usedHeapSize': 1000})
|
||||
])
|
||||
.then((data) => {
|
||||
expect(data['gcTime']).toBe(5);
|
||||
expect(data['gcAmount']).toBe(1.5);
|
||||
expect(data['majorGcTime']).toBe(0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support majorGcTime metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('gc', 0, {'usedHeapSize': 2500}),
|
||||
eventFactory.end('gc', 5, {'usedHeapSize': 1000, 'majorGc': true})
|
||||
])
|
||||
.then((data) => {
|
||||
expect(data['gcTime']).toBe(5);
|
||||
expect(data['majorGcTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support pureScriptTime = scriptTime-gcTime-renderTime',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0),
|
||||
eventFactory.start('gc', 1, {'usedHeapSize': 1000}),
|
||||
eventFactory.end('gc', 4, {'usedHeapSize': 0}),
|
||||
eventFactory.start('render', 4),
|
||||
eventFactory.end('render', 5),
|
||||
eventFactory.end('script', 6)
|
||||
])
|
||||
.then((data) => {
|
||||
expect(data['scriptTime']).toBe(6);
|
||||
expect(data['pureScriptTime']).toBe(2);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('microMetrics', () => {
|
||||
|
||||
it('should report micro metrics', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.markStart('mm1', 0),
|
||||
eventFactory.markEnd('mm1', 5),
|
||||
],
|
||||
{'mm1': 'micro metric 1'})
|
||||
.then((data) => {
|
||||
expect(data['mm1']).toBe(5.0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore micro metrics that were not specified',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.markStart('mm1', 0),
|
||||
eventFactory.markEnd('mm1', 5),
|
||||
])
|
||||
.then((data) => {
|
||||
expect(data['mm1']).toBeFalsy();
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report micro metric averages', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.markStart('mm1*20', 0),
|
||||
eventFactory.markEnd('mm1*20', 5),
|
||||
],
|
||||
{'mm1': 'micro metric 1'})
|
||||
.then((data) => {
|
||||
expect(data['mm1']).toBe(5 / 20);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverExtension extends WebDriverExtension {
|
||||
constructor(private _perfLogs: List<any>, private _commandLog: List<any>,
|
||||
private _perfLogFeatures: PerfLogFeatures) {
|
||||
super();
|
||||
}
|
||||
|
||||
timeBegin(name): Promise<any> {
|
||||
ListWrapper.push(this._commandLog, ['timeBegin', name]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
timeEnd(name, restartName): Promise<any> {
|
||||
ListWrapper.push(this._commandLog, ['timeEnd', name, restartName]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
perfLogFeatures(): PerfLogFeatures { return this._perfLogFeatures; }
|
||||
|
||||
readPerfLog(): Promise<any> {
|
||||
ListWrapper.push(this._commandLog, 'readPerfLog');
|
||||
if (this._perfLogs.length > 0) {
|
||||
var next = this._perfLogs[0];
|
||||
ListWrapper.removeAt(this._perfLogs, 0);
|
||||
return PromiseWrapper.resolve(next);
|
||||
} else {
|
||||
return PromiseWrapper.resolve([]);
|
||||
}
|
||||
}
|
||||
|
||||
gc(): Promise<any> {
|
||||
ListWrapper.push(this._commandLog, ['gc']);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
@ -1,11 +1,16 @@
|
||||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { isBlank, isPresent, Date, DateWrapper } from 'angular2/src/facade/lang';
|
||||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import {isBlank, isPresent, Date, DateWrapper} from 'angular2/src/facade/lang';
|
||||
import {List, ListWrapper} from 'angular2/src/facade/collection';
|
||||
|
||||
import {
|
||||
SampleState, Reporter, bind, Injector,
|
||||
ConsoleReporter, SampleDescription, MeasureValues
|
||||
SampleState,
|
||||
Reporter,
|
||||
bind,
|
||||
Injector,
|
||||
ConsoleReporter,
|
||||
SampleDescription,
|
||||
MeasureValues
|
||||
} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
@ -13,7 +18,9 @@ export function main() {
|
||||
var reporter;
|
||||
var log;
|
||||
|
||||
function createReporter({columnWidth, sampleId, descriptions, metrics}) {
|
||||
function createReporter({columnWidth = null, sampleId = null, descriptions = null, metrics = null}:{
|
||||
columnWidth?, sampleId?, descriptions?, metrics?
|
||||
}) {
|
||||
log = [];
|
||||
if (isBlank(descriptions)) {
|
||||
descriptions = [];
|
||||
@ -36,14 +43,8 @@ export function main() {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
sampleId: 'someSample',
|
||||
descriptions: [{
|
||||
'a': 1,
|
||||
'b': 2
|
||||
}],
|
||||
metrics: {
|
||||
'm1': 'some desc',
|
||||
'm2': 'some other desc'
|
||||
}
|
||||
descriptions: [{'a': 1, 'b': 2}],
|
||||
metrics: {'m1': 'some desc', 'm2': 'some other desc'}
|
||||
});
|
||||
expect(log).toEqual([
|
||||
'BENCHMARK someSample',
|
||||
@ -60,56 +61,24 @@ export function main() {
|
||||
});
|
||||
|
||||
it('should print a table row', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
metrics: {
|
||||
'a': '',
|
||||
'b': ''
|
||||
}
|
||||
});
|
||||
createReporter({columnWidth: 8, metrics: {'a': '', 'b': ''}});
|
||||
log = [];
|
||||
reporter.reportMeasureValues(mv(0, 0, {
|
||||
'a': 1.23, 'b': 2
|
||||
}));
|
||||
expect(log).toEqual([
|
||||
' 1.23 | 2.00'
|
||||
]);
|
||||
reporter.reportMeasureValues(mv(0, 0, {'a': 1.23, 'b': 2}));
|
||||
expect(log).toEqual([' 1.23 | 2.00']);
|
||||
});
|
||||
|
||||
it('should print the table footer and stats when there is a valid sample', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
metrics: {
|
||||
'a': '',
|
||||
'b': ''
|
||||
}
|
||||
});
|
||||
createReporter({columnWidth: 8, metrics: {'a': '', 'b': ''}});
|
||||
log = [];
|
||||
reporter.reportSample([], [mv(0,0,{
|
||||
'a': 3, 'b': 6
|
||||
}), mv(1,1,{
|
||||
'a': 5, 'b': 9
|
||||
})]);
|
||||
expect(log).toEqual([
|
||||
'======== | ========',
|
||||
'4.00+-25% | 7.50+-20%'
|
||||
]);
|
||||
reporter.reportSample([], [mv(0, 0, {'a': 3, 'b': 6}), mv(1, 1, {'a': 5, 'b': 9})]);
|
||||
expect(log).toEqual(['======== | ========', '4.00+-25% | 7.50+-20%']);
|
||||
});
|
||||
|
||||
it('should print the coefficient of variation only when it is meaningful', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
metrics: { 'a': '', 'b': '' }
|
||||
});
|
||||
createReporter({columnWidth: 8, metrics: {'a': '', 'b': ''}});
|
||||
log = [];
|
||||
reporter.reportSample([], [
|
||||
mv(0, 0, { 'a': 3, 'b': 0 }),
|
||||
mv(1, 1, { 'a': 5, 'b': 0 })
|
||||
]);
|
||||
expect(log).toEqual([
|
||||
'======== | ========',
|
||||
'4.00+-25% | 0.00'
|
||||
]);
|
||||
reporter.reportSample([], [mv(0, 0, {'a': 3, 'b': 0}), mv(1, 1, {'a': 5, 'b': 0})]);
|
||||
expect(log).toEqual(['======== | ========', '4.00+-25% | 0.00']);
|
||||
});
|
||||
|
||||
});
|
@ -1,109 +0,0 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import { DateWrapper, Json, RegExpWrapper, isPresent } from 'angular2/src/facade/lang';
|
||||
import { PromiseWrapper } from 'angular2/src/facade/async';
|
||||
|
||||
import {
|
||||
bind, Injector,
|
||||
SampleDescription,
|
||||
MeasureValues,
|
||||
Options
|
||||
} from 'benchpress/common';
|
||||
|
||||
|
||||
import { JsonFileReporter } from 'benchpress/src/reporter/json_file_reporter';
|
||||
|
||||
export function main() {
|
||||
describe('file reporter', () => {
|
||||
var loggedFile;
|
||||
|
||||
function createReporter({sampleId, descriptions, metrics, path}) {
|
||||
var bindings = [
|
||||
JsonFileReporter.BINDINGS,
|
||||
bind(SampleDescription).toValue(new SampleDescription(sampleId, descriptions, metrics)),
|
||||
bind(JsonFileReporter.PATH).toValue(path),
|
||||
bind(Options.NOW).toValue( () => DateWrapper.fromMillis(1234) ),
|
||||
bind(Options.WRITE_FILE).toValue((filename, content) => {
|
||||
loggedFile = {
|
||||
'filename': filename,
|
||||
'content': content
|
||||
};
|
||||
return PromiseWrapper.resolve(null);
|
||||
})
|
||||
];
|
||||
return Injector.resolveAndCreate(bindings).get(JsonFileReporter);
|
||||
}
|
||||
|
||||
it('should write all data into a file', inject([AsyncTestCompleter], (async) => {
|
||||
createReporter({
|
||||
sampleId: 'someId',
|
||||
descriptions: [{ 'a': 2 }],
|
||||
path: 'somePath',
|
||||
metrics: {
|
||||
'script': 'script time'
|
||||
}
|
||||
}).reportSample([
|
||||
mv(0, 0, { 'a': 3, 'b': 6})
|
||||
], [mv(0, 0, {
|
||||
'a': 3, 'b': 6
|
||||
}), mv(1, 1, {
|
||||
'a': 5, 'b': 9
|
||||
})]);
|
||||
var regExp = RegExpWrapper.create('somePath/someId_\\d+\\.json');
|
||||
expect(isPresent(RegExpWrapper.firstMatch(regExp, loggedFile['filename']))).toBe(true);
|
||||
var parsedContent = Json.parse(loggedFile['content']);
|
||||
expect(parsedContent).toEqual({
|
||||
"description": {
|
||||
"id": "someId",
|
||||
"description": {
|
||||
"a": 2
|
||||
},
|
||||
"metrics": {"script": "script time"}
|
||||
},
|
||||
"completeSample": [{
|
||||
"timeStamp": "1970-01-01T00:00:00.000Z",
|
||||
"runIndex": 0,
|
||||
"values": {
|
||||
"a": 3,
|
||||
"b": 6
|
||||
}
|
||||
}],
|
||||
"validSample": [
|
||||
{
|
||||
"timeStamp": "1970-01-01T00:00:00.000Z",
|
||||
"runIndex": 0,
|
||||
"values": {
|
||||
"a": 3,
|
||||
"b": 6
|
||||
}
|
||||
},
|
||||
{
|
||||
"timeStamp": "1970-01-01T00:00:00.001Z",
|
||||
"runIndex": 1,
|
||||
"values": {
|
||||
"a": 5,
|
||||
"b": 9
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
async.done();
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function mv(runIndex, time, values) {
|
||||
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
|
||||
}
|
87
modules/benchpress/test/reporter/json_file_reporter_spec.ts
Normal file
87
modules/benchpress/test/reporter/json_file_reporter_spec.ts
Normal file
@ -0,0 +1,87 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import {DateWrapper, Json, RegExpWrapper, isPresent} from 'angular2/src/facade/lang';
|
||||
import {PromiseWrapper} from 'angular2/src/facade/async';
|
||||
|
||||
import {bind, Injector, SampleDescription, MeasureValues, Options} from 'benchpress/common';
|
||||
|
||||
|
||||
import {JsonFileReporter} from 'benchpress/src/reporter/json_file_reporter';
|
||||
|
||||
export function main() {
|
||||
describe('file reporter', () => {
|
||||
var loggedFile;
|
||||
|
||||
function createReporter({sampleId, descriptions, metrics, path}) {
|
||||
var bindings = [
|
||||
JsonFileReporter.BINDINGS,
|
||||
bind(SampleDescription).toValue(new SampleDescription(sampleId, descriptions, metrics)),
|
||||
bind(JsonFileReporter.PATH).toValue(path),
|
||||
bind(Options.NOW).toValue(() => DateWrapper.fromMillis(1234)),
|
||||
bind(Options.WRITE_FILE)
|
||||
.toValue((filename, content) =>
|
||||
{
|
||||
loggedFile = {
|
||||
'filename': filename,
|
||||
'content': content
|
||||
};
|
||||
return PromiseWrapper.resolve(null);
|
||||
})
|
||||
];
|
||||
return Injector.resolveAndCreate(bindings).get(JsonFileReporter);
|
||||
}
|
||||
|
||||
it('should write all data into a file', inject([AsyncTestCompleter], (async) => {
|
||||
createReporter({
|
||||
sampleId: 'someId',
|
||||
descriptions: [{'a': 2}],
|
||||
path: 'somePath',
|
||||
metrics: {'script': 'script time'}
|
||||
})
|
||||
.reportSample([mv(0, 0, {'a': 3, 'b': 6})],
|
||||
[mv(0, 0, {'a': 3, 'b': 6}), mv(1, 1, {'a': 5, 'b': 9})]);
|
||||
var regExp = RegExpWrapper.create('somePath/someId_\\d+\\.json');
|
||||
expect(isPresent(RegExpWrapper.firstMatch(regExp, loggedFile['filename']))).toBe(true);
|
||||
var parsedContent = Json.parse(loggedFile['content']);
|
||||
expect(parsedContent)
|
||||
.toEqual({
|
||||
"description":
|
||||
{"id": "someId", "description": {"a": 2}, "metrics": {"script": "script time"}},
|
||||
"completeSample": [{
|
||||
"timeStamp": "1970-01-01T00:00:00.000Z",
|
||||
"runIndex": 0,
|
||||
"values": {"a": 3, "b": 6}
|
||||
}],
|
||||
"validSample": [
|
||||
{
|
||||
"timeStamp": "1970-01-01T00:00:00.000Z",
|
||||
"runIndex": 0,
|
||||
"values": {"a": 3, "b": 6}
|
||||
},
|
||||
{
|
||||
"timeStamp": "1970-01-01T00:00:00.001Z",
|
||||
"runIndex": 1,
|
||||
"values": {"a": 5, "b": 9}
|
||||
}
|
||||
]
|
||||
});
|
||||
async.done();
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function mv(runIndex, time, values) {
|
||||
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
|
||||
}
|
@ -1,89 +0,0 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import { List, ListWrapper, StringMap } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
import { DateWrapper } from 'angular2/src/facade/lang';
|
||||
|
||||
import { Reporter, MultiReporter, bind, Injector, MeasureValues } from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
function createReporters(ids) {
|
||||
return Injector.resolveAndCreate([
|
||||
ListWrapper.map(ids, (id) => bind(id).toValue(new MockReporter(id)) ),
|
||||
MultiReporter.createBindings(ids)
|
||||
]).asyncGet(MultiReporter);
|
||||
}
|
||||
|
||||
describe('multi reporter', () => {
|
||||
|
||||
it('should reportMeasureValues to all', inject([AsyncTestCompleter], (async) => {
|
||||
var mv = new MeasureValues(0, DateWrapper.now(), {});
|
||||
createReporters(['m1', 'm2'])
|
||||
.then( (r) => r.reportMeasureValues(mv) )
|
||||
.then( (values) => {
|
||||
|
||||
expect(values).toEqual([
|
||||
{'id': 'm1', 'values': mv},
|
||||
{'id': 'm2', 'values': mv}
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should reportSample to call', inject([AsyncTestCompleter], (async) => {
|
||||
var completeSample = [
|
||||
new MeasureValues(0, DateWrapper.now(), {}),
|
||||
new MeasureValues(1, DateWrapper.now(), {})
|
||||
];
|
||||
var validSample = [completeSample[1]];
|
||||
|
||||
createReporters(['m1', 'm2'])
|
||||
.then( (r) => r.reportSample(completeSample, validSample) )
|
||||
.then( (values) => {
|
||||
|
||||
expect(values).toEqual([
|
||||
{'id': 'm1', 'completeSample': completeSample, 'validSample': validSample},
|
||||
{'id': 'm2', 'completeSample': completeSample, 'validSample': validSample}
|
||||
]);
|
||||
async.done();
|
||||
})
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
_id:string;
|
||||
|
||||
constructor(id) {
|
||||
super();
|
||||
this._id = id;
|
||||
}
|
||||
|
||||
reportMeasureValues(values:MeasureValues):Promise {
|
||||
return PromiseWrapper.resolve({
|
||||
'id': this._id,
|
||||
'values': values
|
||||
});
|
||||
}
|
||||
|
||||
reportSample(completeSample:List<MeasureValues>, validSample:List<MeasureValues>):Promise {
|
||||
return PromiseWrapper.resolve({
|
||||
'id': this._id,
|
||||
'completeSample': completeSample,
|
||||
'validSample': validSample
|
||||
});
|
||||
}
|
||||
|
||||
}
|
76
modules/benchpress/test/reporter/multi_reporter_spec.ts
Normal file
76
modules/benchpress/test/reporter/multi_reporter_spec.ts
Normal file
@ -0,0 +1,76 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import {List, ListWrapper, StringMap} from 'angular2/src/facade/collection';
|
||||
import {PromiseWrapper, Promise} from 'angular2/src/facade/async';
|
||||
import {DateWrapper} from 'angular2/src/facade/lang';
|
||||
|
||||
import {Reporter, MultiReporter, bind, Injector, MeasureValues} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
function createReporters(ids) {
|
||||
return Injector.resolveAndCreate([
|
||||
ListWrapper.map(ids, (id) => bind(id).toValue(new MockReporter(id))),
|
||||
MultiReporter.createBindings(ids)
|
||||
])
|
||||
.asyncGet(MultiReporter);
|
||||
}
|
||||
|
||||
describe('multi reporter', () => {
|
||||
|
||||
it('should reportMeasureValues to all', inject([AsyncTestCompleter], (async) => {
|
||||
var mv = new MeasureValues(0, DateWrapper.now(), {});
|
||||
createReporters(['m1', 'm2'])
|
||||
.then((r) => r.reportMeasureValues(mv))
|
||||
.then((values) => {
|
||||
|
||||
expect(values).toEqual([{'id': 'm1', 'values': mv}, {'id': 'm2', 'values': mv}]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should reportSample to call', inject([AsyncTestCompleter], (async) => {
|
||||
var completeSample = [
|
||||
new MeasureValues(0, DateWrapper.now(), {}),
|
||||
new MeasureValues(1, DateWrapper.now(), {})
|
||||
];
|
||||
var validSample = [completeSample[1]];
|
||||
|
||||
createReporters(['m1', 'm2'])
|
||||
.then((r) => r.reportSample(completeSample, validSample))
|
||||
.then((values) => {
|
||||
|
||||
expect(values).toEqual([
|
||||
{'id': 'm1', 'completeSample': completeSample, 'validSample': validSample},
|
||||
{'id': 'm2', 'completeSample': completeSample, 'validSample': validSample}
|
||||
]);
|
||||
async.done();
|
||||
})
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
constructor(private _id: string) { super(); }
|
||||
|
||||
reportMeasureValues(values: MeasureValues): Promise<StringMap<string, any>> {
|
||||
return PromiseWrapper.resolve({'id': this._id, 'values': values});
|
||||
}
|
||||
|
||||
reportSample(completeSample: List<MeasureValues>,
|
||||
validSample: List<MeasureValues>): Promise<StringMap<string, any>> {
|
||||
return PromiseWrapper.resolve(
|
||||
{'id': this._id, 'completeSample': completeSample, 'validSample': validSample});
|
||||
}
|
||||
}
|
@ -1,154 +0,0 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
import {
|
||||
Runner, Sampler, SampleDescription,
|
||||
Validator, bind, Injector, Metric,
|
||||
Options, WebDriverAdapter
|
||||
} from 'benchpress/common';
|
||||
import { isBlank } from 'angular2/src/facade/lang';
|
||||
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
|
||||
|
||||
export function main() {
|
||||
describe('runner', () => {
|
||||
var injector;
|
||||
var runner;
|
||||
|
||||
function createRunner(defaultBindings = null) {
|
||||
if (isBlank(defaultBindings)) {
|
||||
defaultBindings = [];
|
||||
}
|
||||
runner = new Runner([
|
||||
defaultBindings,
|
||||
bind(Sampler).toFactory(
|
||||
(_injector) => {
|
||||
injector = _injector;
|
||||
return new MockSampler();
|
||||
}, [Injector]
|
||||
),
|
||||
bind(Metric).toFactory( () => new MockMetric(), []),
|
||||
bind(Validator).toFactory( () => new MockValidator(), []),
|
||||
bind(WebDriverAdapter).toFactory( () => new MockWebDriverAdapter(), [])
|
||||
]);
|
||||
return runner;
|
||||
}
|
||||
|
||||
it('should set SampleDescription.id', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner().sample({id: 'someId'})
|
||||
.then( (_) => injector.asyncGet(SampleDescription) )
|
||||
.then( (desc) => {
|
||||
expect(desc.id).toBe('someId');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should merge SampleDescription.description', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner([
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1})
|
||||
]).sample({id: 'someId', bindings: [
|
||||
bind(Options.SAMPLE_DESCRIPTION).toValue({'b': 2})
|
||||
]}).then( (_) => injector.asyncGet(SampleDescription) )
|
||||
.then( (desc) => {
|
||||
|
||||
expect(desc.description).toEqual({
|
||||
'forceGc': false,
|
||||
'userAgent': 'someUserAgent',
|
||||
'a': 1,
|
||||
'b': 2,
|
||||
'v': 11
|
||||
});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should fill SampleDescription.metrics from the Metric', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner().sample({id: 'someId'})
|
||||
.then( (_) => injector.asyncGet(SampleDescription) )
|
||||
.then( (desc) => {
|
||||
|
||||
expect(desc.metrics).toEqual({ 'm1': 'some metric' });
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should bind Options.EXECUTE', inject([AsyncTestCompleter], (async) => {
|
||||
var execute = () => {};
|
||||
createRunner().sample({id: 'someId', execute: execute}).then( (_) => {
|
||||
expect(injector.get(Options.EXECUTE)).toEqual(execute);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should bind Options.PREPARE', inject([AsyncTestCompleter], (async) => {
|
||||
var prepare = () => {};
|
||||
createRunner().sample({id: 'someId', prepare: prepare}).then( (_) => {
|
||||
expect(injector.get(Options.PREPARE)).toEqual(prepare);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should bind Options.MICRO_METRICS', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner().sample({id: 'someId', microMetrics: {'a': 'b'}}).then( (_) => {
|
||||
expect(injector.get(Options.MICRO_METRICS)).toEqual({'a': 'b'});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should overwrite bindings per sample call', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner([
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1}),
|
||||
]).sample({id: 'someId', bindings: [
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 2}),
|
||||
]}).then( (_) => injector.asyncGet(SampleDescription) )
|
||||
.then( (desc) => {
|
||||
|
||||
expect(injector.get(SampleDescription).description['a']).toBe(2);
|
||||
async.done();
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockWebDriverAdapter extends WebDriverAdapter {
|
||||
executeScript(script):Promise {
|
||||
return PromiseWrapper.resolve('someUserAgent');
|
||||
}
|
||||
}
|
||||
|
||||
class MockValidator extends Validator {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
describe() {
|
||||
return { 'v': 11 };
|
||||
}
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
describe() {
|
||||
return { 'm1': 'some metric' };
|
||||
}
|
||||
}
|
||||
|
||||
class MockSampler extends Sampler {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
sample():Promise {
|
||||
return PromiseWrapper.resolve(23);
|
||||
}
|
||||
}
|
158
modules/benchpress/test/runner_spec.ts
Normal file
158
modules/benchpress/test/runner_spec.ts
Normal file
@ -0,0 +1,158 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
import {
|
||||
Runner,
|
||||
Sampler,
|
||||
SampleDescription,
|
||||
Validator,
|
||||
bind,
|
||||
Injector,
|
||||
Metric,
|
||||
Options,
|
||||
WebDriverAdapter,
|
||||
SampleState
|
||||
} from 'benchpress/common';
|
||||
import {isBlank} from 'angular2/src/facade/lang';
|
||||
import {Promise, PromiseWrapper} from 'angular2/src/facade/async';
|
||||
|
||||
export function main() {
|
||||
describe('runner', () => {
|
||||
var injector;
|
||||
var runner;
|
||||
|
||||
function createRunner(defaultBindings = null) {
|
||||
if (isBlank(defaultBindings)) {
|
||||
defaultBindings = [];
|
||||
}
|
||||
runner = new Runner([
|
||||
defaultBindings,
|
||||
bind(Sampler).toFactory((_injector) =>
|
||||
{
|
||||
injector = _injector;
|
||||
return new MockSampler();
|
||||
},
|
||||
[Injector]),
|
||||
bind(Metric).toFactory(() => new MockMetric(), []),
|
||||
bind(Validator).toFactory(() => new MockValidator(), []),
|
||||
bind(WebDriverAdapter).toFactory(() => new MockWebDriverAdapter(), [])
|
||||
]);
|
||||
return runner;
|
||||
}
|
||||
|
||||
it('should set SampleDescription.id', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner()
|
||||
.sample({id: 'someId'})
|
||||
.then((_) => injector.asyncGet(SampleDescription))
|
||||
.then((desc) => {
|
||||
expect(desc.id).toBe('someId');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should merge SampleDescription.description', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner([bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1})])
|
||||
.sample(
|
||||
{id: 'someId', bindings: [bind(Options.SAMPLE_DESCRIPTION).toValue({'b': 2})]})
|
||||
.then((_) => injector.asyncGet(SampleDescription))
|
||||
.then((desc) => {
|
||||
|
||||
expect(desc.description)
|
||||
.toEqual(
|
||||
{'forceGc': false, 'userAgent': 'someUserAgent', 'a': 1, 'b': 2, 'v': 11});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should fill SampleDescription.metrics from the Metric',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createRunner()
|
||||
.sample({id: 'someId'})
|
||||
.then((_) => injector.asyncGet(SampleDescription))
|
||||
.then((desc) => {
|
||||
|
||||
expect(desc.metrics).toEqual({'m1': 'some metric'});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should bind Options.EXECUTE', inject([AsyncTestCompleter], (async) => {
|
||||
var execute = () => {};
|
||||
createRunner()
|
||||
.sample({id: 'someId', execute: execute})
|
||||
.then((_) => {
|
||||
expect(injector.get(Options.EXECUTE)).toEqual(execute);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should bind Options.PREPARE', inject([AsyncTestCompleter], (async) => {
|
||||
var prepare = () => {};
|
||||
createRunner()
|
||||
.sample({id: 'someId', prepare: prepare})
|
||||
.then((_) => {
|
||||
expect(injector.get(Options.PREPARE)).toEqual(prepare);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should bind Options.MICRO_METRICS', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner()
|
||||
.sample({id: 'someId', microMetrics: {'a': 'b'}})
|
||||
.then((_) => {
|
||||
expect(injector.get(Options.MICRO_METRICS)).toEqual({'a': 'b'});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should overwrite bindings per sample call', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner([
|
||||
bind(Options.DEFAULT_DESCRIPTION)
|
||||
.toValue({'a': 1}),
|
||||
])
|
||||
.sample({
|
||||
id: 'someId',
|
||||
bindings: [
|
||||
bind(Options.DEFAULT_DESCRIPTION)
|
||||
.toValue({'a': 2}),
|
||||
]
|
||||
})
|
||||
.then((_) => injector.asyncGet(SampleDescription))
|
||||
.then((desc) => {
|
||||
|
||||
expect(injector.get(SampleDescription).description['a']).toBe(2);
|
||||
async.done();
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockWebDriverAdapter extends WebDriverAdapter {
|
||||
executeScript(script): Promise<string> { return PromiseWrapper.resolve('someUserAgent'); }
|
||||
}
|
||||
|
||||
class MockValidator extends Validator {
|
||||
constructor() { super(); }
|
||||
describe() { return {'v': 11}; }
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
constructor() { super(); }
|
||||
describe() { return {'m1': 'some metric'}; }
|
||||
}
|
||||
|
||||
class MockSampler extends Sampler {
|
||||
constructor() { super(); }
|
||||
sample(): Promise<SampleState> { return PromiseWrapper.resolve(23); }
|
||||
}
|
@ -1,331 +0,0 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import { isBlank, isPresent, BaseException, stringify, Date, DateWrapper } from 'angular2/src/facade/lang';
|
||||
import { ListWrapper, List } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
|
||||
import {
|
||||
Sampler, WebDriverAdapter,
|
||||
Validator, Metric, Reporter, Browser,
|
||||
bind, Injector, Options, MeasureValues
|
||||
} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
var EMPTY_EXECUTE = () => {};
|
||||
|
||||
describe('sampler', () => {
|
||||
var sampler;
|
||||
|
||||
function createSampler({
|
||||
driver,
|
||||
metric,
|
||||
reporter,
|
||||
validator,
|
||||
prepare,
|
||||
execute
|
||||
} = {}) {
|
||||
var time = 1000;
|
||||
if (isBlank(metric)) {
|
||||
metric = new MockMetric([]);
|
||||
}
|
||||
if (isBlank(reporter)) {
|
||||
reporter = new MockReporter([]);
|
||||
}
|
||||
if (isBlank(driver)) {
|
||||
driver = new MockDriverAdapter([]);
|
||||
}
|
||||
var bindings = [
|
||||
Options.DEFAULT_BINDINGS,
|
||||
Sampler.BINDINGS,
|
||||
bind(Metric).toValue(metric),
|
||||
bind(Reporter).toValue(reporter),
|
||||
bind(WebDriverAdapter).toValue(driver),
|
||||
bind(Options.EXECUTE).toValue(execute),
|
||||
bind(Validator).toValue(validator),
|
||||
bind(Options.NOW).toValue( () => DateWrapper.fromMillis(time++) )
|
||||
];
|
||||
if (isPresent(prepare)) {
|
||||
ListWrapper.push(bindings, bind(Options.PREPARE).toValue(prepare));
|
||||
}
|
||||
|
||||
sampler = Injector.resolveAndCreate(bindings).get(Sampler);
|
||||
}
|
||||
|
||||
it('should call the prepare and execute callbacks using WebDriverAdapter.waitFor', inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var count = 0;
|
||||
var driver = new MockDriverAdapter([], (callback) => {
|
||||
var result = callback();
|
||||
ListWrapper.push(log, result);
|
||||
return PromiseWrapper.resolve(result);
|
||||
});
|
||||
createSampler({
|
||||
driver: driver,
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => {
|
||||
return count++;
|
||||
},
|
||||
execute: () => {
|
||||
return count++;
|
||||
}
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(count).toBe(4);
|
||||
expect(log).toEqual([0,1,2,3]);
|
||||
async.done();
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
it('should call prepare, beginMeasure, execute, endMeasure for every iteration', inject([AsyncTestCompleter], (async) => {
|
||||
var workCount = 0;
|
||||
var log = [];
|
||||
createSampler({
|
||||
metric: createCountingMetric(log),
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => {
|
||||
ListWrapper.push(log, `p${workCount++}`);
|
||||
},
|
||||
execute: () => {
|
||||
ListWrapper.push(log, `w${workCount++}`);
|
||||
}
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(log).toEqual([
|
||||
'p0',
|
||||
['beginMeasure'],
|
||||
'w1',
|
||||
['endMeasure', false, {'script': 0}],
|
||||
'p2',
|
||||
['beginMeasure'],
|
||||
'w3',
|
||||
['endMeasure', false, {'script': 1}],
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should call execute, endMeasure for every iteration if there is no prepare callback', inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var workCount = 0;
|
||||
createSampler({
|
||||
metric: createCountingMetric(log),
|
||||
validator: createCountingValidator(2),
|
||||
execute: () => {
|
||||
ListWrapper.push(log, `w${workCount++}`);
|
||||
},
|
||||
prepare: null
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(log).toEqual([
|
||||
['beginMeasure'],
|
||||
'w0',
|
||||
['endMeasure', true, {'script': 0}],
|
||||
'w1',
|
||||
['endMeasure', true, {'script': 1}],
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should only collect metrics for execute and ignore metrics from prepare', inject([AsyncTestCompleter], (async) => {
|
||||
var scriptTime = 0;
|
||||
var iterationCount = 1;
|
||||
createSampler({
|
||||
validator: createCountingValidator(2),
|
||||
metric: new MockMetric([], () => {
|
||||
var result = PromiseWrapper.resolve({'script': scriptTime});
|
||||
scriptTime = 0;
|
||||
return result;
|
||||
}),
|
||||
prepare: () => {
|
||||
scriptTime = 1 * iterationCount;
|
||||
},
|
||||
execute: () => {
|
||||
scriptTime = 10 * iterationCount;
|
||||
iterationCount++;
|
||||
}
|
||||
});
|
||||
sampler.sample().then( (state) => {
|
||||
expect(state.completeSample.length).toBe(2);
|
||||
expect(state.completeSample[0]).toEqual(mv(0, 1000, {'script': 10}));
|
||||
expect(state.completeSample[1]).toEqual(mv(1, 1001, {'script': 20}));
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should call the validator for every execution and store the valid sample', inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
|
||||
createSampler({
|
||||
metric: createCountingMetric(),
|
||||
validator: createCountingValidator(2, validSample, log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then( (state) => {
|
||||
expect(state.validSample).toBe(validSample);
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['validate', [{'script': 0}], null],
|
||||
// ['validate', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
|
||||
expect(log.length).toBe(2);
|
||||
expect(log[0]).toEqual(
|
||||
['validate', [mv(0, 1000, {'script': 0})], null]
|
||||
);
|
||||
expect(log[1]).toEqual(
|
||||
['validate', [mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})], validSample]
|
||||
);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report the metric values', inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
createSampler({
|
||||
validator: createCountingValidator(2, validSample),
|
||||
metric: createCountingMetric(),
|
||||
reporter: new MockReporter(log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['reportMeasureValues', 0, {'script': 0}],
|
||||
// ['reportMeasureValues', 1, {'script': 1}],
|
||||
// ['reportSample', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
expect(log.length).toBe(3);
|
||||
expect(log[0]).toEqual(
|
||||
['reportMeasureValues', mv(0, 1000, {'script': 0})]
|
||||
);
|
||||
expect(log[1]).toEqual(
|
||||
['reportMeasureValues', mv(1, 1001, {'script': 1})]
|
||||
);
|
||||
expect(log[2]).toEqual(
|
||||
['reportSample', [mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})], validSample]
|
||||
);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function mv(runIndex, time, values) {
|
||||
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
|
||||
}
|
||||
|
||||
function createCountingValidator(count, validSample = null, log = null) {
|
||||
return new MockValidator(log, (completeSample) => {
|
||||
count--;
|
||||
if (count === 0) {
|
||||
return isPresent(validSample) ? validSample : completeSample;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createCountingMetric(log = null) {
|
||||
var scriptTime = 0;
|
||||
return new MockMetric(log, () => {
|
||||
return { 'script': scriptTime++ };
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
_log:List;
|
||||
_waitFor:Function;
|
||||
constructor(log = null, waitFor = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
this._waitFor = waitFor;
|
||||
}
|
||||
waitFor(callback:Function):Promise {
|
||||
if (isPresent(this._waitFor)) {
|
||||
return this._waitFor(callback);
|
||||
} else {
|
||||
return PromiseWrapper.resolve(callback());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class MockValidator extends Validator {
|
||||
_validate:Function;
|
||||
_log:List;
|
||||
constructor(log = null, validate = null) {
|
||||
super();
|
||||
this._validate = validate;
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
validate(completeSample:List<MeasureValues>):List<MeasureValues> {
|
||||
var stableSample = isPresent(this._validate) ? this._validate(completeSample) : completeSample;
|
||||
ListWrapper.push(this._log, ['validate', completeSample, stableSample]);
|
||||
return stableSample;
|
||||
}
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
_endMeasure:Function;
|
||||
_log:List;
|
||||
constructor(log = null, endMeasure = null) {
|
||||
super();
|
||||
this._endMeasure = endMeasure;
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
beginMeasure() {
|
||||
ListWrapper.push(this._log, ['beginMeasure']);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
endMeasure(restart) {
|
||||
var measureValues = isPresent(this._endMeasure) ? this._endMeasure() : {};
|
||||
ListWrapper.push(this._log, ['endMeasure', restart, measureValues]);
|
||||
return PromiseWrapper.resolve(measureValues);
|
||||
}
|
||||
}
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
_log:List;
|
||||
constructor(log = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
reportMeasureValues(values):Promise {
|
||||
ListWrapper.push(this._log, ['reportMeasureValues', values]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
reportSample(completeSample, validSample):Promise {
|
||||
ListWrapper.push(this._log, ['reportSample', completeSample, validSample]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
331
modules/benchpress/test/sampler_spec.ts
Normal file
331
modules/benchpress/test/sampler_spec.ts
Normal file
@ -0,0 +1,331 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import {
|
||||
isBlank,
|
||||
isPresent,
|
||||
BaseException,
|
||||
stringify,
|
||||
Date,
|
||||
DateWrapper
|
||||
} from 'angular2/src/facade/lang';
|
||||
import {ListWrapper, List} from 'angular2/src/facade/collection';
|
||||
import {PromiseWrapper, Promise} from 'angular2/src/facade/async';
|
||||
|
||||
import {
|
||||
Sampler,
|
||||
WebDriverAdapter,
|
||||
Validator,
|
||||
Metric,
|
||||
Reporter,
|
||||
bind,
|
||||
Injector,
|
||||
Options,
|
||||
MeasureValues
|
||||
} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
var EMPTY_EXECUTE = () => {};
|
||||
|
||||
describe('sampler', () => {
|
||||
var sampler;
|
||||
|
||||
function createSampler({driver, metric, reporter, validator, prepare, execute}: {
|
||||
driver?: any,
|
||||
metric?: Metric,
|
||||
reporter?: Reporter,
|
||||
validator?: Validator,
|
||||
prepare?: any,
|
||||
execute?: any
|
||||
} = {}) {
|
||||
var time = 1000;
|
||||
if (isBlank(metric)) {
|
||||
metric = new MockMetric([]);
|
||||
}
|
||||
if (isBlank(reporter)) {
|
||||
reporter = new MockReporter([]);
|
||||
}
|
||||
if (isBlank(driver)) {
|
||||
driver = new MockDriverAdapter([]);
|
||||
}
|
||||
var bindings = [
|
||||
Options.DEFAULT_BINDINGS,
|
||||
Sampler.BINDINGS,
|
||||
bind(Metric).toValue(metric),
|
||||
bind(Reporter).toValue(reporter),
|
||||
bind(WebDriverAdapter).toValue(driver),
|
||||
bind(Options.EXECUTE).toValue(execute),
|
||||
bind(Validator).toValue(validator),
|
||||
bind(Options.NOW).toValue(() => DateWrapper.fromMillis(time++))
|
||||
];
|
||||
if (isPresent(prepare)) {
|
||||
ListWrapper.push(bindings, bind(Options.PREPARE).toValue(prepare));
|
||||
}
|
||||
|
||||
sampler = Injector.resolveAndCreate(bindings).get(Sampler);
|
||||
}
|
||||
|
||||
it('should call the prepare and execute callbacks using WebDriverAdapter.waitFor',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var count = 0;
|
||||
var driver = new MockDriverAdapter([], (callback) => {
|
||||
var result = callback();
|
||||
ListWrapper.push(log, result);
|
||||
return PromiseWrapper.resolve(result);
|
||||
});
|
||||
createSampler({
|
||||
driver: driver,
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => { return count++; },
|
||||
execute: () => { return count++; }
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
expect(count).toBe(4);
|
||||
expect(log).toEqual([0, 1, 2, 3]);
|
||||
async.done();
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
it('should call prepare, beginMeasure, execute, endMeasure for every iteration',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var workCount = 0;
|
||||
var log = [];
|
||||
createSampler({
|
||||
metric: createCountingMetric(log),
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => { ListWrapper.push(log, `p${workCount++}`); },
|
||||
execute: () => { ListWrapper.push(log, `w${workCount++}`); }
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
expect(log).toEqual([
|
||||
'p0',
|
||||
['beginMeasure'],
|
||||
'w1',
|
||||
['endMeasure', false, {'script': 0}],
|
||||
'p2',
|
||||
['beginMeasure'],
|
||||
'w3',
|
||||
['endMeasure', false, {'script': 1}],
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should call execute, endMeasure for every iteration if there is no prepare callback',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var workCount = 0;
|
||||
createSampler({
|
||||
metric: createCountingMetric(log),
|
||||
validator: createCountingValidator(2),
|
||||
execute: () => { ListWrapper.push(log, `w${workCount++}`); },
|
||||
prepare: null
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
expect(log).toEqual([
|
||||
['beginMeasure'],
|
||||
'w0',
|
||||
['endMeasure', true, {'script': 0}],
|
||||
'w1',
|
||||
['endMeasure', true, {'script': 1}],
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should only collect metrics for execute and ignore metrics from prepare',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var scriptTime = 0;
|
||||
var iterationCount = 1;
|
||||
createSampler({
|
||||
validator: createCountingValidator(2),
|
||||
metric: new MockMetric([], () =>
|
||||
{
|
||||
var result = PromiseWrapper.resolve({'script': scriptTime});
|
||||
scriptTime = 0;
|
||||
return result;
|
||||
}),
|
||||
prepare: () => { scriptTime = 1 * iterationCount; },
|
||||
execute: () =>
|
||||
{
|
||||
scriptTime = 10 * iterationCount;
|
||||
iterationCount++;
|
||||
}
|
||||
});
|
||||
sampler.sample().then((state) => {
|
||||
expect(state.completeSample.length).toBe(2);
|
||||
expect(state.completeSample[0]).toEqual(mv(0, 1000, {'script': 10}));
|
||||
expect(state.completeSample[1]).toEqual(mv(1, 1001, {'script': 20}));
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should call the validator for every execution and store the valid sample',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
|
||||
createSampler({
|
||||
metric: createCountingMetric(),
|
||||
validator: createCountingValidator(2, validSample, log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then((state) => {
|
||||
expect(state.validSample).toBe(validSample);
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['validate', [{'script': 0}], null],
|
||||
// ['validate', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
|
||||
expect(log.length).toBe(2);
|
||||
expect(log[0]).toEqual(['validate', [mv(0, 1000, {'script': 0})], null]);
|
||||
expect(log[1]).toEqual([
|
||||
'validate',
|
||||
[mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})],
|
||||
validSample
|
||||
]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report the metric values', inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
createSampler({
|
||||
validator: createCountingValidator(2, validSample),
|
||||
metric: createCountingMetric(),
|
||||
reporter: new MockReporter(log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['reportMeasureValues', 0, {'script': 0}],
|
||||
// ['reportMeasureValues', 1, {'script': 1}],
|
||||
// ['reportSample', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
expect(log.length).toBe(3);
|
||||
expect(log[0]).toEqual(['reportMeasureValues', mv(0, 1000, {'script': 0})]);
|
||||
expect(log[1]).toEqual(['reportMeasureValues', mv(1, 1001, {'script': 1})]);
|
||||
expect(log[2]).toEqual([
|
||||
'reportSample',
|
||||
[mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})],
|
||||
validSample
|
||||
]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function mv(runIndex, time, values) {
|
||||
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
|
||||
}
|
||||
|
||||
function createCountingValidator(count, validSample = null, log = null) {
|
||||
return new MockValidator(log, (completeSample) => {
|
||||
count--;
|
||||
if (count === 0) {
|
||||
return isPresent(validSample) ? validSample : completeSample;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createCountingMetric(log = null) {
|
||||
var scriptTime = 0;
|
||||
return new MockMetric(log, () => { return {'script': scriptTime++}; });
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
private _log: List<any>;
|
||||
private _waitFor: Function;
|
||||
constructor(log = null, waitFor = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
this._waitFor = waitFor;
|
||||
}
|
||||
waitFor(callback: Function): Promise<any> {
|
||||
if (isPresent(this._waitFor)) {
|
||||
return this._waitFor(callback);
|
||||
} else {
|
||||
return PromiseWrapper.resolve(callback());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class MockValidator extends Validator {
|
||||
private _log: List<any>;
|
||||
constructor(log = null, private _validate: Function = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
validate(completeSample: List<MeasureValues>): List<MeasureValues> {
|
||||
var stableSample = isPresent(this._validate) ? this._validate(completeSample) : completeSample;
|
||||
ListWrapper.push(this._log, ['validate', completeSample, stableSample]);
|
||||
return stableSample;
|
||||
}
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
private _log: List<any>;
|
||||
constructor(log = null, private _endMeasure: Function = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
beginMeasure() {
|
||||
ListWrapper.push(this._log, ['beginMeasure']);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
endMeasure(restart) {
|
||||
var measureValues = isPresent(this._endMeasure) ? this._endMeasure() : {};
|
||||
ListWrapper.push(this._log, ['endMeasure', restart, measureValues]);
|
||||
return PromiseWrapper.resolve(measureValues);
|
||||
}
|
||||
}
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
_log: List<any>;
|
||||
constructor(log = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
reportMeasureValues(values): Promise<any> {
|
||||
ListWrapper.push(this._log, ['reportMeasureValues', values]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
reportSample(completeSample, validSample): Promise<any> {
|
||||
ListWrapper.push(this._log, ['reportSample', completeSample, validSample]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
@ -1,15 +1,15 @@
|
||||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { Statistic } from 'benchpress/src/statistic';
|
||||
import {Statistic} from 'benchpress/src/statistic';
|
||||
|
||||
import { NaN } from 'angular2/src/facade/math';
|
||||
import {NaN} from 'angular2/src/facade/math';
|
||||
|
||||
export function main() {
|
||||
describe('statistic', () => {
|
||||
|
||||
it('should calculate the mean', () => {
|
||||
expect(Statistic.calculateMean([])).toBeNaN();
|
||||
expect(Statistic.calculateMean([1,2,3])).toBe(2.0);
|
||||
expect(Statistic.calculateMean([1, 2, 3])).toBe(2.0);
|
||||
});
|
||||
|
||||
it('should calculate the standard deviation', () => {
|
||||
@ -27,7 +27,7 @@ export function main() {
|
||||
it('should calculate the regression slope', () => {
|
||||
expect(Statistic.calculateRegressionSlope([], NaN, [], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateRegressionSlope([1], 1, [2], 2)).toBeNaN();
|
||||
expect(Statistic.calculateRegressionSlope([1,2], 1.5, [2,4], 3)).toBe(2.0);
|
||||
expect(Statistic.calculateRegressionSlope([1, 2], 1.5, [2, 4], 3)).toBe(2.0);
|
||||
});
|
||||
|
||||
});
|
@ -1,47 +0,0 @@
|
||||
import { isPresent } from 'angular2/src/facade/lang';
|
||||
|
||||
export class TraceEventFactory {
|
||||
_cat:string;
|
||||
_pid;
|
||||
|
||||
constructor(cat, pid) {
|
||||
this._cat = cat;
|
||||
this._pid = pid;
|
||||
}
|
||||
|
||||
create(ph, name, time, args = null) {
|
||||
var res = {
|
||||
'name': name,
|
||||
'cat': this._cat,
|
||||
'ph': ph,
|
||||
'ts': time,
|
||||
'pid': this._pid
|
||||
};
|
||||
if (isPresent(args)) {
|
||||
res['args'] = args;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
markStart(name, time) {
|
||||
return this.create('b', name, time);
|
||||
}
|
||||
|
||||
markEnd(name, time) {
|
||||
return this.create('e', name, time);
|
||||
}
|
||||
|
||||
start(name, time, args = null) {
|
||||
return this.create('B', name, time, args);
|
||||
}
|
||||
|
||||
end(name, time, args = null) {
|
||||
return this.create('E', name, time, args);
|
||||
}
|
||||
|
||||
complete(name, time, duration, args = null) {
|
||||
var res = this.create('X', name, time, args);
|
||||
res['dur'] = duration;
|
||||
return res;
|
||||
}
|
||||
}
|
33
modules/benchpress/test/trace_event_factory.ts
Normal file
33
modules/benchpress/test/trace_event_factory.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import {isPresent} from 'angular2/src/facade/lang';
|
||||
|
||||
export class TraceEventFactory {
|
||||
private _cat: string;
|
||||
private _pid;
|
||||
|
||||
constructor(cat, pid) {
|
||||
this._cat = cat;
|
||||
this._pid = pid;
|
||||
}
|
||||
|
||||
create(ph, name, time, args = null) {
|
||||
var res = {'name': name, 'cat': this._cat, 'ph': ph, 'ts': time, 'pid': this._pid};
|
||||
if (isPresent(args)) {
|
||||
res['args'] = args;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
markStart(name, time) { return this.create('b', name, time); }
|
||||
|
||||
markEnd(name, time) { return this.create('e', name, time); }
|
||||
|
||||
start(name, time, args = null) { return this.create('B', name, time, args); }
|
||||
|
||||
end(name, time, args = null) { return this.create('E', name, time, args); }
|
||||
|
||||
complete(name, time, duration, args = null) {
|
||||
var res = this.create('X', name, time, args);
|
||||
res['dur'] = duration;
|
||||
return res;
|
||||
}
|
||||
}
|
@ -1,9 +1,13 @@
|
||||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
import { Date, DateWrapper } from 'angular2/src/facade/lang';
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import {Date, DateWrapper} from 'angular2/src/facade/lang';
|
||||
import {ListWrapper} from 'angular2/src/facade/collection';
|
||||
|
||||
import {
|
||||
Validator, RegressionSlopeValidator, Injector, bind, MeasureValues
|
||||
Validator,
|
||||
RegressionSlopeValidator,
|
||||
Injector,
|
||||
bind,
|
||||
MeasureValues
|
||||
} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
@ -12,43 +16,43 @@ export function main() {
|
||||
|
||||
function createValidator({size, metric}) {
|
||||
validator = Injector.resolveAndCreate([
|
||||
RegressionSlopeValidator.BINDINGS,
|
||||
bind(RegressionSlopeValidator.METRIC).toValue(metric),
|
||||
bind(RegressionSlopeValidator.SAMPLE_SIZE).toValue(size)
|
||||
]).get(RegressionSlopeValidator);
|
||||
RegressionSlopeValidator.BINDINGS,
|
||||
bind(RegressionSlopeValidator.METRIC).toValue(metric),
|
||||
bind(RegressionSlopeValidator.SAMPLE_SIZE).toValue(size)
|
||||
])
|
||||
.get(RegressionSlopeValidator);
|
||||
}
|
||||
|
||||
it('should return sampleSize and metric as description', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.describe()).toEqual({
|
||||
'sampleSize': 2,
|
||||
'regressionSlopeMetric': 'script'
|
||||
});
|
||||
expect(validator.describe()).toEqual({'sampleSize': 2, 'regressionSlopeMetric': 'script'});
|
||||
});
|
||||
|
||||
it('should return null while the completeSample is smaller than the given size', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([])).toBe(null);
|
||||
expect(validator.validate([mv(0,0,{})])).toBe(null);
|
||||
expect(validator.validate([mv(0, 0, {})])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return null while the regression slope is < 0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([mv(0,0,{'script':2}), mv(1,1,{'script':1})])).toBe(null);
|
||||
expect(validator.validate([mv(0, 0, {'script': 2}), mv(1, 1, {'script': 1})])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when the regression slope is ==0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
var sample = [mv(0,0,{'script':1}), mv(1,1,{'script':1}), mv(2,2,{'script':1})];
|
||||
expect(validator.validate(ListWrapper.slice(sample,0,2))).toEqual(ListWrapper.slice(sample,0,2));
|
||||
expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample,1,3));
|
||||
var sample = [mv(0, 0, {'script': 1}), mv(1, 1, {'script': 1}), mv(2, 2, {'script': 1})];
|
||||
expect(validator.validate(ListWrapper.slice(sample, 0, 2)))
|
||||
.toEqual(ListWrapper.slice(sample, 0, 2));
|
||||
expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample, 1, 3));
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when the regression slope is >0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
var sample = [mv(0,0,{'script':1}), mv(1,1,{'script':2}), mv(2,2,{'script':3})];
|
||||
expect(validator.validate(ListWrapper.slice(sample,0,2))).toEqual(ListWrapper.slice(sample,0,2));
|
||||
expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample,1,3));
|
||||
var sample = [mv(0, 0, {'script': 1}), mv(1, 1, {'script': 2}), mv(2, 2, {'script': 3})];
|
||||
expect(validator.validate(ListWrapper.slice(sample, 0, 2)))
|
||||
.toEqual(ListWrapper.slice(sample, 0, 2));
|
||||
expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample, 1, 3));
|
||||
});
|
||||
|
||||
});
|
@ -1,39 +1,36 @@
|
||||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
import { Date, DateWrapper } from 'angular2/src/facade/lang';
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import {Date, DateWrapper} from 'angular2/src/facade/lang';
|
||||
import {ListWrapper} from 'angular2/src/facade/collection';
|
||||
|
||||
import {
|
||||
Validator, SizeValidator, Injector, bind, MeasureValues
|
||||
} from 'benchpress/common';
|
||||
import {Validator, SizeValidator, Injector, bind, MeasureValues} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
describe('size validator', () => {
|
||||
var validator;
|
||||
|
||||
function createValidator(size) {
|
||||
validator = Injector.resolveAndCreate([
|
||||
SizeValidator.BINDINGS,
|
||||
bind(SizeValidator.SAMPLE_SIZE).toValue(size)
|
||||
]).get(SizeValidator);
|
||||
validator =
|
||||
Injector.resolveAndCreate(
|
||||
[SizeValidator.BINDINGS, bind(SizeValidator.SAMPLE_SIZE).toValue(size)])
|
||||
.get(SizeValidator);
|
||||
}
|
||||
|
||||
it('should return sampleSize as description', () => {
|
||||
createValidator(2);
|
||||
expect(validator.describe()).toEqual({
|
||||
'sampleSize': 2
|
||||
});
|
||||
expect(validator.describe()).toEqual({'sampleSize': 2});
|
||||
});
|
||||
|
||||
it('should return null while the completeSample is smaller than the given size', () => {
|
||||
createValidator(2);
|
||||
expect(validator.validate([])).toBe(null);
|
||||
expect(validator.validate([mv(0,0,{})])).toBe(null);
|
||||
expect(validator.validate([mv(0, 0, {})])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when it has at least the given size', () => {
|
||||
createValidator(2);
|
||||
var sample = [mv(0,0,{'a':1}), mv(1,1,{'b':2}), mv(2,2,{'c':3})];
|
||||
expect(validator.validate(ListWrapper.slice(sample, 0, 2))).toEqual(ListWrapper.slice(sample, 0, 2));
|
||||
var sample = [mv(0, 0, {'a': 1}), mv(1, 1, {'b': 2}), mv(2, 2, {'c': 3})];
|
||||
expect(validator.validate(ListWrapper.slice(sample, 0, 2)))
|
||||
.toEqual(ListWrapper.slice(sample, 0, 2));
|
||||
expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample, 1, 3));
|
||||
});
|
||||
|
@ -1,62 +0,0 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import { StringMap, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { isPresent, StringWrapper } from 'angular2/src/facade/lang';
|
||||
import { PromiseWrapper } from 'angular2/src/facade/async';
|
||||
|
||||
import { WebDriverExtension, bind, Injector, Options } from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
function createExtension(ids, caps) {
|
||||
return Injector.resolveAndCreate([
|
||||
ListWrapper.map(ids, (id) => bind(id).toValue(new MockExtension(id)) ),
|
||||
bind(Options.CAPABILITIES).toValue(caps),
|
||||
WebDriverExtension.bindTo(ids)
|
||||
]).asyncGet(WebDriverExtension);
|
||||
}
|
||||
|
||||
describe('WebDriverExtension.bindTo', () => {
|
||||
|
||||
it('should bind the extension that matches the capabilities', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(['m1', 'm2', 'm3'], {'browser': 'm2'}).then( (m) => {
|
||||
expect(m.id).toEqual('m2');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if there is no match', inject([AsyncTestCompleter], (async) => {
|
||||
PromiseWrapper.catchError(
|
||||
createExtension(['m1'], {'browser': 'm2'}),
|
||||
(err) => {
|
||||
expect(isPresent(err)).toBe(true);
|
||||
async.done();
|
||||
}
|
||||
);
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockExtension extends WebDriverExtension {
|
||||
id:string;
|
||||
|
||||
constructor(id) {
|
||||
super();
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
supports(capabilities:StringMap):boolean {
|
||||
return StringWrapper.equals(capabilities['browser'], this.id);
|
||||
}
|
||||
}
|
62
modules/benchpress/test/web_driver_extension_spec.ts
Normal file
62
modules/benchpress/test/web_driver_extension_spec.ts
Normal file
@ -0,0 +1,62 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import {StringMap, ListWrapper} from 'angular2/src/facade/collection';
|
||||
import {isPresent, StringWrapper} from 'angular2/src/facade/lang';
|
||||
import {PromiseWrapper} from 'angular2/src/facade/async';
|
||||
|
||||
import {WebDriverExtension, bind, Injector, Options} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
function createExtension(ids, caps) {
|
||||
return Injector.resolveAndCreate([
|
||||
ListWrapper.map(ids, (id) => bind(id).toValue(new MockExtension(id))),
|
||||
bind(Options.CAPABILITIES).toValue(caps),
|
||||
WebDriverExtension.bindTo(ids)
|
||||
])
|
||||
.asyncGet(WebDriverExtension);
|
||||
}
|
||||
|
||||
describe('WebDriverExtension.bindTo', () => {
|
||||
|
||||
it('should bind the extension that matches the capabilities',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(['m1', 'm2', 'm3'], {'browser': 'm2'})
|
||||
.then((m) => {
|
||||
expect(m.id).toEqual('m2');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if there is no match', inject([AsyncTestCompleter], (async) => {
|
||||
PromiseWrapper.catchError(createExtension(['m1'], {'browser': 'm2'}), (err) => {
|
||||
expect(isPresent(err)).toBe(true);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockExtension extends WebDriverExtension {
|
||||
id: string;
|
||||
|
||||
constructor(id) {
|
||||
super();
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
supports(capabilities: StringMap<string, any>): boolean {
|
||||
return StringWrapper.equals(capabilities['browser'], this.id);
|
||||
}
|
||||
}
|
@ -1,270 +0,0 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { Json, isBlank } from 'angular2/src/facade/lang';
|
||||
|
||||
import {
|
||||
WebDriverExtension, ChromeDriverExtension,
|
||||
WebDriverAdapter, Injector, bind
|
||||
} from 'benchpress/common';
|
||||
|
||||
import { TraceEventFactory } from '../trace_event_factory';
|
||||
|
||||
export function main() {
|
||||
describe('chrome driver extension', () => {
|
||||
var log;
|
||||
var extension;
|
||||
|
||||
var blinkEvents = new TraceEventFactory('blink.console', 'pid0');
|
||||
var v8Events = new TraceEventFactory('v8', 'pid0');
|
||||
var v8EventsOtherProcess = new TraceEventFactory('v8', 'pid1');
|
||||
var chromeTimelineEvents = new TraceEventFactory('disabled-by-default-devtools.timeline', 'pid0');
|
||||
var normEvents = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createExtension(perfRecords = null, messageMethod = 'Tracing.dataCollected') {
|
||||
if (isBlank(perfRecords)) {
|
||||
perfRecords = [];
|
||||
}
|
||||
log = [];
|
||||
extension = Injector.resolveAndCreate([
|
||||
ChromeDriverExtension.BINDINGS,
|
||||
bind(WebDriverAdapter).toValue(new MockDriverAdapter(log, perfRecords, messageMethod))
|
||||
]).get(ChromeDriverExtension);
|
||||
return extension;
|
||||
}
|
||||
|
||||
it('should force gc via window.gc()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().gc().then( (_) => {
|
||||
expect(log).toEqual([['executeScript', 'window.gc()']]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.time()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeBegin('someName').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.time('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.timeEnd()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeEnd('someName').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeEnd('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.time() and console.timeEnd()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeEnd('name1', 'name2').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeEnd('name1');console.time('name2');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('readPerfLog', () => {
|
||||
|
||||
it('should execute a dummy script before reading them', inject([AsyncTestCompleter], (async) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
// Sometimes it does not report the newest events of the performance log
|
||||
// to the WebDriver client unless a script is executed...
|
||||
createExtension([]).readPerfLog().then( (_) => {
|
||||
expect(log).toEqual([ [ 'executeScript', '1+1' ], [ 'logs', 'performance' ] ]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should normalize times to ms and forward ph and pid event properties', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.complete('FunctionCall', 1100, 5500, null)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.complete('script', 1.1, 5.5, null),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should normalize "tdur" to "dur"', inject([AsyncTestCompleter], (async) => {
|
||||
var event = chromeTimelineEvents.create('X', 'FunctionCall', 1100, null);
|
||||
event['tdur'] = 5500;
|
||||
createExtension([event]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.complete('script', 1.1, 5.5, null),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report FunctionCall events as "script"', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('FunctionCall', 0)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('script', 0),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore FunctionCalls from webdriver', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('FunctionCall', 0, {'data': {'scriptName': 'InjectedScript'}})
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report begin timestamps', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
blinkEvents.create('S', 'someName', 1000)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.markStart('someName', 1.0)
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report end timestamps', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
blinkEvents.create('F', 'someName', 1000)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.markEnd('someName', 1.0)
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report gc', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('GCEvent', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
chromeTimelineEvents.end('GCEvent', 2000, {'usedHeapSizeAfter': 0}),
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000}),
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': false}),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report major gc', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('GCEvent', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
v8EventsOtherProcess.start('majorGC', 1100, null),
|
||||
v8EventsOtherProcess.end('majorGC', 1200, null),
|
||||
chromeTimelineEvents.end('GCEvent', 2000, {'usedHeapSizeAfter': 0}),
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000}),
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': false}),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore major gc from different processes', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('GCEvent', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
v8Events.start('majorGC', 1100, null),
|
||||
v8Events.end('majorGC', 1200, null),
|
||||
chromeTimelineEvents.end('GCEvent', 2000, {'usedHeapSizeAfter': 0}),
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000}),
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': true}),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint', 'Rasterize', 'CompositeLayers'].forEach( (recordType) => {
|
||||
it(`should report ${recordType} as "render"`, inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start(recordType, 1234),
|
||||
chromeTimelineEvents.end(recordType, 2345)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('render', 1.234),
|
||||
normEvents.end('render', 2.345),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
it('should throw an error on buffer overflow', inject([AsyncTestCompleter], (async) => {
|
||||
PromiseWrapper.catchError(createExtension([
|
||||
chromeTimelineEvents.start('FunctionCall', 1234),
|
||||
], 'Tracing.bufferUsage').readPerfLog(), (err) => {
|
||||
expect( () => {
|
||||
throw err;
|
||||
}).toThrowError('The DevTools trace buffer filled during the test!');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should match chrome browsers', () => {
|
||||
expect(createExtension().supports({
|
||||
'browserName': 'chrome'
|
||||
})).toBe(true);
|
||||
|
||||
expect(createExtension().supports({
|
||||
'browserName': 'Chrome'
|
||||
})).toBe(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
_log:List;
|
||||
_events:List;
|
||||
_messageMethod:string;
|
||||
constructor(log, events, messageMethod) {
|
||||
super();
|
||||
this._log = log;
|
||||
this._events = events;
|
||||
this._messageMethod = messageMethod;
|
||||
}
|
||||
|
||||
executeScript(script) {
|
||||
ListWrapper.push(this._log, ['executeScript', script]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
logs(type) {
|
||||
ListWrapper.push(this._log, ['logs', type]);
|
||||
if (type === 'performance') {
|
||||
return PromiseWrapper.resolve(this._events.map( (event) => {
|
||||
return {
|
||||
'message': Json.stringify({
|
||||
'message': {
|
||||
'method': this._messageMethod,
|
||||
'params': event
|
||||
}
|
||||
})
|
||||
};
|
||||
}));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,281 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import {ListWrapper} from 'angular2/src/facade/collection';
|
||||
import {PromiseWrapper} from 'angular2/src/facade/async';
|
||||
import {Json, isBlank} from 'angular2/src/facade/lang';
|
||||
|
||||
import {
|
||||
WebDriverExtension,
|
||||
ChromeDriverExtension,
|
||||
WebDriverAdapter,
|
||||
Injector,
|
||||
bind
|
||||
} from 'benchpress/common';
|
||||
|
||||
import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
export function main() {
|
||||
describe('chrome driver extension', () => {
|
||||
var log;
|
||||
var extension;
|
||||
|
||||
var blinkEvents = new TraceEventFactory('blink.console', 'pid0');
|
||||
var v8Events = new TraceEventFactory('v8', 'pid0');
|
||||
var v8EventsOtherProcess = new TraceEventFactory('v8', 'pid1');
|
||||
var chromeTimelineEvents =
|
||||
new TraceEventFactory('disabled-by-default-devtools.timeline', 'pid0');
|
||||
var normEvents = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createExtension(perfRecords = null, messageMethod = 'Tracing.dataCollected') {
|
||||
if (isBlank(perfRecords)) {
|
||||
perfRecords = [];
|
||||
}
|
||||
log = [];
|
||||
extension = Injector.resolveAndCreate([
|
||||
ChromeDriverExtension.BINDINGS,
|
||||
bind(WebDriverAdapter)
|
||||
.toValue(new MockDriverAdapter(log, perfRecords, messageMethod))
|
||||
])
|
||||
.get(ChromeDriverExtension);
|
||||
return extension;
|
||||
}
|
||||
|
||||
it('should force gc via window.gc()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().gc().then((_) => {
|
||||
expect(log).toEqual([['executeScript', 'window.gc()']]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.time()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension()
|
||||
.timeBegin('someName')
|
||||
.then((_) => {
|
||||
expect(log).toEqual([['executeScript', `console.time('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.timeEnd()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension()
|
||||
.timeEnd('someName')
|
||||
.then((_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeEnd('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.time() and console.timeEnd()',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension()
|
||||
.timeEnd('name1', 'name2')
|
||||
.then((_) => {
|
||||
expect(log).toEqual(
|
||||
[['executeScript', `console.timeEnd('name1');console.time('name2');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('readPerfLog', () => {
|
||||
|
||||
it('should execute a dummy script before reading them',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
// Sometimes it does not report the newest events of the performance log
|
||||
// to the WebDriver client unless a script is executed...
|
||||
createExtension([]).readPerfLog().then((_) => {
|
||||
expect(log).toEqual([['executeScript', '1+1'], ['logs', 'performance']]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should normalize times to ms and forward ph and pid event properties',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([chromeTimelineEvents.complete('FunctionCall', 1100, 5500, null)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.complete('script', 1.1, 5.5, null),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should normalize "tdur" to "dur"', inject([AsyncTestCompleter], (async) => {
|
||||
var event = chromeTimelineEvents.create('X', 'FunctionCall', 1100, null);
|
||||
event['tdur'] = 5500;
|
||||
createExtension([event]).readPerfLog().then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.complete('script', 1.1, 5.5, null),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report FunctionCall events as "script"', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([chromeTimelineEvents.start('FunctionCall', 0)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('script', 0),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore FunctionCalls from webdriver', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([chromeTimelineEvents.start(
|
||||
'FunctionCall', 0, {'data': {'scriptName': 'InjectedScript'}})])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report begin timestamps', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([blinkEvents.create('S', 'someName', 1000)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([normEvents.markStart('someName', 1.0)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report end timestamps', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([blinkEvents.create('F', 'someName', 1000)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([normEvents.markEnd('someName', 1.0)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report gc', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('GCEvent', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
chromeTimelineEvents.end('GCEvent', 2000, {'usedHeapSizeAfter': 0}),
|
||||
])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000}),
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': false}),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report major gc', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('GCEvent', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
v8EventsOtherProcess.start('majorGC', 1100, null),
|
||||
v8EventsOtherProcess.end('majorGC', 1200, null),
|
||||
chromeTimelineEvents.end('GCEvent', 2000, {'usedHeapSizeAfter': 0}),
|
||||
])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000}),
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': false}),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore major gc from different processes',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('GCEvent', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
v8Events.start('majorGC', 1100, null),
|
||||
v8Events.end('majorGC', 1200, null),
|
||||
chromeTimelineEvents.end('GCEvent', 2000, {'usedHeapSizeAfter': 0}),
|
||||
])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000}),
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': true}),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint', 'Rasterize', 'CompositeLayers']
|
||||
.forEach((recordType) => {
|
||||
it(`should report ${recordType} as "render"`, inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start(recordType, 1234),
|
||||
chromeTimelineEvents.end(recordType, 2345)
|
||||
])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('render', 1.234),
|
||||
normEvents.end('render', 2.345),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
it('should throw an error on buffer overflow', inject([AsyncTestCompleter], (async) => {
|
||||
PromiseWrapper.catchError(
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('FunctionCall', 1234),
|
||||
],
|
||||
'Tracing.bufferUsage')
|
||||
.readPerfLog(),
|
||||
(err) => {
|
||||
expect(() => { throw err; })
|
||||
.toThrowError('The DevTools trace buffer filled during the test!');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should match chrome browsers', () => {
|
||||
expect(createExtension().supports({'browserName': 'chrome'})).toBe(true);
|
||||
|
||||
expect(createExtension().supports({'browserName': 'Chrome'})).toBe(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
constructor(private _log: List<any>, private _events: List<any>, private _messageMethod: string) {
|
||||
super();
|
||||
}
|
||||
|
||||
executeScript(script) {
|
||||
ListWrapper.push(this._log, ['executeScript', script]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
logs(type) {
|
||||
ListWrapper.push(this._log, ['logs', type]);
|
||||
if (type === 'performance') {
|
||||
return PromiseWrapper.resolve(this._events.map((event) => {
|
||||
return {
|
||||
'message':
|
||||
Json.stringify({'message': {'method': this._messageMethod, 'params': event}})
|
||||
};
|
||||
}));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,246 +0,0 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { Json, isBlank, isPresent } from 'angular2/src/facade/lang';
|
||||
|
||||
import {
|
||||
WebDriverExtension, IOsDriverExtension,
|
||||
WebDriverAdapter, Injector, bind
|
||||
} from 'benchpress/common';
|
||||
|
||||
import { TraceEventFactory } from '../trace_event_factory';
|
||||
|
||||
export function main() {
|
||||
describe('ios driver extension', () => {
|
||||
var log;
|
||||
var extension;
|
||||
|
||||
var normEvents = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createExtension(perfRecords = null) {
|
||||
if (isBlank(perfRecords)) {
|
||||
perfRecords = [];
|
||||
}
|
||||
log = [];
|
||||
extension = Injector.resolveAndCreate([
|
||||
IOsDriverExtension.BINDINGS,
|
||||
bind(WebDriverAdapter).toValue(new MockDriverAdapter(log, perfRecords))
|
||||
]).get(IOsDriverExtension);
|
||||
return extension;
|
||||
}
|
||||
|
||||
it('should throw on forcing gc', () => {
|
||||
expect( () => createExtension().gc() ).toThrowError('Force GC is not supported on iOS');
|
||||
});
|
||||
|
||||
it('should mark the timeline via console.time()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeBegin('someName').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.time('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.timeEnd()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeEnd('someName').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeEnd('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.time() and console.timeEnd()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeEnd('name1', 'name2').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeEnd('name1');console.time('name2');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('readPerfLog', () => {
|
||||
|
||||
it('should execute a dummy script before reading them', inject([AsyncTestCompleter], (async) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
// Sometimes it does not report the newest events of the performance log
|
||||
// to the WebDriver client unless a script is executed...
|
||||
createExtension([]).readPerfLog().then( (_) => {
|
||||
expect(log).toEqual([ [ 'executeScript', '1+1' ], [ 'logs', 'performance' ] ]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report FunctionCall records as "script"', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
durationRecord('FunctionCall', 1, 5)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('script', 1),
|
||||
normEvents.end('script', 5)
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore FunctionCalls from webdriver', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
internalScriptRecord(1, 5)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report begin time', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
timeBeginRecord('someName', 12)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.markStart('someName', 12)
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report end timestamps', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
timeEndRecord('someName', 12)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.markEnd('someName', 12)
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint', 'Rasterize', 'CompositeLayers'].forEach( (recordType) => {
|
||||
it(`should report ${recordType}`, inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
durationRecord(recordType, 0, 1)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('render', 0),
|
||||
normEvents.end('render', 1),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
|
||||
it('should walk children', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
durationRecord('FunctionCall', 1, 5, [
|
||||
timeBeginRecord('someName', 2)
|
||||
])
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('script', 1),
|
||||
normEvents.markStart('someName', 2),
|
||||
normEvents.end('script', 5)
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should match safari browsers', () => {
|
||||
expect(createExtension().supports({
|
||||
'browserName': 'safari'
|
||||
})).toBe(true);
|
||||
|
||||
expect(createExtension().supports({
|
||||
'browserName': 'Safari'
|
||||
})).toBe(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function timeBeginRecord(name, time) {
|
||||
return {
|
||||
'type': 'Time',
|
||||
'startTime': time,
|
||||
'data': {
|
||||
'message': name
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function timeEndRecord(name, time) {
|
||||
return {
|
||||
'type': 'TimeEnd',
|
||||
'startTime': time,
|
||||
'data': {
|
||||
'message': name
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function durationRecord(type, startTime, endTime, children = null) {
|
||||
if (isBlank(children)) {
|
||||
children = [];
|
||||
}
|
||||
return {
|
||||
'type': type,
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'children': children
|
||||
};
|
||||
}
|
||||
|
||||
function internalScriptRecord(startTime, endTime) {
|
||||
return {
|
||||
'type': 'FunctionCall',
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'data': {
|
||||
'scriptName': 'InjectedScript'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
_log:List;
|
||||
_perfRecords:List;
|
||||
constructor(log, perfRecords) {
|
||||
super();
|
||||
this._log = log;
|
||||
this._perfRecords = perfRecords;
|
||||
}
|
||||
|
||||
executeScript(script) {
|
||||
ListWrapper.push(this._log, ['executeScript', script]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
logs(type) {
|
||||
ListWrapper.push(this._log, ['logs', type]);
|
||||
if (type === 'performance') {
|
||||
return PromiseWrapper.resolve(this._perfRecords.map(function(record) {
|
||||
return {
|
||||
'message': Json.stringify({
|
||||
'message': {
|
||||
'method': 'Timeline.eventRecorded',
|
||||
'params': {
|
||||
'record': record
|
||||
}
|
||||
}
|
||||
})
|
||||
};
|
||||
}));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
216
modules/benchpress/test/webdriver/ios_driver_extension_spec.ts
Normal file
216
modules/benchpress/test/webdriver/ios_driver_extension_spec.ts
Normal file
@ -0,0 +1,216 @@
|
||||
import {
|
||||
afterEach,
|
||||
AsyncTestCompleter,
|
||||
beforeEach,
|
||||
ddescribe,
|
||||
describe,
|
||||
expect,
|
||||
iit,
|
||||
inject,
|
||||
it,
|
||||
xit,
|
||||
} from 'angular2/test_lib';
|
||||
|
||||
import {ListWrapper} from 'angular2/src/facade/collection';
|
||||
import {PromiseWrapper} from 'angular2/src/facade/async';
|
||||
import {Json, isBlank, isPresent} from 'angular2/src/facade/lang';
|
||||
|
||||
import {
|
||||
WebDriverExtension,
|
||||
IOsDriverExtension,
|
||||
WebDriverAdapter,
|
||||
Injector,
|
||||
bind
|
||||
} from 'benchpress/common';
|
||||
|
||||
import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
export function main() {
|
||||
describe('ios driver extension', () => {
|
||||
var log;
|
||||
var extension;
|
||||
|
||||
var normEvents = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createExtension(perfRecords = null) {
|
||||
if (isBlank(perfRecords)) {
|
||||
perfRecords = [];
|
||||
}
|
||||
log = [];
|
||||
extension = Injector.resolveAndCreate([
|
||||
IOsDriverExtension.BINDINGS,
|
||||
bind(WebDriverAdapter).toValue(new MockDriverAdapter(log, perfRecords))
|
||||
])
|
||||
.get(IOsDriverExtension);
|
||||
return extension;
|
||||
}
|
||||
|
||||
it('should throw on forcing gc', () => {
|
||||
expect(() => createExtension().gc()).toThrowError('Force GC is not supported on iOS');
|
||||
});
|
||||
|
||||
it('should mark the timeline via console.time()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension()
|
||||
.timeBegin('someName')
|
||||
.then((_) => {
|
||||
expect(log).toEqual([['executeScript', `console.time('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.timeEnd()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension()
|
||||
.timeEnd('someName')
|
||||
.then((_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeEnd('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.time() and console.timeEnd()',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension()
|
||||
.timeEnd('name1', 'name2')
|
||||
.then((_) => {
|
||||
expect(log).toEqual(
|
||||
[['executeScript', `console.timeEnd('name1');console.time('name2');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('readPerfLog', () => {
|
||||
|
||||
it('should execute a dummy script before reading them',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
// Sometimes it does not report the newest events of the performance log
|
||||
// to the WebDriver client unless a script is executed...
|
||||
createExtension([]).readPerfLog().then((_) => {
|
||||
expect(log).toEqual([['executeScript', '1+1'], ['logs', 'performance']]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report FunctionCall records as "script"', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([durationRecord('FunctionCall', 1, 5)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events)
|
||||
.toEqual([normEvents.start('script', 1), normEvents.end('script', 5)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore FunctionCalls from webdriver', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([internalScriptRecord(1, 5)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report begin time', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([timeBeginRecord('someName', 12)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([normEvents.markStart('someName', 12)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report end timestamps', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([timeEndRecord('someName', 12)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([normEvents.markEnd('someName', 12)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint', 'Rasterize', 'CompositeLayers']
|
||||
.forEach((recordType) => {
|
||||
it(`should report ${recordType}`, inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([durationRecord(recordType, 0, 1)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('render', 0),
|
||||
normEvents.end('render', 1),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
|
||||
it('should walk children', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([durationRecord('FunctionCall', 1, 5, [timeBeginRecord('someName', 2)])])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('script', 1),
|
||||
normEvents.markStart('someName', 2),
|
||||
normEvents.end('script', 5)
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should match safari browsers', () => {
|
||||
expect(createExtension().supports({'browserName': 'safari'})).toBe(true);
|
||||
|
||||
expect(createExtension().supports({'browserName': 'Safari'})).toBe(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function timeBeginRecord(name, time) {
|
||||
return {'type': 'Time', 'startTime': time, 'data': {'message': name}};
|
||||
}
|
||||
|
||||
function timeEndRecord(name, time) {
|
||||
return {'type': 'TimeEnd', 'startTime': time, 'data': {'message': name}};
|
||||
}
|
||||
|
||||
function durationRecord(type, startTime, endTime, children = null) {
|
||||
if (isBlank(children)) {
|
||||
children = [];
|
||||
}
|
||||
return {'type': type, 'startTime': startTime, 'endTime': endTime, 'children': children};
|
||||
}
|
||||
|
||||
function internalScriptRecord(startTime, endTime) {
|
||||
return {
|
||||
'type': 'FunctionCall',
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'data': {'scriptName': 'InjectedScript'}
|
||||
};
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
constructor(private _log: List<any>, private _perfRecords: List<any>) { super(); }
|
||||
|
||||
executeScript(script) {
|
||||
ListWrapper.push(this._log, ['executeScript', script]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
logs(type) {
|
||||
ListWrapper.push(this._log, ['logs', type]);
|
||||
if (type === 'performance') {
|
||||
return PromiseWrapper.resolve(this._perfRecords.map(function(record) {
|
||||
return {
|
||||
'message': Json.stringify(
|
||||
{'message': {'method': 'Timeline.eventRecorded', 'params': {'record': record}}})
|
||||
};
|
||||
}));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user