feat(benchpress): rewritten implementation
Limitations: - cloud reporter is not yet supported any more
This commit is contained in:
16
modules/benchpress/benchpress.js
Normal file
16
modules/benchpress/benchpress.js
Normal file
@ -0,0 +1,16 @@
|
||||
export { Sampler, SampleState } from './src/sampler';
|
||||
export { Metric } from './src/metric';
|
||||
export { Validator } from './src/validator';
|
||||
export { Reporter } from './src/reporter';
|
||||
export { WebDriverExtension } from './src/web_driver_extension';
|
||||
export { WebDriverAdapter } from './src/web_driver_adapter';
|
||||
export { SizeValidator } from './src/validator/size_validator';
|
||||
export { RegressionSlopeValidator } from './src/validator/regression_slope_validator';
|
||||
export { ConsoleReporter } from './src/reporter/console_reporter';
|
||||
export { SampleDescription } from './src/sample_description';
|
||||
export { PerflogMetric } from './src/metric/perflog_metric';
|
||||
export { ChromeDriverExtension } from './src/webdriver/chrome_driver_extension';
|
||||
export { Runner } from './src/runner';
|
||||
export { Options } from './src/sample_options';
|
||||
|
||||
export { bind, Injector, OpaqueToken } from 'angular2/di';
|
21
modules/benchpress/package.json
Normal file
21
modules/benchpress/package.json
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "angular-benchpress2",
|
||||
"version": "<%= packageJson.version %>",
|
||||
"description": "Angular-Benchpress - a framework for e2e performance tests",
|
||||
"homepage": "<%= packageJson.homepage %>",
|
||||
"bugs": "<%= packageJson.bugs %>",
|
||||
"contributors": <%= JSON.stringify(packageJson.contributors) %>,
|
||||
"license": "<%= packageJson.license %>",
|
||||
"dependencies": {
|
||||
"rtts_assert": "<%= packageJson.version %>",
|
||||
"angular2": "<%= packageJson.version %>.dev"
|
||||
},
|
||||
"devDependencies": {
|
||||
"yargs": "2.3.*",
|
||||
"gulp-sourcemaps": "1.3.*",
|
||||
"gulp-traceur": "0.16.*",
|
||||
"gulp": "^3.8.8",
|
||||
"gulp-rename": "^1.2.0",
|
||||
"through2": "^0.6.1"
|
||||
}
|
||||
}
|
16
modules/benchpress/pubspec.yaml
Normal file
16
modules/benchpress/pubspec.yaml
Normal file
@ -0,0 +1,16 @@
|
||||
name: benchpress
|
||||
version: <%= packageJson.version %>
|
||||
authors:
|
||||
<%= Object.keys(packageJson.contributors).map(function(name) {
|
||||
return '- '+name+' <'+packageJson.contributors[name]+'>';
|
||||
}).join('\n') %>
|
||||
description: Benchpress - a framework for e2e performance tests
|
||||
homepage: <%= packageJson.homepage %>
|
||||
environment:
|
||||
sdk: '>=1.4.0'
|
||||
dependencies:
|
||||
stack_trace: '>=1.1.1 <2.0.0'
|
||||
angular2:
|
||||
path: ../angular2
|
||||
dev_dependencies:
|
||||
guinness: ">=0.1.16 <0.2.0"
|
36
modules/benchpress/src/metric.js
Normal file
36
modules/benchpress/src/metric.js
Normal file
@ -0,0 +1,36 @@
|
||||
import {
|
||||
Promise, PromiseWrapper
|
||||
} from 'angular2/src/facade/async';
|
||||
import {
|
||||
ABSTRACT, BaseException
|
||||
} from 'angular2/src/facade/lang';
|
||||
|
||||
/**
|
||||
* A metric is measures values
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class Metric {
|
||||
/**
|
||||
* Starts measuring
|
||||
*/
|
||||
beginMeasure():Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
/**
|
||||
* Ends measuring and reports the data
|
||||
* since the begin call.
|
||||
* @param restart: Whether to restart right after this.
|
||||
*/
|
||||
endMeasure(restart:boolean):Promise<any> {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes the metrics provided by this metric implementation.
|
||||
* (e.g. units, ...)
|
||||
*/
|
||||
describe():any {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
144
modules/benchpress/src/metric/perflog_metric.js
Normal file
144
modules/benchpress/src/metric/perflog_metric.js
Normal file
@ -0,0 +1,144 @@
|
||||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
import { isPresent, isBlank, int, BaseException, StringWrapper } from 'angular2/src/facade/lang';
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { WebDriverExtension } from '../web_driver_extension';
|
||||
import { Metric } from '../metric';
|
||||
|
||||
/**
|
||||
* A metric that reads out the performance log
|
||||
*/
|
||||
export class PerflogMetric extends Metric {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get SET_TIMEOUT() { return _SET_TIMEOUT; }
|
||||
|
||||
_driverExtension:WebDriverExtension;
|
||||
_remainingEvents:List;
|
||||
_measureCount:int;
|
||||
_setTimeout:Function;
|
||||
|
||||
constructor(driverExtension:WebDriverExtension, setTimeout:Function) {
|
||||
super();
|
||||
this._driverExtension = driverExtension;
|
||||
this._remainingEvents = [];
|
||||
this._measureCount = 0;
|
||||
this._setTimeout = setTimeout;
|
||||
}
|
||||
|
||||
describe() {
|
||||
return {
|
||||
'script': 'script execution time in ms',
|
||||
'render': 'render time in ms',
|
||||
'gcTime': 'gc time in ms',
|
||||
'gcAmount': 'gc amount in bytes',
|
||||
'gcTimeInScript': 'gc time during script execution in ms',
|
||||
'gcAmountInScript': 'gc amount during script execution in bytes'
|
||||
};
|
||||
}
|
||||
|
||||
beginMeasure():Promise {
|
||||
return this._driverExtension.timeBegin(this._markName(this._measureCount++));
|
||||
}
|
||||
|
||||
endMeasure(restart:boolean):Promise<Object> {
|
||||
var markName = this._markName(this._measureCount-1);
|
||||
var nextMarkName = restart ? this._markName(this._measureCount++) : null;
|
||||
return this._driverExtension.timeEnd(markName, nextMarkName)
|
||||
.then( (_) => this._readUntilEndMark(markName) );
|
||||
}
|
||||
|
||||
_readUntilEndMark(markName:string, loopCount:int = 0) {
|
||||
return this._driverExtension.readPerfLog().then( (events) => {
|
||||
this._remainingEvents = ListWrapper.concat(this._remainingEvents, events);
|
||||
if (loopCount > _MAX_RETRY_COUNT) {
|
||||
throw new BaseException(`Tried too often to get the ending mark: ${loopCount}`);
|
||||
}
|
||||
var result = this._aggregateEvents(
|
||||
this._remainingEvents, markName
|
||||
);
|
||||
if (isPresent(result)) {
|
||||
this._remainingEvents = events;
|
||||
return result;
|
||||
}
|
||||
var completer = PromiseWrapper.completer();
|
||||
this._setTimeout(
|
||||
() => completer.complete(this._readUntilEndMark(markName, loopCount+1)),
|
||||
100
|
||||
);
|
||||
return completer.promise;
|
||||
});
|
||||
}
|
||||
|
||||
_aggregateEvents(events, markName) {
|
||||
var result = {
|
||||
'script': 0,
|
||||
'render': 0,
|
||||
'gcTime': 0,
|
||||
'gcAmount': 0,
|
||||
'gcTimeInScript': 0,
|
||||
'gcAmountInScript': 0
|
||||
};
|
||||
|
||||
var startMarkFound = false;
|
||||
var endMarkFound = false;
|
||||
if (isBlank(markName)) {
|
||||
startMarkFound = true;
|
||||
endMarkFound = true;
|
||||
}
|
||||
|
||||
var intervalStarts = {};
|
||||
events.forEach( (event) => {
|
||||
var ph = event['ph'];
|
||||
var name = event['name'];
|
||||
var ts = event['ts'];
|
||||
var args = event['args'];
|
||||
if (StringWrapper.equals(ph, 'b') && StringWrapper.equals(name, markName)) {
|
||||
startMarkFound = true;
|
||||
} else if (StringWrapper.equals(ph, 'e') && StringWrapper.equals(name, markName)) {
|
||||
endMarkFound = true;
|
||||
}
|
||||
if (startMarkFound && !endMarkFound) {
|
||||
if (StringWrapper.equals(ph, 'B')) {
|
||||
intervalStarts[name] = ts;
|
||||
} else if (StringWrapper.equals(ph, 'E') && isPresent(intervalStarts[name])) {
|
||||
var diff = ts - intervalStarts[name];
|
||||
intervalStarts[name] = null;
|
||||
if (StringWrapper.equals(name, 'gc')) {
|
||||
result['gcTime'] += diff;
|
||||
var gcAmount = 0;
|
||||
if (isPresent(args)) {
|
||||
gcAmount = args['amount'];
|
||||
}
|
||||
result['gcAmount'] += gcAmount;
|
||||
if (isPresent(intervalStarts['script'])) {
|
||||
result['gcTimeInScript'] += diff;
|
||||
result['gcAmountInScript'] += gcAmount;
|
||||
}
|
||||
} else {
|
||||
result[name] += diff;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
result['script'] -= result['gcTimeInScript'];
|
||||
return startMarkFound && endMarkFound ? result : null;
|
||||
}
|
||||
|
||||
_markName(index) {
|
||||
return `${_MARK_NAME_PREFIX}${index}`;
|
||||
}
|
||||
}
|
||||
|
||||
var _MAX_RETRY_COUNT = 20;
|
||||
var _MARK_NAME_PREFIX = 'benchpress';
|
||||
var _SET_TIMEOUT = new OpaqueToken('PerflogMetric.setTimeout');
|
||||
var _BINDINGS = [
|
||||
bind(Metric).toFactory(
|
||||
(driverExtension, setTimeout) => new PerflogMetric(driverExtension, setTimeout),
|
||||
[WebDriverExtension, _SET_TIMEOUT]
|
||||
),
|
||||
bind(_SET_TIMEOUT).toValue( (fn, millis) => PromiseWrapper.setTimeout(fn, millis) )
|
||||
];
|
20
modules/benchpress/src/reporter.js
Normal file
20
modules/benchpress/src/reporter.js
Normal file
@ -0,0 +1,20 @@
|
||||
import {
|
||||
Promise, PromiseWrapper
|
||||
} from 'angular2/src/facade/async';
|
||||
import {
|
||||
ABSTRACT, BaseException
|
||||
} from 'angular2/src/facade/lang';
|
||||
|
||||
/**
|
||||
* A reporter reports measure values and the valid sample.
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class Reporter {
|
||||
reportMeasureValues(index:number, values:any):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
reportSample(completeSample:List, validSample:List):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
117
modules/benchpress/src/reporter/console_reporter.js
Normal file
117
modules/benchpress/src/reporter/console_reporter.js
Normal file
@ -0,0 +1,117 @@
|
||||
import { print, isPresent, isBlank } from 'angular2/src/facade/lang';
|
||||
import { StringMapWrapper, ListWrapper, List } from 'angular2/src/facade/collection';
|
||||
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { Math } from 'angular2/src/facade/math';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { Statistic } from '../statistic';
|
||||
import { Reporter } from '../reporter';
|
||||
import { SampleDescription } from '../sample_description';
|
||||
|
||||
/**
|
||||
* A reporter for the console
|
||||
*/
|
||||
export class ConsoleReporter extends Reporter {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PRINT() { return _PRINT; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get COLUMN_WIDTH() { return _COLUMN_WIDTH; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
static _lpad(value, columnWidth, fill = ' ') {
|
||||
var result = '';
|
||||
for (var i=0; i<columnWidth - value.length; i++) {
|
||||
result += fill;
|
||||
}
|
||||
return result + value;
|
||||
}
|
||||
|
||||
static _formatNum(num) {
|
||||
var result;
|
||||
if (num === 0) {
|
||||
result = '000';
|
||||
} else {
|
||||
result = `${Math.floor(num * 100)}`;
|
||||
}
|
||||
return result.substring(0, result.length - 2) + '.' + result.substring(result.length-2);
|
||||
}
|
||||
|
||||
static _sortedProps(obj) {
|
||||
var props = [];
|
||||
StringMapWrapper.forEach(obj, (value, prop) => ListWrapper.push(props, prop));
|
||||
props.sort();
|
||||
return props;
|
||||
}
|
||||
|
||||
_columnWidth:number;
|
||||
_metricNames:List;
|
||||
_print:Function;
|
||||
|
||||
constructor(columnWidth, sampleDescription, print) {
|
||||
super();
|
||||
this._columnWidth = columnWidth;
|
||||
this._metricNames = ConsoleReporter._sortedProps(sampleDescription.metrics);
|
||||
this._print = print;
|
||||
this._printDescription(sampleDescription);
|
||||
}
|
||||
|
||||
_printDescription(sampleDescription) {
|
||||
this._print(`BENCHMARK ${sampleDescription.id}`);
|
||||
this._print('Description:');
|
||||
var props = ConsoleReporter._sortedProps(sampleDescription.description);
|
||||
props.forEach( (prop) => {
|
||||
this._print(`- ${prop}: ${sampleDescription.description[prop]}`);
|
||||
});
|
||||
this._print('Metrics:');
|
||||
this._metricNames.forEach( (metricName) => {
|
||||
this._print(`- ${metricName}: ${sampleDescription.metrics[metricName]}`);
|
||||
});
|
||||
this._print('');
|
||||
this._printStringRow(this._metricNames);
|
||||
this._printStringRow(this._metricNames.map( (_) => '' ), '-');
|
||||
}
|
||||
|
||||
reportMeasureValues(index:number, measuredValues:any):Promise {
|
||||
var formattedValues = ListWrapper.map(this._metricNames, (metricName) => {
|
||||
var value = measuredValues[metricName];
|
||||
return ConsoleReporter._formatNum(value);
|
||||
});
|
||||
this._printStringRow(formattedValues);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
reportSample(completeSample:List, validSample:List):Promise {
|
||||
this._printStringRow(this._metricNames.map( (_) => '' ), '=');
|
||||
this._printStringRow(
|
||||
ListWrapper.map(this._metricNames, (metricName) => {
|
||||
var sample = ListWrapper.map(validSample, (measuredValues) => measuredValues[metricName]);
|
||||
var mean = Statistic.calculateMean(sample);
|
||||
var cv = Statistic.calculateCoefficientOfVariation(sample, mean);
|
||||
return `${ConsoleReporter._formatNum(mean)}\u00B1${Math.floor(cv)}%`;
|
||||
})
|
||||
);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
_printStringRow(parts, fill = ' ') {
|
||||
this._print(
|
||||
ListWrapper.map(parts, (part) => {
|
||||
var w = this._columnWidth;
|
||||
return ConsoleReporter._lpad(part, w, fill);
|
||||
}).join(' | ')
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var _PRINT = new OpaqueToken('ConsoleReporter.print');
|
||||
var _COLUMN_WIDTH = new OpaqueToken('ConsoleReporter.columnWidht');
|
||||
var _BINDINGS = [
|
||||
bind(Reporter).toFactory(
|
||||
(columnWidth, sampleDescription, print) => new ConsoleReporter(columnWidth, sampleDescription, print),
|
||||
[_COLUMN_WIDTH, SampleDescription, _PRINT]
|
||||
),
|
||||
bind(_COLUMN_WIDTH).toValue(18),
|
||||
bind(_PRINT).toValue(print)
|
||||
];
|
54
modules/benchpress/src/runner.js
Normal file
54
modules/benchpress/src/runner.js
Normal file
@ -0,0 +1,54 @@
|
||||
import { Injector, bind } from 'angular2/di';
|
||||
import { isPresent, isBlank } from 'angular2/src/facade/lang';
|
||||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { Promise } from 'angular2/src/facade/async';
|
||||
|
||||
import { Sampler, SampleState } from './sampler';
|
||||
import { ConsoleReporter } from './reporter/console_reporter';
|
||||
import { RegressionSlopeValidator } from './validator/regression_slope_validator';
|
||||
import { PerflogMetric } from './metric/perflog_metric';
|
||||
import { ChromeDriverExtension } from './webdriver/chrome_driver_extension';
|
||||
import { SampleDescription } from './sample_description';
|
||||
|
||||
import { Options } from './sample_options';
|
||||
|
||||
/**
|
||||
* The Runner is the main entry point for executing a sample run.
|
||||
* It provides defaults, creates the injector and calls the sampler.
|
||||
*/
|
||||
export class Runner {
|
||||
_defaultBindings:List;
|
||||
|
||||
constructor(defaultBindings:List = null) {
|
||||
if (isBlank(defaultBindings)) {
|
||||
defaultBindings = [];
|
||||
}
|
||||
this._defaultBindings = defaultBindings;
|
||||
}
|
||||
|
||||
sample({id, execute, prepare, bindings}):Promise<SampleState> {
|
||||
var sampleBindings = [
|
||||
_DEFAULT_BINDINGS,
|
||||
this._defaultBindings,
|
||||
bind(Options.SAMPLE_ID).toValue(id),
|
||||
bind(Options.EXECUTE).toValue(execute)
|
||||
];
|
||||
if (isPresent(prepare)) {
|
||||
ListWrapper.push(sampleBindings, bind(Options.PREPARE).toValue(prepare));
|
||||
}
|
||||
if (isPresent(bindings)) {
|
||||
ListWrapper.push(sampleBindings, bindings);
|
||||
}
|
||||
return new Injector(sampleBindings).asyncGet(Sampler)
|
||||
.then( (sampler) => sampler.sample() );
|
||||
}
|
||||
}
|
||||
|
||||
var _DEFAULT_BINDINGS = [
|
||||
Sampler.BINDINGS,
|
||||
ConsoleReporter.BINDINGS,
|
||||
RegressionSlopeValidator.BINDINGS,
|
||||
ChromeDriverExtension.BINDINGS,
|
||||
PerflogMetric.BINDINGS,
|
||||
SampleDescription.BINDINGS
|
||||
];
|
43
modules/benchpress/src/sample_description.js
Normal file
43
modules/benchpress/src/sample_description.js
Normal file
@ -0,0 +1,43 @@
|
||||
import { StringMapWrapper, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
import { Sampler } from './sampler';
|
||||
import { Validator } from './validator';
|
||||
import { Metric } from './metric';
|
||||
import { Options } from './sample_options';
|
||||
|
||||
/**
|
||||
* SampleDescription merges all available descriptions about a sample
|
||||
*/
|
||||
export class SampleDescription {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
id:string;
|
||||
description:any;
|
||||
metrics:any;
|
||||
|
||||
constructor(id, descriptions, metrics) {
|
||||
this.id = id;
|
||||
this.metrics = metrics;
|
||||
this.description = {};
|
||||
ListWrapper.forEach(descriptions, (description) => {
|
||||
StringMapWrapper.forEach(description, (value, prop) => this.description[prop] = value );
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var _BINDINGS = [
|
||||
bind(SampleDescription).toFactory(
|
||||
(metric, id, forceGc, validator, defaultDesc, userDesc) => new SampleDescription(id,
|
||||
[
|
||||
{'forceGc': forceGc},
|
||||
validator.describe(),
|
||||
defaultDesc,
|
||||
userDesc
|
||||
],
|
||||
metric.describe()),
|
||||
[Metric, Options.SAMPLE_ID, Options.FORCE_GC, Validator, Options.DEFAULT_DESCRIPTION, Options.SAMPLE_DESCRIPTION]
|
||||
),
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({}),
|
||||
bind(Options.SAMPLE_DESCRIPTION).toValue({})
|
||||
];
|
23
modules/benchpress/src/sample_options.js
Normal file
23
modules/benchpress/src/sample_options.js
Normal file
@ -0,0 +1,23 @@
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
export class Options {
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get SAMPLE_ID() { return _SAMPLE_ID; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get DEFAULT_DESCRIPTION() { return _DEFAULT_DESCRIPTION; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get SAMPLE_DESCRIPTION() { return _SAMPLE_DESCRIPTION; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get FORCE_GC() { return _FORCE_GC; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get PREPARE() { return _PREPARE; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get EXECUTE() { return _EXECUTE; }
|
||||
}
|
||||
|
||||
var _SAMPLE_ID = new OpaqueToken('SampleDescription.sampleId');
|
||||
var _DEFAULT_DESCRIPTION = new OpaqueToken('SampleDescription.defaultDescription');
|
||||
var _SAMPLE_DESCRIPTION = new OpaqueToken('SampleDescription.sampleDescription');
|
||||
var _FORCE_GC = new OpaqueToken('Sampler.forceGc');
|
||||
var _PREPARE = new OpaqueToken('Sampler.prepare');
|
||||
var _EXECUTE = new OpaqueToken('Sampler.execute');
|
134
modules/benchpress/src/sampler.js
Normal file
134
modules/benchpress/src/sampler.js
Normal file
@ -0,0 +1,134 @@
|
||||
import { isPresent, isBlank } from 'angular2/src/facade/lang';
|
||||
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { StringMapWrapper, List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { Metric } from './metric';
|
||||
import { Validator } from './validator';
|
||||
import { Reporter } from './reporter';
|
||||
import { WebDriverExtension } from './web_driver_extension';
|
||||
import { WebDriverAdapter } from './web_driver_adapter';
|
||||
|
||||
import { Options } from './sample_options';
|
||||
|
||||
/**
|
||||
* The Sampler owns the sample loop:
|
||||
* 1. calls the prepare/execute callbacks,
|
||||
* 2. gets data from the metric
|
||||
* 3. asks the validator for a valid sample
|
||||
* 4. reports the new data to the reporter
|
||||
* 5. loop until there is a valid sample
|
||||
*/
|
||||
export class Sampler {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
_driver:WebDriverAdapter;
|
||||
_driverExtension:WebDriverExtension;
|
||||
_metric:Metric;
|
||||
_reporter:Reporter;
|
||||
_validator:Validator;
|
||||
_forceGc:boolean;
|
||||
_prepare:Function;
|
||||
_execute:Function;
|
||||
|
||||
constructor({
|
||||
driver, driverExtension, metric, reporter, validator, forceGc, prepare, execute
|
||||
}:{
|
||||
driver: WebDriverAdapter,
|
||||
driverExtension: WebDriverExtension, metric: Metric, reporter: Reporter,
|
||||
validator: Validator, prepare: Function, execute: Function
|
||||
}={}) {
|
||||
this._driver = driver;
|
||||
this._driverExtension = driverExtension;
|
||||
this._metric = metric;
|
||||
this._reporter = reporter;
|
||||
this._validator = validator;
|
||||
this._forceGc = forceGc;
|
||||
this._prepare = prepare;
|
||||
this._execute = execute;
|
||||
}
|
||||
|
||||
sample():Promise<SampleState> {
|
||||
var loop;
|
||||
loop = (lastState) => {
|
||||
return this._iterate(lastState)
|
||||
.then( (newState) => {
|
||||
if (isPresent(newState.validSample)) {
|
||||
return newState;
|
||||
} else {
|
||||
return loop(newState);
|
||||
}
|
||||
});
|
||||
}
|
||||
return this._gcIfNeeded().then( (_) => loop(new SampleState([], null)) );
|
||||
}
|
||||
|
||||
_gcIfNeeded() {
|
||||
if (this._forceGc) {
|
||||
return this._driverExtension.gc();
|
||||
} else {
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
||||
|
||||
_iterate(lastState) {
|
||||
var resultPromise;
|
||||
if (isPresent(this._prepare)) {
|
||||
resultPromise = this._driver.waitFor(this._prepare)
|
||||
.then( (_) => this._gcIfNeeded() );
|
||||
} else {
|
||||
resultPromise = PromiseWrapper.resolve(null);
|
||||
}
|
||||
if (isPresent(this._prepare) || lastState.completeSample.length === 0) {
|
||||
resultPromise = resultPromise.then( (_) => this._metric.beginMeasure() );
|
||||
}
|
||||
return resultPromise
|
||||
.then( (_) => this._driver.waitFor(this._execute) )
|
||||
.then( (_) => this._gcIfNeeded() )
|
||||
.then( (_) => this._metric.endMeasure(isBlank(this._prepare)) )
|
||||
.then( (measureValues) => this._report(lastState, measureValues) );
|
||||
}
|
||||
|
||||
_report(state:SampleState, measuredValues:any):Promise<SampleState> {
|
||||
var completeSample = ListWrapper.concat(state.completeSample, [measuredValues]);
|
||||
var validSample = this._validator.validate(completeSample);
|
||||
var resultPromise = this._reporter.reportMeasureValues(completeSample.length - 1, measuredValues);
|
||||
if (isPresent(validSample)) {
|
||||
resultPromise = resultPromise.then( (_) => this._reporter.reportSample(completeSample, validSample) )
|
||||
}
|
||||
return resultPromise.then( (_) => new SampleState(completeSample, validSample) );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class SampleState {
|
||||
completeSample:List;
|
||||
validSample:List;
|
||||
|
||||
constructor(completeSample: List, validSample: List) {
|
||||
this.completeSample = completeSample;
|
||||
this.validSample = validSample;
|
||||
}
|
||||
}
|
||||
|
||||
var _BINDINGS = [
|
||||
bind(Sampler).toFactory(
|
||||
(driver, driverExtension, metric, reporter, validator, forceGc, prepare, execute) => new Sampler({
|
||||
driver: driver,
|
||||
driverExtension: driverExtension,
|
||||
reporter: reporter,
|
||||
validator: validator,
|
||||
metric: metric,
|
||||
forceGc: forceGc,
|
||||
// TODO(tbosch): DI right now does not support null/undefined objects
|
||||
// Mostly because the cache would have to be initialized with a
|
||||
// special null object, which is expensive.
|
||||
prepare: prepare !== false ? prepare : null,
|
||||
execute: execute
|
||||
}),
|
||||
[WebDriverAdapter, WebDriverExtension, Metric, Reporter, Validator, Options.FORCE_GC, Options.PREPARE, Options.EXECUTE]
|
||||
),
|
||||
bind(Options.FORCE_GC).toValue(false),
|
||||
bind(Options.PREPARE).toValue(false)
|
||||
];
|
37
modules/benchpress/src/statistic.js
Normal file
37
modules/benchpress/src/statistic.js
Normal file
@ -0,0 +1,37 @@
|
||||
import { Math } from 'angular2/src/facade/math';
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
|
||||
export class Statistic {
|
||||
static calculateCoefficientOfVariation(sample, mean) {
|
||||
return Statistic.calculateStandardDeviation(sample, mean) / mean * 100;
|
||||
}
|
||||
|
||||
static calculateMean(sample) {
|
||||
var total = 0;
|
||||
ListWrapper.forEach(sample, (x) => { total += x } );
|
||||
return total / sample.length;
|
||||
}
|
||||
|
||||
static calculateStandardDeviation(sample, mean) {
|
||||
var deviation = 0;
|
||||
ListWrapper.forEach(sample, (x) => {
|
||||
deviation += Math.pow(x - mean, 2);
|
||||
});
|
||||
deviation = deviation / (sample.length);
|
||||
deviation = Math.sqrt(deviation);
|
||||
return deviation;
|
||||
}
|
||||
|
||||
static calculateRegressionSlope(xValues, xMean, yValues, yMean) {
|
||||
// See http://en.wikipedia.org/wiki/Simple_linear_regression
|
||||
var dividendSum = 0;
|
||||
var divisorSum = 0;
|
||||
for (var i=0; i<xValues.length; i++) {
|
||||
dividendSum += (xValues[i] - xMean) * (yValues[i] - yMean);
|
||||
divisorSum += Math.pow(xValues[i] - xMean, 2);
|
||||
}
|
||||
return dividendSum / divisorSum;
|
||||
}
|
||||
}
|
||||
|
||||
|
27
modules/benchpress/src/validator.js
Normal file
27
modules/benchpress/src/validator.js
Normal file
@ -0,0 +1,27 @@
|
||||
import { List } from 'angular2/src/facade/collection';
|
||||
import {
|
||||
ABSTRACT, BaseException
|
||||
} from 'angular2/src/facade/lang';
|
||||
|
||||
/**
|
||||
* A Validator calculates a valid sample out of the complete sample.
|
||||
* A valid sample is a sample that represents the population that should be observed
|
||||
* in the correct way.
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class Validator {
|
||||
/**
|
||||
* Calculates a valid sample out of the complete sample
|
||||
*/
|
||||
validate(completeSample:List<any>):List<any> {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Map that describes the properties of the validator
|
||||
* (e.g. sample size, ...)
|
||||
*/
|
||||
describe():any {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
@ -0,0 +1,68 @@
|
||||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { Validator } from '../validator';
|
||||
import { Statistic } from '../statistic';
|
||||
|
||||
/**
|
||||
* A validator that checks the regression slope of a specific metric.
|
||||
* Waits for the regression slope to be >=0.
|
||||
*/
|
||||
export class RegressionSlopeValidator extends Validator {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get SAMPLE_SIZE() { return _SAMPLE_SIZE; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get METRIC() { return _METRIC; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
_sampleSize:number;
|
||||
_metric:string;
|
||||
|
||||
constructor(sampleSize, metric) {
|
||||
super();
|
||||
this._sampleSize = sampleSize;
|
||||
this._metric = metric;
|
||||
}
|
||||
|
||||
describe():any {
|
||||
return {
|
||||
'sampleSize': this._sampleSize,
|
||||
'regressionSlopeMetric': this._metric
|
||||
};
|
||||
}
|
||||
|
||||
validate(completeSample:List<any>):List<any> {
|
||||
if (completeSample.length >= this._sampleSize) {
|
||||
var latestSample =
|
||||
ListWrapper.slice(completeSample, completeSample.length - this._sampleSize, completeSample.length);
|
||||
var xValues = [];
|
||||
var yValues = [];
|
||||
for (var i = 0; i<latestSample.length; i++) {
|
||||
// For now, we only use the array index as x value.
|
||||
// TODO(tbosch): think about whether we should use time here instead
|
||||
ListWrapper.push(xValues, i);
|
||||
ListWrapper.push(yValues, latestSample[i][this._metric]);
|
||||
}
|
||||
var regressionSlope = Statistic.calculateRegressionSlope(
|
||||
xValues, Statistic.calculateMean(xValues),
|
||||
yValues, Statistic.calculateMean(yValues)
|
||||
);
|
||||
return regressionSlope >= 0 ? latestSample : null;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var _SAMPLE_SIZE = new OpaqueToken('RegressionSlopeValidator.sampleSize');
|
||||
var _METRIC = new OpaqueToken('RegressionSlopeValidator.metric');
|
||||
var _BINDINGS = [
|
||||
bind(Validator).toFactory(
|
||||
(sampleSize, metric) => new RegressionSlopeValidator(sampleSize, metric),
|
||||
[_SAMPLE_SIZE, _METRIC]
|
||||
),
|
||||
bind(_SAMPLE_SIZE).toValue(10),
|
||||
bind(_METRIC).toValue('script')
|
||||
];
|
45
modules/benchpress/src/validator/size_validator.js
Normal file
45
modules/benchpress/src/validator/size_validator.js
Normal file
@ -0,0 +1,45 @@
|
||||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { bind, OpaqueToken } from 'angular2/di';
|
||||
|
||||
import { Validator } from '../validator';
|
||||
|
||||
/**
|
||||
* A validator that waits for the sample to have a certain size.
|
||||
*/
|
||||
export class SizeValidator extends Validator {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get SAMPLE_SIZE() { return _SAMPLE_SIZE; }
|
||||
|
||||
_sampleSize:number;
|
||||
|
||||
constructor(size) {
|
||||
super();
|
||||
this._sampleSize = size;
|
||||
}
|
||||
|
||||
describe():any {
|
||||
return {
|
||||
'sampleSize': this._sampleSize
|
||||
};
|
||||
}
|
||||
|
||||
validate(completeSample:List<any>):List<any> {
|
||||
if (completeSample.length >= this._sampleSize) {
|
||||
return ListWrapper.slice(completeSample, completeSample.length - this._sampleSize, completeSample.length);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var _SAMPLE_SIZE = new OpaqueToken('SizeValidator.sampleSize');
|
||||
var _BINDINGS = [
|
||||
bind(Validator).toFactory(
|
||||
(size) => new SizeValidator(size),
|
||||
[_SAMPLE_SIZE]
|
||||
),
|
||||
bind(_SAMPLE_SIZE).toValue(10)
|
||||
];
|
23
modules/benchpress/src/web_driver_adapter.js
Normal file
23
modules/benchpress/src/web_driver_adapter.js
Normal file
@ -0,0 +1,23 @@
|
||||
import { Promise } from 'angular2/src/facade/async';
|
||||
import { BaseException, ABSTRACT } from 'angular2/src/facade/lang';
|
||||
|
||||
/**
|
||||
* A WebDriverAdapter bridges API differences between different WebDriver clients,
|
||||
* e.g. JS vs Dart Async vs Dart Sync webdriver.
|
||||
* Needs one implementation for every supported WebDriver client.
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class WebDriverAdapter {
|
||||
waitFor(callback:Function):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
executeScript(script:string):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
capabilities():Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
logs(type:string):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
40
modules/benchpress/src/web_driver_extension.js
Normal file
40
modules/benchpress/src/web_driver_extension.js
Normal file
@ -0,0 +1,40 @@
|
||||
import { BaseException, ABSTRACT } from 'angular2/src/facade/lang';
|
||||
import { Promise } from 'angular2/src/facade/async';
|
||||
import { List } from 'angular2/src/facade/collection';
|
||||
|
||||
/**
|
||||
* A WebDriverExtension implements extended commands of the webdriver protocol
|
||||
* for a given browser, independent of the WebDriverAdapter.
|
||||
* Needs one implementation for every supported Browser.
|
||||
*/
|
||||
@ABSTRACT()
|
||||
export class WebDriverExtension {
|
||||
gc():Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
timeStamp(name:string, names:List<String>):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
timeBegin(name):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
timeEnd(name, restart:boolean):Promise {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
|
||||
/**
|
||||
* Format:
|
||||
* - name: event name, e.g. 'script', 'gc', ...
|
||||
* - ph: phase: 'B' (begin), 'E' (end), 'b' (nestable start), 'e' (nestable end)
|
||||
* - ts: timestamp, e.g. 12345
|
||||
* - args: arguments, e.g. {someArg: 1}
|
||||
*
|
||||
* Based on [Chrome Trace Event Format](https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/edit)
|
||||
**/
|
||||
readPerfLog():Promise<List> {
|
||||
throw new BaseException('NYI');
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
library benchpress.src.webdriver.async_webdriver_adapter_dart;
|
||||
|
||||
import 'package:angular2/src/facade/async.dart' show Future;
|
||||
import '../web_driver_adapter.dart' show WebDriverAdapter;
|
||||
|
||||
class AsyncWebDriverAdapter extends WebDriverAdapter {
|
||||
dynamic _driver;
|
||||
AsyncWebDriverAdapter(driver) {
|
||||
this._driver = driver;
|
||||
}
|
||||
Future waitFor(Function callback) {
|
||||
return callback();
|
||||
}
|
||||
Future executeScript(String script) {
|
||||
return this._driver.execute(script);
|
||||
}
|
||||
Future capabilities() {
|
||||
return this._driver.capabilities;
|
||||
}
|
||||
Future logs(String type) {
|
||||
return this._driver.logs.get(type);
|
||||
}
|
||||
}
|
151
modules/benchpress/src/webdriver/chrome_driver_extension.js
Normal file
151
modules/benchpress/src/webdriver/chrome_driver_extension.js
Normal file
@ -0,0 +1,151 @@
|
||||
import { bind } from 'angular2/di';
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import {
|
||||
Json, isPresent, isBlank, RegExpWrapper, StringWrapper
|
||||
} from 'angular2/src/facade/lang';
|
||||
|
||||
import { WebDriverExtension } from '../web_driver_extension';
|
||||
import { WebDriverAdapter } from '../web_driver_adapter';
|
||||
import { Promise } from 'angular2/src/facade/async';
|
||||
|
||||
|
||||
var BEGIN_MARK_RE = RegExpWrapper.create('begin_(.*)');
|
||||
var END_MARK_RE = RegExpWrapper.create('end_(.*)');
|
||||
|
||||
export class ChromeDriverExtension extends WebDriverExtension {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get BINDINGS() { return _BINDINGS; }
|
||||
|
||||
_driver:WebDriverAdapter;
|
||||
|
||||
constructor(driver:WebDriverAdapter) {
|
||||
super();
|
||||
this._driver = driver;
|
||||
}
|
||||
|
||||
gc() {
|
||||
return this._driver.executeScript('window.gc()');
|
||||
}
|
||||
|
||||
timeBegin(name:string):Promise {
|
||||
// Note: Can't use console.time / console.timeEnd as it does not show up in the perf log!
|
||||
return this._driver.executeScript(`console.timeStamp('begin_${name}');`);
|
||||
}
|
||||
|
||||
timeEnd(name:string, restartName:string = null):Promise {
|
||||
// Note: Can't use console.time / console.timeEnd as it does not show up in the perf log!
|
||||
var script = `console.timeStamp('end_${name}');`;
|
||||
if (isPresent(restartName)) {
|
||||
script += `console.timeStamp('begin_${restartName}');`
|
||||
}
|
||||
return this._driver.executeScript(script);
|
||||
}
|
||||
|
||||
readPerfLog() {
|
||||
// TODO(tbosch): Bug in ChromeDriver: Need to execute at least one command
|
||||
// so that the browser logs can be read out!
|
||||
return this._driver.executeScript('1+1')
|
||||
.then( (_) => this._driver.logs('performance') )
|
||||
.then( (entries) => {
|
||||
var records = [];
|
||||
ListWrapper.forEach(entries, function(entry) {
|
||||
var message = Json.parse(entry['message'])['message'];
|
||||
if (StringWrapper.equals(message['method'], 'Timeline.eventRecorded')) {
|
||||
ListWrapper.push(records, message['params']['record']);
|
||||
}
|
||||
});
|
||||
return this._convertPerfRecordsToEvents(records);
|
||||
});
|
||||
}
|
||||
|
||||
_convertPerfRecordsToEvents(records, events = null) {
|
||||
if (isBlank(events)) {
|
||||
events = [];
|
||||
}
|
||||
records.forEach( (record) => {
|
||||
var endEvent = null;
|
||||
var type = record['type'];
|
||||
var data = record['data'];
|
||||
var startTime = record['startTime'];
|
||||
var endTime = record['endTime'];
|
||||
|
||||
if (StringWrapper.equals(type, 'FunctionCall') &&
|
||||
(isBlank(data) || !StringWrapper.equals(data['scriptName'], 'InjectedScript'))) {
|
||||
ListWrapper.push(events, {
|
||||
'name': 'script',
|
||||
'ts': startTime,
|
||||
'ph': 'B'
|
||||
});
|
||||
endEvent = {
|
||||
'name': 'script',
|
||||
'ts': endTime,
|
||||
'ph': 'E',
|
||||
'args': null
|
||||
}
|
||||
} else if (StringWrapper.equals(type, 'TimeStamp')) {
|
||||
var name = data['message'];
|
||||
var ph;
|
||||
var match = RegExpWrapper.firstMatch(BEGIN_MARK_RE, name);
|
||||
if (isPresent(match)) {
|
||||
ph = 'b';
|
||||
} else {
|
||||
match = RegExpWrapper.firstMatch(END_MARK_RE, name);
|
||||
if (isPresent(match)) {
|
||||
ph = 'e';
|
||||
}
|
||||
}
|
||||
if (isPresent(ph)) {
|
||||
ListWrapper.push(events, {
|
||||
'name': match[1],
|
||||
'ph': ph
|
||||
});
|
||||
}
|
||||
} else if (StringWrapper.equals(type, 'RecalculateStyles') ||
|
||||
StringWrapper.equals(type, 'Layout') ||
|
||||
StringWrapper.equals(type, 'UpdateLayerTree') ||
|
||||
StringWrapper.equals(type, 'Paint') ||
|
||||
StringWrapper.equals(type, 'Rasterize') ||
|
||||
StringWrapper.equals(type, 'CompositeLayers')) {
|
||||
ListWrapper.push(events, {
|
||||
'name': 'render',
|
||||
'ts': startTime,
|
||||
'ph': 'B'
|
||||
});
|
||||
endEvent = {
|
||||
'name': 'render',
|
||||
'ts': endTime,
|
||||
'ph': 'E',
|
||||
'args': null
|
||||
}
|
||||
} else if (StringWrapper.equals(type, 'GCEvent')) {
|
||||
ListWrapper.push(events, {
|
||||
'name': 'gc',
|
||||
'ts': startTime,
|
||||
'ph': 'B'
|
||||
});
|
||||
endEvent = {
|
||||
'name': 'gc',
|
||||
'ts': endTime,
|
||||
'ph': 'E',
|
||||
'args': {
|
||||
'amount': data['usedHeapSizeDelta']
|
||||
}
|
||||
};
|
||||
}
|
||||
if (isPresent(record['children'])) {
|
||||
this._convertPerfRecordsToEvents(record['children'], events);
|
||||
}
|
||||
if (isPresent(endEvent)) {
|
||||
ListWrapper.push(events, endEvent);
|
||||
}
|
||||
});
|
||||
return events;
|
||||
}
|
||||
}
|
||||
|
||||
var _BINDINGS = [
|
||||
bind(WebDriverExtension).toFactory(
|
||||
(driver) => new ChromeDriverExtension(driver),
|
||||
[WebDriverAdapter]
|
||||
)
|
||||
];
|
@ -0,0 +1,49 @@
|
||||
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { bind } from 'angular2/di';
|
||||
import { WebDriverAdapter } from '../web_driver_adapter';
|
||||
|
||||
import webdriver from 'selenium-webdriver';
|
||||
|
||||
/**
|
||||
* Adapter for the selenium-webdriver.
|
||||
*/
|
||||
export class SeleniumWebDriverAdapter extends WebDriverAdapter {
|
||||
_driver:any;
|
||||
|
||||
constructor(driver) {
|
||||
super();
|
||||
this._driver = driver;
|
||||
}
|
||||
|
||||
_convertPromise(thenable) {
|
||||
var completer = PromiseWrapper.completer();
|
||||
thenable.then(completer.complete, completer.reject);
|
||||
return completer.promise;
|
||||
}
|
||||
|
||||
waitFor(callback):Promise {
|
||||
return this._convertPromise(this._driver.controlFlow().execute(callback));
|
||||
}
|
||||
|
||||
executeScript(script:string):Promise {
|
||||
return this._convertPromise(this._driver.executeScript(script));
|
||||
}
|
||||
|
||||
capabilities():Promise {
|
||||
return this._convertPromise(this._driver.getCapabilities());
|
||||
}
|
||||
|
||||
logs(type:string):Promise {
|
||||
// Needed as selenium-webdriver does not forward
|
||||
// performance logs in the correct way via manage().logs
|
||||
return this._convertPromise(this._driver.schedule(
|
||||
new webdriver.Command(webdriver.CommandName.GET_LOG).
|
||||
setParameter('type', type),
|
||||
'WebDriver.manage().logs().get(' + type + ')').then( (logs) => {
|
||||
// Need to convert the Array into an instance of an Array
|
||||
// as selenium-webdriver uses an own Node.js context!
|
||||
return [].slice.call(logs);
|
||||
}));
|
||||
}
|
||||
|
||||
}
|
41
modules/benchpress/src/webdriver/sync_webdriver_adapter.dart
Normal file
41
modules/benchpress/src/webdriver/sync_webdriver_adapter.dart
Normal file
@ -0,0 +1,41 @@
|
||||
library benchpress.src.webdriver.sync_webdriver_adapter_dart;
|
||||
|
||||
import 'package:angular2/src/facade/async.dart' show Future, PromiseWrapper;
|
||||
import '../web_driver_adapter.dart' show WebDriverAdapter;
|
||||
|
||||
class SyncWebDriverAdapter extends WebDriverAdapter {
|
||||
dynamic _driver;
|
||||
SyncWebDriverAdapter(driver) {
|
||||
this._driver = driver;
|
||||
}
|
||||
Future waitFor(Function callback) {
|
||||
return this._convertToAsync(callback);
|
||||
}
|
||||
Future _convertToAsync(callback) {
|
||||
try {
|
||||
var result = callback();
|
||||
if (result is Promise) {
|
||||
return result;
|
||||
} else {
|
||||
return PromiseWrapper.resolve(result);
|
||||
}
|
||||
} catch (e) {
|
||||
return PromiseWrapper.reject(result);
|
||||
}
|
||||
}
|
||||
Future executeScript(String script) {
|
||||
return this._convertToAsync(() {
|
||||
return this._driver.execute(script);
|
||||
});
|
||||
}
|
||||
Future capabilities() {
|
||||
return this._convertToAsync(() {
|
||||
return this._driver.capabilities;
|
||||
});
|
||||
}
|
||||
Future logs(String type) {
|
||||
return this._convertToAsync(() {
|
||||
return this._driver.logs.get(script);
|
||||
});
|
||||
}
|
||||
}
|
329
modules/benchpress/test/metric/perflog_metric_spec.js
Normal file
329
modules/benchpress/test/metric/perflog_metric_spec.js
Normal file
@ -0,0 +1,329 @@
|
||||
import {ddescribe, describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
|
||||
import { Metric, PerflogMetric, WebDriverExtension, bind, Injector } from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
var commandLog;
|
||||
|
||||
function createMetric(perfLogs) {
|
||||
commandLog = [];
|
||||
return new Injector([
|
||||
PerflogMetric.BINDINGS,
|
||||
bind(PerflogMetric.SET_TIMEOUT).toValue( (fn, millis) => {
|
||||
ListWrapper.push(commandLog, ['setTimeout', millis]);
|
||||
fn();
|
||||
}),
|
||||
bind(WebDriverExtension).toValue(new MockDriverExtension(perfLogs, commandLog))
|
||||
]).get(Metric);
|
||||
}
|
||||
|
||||
describe('perflog metric', () => {
|
||||
|
||||
it('should describe itself', () => {
|
||||
expect(createMetric([[]]).describe()['script']).toBe('script execution time in ms');
|
||||
});
|
||||
|
||||
describe('beginMeasure', () => {
|
||||
|
||||
it('should mark the timeline', (done) => {
|
||||
var metric = createMetric([[]]);
|
||||
metric.beginMeasure().then((_) => {
|
||||
expect(commandLog).toEqual([['timeBegin', 'benchpress0']]);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('endMeasure', () => {
|
||||
|
||||
it('should mark and aggregate events in between the marks', (done) => {
|
||||
var events = [
|
||||
[
|
||||
markStartEvent('benchpress0'),
|
||||
startEvent('script', 4),
|
||||
endEvent('script', 6),
|
||||
markEndEvent('benchpress0')
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(false) )
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', null],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['script']).toBe(2);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should restart timing', (done) => {
|
||||
var events = [
|
||||
[
|
||||
markStartEvent('benchpress0'),
|
||||
markEndEvent('benchpress0'),
|
||||
markStartEvent('benchpress1'),
|
||||
], [
|
||||
markEndEvent('benchpress1')
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (_) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['timeEnd', 'benchpress1', 'benchpress2'],
|
||||
'readPerfLog'
|
||||
]);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should loop and aggregate until the end mark is present', (done) => {
|
||||
var events = [
|
||||
[ markStartEvent('benchpress0'), startEvent('script', 1) ],
|
||||
[ endEvent('script', 2) ],
|
||||
[ startEvent('script', 3), endEvent('script', 5), markEndEvent('benchpress0') ]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(false) )
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', null],
|
||||
'readPerfLog',
|
||||
[ 'setTimeout', 100 ],
|
||||
'readPerfLog',
|
||||
[ 'setTimeout', 100 ],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['script']).toBe(3);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should store events after the end mark for the next call', (done) => {
|
||||
var events = [
|
||||
[ markStartEvent('benchpress0'), markEndEvent('benchpress0'), markStartEvent('benchpress1'),
|
||||
startEvent('script', 1), endEvent('script', 2) ],
|
||||
[ startEvent('script', 3), endEvent('script', 5), markEndEvent('benchpress1') ]
|
||||
];
|
||||
var metric = createMetric(events);
|
||||
metric.beginMeasure()
|
||||
.then( (_) => metric.endMeasure(true) )
|
||||
.then( (data) => {
|
||||
expect(data['script']).toBe(0);
|
||||
return metric.endMeasure(true)
|
||||
})
|
||||
.then( (data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'],
|
||||
['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog',
|
||||
['timeEnd', 'benchpress1', 'benchpress2'],
|
||||
'readPerfLog'
|
||||
]);
|
||||
expect(data['script']).toBe(3);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('aggregation', () => {
|
||||
|
||||
function aggregate(events) {
|
||||
ListWrapper.insert(events, 0, markStartEvent('benchpress0'));
|
||||
ListWrapper.push(events, markEndEvent('benchpress0'));
|
||||
var metric = createMetric([events]);
|
||||
return metric
|
||||
.beginMeasure().then( (_) => metric.endMeasure(false) );
|
||||
}
|
||||
|
||||
|
||||
it('should report a single interval', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 0),
|
||||
endEvent('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(5);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should sum up multiple intervals', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 0),
|
||||
endEvent('script', 5),
|
||||
startEvent('script', 10),
|
||||
endEvent('script', 17)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(12);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore not started intervals', (done) => {
|
||||
aggregate([
|
||||
endEvent('script', 10)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore not ended intervals', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 10)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
['script', 'gcTime', 'render'].forEach( (metricName) => {
|
||||
it(`should support ${metricName} metric`, (done) => {
|
||||
aggregate([
|
||||
startEvent(metricName, 0),
|
||||
endEvent(metricName, 5)
|
||||
]).then((data) => {
|
||||
expect(data[metricName]).toBe(5);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should support gcAmount metric', (done) => {
|
||||
aggregate([
|
||||
startEvent('gc', 0),
|
||||
endEvent('gc', 5, {'amount': 10})
|
||||
]).then((data) => {
|
||||
expect(data['gcAmount']).toBe(10);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should subtract gcTime in script from script time', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 0),
|
||||
startEvent('gc', 1),
|
||||
endEvent('gc', 4, {'amount': 10}),
|
||||
endEvent('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['script']).toBe(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('gcTimeInScript / gcAmountInScript', () => {
|
||||
|
||||
it('should use gc during script execution', (done) => {
|
||||
aggregate([
|
||||
startEvent('script', 0),
|
||||
startEvent('gc', 1),
|
||||
endEvent('gc', 4, {'amount': 10}),
|
||||
endEvent('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['gcTimeInScript']).toBe(3);
|
||||
expect(data['gcAmountInScript']).toBe(10);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore gc outside of script execution', (done) => {
|
||||
aggregate([
|
||||
startEvent('gc', 1),
|
||||
endEvent('gc', 4, {'amount': 10}),
|
||||
startEvent('script', 0),
|
||||
endEvent('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['gcTimeInScript']).toBe(0);
|
||||
expect(data['gcAmountInScript']).toBe(0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function markStartEvent(type) {
|
||||
return {
|
||||
'name': type,
|
||||
'ph': 'b'
|
||||
}
|
||||
}
|
||||
|
||||
function markEndEvent(type) {
|
||||
return {
|
||||
'name': type,
|
||||
'ph': 'e'
|
||||
}
|
||||
}
|
||||
|
||||
function startEvent(type, time) {
|
||||
return {
|
||||
'name': type,
|
||||
'ts': time,
|
||||
'ph': 'B'
|
||||
}
|
||||
}
|
||||
|
||||
function endEvent(type, time, args = null) {
|
||||
return {
|
||||
'name': type,
|
||||
'ts': time,
|
||||
'ph': 'E',
|
||||
'args': args
|
||||
}
|
||||
}
|
||||
|
||||
class MockDriverExtension extends WebDriverExtension {
|
||||
_perfLogs:List;
|
||||
_commandLog:List;
|
||||
constructor(perfLogs, commandLog) {
|
||||
super();
|
||||
this._perfLogs = perfLogs;
|
||||
this._commandLog = commandLog;
|
||||
}
|
||||
|
||||
timeBegin(name):Promise {
|
||||
ListWrapper.push(this._commandLog, ['timeBegin', name]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
timeEnd(name, restartName):Promise {
|
||||
ListWrapper.push(this._commandLog, ['timeEnd', name, restartName]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
readPerfLog():Promise {
|
||||
ListWrapper.push(this._commandLog, 'readPerfLog');
|
||||
if (this._perfLogs.length > 0) {
|
||||
var next = this._perfLogs[0];
|
||||
ListWrapper.removeAt(this._perfLogs, 0);
|
||||
return PromiseWrapper.resolve(next);
|
||||
} else {
|
||||
return PromiseWrapper.resolve([]);
|
||||
}
|
||||
}
|
||||
}
|
101
modules/benchpress/test/reporter/console_reporter_spec.js
Normal file
101
modules/benchpress/test/reporter/console_reporter_spec.js
Normal file
@ -0,0 +1,101 @@
|
||||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { isBlank, isPresent } from 'angular2/src/facade/lang';
|
||||
import { List, ListWrapper } from 'angular2/src/facade/collection';
|
||||
|
||||
import {
|
||||
SampleState, Reporter, bind, Injector,
|
||||
ConsoleReporter, SampleDescription
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
describe('console reporter', () => {
|
||||
var reporter;
|
||||
var log;
|
||||
|
||||
function createReporter({columnWidth, sampleId, descriptions, metrics}) {
|
||||
log = [];
|
||||
if (isBlank(descriptions)) {
|
||||
descriptions = [];
|
||||
}
|
||||
if (isBlank(sampleId)) {
|
||||
sampleId = 'null';
|
||||
}
|
||||
var bindings = [
|
||||
ConsoleReporter.BINDINGS,
|
||||
bind(SampleDescription).toValue(new SampleDescription(sampleId, descriptions, metrics)),
|
||||
bind(ConsoleReporter.PRINT).toValue((line) => ListWrapper.push(log, line))
|
||||
];
|
||||
if (isPresent(columnWidth)) {
|
||||
ListWrapper.push(bindings, bind(ConsoleReporter.COLUMN_WIDTH).toValue(columnWidth));
|
||||
}
|
||||
reporter = new Injector(bindings).get(Reporter);
|
||||
}
|
||||
|
||||
it('should print the sample id, description and table header', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
sampleId: 'someSample',
|
||||
descriptions: [{
|
||||
'a': 1,
|
||||
'b': 2
|
||||
}],
|
||||
metrics: {
|
||||
'm1': 'some desc',
|
||||
'm2': 'some other desc'
|
||||
}
|
||||
});
|
||||
expect(log).toEqual([
|
||||
'BENCHMARK someSample',
|
||||
'Description:',
|
||||
'- a: 1',
|
||||
'- b: 2',
|
||||
'Metrics:',
|
||||
'- m1: some desc',
|
||||
'- m2: some other desc',
|
||||
'',
|
||||
' m1 | m2',
|
||||
'-------- | --------',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should print a table row', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
metrics: {
|
||||
'a': '',
|
||||
'b': ''
|
||||
}
|
||||
});
|
||||
log = [];
|
||||
reporter.reportMeasureValues(0, {
|
||||
'a': 1.23, 'b': 2
|
||||
});
|
||||
expect(log).toEqual([
|
||||
' 1.23 | 2.00'
|
||||
]);
|
||||
});
|
||||
|
||||
it('should print the table footer and stats when there is a valid sample', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
metrics: {
|
||||
'a': '',
|
||||
'b': ''
|
||||
}
|
||||
});
|
||||
log = [];
|
||||
reporter.reportSample([], [{
|
||||
'a': 3, 'b': 6
|
||||
},{
|
||||
'a': 5, 'b': 9
|
||||
}]);
|
||||
expect(log).toEqual([
|
||||
'======== | ========',
|
||||
'4.00±25% | 7.50±20%'
|
||||
]);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
119
modules/benchpress/test/runner_spec.js
Normal file
119
modules/benchpress/test/runner_spec.js
Normal file
@ -0,0 +1,119 @@
|
||||
import {describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
import {
|
||||
Runner, Sampler, SampleDescription,
|
||||
Validator, bind, Injector, Metric,
|
||||
Options
|
||||
} from 'benchpress/benchpress';
|
||||
import { isBlank } from 'angular2/src/facade/lang';
|
||||
import { Promise, PromiseWrapper } from 'angular2/src/facade/async';
|
||||
|
||||
export function main() {
|
||||
describe('runner', () => {
|
||||
var injector;
|
||||
var runner;
|
||||
|
||||
function createRunner(defaultBindings = null) {
|
||||
if (isBlank(defaultBindings)) {
|
||||
defaultBindings = [];
|
||||
}
|
||||
runner = new Runner([
|
||||
defaultBindings,
|
||||
bind(Sampler).toFactory(
|
||||
(_injector) => {
|
||||
injector = _injector;
|
||||
return new MockSampler();
|
||||
}, [Injector]
|
||||
),
|
||||
bind(Metric).toFactory( () => new MockMetric(), []),
|
||||
bind(Validator).toFactory( () => new MockValidator(), [])
|
||||
]);
|
||||
return runner;
|
||||
}
|
||||
|
||||
it('should set SampleDescription.id', (done) => {
|
||||
createRunner().sample({id: 'someId'}).then( (_) => {
|
||||
expect(injector.get(SampleDescription).id).toBe('someId');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge SampleDescription.description', (done) => {
|
||||
createRunner([
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1})
|
||||
]).sample({id: 'someId', bindings: [
|
||||
bind(Options.SAMPLE_DESCRIPTION).toValue({'b': 2})
|
||||
]}).then( (_) => {
|
||||
expect(injector.get(SampleDescription).description).toEqual({
|
||||
'forceGc': false,
|
||||
'a': 1,
|
||||
'b': 2,
|
||||
'v': 11
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should fill SampleDescription.metrics from the Metric', (done) => {
|
||||
createRunner().sample({id: 'someId'}).then( (_) => {
|
||||
expect(injector.get(SampleDescription).metrics).toEqual({ 'm1': 'some metric' });
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should bind Options.EXECUTE', (done) => {
|
||||
var execute = () => {};
|
||||
createRunner().sample({id: 'someId', execute: execute}).then( (_) => {
|
||||
expect(injector.get(Options.EXECUTE)).toEqual(execute);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should bind Options.PREPARE', (done) => {
|
||||
var prepare = () => {};
|
||||
createRunner().sample({id: 'someId', prepare: prepare}).then( (_) => {
|
||||
expect(injector.get(Options.PREPARE)).toEqual(prepare);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should overwrite bindings per sample call', (done) => {
|
||||
createRunner([
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1}),
|
||||
]).sample({id: 'someId', bindings: [
|
||||
bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 2}),
|
||||
]}).then( (_) => {
|
||||
expect(injector.get(SampleDescription).description['a']).toBe(2);
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockValidator extends Validator {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
describe() {
|
||||
return { 'v': 11 };
|
||||
}
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
describe() {
|
||||
return { 'm1': 'some metric' };
|
||||
}
|
||||
}
|
||||
|
||||
class MockSampler extends Sampler {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
sample():Promise {
|
||||
return PromiseWrapper.resolve(23);
|
||||
}
|
||||
}
|
364
modules/benchpress/test/sampler_spec.js
Normal file
364
modules/benchpress/test/sampler_spec.js
Normal file
@ -0,0 +1,364 @@
|
||||
import {describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { isBlank, isPresent, BaseException, stringify } from 'angular2/src/facade/lang';
|
||||
import { ListWrapper, List } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper, Promise } from 'angular2/src/facade/async';
|
||||
|
||||
import {
|
||||
Sampler, WebDriverAdapter, WebDriverExtension,
|
||||
Validator, Metric, Reporter, Browser,
|
||||
bind, Injector, Options
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
var EMPTY_EXECUTE = () => {};
|
||||
|
||||
describe('sampler', () => {
|
||||
var sampler;
|
||||
|
||||
function createSampler({
|
||||
driver,
|
||||
driverExtension,
|
||||
metric,
|
||||
reporter,
|
||||
validator,
|
||||
forceGc,
|
||||
prepare,
|
||||
execute
|
||||
} = {}) {
|
||||
if (isBlank(metric)) {
|
||||
metric = new MockMetric([]);
|
||||
}
|
||||
if (isBlank(reporter)) {
|
||||
reporter = new MockReporter([]);
|
||||
}
|
||||
if (isBlank(driver)) {
|
||||
driver = new MockDriverAdapter([]);
|
||||
}
|
||||
if (isBlank(driverExtension)) {
|
||||
driverExtension = new MockDriverExtension([]);
|
||||
}
|
||||
var bindings = ListWrapper.concat(Sampler.BINDINGS, [
|
||||
bind(Metric).toValue(metric),
|
||||
bind(Reporter).toValue(reporter),
|
||||
bind(WebDriverAdapter).toValue(driver),
|
||||
bind(WebDriverExtension).toValue(driverExtension),
|
||||
bind(Options.EXECUTE).toValue(execute),
|
||||
bind(Validator).toValue(validator)
|
||||
]);
|
||||
if (isPresent(prepare)) {
|
||||
ListWrapper.push(bindings, bind(Options.PREPARE).toValue(prepare));
|
||||
}
|
||||
if (isPresent(forceGc)) {
|
||||
ListWrapper.push(bindings, bind(Options.FORCE_GC).toValue(forceGc));
|
||||
}
|
||||
|
||||
sampler = new Injector(bindings).get(Sampler);
|
||||
}
|
||||
|
||||
it('should call the prepare and execute callbacks using WebDriverAdapter.waitFor', (done) => {
|
||||
var log = [];
|
||||
var count = 0;
|
||||
var driver = new MockDriverAdapter([], (callback) => {
|
||||
var result = callback();
|
||||
ListWrapper.push(log, result);
|
||||
return PromiseWrapper.resolve(result);
|
||||
});
|
||||
createSampler({
|
||||
driver: driver,
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => {
|
||||
return count++;
|
||||
},
|
||||
execute: () => {
|
||||
return count++;
|
||||
}
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(count).toBe(4);
|
||||
expect(log).toEqual([0,1,2,3]);
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('should call prepare, gc, beginMeasure, execute, gc, endMeasure for every iteration', (done) => {
|
||||
var workCount = 0;
|
||||
var log = [];
|
||||
createSampler({
|
||||
forceGc: true,
|
||||
metric: createCountingMetric(log),
|
||||
driverExtension: new MockDriverExtension(log),
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => {
|
||||
ListWrapper.push(log, `p${workCount++}`);
|
||||
},
|
||||
execute: () => {
|
||||
ListWrapper.push(log, `w${workCount++}`);
|
||||
}
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(log).toEqual([
|
||||
['gc'],
|
||||
'p0',
|
||||
['gc'],
|
||||
['beginMeasure'],
|
||||
'w1',
|
||||
['gc'],
|
||||
['endMeasure', false, {'script': 0}],
|
||||
'p2',
|
||||
['gc'],
|
||||
['beginMeasure'],
|
||||
'w3',
|
||||
['gc'],
|
||||
['endMeasure', false, {'script': 1}],
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should call execute, gc, endMeasure for every iteration if there is no prepare callback', (done) => {
|
||||
var log = [];
|
||||
var workCount = 0;
|
||||
createSampler({
|
||||
forceGc: true,
|
||||
metric: createCountingMetric(log),
|
||||
driverExtension: new MockDriverExtension(log),
|
||||
validator: createCountingValidator(2),
|
||||
execute: () => {
|
||||
ListWrapper.push(log, `w${workCount++}`);
|
||||
},
|
||||
prepare: null
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(log).toEqual([
|
||||
['gc'],
|
||||
['beginMeasure'],
|
||||
'w0',
|
||||
['gc'],
|
||||
['endMeasure', true, {'script': 0}],
|
||||
'w1',
|
||||
['gc'],
|
||||
['endMeasure', true, {'script': 1}],
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should not gc if the flag is not set', (done) => {
|
||||
var workCount = 0;
|
||||
var log = [];
|
||||
createSampler({
|
||||
metric: createCountingMetric(),
|
||||
driverExtension: new MockDriverExtension(log),
|
||||
validator: createCountingValidator(2),
|
||||
prepare: EMPTY_EXECUTE,
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
expect(log).toEqual([]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should only collect metrics for execute and ignore metrics from prepare', (done) => {
|
||||
var scriptTime = 0;
|
||||
var iterationCount = 1;
|
||||
createSampler({
|
||||
validator: createCountingValidator(2),
|
||||
metric: new MockMetric([], () => {
|
||||
var result = PromiseWrapper.resolve({'script': scriptTime});
|
||||
scriptTime = 0;
|
||||
return result;
|
||||
}),
|
||||
prepare: () => {
|
||||
scriptTime = 1 * iterationCount;
|
||||
},
|
||||
execute: () => {
|
||||
scriptTime = 10 * iterationCount;
|
||||
iterationCount++;
|
||||
}
|
||||
});
|
||||
sampler.sample().then( (state) => {
|
||||
expect(state.completeSample.length).toBe(2);
|
||||
expect(state.completeSample[0]).toEqual({'script': 10});
|
||||
expect(state.completeSample[1]).toEqual({'script': 20});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the validator for every execution and store the valid sample', (done) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
|
||||
createSampler({
|
||||
metric: createCountingMetric(),
|
||||
validator: createCountingValidator(2, validSample, log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then( (state) => {
|
||||
expect(state.validSample).toBe(validSample);
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['validate', [{'script': 0}], null],
|
||||
// ['validate', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
|
||||
expect(log.length).toBe(2);
|
||||
expect(log[0]).toEqual(
|
||||
['validate', [{'script': 0}], null]
|
||||
);
|
||||
expect(log[1]).toEqual(
|
||||
['validate', [{'script': 0}, {'script': 1}], validSample]
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report the metric values', (done) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
createSampler({
|
||||
validator: createCountingValidator(2, validSample),
|
||||
metric: createCountingMetric(),
|
||||
reporter: new MockReporter(log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then( (_) => {
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['reportMeasureValues', 0, {'script': 0}],
|
||||
// ['reportMeasureValues', 1, {'script': 1}],
|
||||
// ['reportSample', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
expect(log.length).toBe(3);
|
||||
expect(log[0]).toEqual(
|
||||
['reportMeasureValues', 0, {'script': 0}]
|
||||
);
|
||||
expect(log[1]).toEqual(
|
||||
['reportMeasureValues', 1, {'script': 1}]
|
||||
);
|
||||
expect(log[2]).toEqual(
|
||||
['reportSample', [{'script': 0}, {'script': 1}], validSample]
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function createCountingValidator(count, validSample = null, log = null) {
|
||||
return new MockValidator(log, (completeSample) => {
|
||||
count--;
|
||||
if (count === 0) {
|
||||
return isPresent(validSample) ? validSample : completeSample;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createCountingMetric(log = null) {
|
||||
var scriptTime = 0;
|
||||
return new MockMetric(log, () => {
|
||||
return { 'script': scriptTime++ };
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
_log:List;
|
||||
_waitFor:Function;
|
||||
constructor(log = null, waitFor = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
this._waitFor = waitFor;
|
||||
}
|
||||
waitFor(callback:Function):Promise {
|
||||
if (isPresent(this._waitFor)) {
|
||||
return this._waitFor(callback);
|
||||
} else {
|
||||
return PromiseWrapper.resolve(callback());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class MockDriverExtension extends WebDriverExtension {
|
||||
_log:List;
|
||||
constructor(log = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
gc():Promise {
|
||||
ListWrapper.push(this._log, ['gc']);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
||||
|
||||
class MockValidator extends Validator {
|
||||
_validate:Function;
|
||||
_log:List;
|
||||
constructor(log = null, validate = null) {
|
||||
super();
|
||||
this._validate = validate;
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
validate(completeSample:List<Object>):List<Object> {
|
||||
var stableSample = isPresent(this._validate) ? this._validate(completeSample) : completeSample;
|
||||
ListWrapper.push(this._log, ['validate', completeSample, stableSample]);
|
||||
return stableSample;
|
||||
}
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
_endMeasure:Function;
|
||||
_log:List;
|
||||
constructor(log = null, endMeasure = null) {
|
||||
super();
|
||||
this._endMeasure = endMeasure;
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
beginMeasure() {
|
||||
ListWrapper.push(this._log, ['beginMeasure']);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
endMeasure(restart) {
|
||||
var measureValues = isPresent(this._endMeasure) ? this._endMeasure() : {};
|
||||
ListWrapper.push(this._log, ['endMeasure', restart, measureValues]);
|
||||
return PromiseWrapper.resolve(measureValues);
|
||||
}
|
||||
}
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
_log:List;
|
||||
constructor(log = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
reportMeasureValues(index, values):Promise {
|
||||
ListWrapper.push(this._log, ['reportMeasureValues', index, values]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
reportSample(completeSample, validSample):Promise {
|
||||
ListWrapper.push(this._log, ['reportSample', completeSample, validSample]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
}
|
34
modules/benchpress/test/statistic_spec.js
Normal file
34
modules/benchpress/test/statistic_spec.js
Normal file
@ -0,0 +1,34 @@
|
||||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { Statistic } from 'benchpress/src/statistic';
|
||||
|
||||
import { NaN } from 'angular2/src/facade/math';
|
||||
|
||||
export function main() {
|
||||
describe('statistic', () => {
|
||||
|
||||
it('should calculate the mean', () => {
|
||||
expect(Statistic.calculateMean([])).toBeNaN();
|
||||
expect(Statistic.calculateMean([1,2,3])).toBe(2.0);
|
||||
});
|
||||
|
||||
it('should calculate the standard deviation', () => {
|
||||
expect(Statistic.calculateStandardDeviation([], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateStandardDeviation([1], 1)).toBe(0.0);
|
||||
expect(Statistic.calculateStandardDeviation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(2.0);
|
||||
});
|
||||
|
||||
it('should calculate the coefficient of variation', () => {
|
||||
expect(Statistic.calculateCoefficientOfVariation([], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateCoefficientOfVariation([1], 1)).toBe(0.0);
|
||||
expect(Statistic.calculateCoefficientOfVariation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(40.0);
|
||||
});
|
||||
|
||||
it('should calculate the regression slope', () => {
|
||||
expect(Statistic.calculateRegressionSlope([], NaN, [], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateRegressionSlope([1], 1, [2], 2)).toBeNaN();
|
||||
expect(Statistic.calculateRegressionSlope([1,2], 1.5, [2,4], 3)).toBe(2.0);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import {
|
||||
Validator, RegressionSlopeValidator, Injector, bind
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
describe('regression slope validator', () => {
|
||||
var validator;
|
||||
|
||||
function createValidator({size, metric}) {
|
||||
validator = new Injector([
|
||||
RegressionSlopeValidator.BINDINGS,
|
||||
bind(RegressionSlopeValidator.METRIC).toValue(metric),
|
||||
bind(RegressionSlopeValidator.SAMPLE_SIZE).toValue(size)
|
||||
]).get(Validator);
|
||||
}
|
||||
|
||||
it('should return sampleSize and metric as description', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.describe()).toEqual({
|
||||
'sampleSize': 2,
|
||||
'regressionSlopeMetric': 'script'
|
||||
});
|
||||
});
|
||||
|
||||
it('should return null while the completeSample is smaller than the given size', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([])).toBe(null);
|
||||
expect(validator.validate([{}])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return null while the regression slope is < 0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([{'script':2}, {'script':1}])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when the regression slope is ==0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([{'script':1}, {'script':1}])).toEqual([{'script':1}, {'script':1}]);
|
||||
expect(validator.validate([{'script':1}, {'script':1}, {'script':1}])).toEqual([{'script':1}, {'script':1}]);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when the regression slope is >0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([{'script':1}, {'script':2}])).toEqual([{'script':1}, {'script':2}]);
|
||||
expect(validator.validate([{'script':1}, {'script':2}, {'script':3}])).toEqual([{'script':2}, {'script':3}]);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
38
modules/benchpress/test/validator/size_validator_spec.js
Normal file
38
modules/benchpress/test/validator/size_validator_spec.js
Normal file
@ -0,0 +1,38 @@
|
||||
import {describe, ddescribe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import {
|
||||
Validator, SizeValidator, Injector, bind
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
describe('size validator', () => {
|
||||
var validator;
|
||||
|
||||
function createValidator(size) {
|
||||
validator = new Injector([
|
||||
SizeValidator.BINDINGS,
|
||||
bind(SizeValidator.SAMPLE_SIZE).toValue(size)
|
||||
]).get(Validator);
|
||||
}
|
||||
|
||||
it('should return sampleSize as description', () => {
|
||||
createValidator(2);
|
||||
expect(validator.describe()).toEqual({
|
||||
'sampleSize': 2
|
||||
});
|
||||
});
|
||||
|
||||
it('should return null while the completeSample is smaller than the given size', () => {
|
||||
createValidator(2);
|
||||
expect(validator.validate([])).toBe(null);
|
||||
expect(validator.validate([{}])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when it has at least the given size', () => {
|
||||
createValidator(2);
|
||||
expect(validator.validate([{'a':1}, {'b':2}])).toEqual([{'a':1}, {'b':2}]);
|
||||
expect(validator.validate([{'a':1}, {'b':2}, {'c':3}])).toEqual([{'b':2}, {'c':3}]);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
@ -0,0 +1,267 @@
|
||||
import {describe, it, iit, xit, expect, beforeEach, afterEach} from 'angular2/test_lib';
|
||||
|
||||
import { ListWrapper } from 'angular2/src/facade/collection';
|
||||
import { PromiseWrapper } from 'angular2/src/facade/async';
|
||||
import { Json, perfRecords, isBlank } from 'angular2/src/facade/lang';
|
||||
|
||||
import {
|
||||
WebDriverExtension, ChromeDriverExtension,
|
||||
WebDriverAdapter, Injector, bind
|
||||
} from 'benchpress/benchpress';
|
||||
|
||||
export function main() {
|
||||
describe('chrome driver extension', () => {
|
||||
var log;
|
||||
var extension;
|
||||
|
||||
function createExtension(perfRecords = null) {
|
||||
if (isBlank(perfRecords)) {
|
||||
perfRecords = [];
|
||||
}
|
||||
log = [];
|
||||
extension = new Injector([
|
||||
ChromeDriverExtension.BINDINGS,
|
||||
bind(WebDriverAdapter).toValue(new MockDriverAdapter(log, perfRecords))
|
||||
]).get(WebDriverExtension);
|
||||
return extension;
|
||||
}
|
||||
|
||||
it('should force gc via window.gc()', (done) => {
|
||||
createExtension().gc().then( (_) => {
|
||||
expect(log).toEqual([['executeScript', 'window.gc()']]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should mark the timeline via console.timeStamp()', (done) => {
|
||||
createExtension().timeBegin('someName').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeStamp('begin_someName');`]]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should mark the timeline via console.timeEnd()', (done) => {
|
||||
createExtension().timeEnd('someName').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeStamp('end_someName');`]]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should mark the timeline via console.time() and console.timeEnd()', (done) => {
|
||||
createExtension().timeEnd('name1', 'name2').then( (_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeStamp('end_name1');console.timeStamp('begin_name2');`]]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('readPerfLog', () => {
|
||||
|
||||
it('should execute a dummy script before reading them', (done) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
// Sometimes it does not report the newest events of the performance log
|
||||
// to the WebDriver client unless a script is executed...
|
||||
createExtension([]).readPerfLog().then( (_) => {
|
||||
expect(log).toEqual([ [ 'executeScript', '1+1' ], [ 'logs', 'performance' ] ]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report FunctionCall records as "script"', (done) => {
|
||||
createExtension([
|
||||
durationRecord('FunctionCall', 1, 5)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
startEvent('script', 1),
|
||||
endEvent('script', 5)
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore FunctionCalls from webdriver', (done) => {
|
||||
createExtension([
|
||||
internalScriptRecord(1, 5)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report begin timestamps', (done) => {
|
||||
createExtension([
|
||||
timeStampRecord('begin_someName')
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
markStartEvent('someName')
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report end timestamps', (done) => {
|
||||
createExtension([
|
||||
timeStampRecord('end_someName')
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
markEndEvent('someName')
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should report gc', (done) => {
|
||||
createExtension([
|
||||
gcRecord(1, 3, 21)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
startEvent('gc', 1),
|
||||
endEvent('gc', 3, {'amount': 21}),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint', 'Rasterize', 'CompositeLayers'].forEach( (recordType) => {
|
||||
it(`should report ${recordType}`, (done) => {
|
||||
createExtension([
|
||||
durationRecord(recordType, 0, 1)
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
startEvent('render', 0),
|
||||
endEvent('render', 1),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should walk children', (done) => {
|
||||
createExtension([
|
||||
durationRecord('FunctionCall', 1, 5, [
|
||||
timeStampRecord('begin_someName')
|
||||
])
|
||||
]).readPerfLog().then( (events) => {
|
||||
expect(events).toEqual([
|
||||
startEvent('script', 1),
|
||||
markStartEvent('someName'),
|
||||
endEvent('script', 5)
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function timeStampRecord(name) {
|
||||
return {
|
||||
'type': 'TimeStamp',
|
||||
'data': {
|
||||
'message': name
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function durationRecord(type, startTime, endTime, children = null) {
|
||||
if (isBlank(children)) {
|
||||
children = [];
|
||||
}
|
||||
return {
|
||||
'type': type,
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'children': children
|
||||
};
|
||||
}
|
||||
|
||||
function internalScriptRecord(startTime, endTime) {
|
||||
return {
|
||||
'type': 'FunctionCall',
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'data': {
|
||||
'scriptName': 'InjectedScript'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function gcRecord(startTime, endTime, gcAmount) {
|
||||
return {
|
||||
'type': 'GCEvent',
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'data': {
|
||||
'usedHeapSizeDelta': gcAmount
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function markStartEvent(type) {
|
||||
return {
|
||||
'name': type,
|
||||
'ph': 'b'
|
||||
}
|
||||
}
|
||||
|
||||
function markEndEvent(type) {
|
||||
return {
|
||||
'name': type,
|
||||
'ph': 'e'
|
||||
}
|
||||
}
|
||||
|
||||
function startEvent(type, time) {
|
||||
return {
|
||||
'name': type,
|
||||
'ts': time,
|
||||
'ph': 'B'
|
||||
}
|
||||
}
|
||||
|
||||
function endEvent(type, time, args = null) {
|
||||
return {
|
||||
'name': type,
|
||||
'ts': time,
|
||||
'ph': 'E',
|
||||
'args': args
|
||||
}
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
_log:List;
|
||||
_perfRecords:List;
|
||||
constructor(log, perfRecords) {
|
||||
super();
|
||||
this._log = log;
|
||||
this._perfRecords = perfRecords;
|
||||
}
|
||||
|
||||
executeScript(script) {
|
||||
ListWrapper.push(this._log, ['executeScript', script]);
|
||||
return PromiseWrapper.resolve(null);
|
||||
}
|
||||
|
||||
logs(type) {
|
||||
ListWrapper.push(this._log, ['logs', type]);
|
||||
if (type === 'performance') {
|
||||
return PromiseWrapper.resolve(this._perfRecords.map(function(record) {
|
||||
return {
|
||||
'message': Json.stringify({
|
||||
'message': {
|
||||
'method': 'Timeline.eventRecorded',
|
||||
'params': {
|
||||
'record': record
|
||||
}
|
||||
}
|
||||
})
|
||||
};
|
||||
}));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user