chore: move benchpress to @angular/benchpress
This commit is contained in:
6
modules/@angular/benchpress/README.dart.md
Normal file
6
modules/@angular/benchpress/README.dart.md
Normal file
@ -0,0 +1,6 @@
|
||||
Benchpress - a framework for e2e performance tests
|
||||
=========
|
||||
|
||||
The sources for this package are in the main [Angular2](https://github.com/angular/angular) repo. Please file issues and pull requests against that repo.
|
||||
|
||||
License: Apache MIT 2.0
|
8
modules/@angular/benchpress/README.js.md
Normal file
8
modules/@angular/benchpress/README.js.md
Normal file
@ -0,0 +1,8 @@
|
||||
Benchpress - a framework for e2e performance tests
|
||||
=========
|
||||
|
||||
The sources for this package are in the main [Angular2](https://github.com/angular/angular) repo. Please file issues and pull requests against that repo.
|
||||
|
||||
See [this project](https://github.com/angular/benchpress-tree) for an example.
|
||||
|
||||
License: Apache MIT 2.0
|
3
modules/@angular/benchpress/benchpress.dart
Normal file
3
modules/@angular/benchpress/benchpress.dart
Normal file
@ -0,0 +1,3 @@
|
||||
export './common.dart';
|
||||
export './src/webdriver/async_webdriver_adapter.dart'
|
||||
show AsyncWebDriverAdapter;
|
33
modules/@angular/benchpress/benchpress.ts
Normal file
33
modules/@angular/benchpress/benchpress.ts
Normal file
@ -0,0 +1,33 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Options} from './common';
|
||||
|
||||
export * from './common';
|
||||
export {SeleniumWebDriverAdapter} from './src/webdriver/selenium_webdriver_adapter';
|
||||
|
||||
var fs = require('fs');
|
||||
|
||||
// TODO(tbosch): right now we bind the `writeFile` method
|
||||
// in benchpres/benchpress.es6. This does not work for Dart,
|
||||
// find another way...
|
||||
// Note: Can't do the `require` call in a facade as it can't be loaded into the browser
|
||||
// for our unit tests via karma.
|
||||
Options.DEFAULT_PROVIDERS.push({provide: Options.WRITE_FILE, useValue: writeFile});
|
||||
|
||||
function writeFile(filename, content): Promise<any> {
|
||||
return new Promise(function(resolve, reject) {
|
||||
fs.writeFile(filename, content, (error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
30
modules/@angular/benchpress/common.ts
Normal file
30
modules/@angular/benchpress/common.ts
Normal file
@ -0,0 +1,30 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
export {Injector, OpaqueToken, ReflectiveInjector} from '@angular/core/src/di';
|
||||
export {Options} from './src/common_options';
|
||||
export {MeasureValues} from './src/measure_values';
|
||||
export {Metric} from './src/metric';
|
||||
export {MultiMetric} from './src/metric/multi_metric';
|
||||
export {PerflogMetric} from './src/metric/perflog_metric';
|
||||
export {UserMetric} from './src/metric/user_metric';
|
||||
export {Reporter} from './src/reporter';
|
||||
export {ConsoleReporter} from './src/reporter/console_reporter';
|
||||
export {JsonFileReporter} from './src/reporter/json_file_reporter';
|
||||
export {MultiReporter} from './src/reporter/multi_reporter';
|
||||
export {Runner} from './src/runner';
|
||||
export {SampleDescription} from './src/sample_description';
|
||||
export {SampleState, Sampler} from './src/sampler';
|
||||
export {Validator} from './src/validator';
|
||||
export {RegressionSlopeValidator} from './src/validator/regression_slope_validator';
|
||||
export {SizeValidator} from './src/validator/size_validator';
|
||||
export {WebDriverAdapter} from './src/web_driver_adapter';
|
||||
export {PerfLogFeatures, WebDriverExtension} from './src/web_driver_extension';
|
||||
export {ChromeDriverExtension} from './src/webdriver/chrome_driver_extension';
|
||||
export {FirefoxDriverExtension} from './src/webdriver/firefox_driver_extension';
|
||||
export {IOsDriverExtension} from './src/webdriver/ios_driver_extension';
|
301
modules/@angular/benchpress/docs/index.md
Normal file
301
modules/@angular/benchpress/docs/index.md
Normal file
@ -0,0 +1,301 @@
|
||||
# Benchpress
|
||||
|
||||
Benchpress is a framework for e2e performance tests.
|
||||
See [here for an example project](https://github.com/angular/benchpress-tree).
|
||||
|
||||
# Why?
|
||||
|
||||
There are so called "micro benchmarks" that essentially use a stop watch in the browser to measure time
|
||||
(e.g. via `performance.now()`). This approach is limited to time, and in some cases memory
|
||||
(Chrome with special flags), as metric. It does not allow to measure:
|
||||
|
||||
- rendering time: e.g. the time the browser spends to layout or paint elements. This can e.g. used to
|
||||
test the performance impact of stylesheet changes.
|
||||
- garbage collection: e.g. how long the browser paused script execution, and how much memory was collected.
|
||||
This can be used to stabilize script execution time, as garbage collection times are usually very
|
||||
unpredictable. This data can also be used to measure and improve memory usage of applications,
|
||||
as the garbage collection amount directly affects garbage collection time.
|
||||
- distinguish script execution time from waiting: e.g. to measure the client side only time that is spent
|
||||
in a complex user interaction, ignoring backend calls.
|
||||
- measure fps to assert the smoothness of scrolling and animations.
|
||||
|
||||
This kind of data is already available in the DevTools of modern browsers. However, there is no standard way to
|
||||
use those tools in an automated way to measure web app performance, especially not across platforms.
|
||||
|
||||
Benchpress tries to fill this gap, i.e. allow to access all kinds of performance metrics in an automated way.
|
||||
|
||||
|
||||
# How it works
|
||||
|
||||
Benchpress uses webdriver to read out the so called "performance log" of browsers. This contains all kinds of interesting
|
||||
data, e.g. when a script started/ended executing, gc started/ended, the browser painted something to the screen, ...
|
||||
|
||||
As browsers are different, benchpress has plugins to normalizes these events.
|
||||
|
||||
|
||||
# Features
|
||||
|
||||
* Provides a loop (so called "Sampler") that executes the benchmark multiple times
|
||||
* Automatically waits/detects until the browser is "warm"
|
||||
* Reporters provide a normalized way to store results:
|
||||
- console reporter
|
||||
- file reporter
|
||||
- Google Big Query reporter (coming soon)
|
||||
* Supports micro benchmarks as well via `console.time()` / `console.timeEnd()`
|
||||
- `console.time()` / `console.timeEnd()` mark the timeline in the DevTools, so it makes sense
|
||||
to use them in micro benchmark to visualize and understand them, with or without benchpress.
|
||||
- running micro benchmarks in benchpress leverages the already existing reporters,
|
||||
the sampler and the auto warmup feature of benchpress.
|
||||
|
||||
|
||||
# Supported browsers
|
||||
|
||||
* Chrome on all platforms
|
||||
* Mobile Safari (iOS)
|
||||
* Firefox (work in progress)
|
||||
|
||||
|
||||
# How to write a benchmark
|
||||
|
||||
A benchmark in benchpress is made by an application under test
|
||||
and a benchmark driver. The application under test is the
|
||||
actual application consisting of html/css/js that should be tests.
|
||||
A benchmark driver is a webdriver test that interacts with the
|
||||
application under test.
|
||||
|
||||
|
||||
## A simple benchmark
|
||||
|
||||
Let's assume we want to measure the script execution time, as well as the render time
|
||||
that it takes to fill a container element with a complex html string.
|
||||
|
||||
The application under test could look like this:
|
||||
|
||||
```
|
||||
index.html:
|
||||
|
||||
<button id="reset" onclick="reset()">Reset</button>
|
||||
<button id="fill" onclick="fill()">fill innerHTML</button>
|
||||
<div id="container"></div>
|
||||
<script>
|
||||
var container = document.getElementById('container');
|
||||
var complexHtmlString = '...'; // TODO
|
||||
|
||||
function reset() { container.innerHTML = ''; }
|
||||
|
||||
function fill() {
|
||||
container.innerHTML = complexHtmlString;
|
||||
}
|
||||
</script>
|
||||
```
|
||||
|
||||
A benchmark driver could look like this:
|
||||
|
||||
```
|
||||
// A runner contains the shared configuration
|
||||
// and can be shared across multiple tests.
|
||||
var runner = new Runner(...);
|
||||
|
||||
driver.get('http://myserver/index.html');
|
||||
|
||||
var resetBtn = driver.findElement(By.id('reset'));
|
||||
var fillBtn = driver.findElement(By.id('fill'));
|
||||
|
||||
runner.sample({
|
||||
id: 'fillElement',
|
||||
// Prepare is optional...
|
||||
prepare: () {
|
||||
resetBtn.click();
|
||||
},
|
||||
execute: () {
|
||||
fillBtn.click();
|
||||
// Note: if fillBtn would use some asynchronous code,
|
||||
// we would need to wait here for its end.
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Measuring in the browser
|
||||
|
||||
If the application under test would like to, it can measure on its own.
|
||||
E.g.
|
||||
|
||||
```
|
||||
index.html:
|
||||
|
||||
<button id="measure" onclick="measure()">Measure document.createElement</button>
|
||||
<script>
|
||||
function measure() {
|
||||
console.time('createElement*10000');
|
||||
for (var i=0; i<100000; i++) {
|
||||
document.createElement('div');
|
||||
}
|
||||
console.timeEnd('createElement*10000');
|
||||
}
|
||||
</script>
|
||||
```
|
||||
|
||||
When the `measure` button is clicked, it marks the timeline and creates 10000 elements.
|
||||
It uses the special names `createElement*10000` to tell benchpress that the
|
||||
time that was measured is for 10000 calls to createElement and that benchpress should
|
||||
take the average for it.
|
||||
|
||||
A test driver for this would look like this:
|
||||
|
||||
````
|
||||
driver.get('.../index.html');
|
||||
|
||||
var measureBtn = driver.findElement(By.id('measure'));
|
||||
runner.sample({
|
||||
id: 'createElement test',
|
||||
microMetrics: {
|
||||
'createElement': 'time to create an element (ms)'
|
||||
},
|
||||
execute: () {
|
||||
measureBtn.click();
|
||||
}
|
||||
});
|
||||
````
|
||||
|
||||
When looking into the DevTools Timeline, we see a marker as well:
|
||||

|
||||
|
||||
### Custom Metrics Without Using `console.time`
|
||||
|
||||
It's also possible to measure any "user metric" within the browser
|
||||
by setting a numeric value on the `window` object. For example:
|
||||
|
||||
```js
|
||||
bootstrap(App)
|
||||
.then(() => {
|
||||
window.timeToBootstrap = Date.now() - performance.timing.navigationStart;
|
||||
});
|
||||
```
|
||||
|
||||
A test driver for this user metric could be written as follows:
|
||||
|
||||
```js
|
||||
|
||||
describe('home page load', function() {
|
||||
it('should log load time for a 2G connection', done => {
|
||||
runner.sample({
|
||||
execute: () => {
|
||||
browser.get(`http://localhost:8080`);
|
||||
},
|
||||
userMetrics: {
|
||||
timeToBootstrap: 'The time in milliseconds to bootstrap'
|
||||
},
|
||||
bindings: [
|
||||
bind(RegressionSlopeValidator.METRIC).toValue('timeToBootstrap')
|
||||
]
|
||||
}).then(done);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Using this strategy, benchpress will wait until the specified property name,
|
||||
`timeToBootstrap` in this case, is defined as a number on the `window` object
|
||||
inside the application under test.
|
||||
|
||||
# Smoothness Metrics
|
||||
|
||||
Benchpress can also measure the "smoothness" of scrolling and animations. In order to do that, the following set of metrics can be collected by benchpress:
|
||||
|
||||
- `frameTime.mean`: mean frame time in ms (target: 16.6ms for 60fps)
|
||||
- `frameTime.worst`: worst frame time in ms
|
||||
- `frameTime.best`: best frame time in ms
|
||||
- `frameTime.smooth`: percentage of frames that hit 60fps
|
||||
|
||||
To collect these metrics, you need to execute `console.time('frameCapture')` and `console.timeEnd('frameCapture')` either in your benchmark application or in you benchmark driver via webdriver. The metrics mentioned above will only be collected between those two calls and it is recommended to wrap the time/timeEnd calls as closely as possible around the action you want to evaluate to get accurate measurements.
|
||||
|
||||
In addition to that, one extra binding needs to be passed to benchpress in tests that want to collect these metrics:
|
||||
|
||||
benchpress.sample(providers: [bp.bind(bp.Options.CAPTURE_FRAMES).toValue(true)], ... )
|
||||
|
||||
# Requests Metrics
|
||||
|
||||
Benchpress can also record the number of requests sent and count the received "encoded" bytes since [window.performance.timing.navigationStart](http://www.w3.org/TR/navigation-timing/#dom-performancetiming-navigationstart):
|
||||
|
||||
- `receivedData`: number of bytes received since the last navigation start
|
||||
- `requestCount`: number of requests sent since the last navigation start
|
||||
|
||||
To collect these metrics, you need the following corresponding extra providers:
|
||||
|
||||
benchpress.sample(providers: [
|
||||
bp.bind(bp.Options.RECEIVED_DATA).toValue(true),
|
||||
bp.bind(bp.Options.REQUEST_COUNT).toValue(true)
|
||||
], ... )
|
||||
|
||||
# Best practices
|
||||
|
||||
* Use normalized environments
|
||||
- metrics that are dependent on the performance of the execution environment must be executed on a normalized machine
|
||||
- e.g. a real mobile device whose cpu frequency is set to a fixed value.
|
||||
* see our [build script](https://github.com/angular/angular/blob/master/scripts/ci/android_cpu.sh)
|
||||
* this requires root access, e.g. via a userdebug build of Android on a Google Nexus device
|
||||
(see [here](https://source.android.com/source/building-running.html) and [here](https://source.android.com/source/building-devices.html#obtaining-proprietary-binaries))
|
||||
- e.g. a calibrated machine that does not run background jobs, has a fixed cpu frequency, ...
|
||||
|
||||
* Use relative comparisons
|
||||
- relative comparisons are less likely to change over time and help to interpret the results of benchmarks
|
||||
- e.g. compare an example written using a ui framework against a hand coded example and track the ratio
|
||||
|
||||
* Assert post-commit for commit ranges
|
||||
- running benchmarks can take some time. Running them before every commit is usually too slow.
|
||||
- when a regression is detected for a commit range, use bisection to find the problematic commit
|
||||
|
||||
* Repeat benchmarks multiple times in a fresh window
|
||||
- run the same benchmark multiple times in a fresh window and then take the minimal average value of each benchmark run
|
||||
|
||||
* Use force gc with care
|
||||
- forcing gc can skew the script execution time and gcTime numbers,
|
||||
but might be needed to get stable gc time / gc amount numbers
|
||||
|
||||
* Open a new window for every test
|
||||
- browsers (e.g. chrome) might keep JIT statistics over page reloads and optimize pages differently depending on what has been loaded before
|
||||
|
||||
# Detailed overview
|
||||
|
||||

|
||||
|
||||
Definitions:
|
||||
|
||||
* valid sample: a sample that represents the world that should be measured in a good way.
|
||||
* complete sample: sample of all measure values collected so far
|
||||
|
||||
Components:
|
||||
|
||||
* Runner
|
||||
- contains a default configuration
|
||||
- creates a new injector for every sample call, via which all other components are created
|
||||
|
||||
* Sampler
|
||||
- gets data from the metrics
|
||||
- reports measure values immediately to the reporters
|
||||
- loops until the validator is able to extract a valid sample out of the complete sample (see below).
|
||||
- reports the valid sample and the complete sample to the reporters
|
||||
|
||||
* Metric
|
||||
- gets measure values from the browser
|
||||
- e.g. reads out performance logs, DOM values, JavaScript values
|
||||
|
||||
* Validator
|
||||
- extracts a valid sample out of the complete sample of all measure values.
|
||||
- e.g. wait until there are 10 samples and take them as valid sample (would include warmup time)
|
||||
- e.g. wait until the regression slope for the metric `scriptTime` through the last 10 measure values is >=0, i.e. the values for the `scriptTime` metric are no more decreasing
|
||||
|
||||
* Reporter
|
||||
- reports measure values, the valid sample and the complete sample to backends
|
||||
- e.g. a reporter that prints to the console, a reporter that reports values into Google BigQuery, ...
|
||||
|
||||
* WebDriverAdapter
|
||||
- abstraction over the used web driver client
|
||||
- one implementation for every webdriver client
|
||||
E.g. one for selenium-webdriver Node.js module, dart async webdriver, dart sync webdriver, ...
|
||||
|
||||
* WebDriverExtension
|
||||
- implements additional methods that are standardized in the webdriver protocol using the WebDriverAdapter
|
||||
- provides functionality like force gc, read out performance logs in a normalized format
|
||||
- one implementation per browser, e.g. one for Chrome, one for mobile Safari, one for Firefox
|
||||
|
||||
|
BIN
modules/@angular/benchpress/docs/marked_timeline.png
Normal file
BIN
modules/@angular/benchpress/docs/marked_timeline.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 22 KiB |
BIN
modules/@angular/benchpress/docs/overview.png
Normal file
BIN
modules/@angular/benchpress/docs/overview.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 26 KiB |
3
modules/@angular/benchpress/index.dart
Normal file
3
modules/@angular/benchpress/index.dart
Normal file
@ -0,0 +1,3 @@
|
||||
library benchpress.index;
|
||||
|
||||
//no dart implementation
|
14
modules/@angular/benchpress/index.ts
Normal file
14
modules/@angular/benchpress/index.ts
Normal file
@ -0,0 +1,14 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
require('reflect-metadata');
|
||||
require('core-js');
|
||||
module.exports = require('./benchpress.js');
|
||||
// when bundling benchpress to one file, this is used
|
||||
// for getting exports out of browserify's scope.
|
||||
(<any>global).__benchpressExports = module.exports;
|
23
modules/@angular/benchpress/package.json
Normal file
23
modules/@angular/benchpress/package.json
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "benchpress",
|
||||
"version": "<%= packageJson.version %>",
|
||||
"description": "Benchpress - a framework for e2e performance tests",
|
||||
"homepage": "<%= packageJson.homepage %>",
|
||||
"bugs": "<%= packageJson.bugs %>",
|
||||
"main" : "./index.js",
|
||||
"contributors": <%= JSON.stringify(packageJson.contributors) %>,
|
||||
"license": "<%= packageJson.license %>",
|
||||
"repository": <%= JSON.stringify(packageJson.repository) %>,
|
||||
"dependencies": {
|
||||
"angular2": "<%= packageJson.version %>",
|
||||
"core-js": "<%= packageJson.dependencies['core-js'] %>",
|
||||
"reflect-metadata": "<%= packageJson.dependencies['reflect-metadata'] %>",
|
||||
"rxjs": "<%= packageJson.dependencies['rxjs'] %>",
|
||||
"selenium-webdriver": "<%= packageJson.dependencies['selenium-webdriver'] %>",
|
||||
"zone.js": "<%= packageJson.dependencies['zone.js'] %>"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"jpm": "<%= packageJson.devDependencies.jpm %>",
|
||||
"firefox-profile": "<%= packageJson.devDependencies['firefox-profile'] %>"
|
||||
}
|
||||
}
|
22
modules/@angular/benchpress/pubspec.yaml
Normal file
22
modules/@angular/benchpress/pubspec.yaml
Normal file
@ -0,0 +1,22 @@
|
||||
name: benchpress
|
||||
version: <%= packageJson.version %>
|
||||
authors:
|
||||
<%= Object.keys(packageJson.contributors).map(function(name) {
|
||||
return '- '+name+' <'+packageJson.contributors[name]+'>';
|
||||
}).join('\n') %>
|
||||
description: Benchpress - a framework for e2e performance tests
|
||||
homepage: <%= packageJson.homepage %>
|
||||
environment:
|
||||
sdk: '>=1.10.0 <2.0.0'
|
||||
dependencies:
|
||||
angular2: '^<%= packageJson.version %>'
|
||||
stack_trace: '^1.1.1'
|
||||
webdriver: '^0.9.0'
|
||||
dev_dependencies:
|
||||
guinness2: '0.0.4'
|
||||
quiver: '^0.21.4'
|
||||
test: '^0.12.10'
|
||||
dependency_overrides:
|
||||
angular2:
|
||||
path: ../angular2
|
||||
matcher: '0.12.0+1'
|
68
modules/@angular/benchpress/src/common_options.ts
Normal file
68
modules/@angular/benchpress/src/common_options.ts
Normal file
@ -0,0 +1,68 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {OpaqueToken} from '@angular/core/src/di';
|
||||
import {DateWrapper} from '@angular/facade/src/lang';
|
||||
|
||||
export class Options {
|
||||
static get DEFAULT_PROVIDERS(): any[] { return _DEFAULT_PROVIDERS; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get SAMPLE_ID() { return _SAMPLE_ID; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get DEFAULT_DESCRIPTION() { return _DEFAULT_DESCRIPTION; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get SAMPLE_DESCRIPTION() { return _SAMPLE_DESCRIPTION; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get FORCE_GC() { return _FORCE_GC; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get PREPARE() { return _PREPARE; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get EXECUTE() { return _EXECUTE; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get CAPABILITIES() { return _CAPABILITIES; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get USER_AGENT() { return _USER_AGENT; }
|
||||
// TODO(tbosch): use static initializer when our transpiler supports it
|
||||
static get NOW() { return _NOW; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get WRITE_FILE() { return _WRITE_FILE; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get MICRO_METRICS() { return _MICRO_METRICS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get USER_METRICS() { return _USER_METRICS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get RECEIVED_DATA() { return _RECEIVED_DATA; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get REQUEST_COUNT() { return _REQUEST_COUNT; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get CAPTURE_FRAMES() { return _CAPTURE_FRAMES; }
|
||||
}
|
||||
|
||||
var _SAMPLE_ID = new OpaqueToken('Options.sampleId');
|
||||
var _DEFAULT_DESCRIPTION = new OpaqueToken('Options.defaultDescription');
|
||||
var _SAMPLE_DESCRIPTION = new OpaqueToken('Options.sampleDescription');
|
||||
var _FORCE_GC = new OpaqueToken('Options.forceGc');
|
||||
var _PREPARE = new OpaqueToken('Options.prepare');
|
||||
var _EXECUTE = new OpaqueToken('Options.execute');
|
||||
var _CAPABILITIES = new OpaqueToken('Options.capabilities');
|
||||
var _USER_AGENT = new OpaqueToken('Options.userAgent');
|
||||
var _MICRO_METRICS = new OpaqueToken('Options.microMetrics');
|
||||
var _USER_METRICS = new OpaqueToken('Options.userMetrics');
|
||||
var _NOW = new OpaqueToken('Options.now');
|
||||
var _WRITE_FILE = new OpaqueToken('Options.writeFile');
|
||||
var _RECEIVED_DATA = new OpaqueToken('Options.receivedData');
|
||||
var _REQUEST_COUNT = new OpaqueToken('Options.requestCount');
|
||||
var _CAPTURE_FRAMES = new OpaqueToken('Options.frameCapture');
|
||||
|
||||
var _DEFAULT_PROVIDERS = [
|
||||
{provide: _DEFAULT_DESCRIPTION, useValue: {}}, {provide: _SAMPLE_DESCRIPTION, useValue: {}},
|
||||
{provide: _FORCE_GC, useValue: false}, {provide: _PREPARE, useValue: false},
|
||||
{provide: _MICRO_METRICS, useValue: {}}, {provide: _USER_METRICS, useValue: {}},
|
||||
{provide: _NOW, useValue: () => DateWrapper.now()}, {provide: _RECEIVED_DATA, useValue: false},
|
||||
{provide: _REQUEST_COUNT, useValue: false}, {provide: _CAPTURE_FRAMES, useValue: false}
|
||||
];
|
2
modules/@angular/benchpress/src/firefox_extension/.gitignore
vendored
Normal file
2
modules/@angular/benchpress/src/firefox_extension/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
*.xpi
|
||||
addon-sdk*
|
@ -0,0 +1,3 @@
|
||||
library benchpress.src.firefox_extension.data.installed_script;
|
||||
|
||||
//no dart implementation
|
@ -0,0 +1,38 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
declare var exportFunction;
|
||||
declare var unsafeWindow;
|
||||
|
||||
exportFunction(function() {
|
||||
var curTime = unsafeWindow.performance.now();
|
||||
(<any>self).port.emit('startProfiler', curTime);
|
||||
}, unsafeWindow, {defineAs: 'startProfiler'});
|
||||
|
||||
exportFunction(function() {
|
||||
(<any>self).port.emit('stopProfiler');
|
||||
}, unsafeWindow, {defineAs: 'stopProfiler'});
|
||||
|
||||
exportFunction(function(cb) {
|
||||
(<any>self).port.once('perfProfile', cb);
|
||||
(<any>self).port.emit('getProfile');
|
||||
}, unsafeWindow, {defineAs: 'getProfile'});
|
||||
|
||||
exportFunction(function() {
|
||||
(<any>self).port.emit('forceGC');
|
||||
}, unsafeWindow, {defineAs: 'forceGC'});
|
||||
|
||||
exportFunction(function(name) {
|
||||
var curTime = unsafeWindow.performance.now();
|
||||
(<any>self).port.emit('markStart', name, curTime);
|
||||
}, unsafeWindow, {defineAs: 'markStart'});
|
||||
|
||||
exportFunction(function(name) {
|
||||
var curTime = unsafeWindow.performance.now();
|
||||
(<any>self).port.emit('markEnd', name, curTime);
|
||||
}, unsafeWindow, {defineAs: 'markEnd'});
|
@ -0,0 +1,3 @@
|
||||
library benchpress.src.firefox_extension.lib.main;
|
||||
|
||||
//no dart implementation
|
@ -0,0 +1,74 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
var {Cc, Ci, Cu} = require('chrome');
|
||||
var os = Cc['@mozilla.org/observer-service;1'].getService(Ci.nsIObserverService);
|
||||
var ParserUtil = require('./parser_util');
|
||||
|
||||
class Profiler {
|
||||
private _profiler;
|
||||
private _markerEvents: any[];
|
||||
private _profilerStartTime: number;
|
||||
|
||||
constructor() { this._profiler = Cc['@mozilla.org/tools/profiler;1'].getService(Ci.nsIProfiler); }
|
||||
|
||||
start(entries, interval, features, timeStarted) {
|
||||
this._profiler.StartProfiler(entries, interval, features, features.length);
|
||||
this._profilerStartTime = timeStarted;
|
||||
this._markerEvents = [];
|
||||
}
|
||||
|
||||
stop() { this._profiler.StopProfiler(); }
|
||||
|
||||
getProfilePerfEvents() {
|
||||
var profileData = this._profiler.getProfileData();
|
||||
var perfEvents = ParserUtil.convertPerfProfileToEvents(profileData);
|
||||
perfEvents = this._mergeMarkerEvents(perfEvents);
|
||||
perfEvents.sort(function(event1, event2) { return event1.ts - event2.ts; }); // Sort by ts
|
||||
return perfEvents;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _mergeMarkerEvents(perfEvents: any[]): any[] {
|
||||
this._markerEvents.forEach(function(markerEvent) { perfEvents.push(markerEvent); });
|
||||
return perfEvents;
|
||||
}
|
||||
|
||||
addStartEvent(name: string, timeStarted: number) {
|
||||
this._markerEvents.push({ph: 'b', ts: timeStarted - this._profilerStartTime, name: name});
|
||||
}
|
||||
|
||||
addEndEvent(name: string, timeEnded: number) {
|
||||
this._markerEvents.push({ph: 'e', ts: timeEnded - this._profilerStartTime, name: name});
|
||||
}
|
||||
}
|
||||
|
||||
function forceGC() {
|
||||
Cu.forceGC();
|
||||
os.notifyObservers(null, 'child-gc-request', null);
|
||||
};
|
||||
|
||||
var mod = require('sdk/page-mod');
|
||||
var data = require('sdk/self').data;
|
||||
var profiler = new Profiler();
|
||||
mod.PageMod({
|
||||
include: ['*'],
|
||||
contentScriptFile: data.url('installed_script.js'),
|
||||
onAttach: worker => {
|
||||
worker.port.on(
|
||||
'startProfiler',
|
||||
(timeStarted) => profiler.start(
|
||||
/* = profiler memory */ 3000000, 0.1, ['leaf', 'js', 'stackwalk', 'gc'], timeStarted));
|
||||
worker.port.on('stopProfiler', () => profiler.stop());
|
||||
worker.port.on(
|
||||
'getProfile', () => worker.port.emit('perfProfile', profiler.getProfilePerfEvents()));
|
||||
worker.port.on('forceGC', forceGC);
|
||||
worker.port.on('markStart', (name, timeStarted) => profiler.addStartEvent(name, timeStarted));
|
||||
worker.port.on('markEnd', (name, timeEnded) => profiler.addEndEvent(name, timeEnded));
|
||||
}
|
||||
});
|
@ -0,0 +1,3 @@
|
||||
library benchpress.src.firefox_extension.lib.parser_util;
|
||||
|
||||
//no dart implementation
|
@ -0,0 +1,90 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Object} perfProfile The perf profile JSON object.
|
||||
* @return {Object[]} An array of recognized events that are captured
|
||||
* within the perf profile.
|
||||
*/
|
||||
export function convertPerfProfileToEvents(perfProfile: any): any[] {
|
||||
var inProgressEvents = new Map(); // map from event name to start time
|
||||
var finishedEvents = []; // Event[] finished events
|
||||
var addFinishedEvent = function(eventName, startTime, endTime) {
|
||||
var categorizedEventName = categorizeEvent(eventName);
|
||||
var args = undefined;
|
||||
if (categorizedEventName == 'gc') {
|
||||
// TODO: We cannot measure heap size at the moment
|
||||
args = {usedHeapSize: 0};
|
||||
}
|
||||
if (startTime == endTime) {
|
||||
// Finished instantly
|
||||
finishedEvents.push({ph: 'X', ts: startTime, name: categorizedEventName, args: args});
|
||||
} else {
|
||||
// Has duration
|
||||
finishedEvents.push({ph: 'B', ts: startTime, name: categorizedEventName, args: args});
|
||||
finishedEvents.push({ph: 'E', ts: endTime, name: categorizedEventName, args: args});
|
||||
}
|
||||
};
|
||||
|
||||
var samples = perfProfile.threads[0].samples;
|
||||
// In perf profile, firefox samples all the frames in set time intervals. Here
|
||||
// we go through all the samples and construct the start and end time for each
|
||||
// event.
|
||||
for (var i = 0; i < samples.length; ++i) {
|
||||
var sample = samples[i];
|
||||
var sampleTime = sample.time;
|
||||
|
||||
// Add all the frames into a set so it's easier/faster to find the set
|
||||
// differences
|
||||
var sampleFrames = new Set();
|
||||
sample.frames.forEach(function(frame) { sampleFrames.add(frame.location); });
|
||||
|
||||
// If an event is in the inProgressEvents map, but not in the current sample,
|
||||
// then it must have just finished. We add this event to the finishedEvents
|
||||
// array and remove it from the inProgressEvents map.
|
||||
var previousSampleTime = (i == 0 ? /* not used */ -1 : samples[i - 1].time);
|
||||
inProgressEvents.forEach(function(startTime, eventName) {
|
||||
if (!(sampleFrames.has(eventName))) {
|
||||
addFinishedEvent(eventName, startTime, previousSampleTime);
|
||||
inProgressEvents.delete(eventName);
|
||||
}
|
||||
});
|
||||
|
||||
// If an event is in the current sample, but not in the inProgressEvents map,
|
||||
// then it must have just started. We add this event to the inProgressEvents
|
||||
// map.
|
||||
sampleFrames.forEach(function(eventName) {
|
||||
if (!(inProgressEvents.has(eventName))) {
|
||||
inProgressEvents.set(eventName, sampleTime);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// If anything is still in progress, we need to included it as a finished event
|
||||
// since recording ended.
|
||||
var lastSampleTime = samples[samples.length - 1].time;
|
||||
inProgressEvents.forEach(function(startTime, eventName) {
|
||||
addFinishedEvent(eventName, startTime, lastSampleTime);
|
||||
});
|
||||
|
||||
// Remove all the unknown categories.
|
||||
return finishedEvents.filter(function(event) { return event.name != 'unknown'; });
|
||||
}
|
||||
|
||||
// TODO: this is most likely not exhaustive.
|
||||
export function categorizeEvent(eventName: string): string {
|
||||
if (eventName.indexOf('PresShell::Paint') > -1) {
|
||||
return 'render';
|
||||
} else if (eventName.indexOf('FirefoxDriver.prototype.executeScript') > -1) {
|
||||
return 'script';
|
||||
} else if (eventName.indexOf('forceGC') > -1) {
|
||||
return 'gc';
|
||||
} else {
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
@ -0,0 +1,3 @@
|
||||
library benchpress.src.firefox_extension.lib.test_helper;
|
||||
|
||||
//no dart implementation
|
@ -0,0 +1,51 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
var q = require('q');
|
||||
var FirefoxProfile = require('firefox-profile');
|
||||
var jpm = require('jpm/lib/xpi');
|
||||
var pathUtil = require('path');
|
||||
|
||||
var PERF_ADDON_PACKAGE_JSON_DIR = '..';
|
||||
|
||||
exports.getAbsolutePath = function(path) {
|
||||
var normalizedPath = pathUtil.normalize(path);
|
||||
if (pathUtil.resolve(normalizedPath) == normalizedPath) {
|
||||
// Already absolute path
|
||||
return normalizedPath;
|
||||
} else {
|
||||
return pathUtil.join(__dirname, normalizedPath);
|
||||
}
|
||||
};
|
||||
|
||||
exports.getFirefoxProfile = function(extensionPath) {
|
||||
var deferred = q.defer();
|
||||
|
||||
var firefoxProfile = new FirefoxProfile();
|
||||
firefoxProfile.addExtensions([extensionPath], () => {
|
||||
firefoxProfile.encoded(encodedProfile => {
|
||||
var multiCapabilities = [{browserName: 'firefox', firefox_profile: encodedProfile}];
|
||||
deferred.resolve(multiCapabilities);
|
||||
});
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
};
|
||||
|
||||
exports.getFirefoxProfileWithExtension = function() {
|
||||
var absPackageJsonDir = pathUtil.join(__dirname, PERF_ADDON_PACKAGE_JSON_DIR);
|
||||
var packageJson = require(pathUtil.join(absPackageJsonDir, 'package.json'));
|
||||
|
||||
var savedCwd = process.cwd();
|
||||
process.chdir(absPackageJsonDir);
|
||||
|
||||
return jpm(packageJson).then(xpiPath => {
|
||||
process.chdir(savedCwd);
|
||||
return exports.getFirefoxProfile(xpiPath);
|
||||
});
|
||||
};
|
@ -0,0 +1 @@
|
||||
{ "version" : "0.0.1", "main" : "lib/main.js", "name" : "ffperf-addon" }
|
23
modules/@angular/benchpress/src/measure_values.ts
Normal file
23
modules/@angular/benchpress/src/measure_values.ts
Normal file
@ -0,0 +1,23 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Map} from '@angular/facade/src/collection';
|
||||
import {Date, DateWrapper} from '@angular/facade/src/lang';
|
||||
|
||||
export class MeasureValues {
|
||||
constructor(
|
||||
public runIndex: number, public timeStamp: Date, public values: {[key: string]: any}) {}
|
||||
|
||||
toJson() {
|
||||
return {
|
||||
'timeStamp': DateWrapper.toJson(this.timeStamp),
|
||||
'runIndex': this.runIndex,
|
||||
'values': this.values
|
||||
};
|
||||
}
|
||||
}
|
35
modules/@angular/benchpress/src/metric.ts
Normal file
35
modules/@angular/benchpress/src/metric.ts
Normal file
@ -0,0 +1,35 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* A metric is measures values
|
||||
*/
|
||||
export abstract class Metric {
|
||||
static bindTo(delegateToken): any[] {
|
||||
return [{provide: Metric, useFactory: (delegate) => delegate, deps: [delegateToken]}];
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts measuring
|
||||
*/
|
||||
beginMeasure(): Promise<any> { throw new Error('NYI'); }
|
||||
|
||||
/**
|
||||
* Ends measuring and reports the data
|
||||
* since the begin call.
|
||||
* @param restart: Whether to restart right after this.
|
||||
*/
|
||||
endMeasure(restart: boolean): Promise<{[key: string]: any}> { throw new Error('NYI'); }
|
||||
|
||||
/**
|
||||
* Describes the metrics provided by this metric implementation.
|
||||
* (e.g. units, ...)
|
||||
*/
|
||||
describe(): {[key: string]: any} { throw new Error('NYI'); }
|
||||
}
|
61
modules/@angular/benchpress/src/metric/multi_metric.ts
Normal file
61
modules/@angular/benchpress/src/metric/multi_metric.ts
Normal file
@ -0,0 +1,61 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Injector, OpaqueToken} from '@angular/core/src/di';
|
||||
import {StringMapWrapper} from '@angular/facade/src/collection';
|
||||
|
||||
import {Metric} from '../metric';
|
||||
|
||||
export class MultiMetric extends Metric {
|
||||
static createBindings(childTokens: any[]): any[] {
|
||||
return [
|
||||
{
|
||||
provide: _CHILDREN,
|
||||
useFactory: (injector: Injector) => childTokens.map(token => injector.get(token)),
|
||||
deps: [Injector]
|
||||
},
|
||||
{provide: MultiMetric, useFactory: children => new MultiMetric(children), deps: [_CHILDREN]}
|
||||
];
|
||||
}
|
||||
|
||||
constructor(private _metrics: Metric[]) { super(); }
|
||||
|
||||
/**
|
||||
* Starts measuring
|
||||
*/
|
||||
beginMeasure(): Promise<any> {
|
||||
return Promise.all(this._metrics.map(metric => metric.beginMeasure()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Ends measuring and reports the data
|
||||
* since the begin call.
|
||||
* @param restart: Whether to restart right after this.
|
||||
*/
|
||||
endMeasure(restart: boolean): Promise<{[key: string]: any}> {
|
||||
return Promise.all(this._metrics.map(metric => metric.endMeasure(restart)))
|
||||
.then(values => mergeStringMaps(<any>values));
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes the metrics provided by this metric implementation.
|
||||
* (e.g. units, ...)
|
||||
*/
|
||||
describe(): {[key: string]: any} {
|
||||
return mergeStringMaps(this._metrics.map((metric) => metric.describe()));
|
||||
}
|
||||
}
|
||||
|
||||
function mergeStringMaps(maps: {[key: string]: string}[]): {[key: string]: string} {
|
||||
var result = {};
|
||||
maps.forEach(
|
||||
map => { StringMapWrapper.forEach(map, (value, prop) => { result[prop] = value; }); });
|
||||
return result;
|
||||
}
|
||||
|
||||
var _CHILDREN = new OpaqueToken('MultiMetric.children');
|
403
modules/@angular/benchpress/src/metric/perflog_metric.ts
Normal file
403
modules/@angular/benchpress/src/metric/perflog_metric.ts
Normal file
@ -0,0 +1,403 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {OpaqueToken} from '@angular/core/src/di';
|
||||
import {ListWrapper, StringMapWrapper} from '@angular/facade/src/collection';
|
||||
import {Math, NumberWrapper, StringWrapper, isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
|
||||
import {Options} from '../common_options';
|
||||
import {Metric} from '../metric';
|
||||
import {PerfLogFeatures, WebDriverExtension} from '../web_driver_extension';
|
||||
|
||||
|
||||
/**
|
||||
* A metric that reads out the performance log
|
||||
*/
|
||||
export class PerflogMetric extends Metric {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get SET_TIMEOUT(): OpaqueToken { return _SET_TIMEOUT; }
|
||||
|
||||
/** @internal */
|
||||
private _remainingEvents: Array<{[key: string]: any}>;
|
||||
/** @internal */
|
||||
private _measureCount: number;
|
||||
/** @internal */
|
||||
private _perfLogFeatures: PerfLogFeatures;
|
||||
|
||||
|
||||
/**
|
||||
* @param driverExtension
|
||||
* @param setTimeout
|
||||
* @param microMetrics Name and description of metrics provided via console.time / console.timeEnd
|
||||
**/
|
||||
constructor(
|
||||
/** @internal */
|
||||
private _driverExtension: WebDriverExtension,
|
||||
/** @internal */
|
||||
private _setTimeout: Function,
|
||||
/** @internal */
|
||||
private _microMetrics: {[key: string]: any},
|
||||
/** @internal */
|
||||
private _forceGc: boolean,
|
||||
/** @internal */
|
||||
private _captureFrames: boolean,
|
||||
/** @internal */
|
||||
private _receivedData: boolean,
|
||||
/** @internal */
|
||||
private _requestCount: boolean) {
|
||||
super();
|
||||
|
||||
this._remainingEvents = [];
|
||||
this._measureCount = 0;
|
||||
this._perfLogFeatures = _driverExtension.perfLogFeatures();
|
||||
if (!this._perfLogFeatures.userTiming) {
|
||||
// User timing is needed for navigationStart.
|
||||
this._receivedData = false;
|
||||
this._requestCount = false;
|
||||
}
|
||||
}
|
||||
|
||||
describe(): {[key: string]: any} {
|
||||
var res = {
|
||||
'scriptTime': 'script execution time in ms, including gc and render',
|
||||
'pureScriptTime': 'script execution time in ms, without gc nor render'
|
||||
};
|
||||
if (this._perfLogFeatures.render) {
|
||||
res['renderTime'] = 'render time in ms';
|
||||
}
|
||||
if (this._perfLogFeatures.gc) {
|
||||
res['gcTime'] = 'gc time in ms';
|
||||
res['gcAmount'] = 'gc amount in kbytes';
|
||||
res['majorGcTime'] = 'time of major gcs in ms';
|
||||
if (this._forceGc) {
|
||||
res['forcedGcTime'] = 'forced gc time in ms';
|
||||
res['forcedGcAmount'] = 'forced gc amount in kbytes';
|
||||
}
|
||||
}
|
||||
if (this._receivedData) {
|
||||
res['receivedData'] = 'encoded bytes received since navigationStart';
|
||||
}
|
||||
if (this._requestCount) {
|
||||
res['requestCount'] = 'count of requests sent since navigationStart';
|
||||
}
|
||||
if (this._captureFrames) {
|
||||
if (!this._perfLogFeatures.frameCapture) {
|
||||
var warningMsg = 'WARNING: Metric requested, but not supported by driver';
|
||||
// using dot syntax for metric name to keep them grouped together in console reporter
|
||||
res['frameTime.mean'] = warningMsg;
|
||||
res['frameTime.worst'] = warningMsg;
|
||||
res['frameTime.best'] = warningMsg;
|
||||
res['frameTime.smooth'] = warningMsg;
|
||||
} else {
|
||||
res['frameTime.mean'] = 'mean frame time in ms (target: 16.6ms for 60fps)';
|
||||
res['frameTime.worst'] = 'worst frame time in ms';
|
||||
res['frameTime.best'] = 'best frame time in ms';
|
||||
res['frameTime.smooth'] = 'percentage of frames that hit 60fps';
|
||||
}
|
||||
}
|
||||
StringMapWrapper.forEach(
|
||||
this._microMetrics, (desc, name) => { StringMapWrapper.set(res, name, desc); });
|
||||
return res;
|
||||
}
|
||||
|
||||
beginMeasure(): Promise<any> {
|
||||
var resultPromise = Promise.resolve(null);
|
||||
if (this._forceGc) {
|
||||
resultPromise = resultPromise.then((_) => this._driverExtension.gc());
|
||||
}
|
||||
return resultPromise.then((_) => this._beginMeasure());
|
||||
}
|
||||
|
||||
endMeasure(restart: boolean): Promise<{[key: string]: any}> {
|
||||
if (this._forceGc) {
|
||||
return this._endPlainMeasureAndMeasureForceGc(restart);
|
||||
} else {
|
||||
return this._endMeasure(restart);
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _endPlainMeasureAndMeasureForceGc(restartMeasure: boolean) {
|
||||
return this._endMeasure(true).then((measureValues) => {
|
||||
// disable frame capture for measurements during forced gc
|
||||
var originalFrameCaptureValue = this._captureFrames;
|
||||
this._captureFrames = false;
|
||||
return this._driverExtension.gc()
|
||||
.then((_) => this._endMeasure(restartMeasure))
|
||||
.then((forceGcMeasureValues) => {
|
||||
this._captureFrames = originalFrameCaptureValue;
|
||||
StringMapWrapper.set(measureValues, 'forcedGcTime', forceGcMeasureValues['gcTime']);
|
||||
StringMapWrapper.set(measureValues, 'forcedGcAmount', forceGcMeasureValues['gcAmount']);
|
||||
return measureValues;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _beginMeasure(): Promise<any> {
|
||||
return this._driverExtension.timeBegin(this._markName(this._measureCount++));
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _endMeasure(restart: boolean): Promise<{[key: string]: any}> {
|
||||
var markName = this._markName(this._measureCount - 1);
|
||||
var nextMarkName = restart ? this._markName(this._measureCount++) : null;
|
||||
return this._driverExtension.timeEnd(markName, nextMarkName)
|
||||
.then((_) => this._readUntilEndMark(markName));
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _readUntilEndMark(markName: string, loopCount: number = 0, startEvent = null) {
|
||||
if (loopCount > _MAX_RETRY_COUNT) {
|
||||
throw new Error(`Tried too often to get the ending mark: ${loopCount}`);
|
||||
}
|
||||
return this._driverExtension.readPerfLog().then((events) => {
|
||||
this._addEvents(events);
|
||||
var result = this._aggregateEvents(this._remainingEvents, markName);
|
||||
if (isPresent(result)) {
|
||||
this._remainingEvents = events;
|
||||
return result;
|
||||
}
|
||||
var resolve: (result: any) => void;
|
||||
var promise = new Promise(res => { resolve = res; });
|
||||
this._setTimeout(() => resolve(this._readUntilEndMark(markName, loopCount + 1)), 100);
|
||||
return promise;
|
||||
});
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _addEvents(events: {[key: string]: string}[]) {
|
||||
var needSort = false;
|
||||
events.forEach(event => {
|
||||
if (StringWrapper.equals(event['ph'], 'X')) {
|
||||
needSort = true;
|
||||
var startEvent = {};
|
||||
var endEvent = {};
|
||||
StringMapWrapper.forEach(event, (value, prop) => {
|
||||
startEvent[prop] = value;
|
||||
endEvent[prop] = value;
|
||||
});
|
||||
startEvent['ph'] = 'B';
|
||||
endEvent['ph'] = 'E';
|
||||
endEvent['ts'] = startEvent['ts'] + startEvent['dur'];
|
||||
this._remainingEvents.push(startEvent);
|
||||
this._remainingEvents.push(endEvent);
|
||||
} else {
|
||||
this._remainingEvents.push(event);
|
||||
}
|
||||
});
|
||||
if (needSort) {
|
||||
// Need to sort because of the ph==='X' events
|
||||
ListWrapper.sort(this._remainingEvents, (a, b) => {
|
||||
var diff = a['ts'] - b['ts'];
|
||||
return diff > 0 ? 1 : diff < 0 ? -1 : 0;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _aggregateEvents(events: Array<{[key: string]: any}>, markName): {[key: string]: any} {
|
||||
var result = {'scriptTime': 0, 'pureScriptTime': 0};
|
||||
if (this._perfLogFeatures.gc) {
|
||||
result['gcTime'] = 0;
|
||||
result['majorGcTime'] = 0;
|
||||
result['gcAmount'] = 0;
|
||||
}
|
||||
if (this._perfLogFeatures.render) {
|
||||
result['renderTime'] = 0;
|
||||
}
|
||||
if (this._captureFrames) {
|
||||
result['frameTime.mean'] = 0;
|
||||
result['frameTime.best'] = 0;
|
||||
result['frameTime.worst'] = 0;
|
||||
result['frameTime.smooth'] = 0;
|
||||
}
|
||||
StringMapWrapper.forEach(this._microMetrics, (desc, name) => { result[name] = 0; });
|
||||
if (this._receivedData) {
|
||||
result['receivedData'] = 0;
|
||||
}
|
||||
if (this._requestCount) {
|
||||
result['requestCount'] = 0;
|
||||
}
|
||||
|
||||
var markStartEvent = null;
|
||||
var markEndEvent = null;
|
||||
var gcTimeInScript = 0;
|
||||
var renderTimeInScript = 0;
|
||||
|
||||
var frameTimestamps = [];
|
||||
var frameTimes = [];
|
||||
var frameCaptureStartEvent = null;
|
||||
var frameCaptureEndEvent = null;
|
||||
|
||||
var intervalStarts: {[key: string]: any} = {};
|
||||
var intervalStartCount: {[key: string]: number} = {};
|
||||
events.forEach((event) => {
|
||||
var ph = event['ph'];
|
||||
var name = event['name'];
|
||||
var microIterations = 1;
|
||||
var microIterationsMatch = name.match(_MICRO_ITERATIONS_REGEX);
|
||||
if (isPresent(microIterationsMatch)) {
|
||||
name = microIterationsMatch[1];
|
||||
microIterations = NumberWrapper.parseInt(microIterationsMatch[2], 10);
|
||||
}
|
||||
|
||||
if (StringWrapper.equals(ph, 'b') && StringWrapper.equals(name, markName)) {
|
||||
markStartEvent = event;
|
||||
} else if (StringWrapper.equals(ph, 'e') && StringWrapper.equals(name, markName)) {
|
||||
markEndEvent = event;
|
||||
}
|
||||
|
||||
let isInstant = StringWrapper.equals(ph, 'I') || StringWrapper.equals(ph, 'i');
|
||||
if (this._requestCount && StringWrapper.equals(name, 'sendRequest')) {
|
||||
result['requestCount'] += 1;
|
||||
} else if (this._receivedData && StringWrapper.equals(name, 'receivedData') && isInstant) {
|
||||
result['receivedData'] += event['args']['encodedDataLength'];
|
||||
} else if (StringWrapper.equals(name, 'navigationStart')) {
|
||||
// We count data + requests since the last navigationStart
|
||||
// (there might be chrome extensions loaded by selenium before our page, so there
|
||||
// will likely be more than one navigationStart).
|
||||
if (this._receivedData) {
|
||||
result['receivedData'] = 0;
|
||||
}
|
||||
if (this._requestCount) {
|
||||
result['requestCount'] = 0;
|
||||
}
|
||||
}
|
||||
if (isPresent(markStartEvent) && isBlank(markEndEvent) &&
|
||||
event['pid'] === markStartEvent['pid']) {
|
||||
if (StringWrapper.equals(ph, 'b') && StringWrapper.equals(name, _MARK_NAME_FRAME_CAPUTRE)) {
|
||||
if (isPresent(frameCaptureStartEvent)) {
|
||||
throw new Error('can capture frames only once per benchmark run');
|
||||
}
|
||||
if (!this._captureFrames) {
|
||||
throw new Error(
|
||||
'found start event for frame capture, but frame capture was not requested in benchpress');
|
||||
}
|
||||
frameCaptureStartEvent = event;
|
||||
} else if (
|
||||
StringWrapper.equals(ph, 'e') && StringWrapper.equals(name, _MARK_NAME_FRAME_CAPUTRE)) {
|
||||
if (isBlank(frameCaptureStartEvent)) {
|
||||
throw new Error('missing start event for frame capture');
|
||||
}
|
||||
frameCaptureEndEvent = event;
|
||||
}
|
||||
|
||||
if (isInstant) {
|
||||
if (isPresent(frameCaptureStartEvent) && isBlank(frameCaptureEndEvent) &&
|
||||
StringWrapper.equals(name, 'frame')) {
|
||||
frameTimestamps.push(event['ts']);
|
||||
if (frameTimestamps.length >= 2) {
|
||||
frameTimes.push(
|
||||
frameTimestamps[frameTimestamps.length - 1] -
|
||||
frameTimestamps[frameTimestamps.length - 2]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (StringWrapper.equals(ph, 'B') || StringWrapper.equals(ph, 'b')) {
|
||||
if (isBlank(intervalStarts[name])) {
|
||||
intervalStartCount[name] = 1;
|
||||
intervalStarts[name] = event;
|
||||
} else {
|
||||
intervalStartCount[name]++;
|
||||
}
|
||||
} else if (
|
||||
(StringWrapper.equals(ph, 'E') || StringWrapper.equals(ph, 'e')) &&
|
||||
isPresent(intervalStarts[name])) {
|
||||
intervalStartCount[name]--;
|
||||
if (intervalStartCount[name] === 0) {
|
||||
var startEvent = intervalStarts[name];
|
||||
var duration = (event['ts'] - startEvent['ts']);
|
||||
intervalStarts[name] = null;
|
||||
if (StringWrapper.equals(name, 'gc')) {
|
||||
result['gcTime'] += duration;
|
||||
var amount =
|
||||
(startEvent['args']['usedHeapSize'] - event['args']['usedHeapSize']) / 1000;
|
||||
result['gcAmount'] += amount;
|
||||
var majorGc = event['args']['majorGc'];
|
||||
if (isPresent(majorGc) && majorGc) {
|
||||
result['majorGcTime'] += duration;
|
||||
}
|
||||
if (isPresent(intervalStarts['script'])) {
|
||||
gcTimeInScript += duration;
|
||||
}
|
||||
} else if (StringWrapper.equals(name, 'render')) {
|
||||
result['renderTime'] += duration;
|
||||
if (isPresent(intervalStarts['script'])) {
|
||||
renderTimeInScript += duration;
|
||||
}
|
||||
} else if (StringWrapper.equals(name, 'script')) {
|
||||
result['scriptTime'] += duration;
|
||||
} else if (isPresent(this._microMetrics[name])) {
|
||||
result[name] += duration / microIterations;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (!isPresent(markStartEvent) || !isPresent(markEndEvent)) {
|
||||
// not all events have been received, no further processing for now
|
||||
return null;
|
||||
}
|
||||
|
||||
if (isPresent(markEndEvent) && isPresent(frameCaptureStartEvent) &&
|
||||
isBlank(frameCaptureEndEvent)) {
|
||||
throw new Error('missing end event for frame capture');
|
||||
}
|
||||
if (this._captureFrames && isBlank(frameCaptureStartEvent)) {
|
||||
throw new Error('frame capture requested in benchpress, but no start event was found');
|
||||
}
|
||||
if (frameTimes.length > 0) {
|
||||
this._addFrameMetrics(result, frameTimes);
|
||||
}
|
||||
result['pureScriptTime'] = result['scriptTime'] - gcTimeInScript - renderTimeInScript;
|
||||
return result;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _addFrameMetrics(result: {[key: string]: any}, frameTimes: any[]) {
|
||||
result['frameTime.mean'] = frameTimes.reduce((a, b) => a + b, 0) / frameTimes.length;
|
||||
var firstFrame = frameTimes[0];
|
||||
result['frameTime.worst'] = frameTimes.reduce((a, b) => a > b ? a : b, firstFrame);
|
||||
result['frameTime.best'] = frameTimes.reduce((a, b) => a < b ? a : b, firstFrame);
|
||||
result['frameTime.smooth'] =
|
||||
frameTimes.filter(t => t < _FRAME_TIME_SMOOTH_THRESHOLD).length / frameTimes.length;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _markName(index) { return `${_MARK_NAME_PREFIX}${index}`; }
|
||||
}
|
||||
|
||||
var _MICRO_ITERATIONS_REGEX = /(.+)\*(\d+)$/;
|
||||
|
||||
var _MAX_RETRY_COUNT = 20;
|
||||
var _MARK_NAME_PREFIX = 'benchpress';
|
||||
var _SET_TIMEOUT = new OpaqueToken('PerflogMetric.setTimeout');
|
||||
|
||||
var _MARK_NAME_FRAME_CAPUTRE = 'frameCapture';
|
||||
// using 17ms as a somewhat looser threshold, instead of 16.6666ms
|
||||
var _FRAME_TIME_SMOOTH_THRESHOLD = 17;
|
||||
|
||||
var _PROVIDERS = [
|
||||
{
|
||||
provide: PerflogMetric,
|
||||
useFactory: (driverExtension, setTimeout, microMetrics, forceGc, captureFrames, receivedData,
|
||||
requestCount) =>
|
||||
new PerflogMetric(
|
||||
driverExtension, setTimeout, microMetrics, forceGc, captureFrames,
|
||||
receivedData, requestCount),
|
||||
deps: [
|
||||
WebDriverExtension, _SET_TIMEOUT, Options.MICRO_METRICS, Options.FORCE_GC,
|
||||
Options.CAPTURE_FRAMES, Options.RECEIVED_DATA, Options.REQUEST_COUNT
|
||||
]
|
||||
},
|
||||
{provide: _SET_TIMEOUT, useValue: (fn, millis) => <any>setTimeout(fn, millis)}
|
||||
];
|
75
modules/@angular/benchpress/src/metric/user_metric.ts
Normal file
75
modules/@angular/benchpress/src/metric/user_metric.ts
Normal file
@ -0,0 +1,75 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {OpaqueToken, Provider} from '@angular/core';
|
||||
import {StringMapWrapper} from '@angular/facade/src/collection';
|
||||
import {isNumber} from '@angular/facade/src/lang';
|
||||
|
||||
import {Options} from '../common_options';
|
||||
import {Metric} from '../metric';
|
||||
import {WebDriverAdapter} from '../web_driver_adapter';
|
||||
|
||||
export class UserMetric extends Metric {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): Provider[] { return _PROVIDERS; }
|
||||
|
||||
constructor(private _userMetrics: {[key: string]: string}, private _wdAdapter: WebDriverAdapter) {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts measuring
|
||||
*/
|
||||
beginMeasure(): Promise<any> { return Promise.resolve(true); }
|
||||
|
||||
/**
|
||||
* Ends measuring.
|
||||
*/
|
||||
endMeasure(restart: boolean): Promise<{[key: string]: any}> {
|
||||
let resolve: (result: any) => void;
|
||||
let reject: (error: any) => void;
|
||||
let promise = new Promise((res, rej) => {
|
||||
resolve = res;
|
||||
reject = rej;
|
||||
});
|
||||
let adapter = this._wdAdapter;
|
||||
let names = StringMapWrapper.keys(this._userMetrics);
|
||||
|
||||
function getAndClearValues() {
|
||||
Promise.all(names.map(name => adapter.executeScript(`return window.${name}`)))
|
||||
.then((values: any[]) => {
|
||||
if (values.every(isNumber)) {
|
||||
Promise.all(names.map(name => adapter.executeScript(`delete window.${name}`)))
|
||||
.then((_: any[]) => {
|
||||
let map = StringMapWrapper.create();
|
||||
for (let i = 0, n = names.length; i < n; i++) {
|
||||
StringMapWrapper.set(map, names[i], values[i]);
|
||||
}
|
||||
resolve(map);
|
||||
}, reject);
|
||||
} else {
|
||||
<any>setTimeout(getAndClearValues, 100);
|
||||
}
|
||||
}, reject);
|
||||
}
|
||||
getAndClearValues();
|
||||
return promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes the metrics provided by this metric implementation.
|
||||
* (e.g. units, ...)
|
||||
*/
|
||||
describe(): {[key: string]: any} { return this._userMetrics; }
|
||||
}
|
||||
|
||||
var _PROVIDERS: Provider[] = [{
|
||||
provide: UserMetric,
|
||||
useFactory: (userMetrics, wdAdapter) => new UserMetric(userMetrics, wdAdapter),
|
||||
deps: [Options.USER_METRICS, WebDriverAdapter]
|
||||
}];
|
24
modules/@angular/benchpress/src/reporter.ts
Normal file
24
modules/@angular/benchpress/src/reporter.ts
Normal file
@ -0,0 +1,24 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {MeasureValues} from './measure_values';
|
||||
|
||||
/**
|
||||
* A reporter reports measure values and the valid sample.
|
||||
*/
|
||||
export abstract class Reporter {
|
||||
static bindTo(delegateToken): any[] {
|
||||
return [{provide: Reporter, useFactory: (delegate) => delegate, deps: [delegateToken]}];
|
||||
}
|
||||
|
||||
reportMeasureValues(values: MeasureValues): Promise<any> { throw new Error('NYI'); }
|
||||
|
||||
reportSample(completeSample: MeasureValues[], validSample: MeasureValues[]): Promise<any> {
|
||||
throw new Error('NYI');
|
||||
}
|
||||
}
|
116
modules/@angular/benchpress/src/reporter/console_reporter.ts
Normal file
116
modules/@angular/benchpress/src/reporter/console_reporter.ts
Normal file
@ -0,0 +1,116 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {OpaqueToken} from '@angular/core/src/di';
|
||||
import {ListWrapper, StringMapWrapper} from '@angular/facade/src/collection';
|
||||
import {NumberWrapper, isBlank, isPresent, print} from '@angular/facade/src/lang';
|
||||
import {Math} from '@angular/facade/src/math';
|
||||
|
||||
import {MeasureValues} from '../measure_values';
|
||||
import {Reporter} from '../reporter';
|
||||
import {SampleDescription} from '../sample_description';
|
||||
import {Statistic} from '../statistic';
|
||||
|
||||
|
||||
/**
|
||||
* A reporter for the console
|
||||
*/
|
||||
export class ConsoleReporter extends Reporter {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PRINT(): OpaqueToken { return _PRINT; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get COLUMN_WIDTH(): OpaqueToken { return _COLUMN_WIDTH; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
|
||||
|
||||
/** @internal */
|
||||
private static _lpad(value, columnWidth, fill = ' ') {
|
||||
var result = '';
|
||||
for (var i = 0; i < columnWidth - value.length; i++) {
|
||||
result += fill;
|
||||
}
|
||||
return result + value;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private static _formatNum(n) { return NumberWrapper.toFixed(n, 2); }
|
||||
|
||||
/** @internal */
|
||||
private static _sortedProps(obj) {
|
||||
var props = [];
|
||||
StringMapWrapper.forEach(obj, (value, prop) => props.push(prop));
|
||||
props.sort();
|
||||
return props;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _metricNames: string[];
|
||||
|
||||
constructor(private _columnWidth: number, sampleDescription, private _print: Function) {
|
||||
super();
|
||||
this._metricNames = ConsoleReporter._sortedProps(sampleDescription.metrics);
|
||||
this._printDescription(sampleDescription);
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _printDescription(sampleDescription) {
|
||||
this._print(`BENCHMARK ${sampleDescription.id}`);
|
||||
this._print('Description:');
|
||||
var props = ConsoleReporter._sortedProps(sampleDescription.description);
|
||||
props.forEach((prop) => { this._print(`- ${prop}: ${sampleDescription.description[prop]}`); });
|
||||
this._print('Metrics:');
|
||||
this._metricNames.forEach((metricName) => {
|
||||
this._print(`- ${metricName}: ${sampleDescription.metrics[metricName]}`);
|
||||
});
|
||||
this._print('');
|
||||
this._printStringRow(this._metricNames);
|
||||
this._printStringRow(this._metricNames.map((_) => ''), '-');
|
||||
}
|
||||
|
||||
reportMeasureValues(measureValues: MeasureValues): Promise<any> {
|
||||
var formattedValues = this._metricNames.map(metricName => {
|
||||
var value = measureValues.values[metricName];
|
||||
return ConsoleReporter._formatNum(value);
|
||||
});
|
||||
this._printStringRow(formattedValues);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
|
||||
reportSample(completeSample: MeasureValues[], validSamples: MeasureValues[]): Promise<any> {
|
||||
this._printStringRow(this._metricNames.map((_) => ''), '=');
|
||||
this._printStringRow(this._metricNames.map(metricName => {
|
||||
var samples = validSamples.map(measureValues => measureValues.values[metricName]);
|
||||
var mean = Statistic.calculateMean(samples);
|
||||
var cv = Statistic.calculateCoefficientOfVariation(samples, mean);
|
||||
var formattedMean = ConsoleReporter._formatNum(mean);
|
||||
// Note: Don't use the unicode character for +- as it might cause
|
||||
// hickups for consoles...
|
||||
return NumberWrapper.isNaN(cv) ? formattedMean : `${formattedMean}+-${Math.floor(cv)}%`;
|
||||
}));
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _printStringRow(parts: any[], fill = ' ') {
|
||||
this._print(
|
||||
parts.map(part => ConsoleReporter._lpad(part, this._columnWidth, fill)).join(' | '));
|
||||
}
|
||||
}
|
||||
|
||||
var _PRINT = new OpaqueToken('ConsoleReporter.print');
|
||||
var _COLUMN_WIDTH = new OpaqueToken('ConsoleReporter.columnWidth');
|
||||
var _PROVIDERS = [
|
||||
{
|
||||
provide: ConsoleReporter,
|
||||
useFactory: (columnWidth, sampleDescription, print) =>
|
||||
new ConsoleReporter(columnWidth, sampleDescription, print),
|
||||
deps: [_COLUMN_WIDTH, SampleDescription, _PRINT]
|
||||
},
|
||||
{provide: _COLUMN_WIDTH, useValue: 18}, {provide: _PRINT, useValue: print}
|
||||
];
|
@ -0,0 +1,67 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {OpaqueToken} from '@angular/core/src/di';
|
||||
import {DateWrapper, Json, isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
|
||||
import {Options} from '../common_options';
|
||||
import {MeasureValues} from '../measure_values';
|
||||
import {Reporter} from '../reporter';
|
||||
import {SampleDescription} from '../sample_description';
|
||||
|
||||
|
||||
/**
|
||||
* A reporter that writes results into a json file.
|
||||
*/
|
||||
export class JsonFileReporter extends Reporter {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PATH(): OpaqueToken { return _PATH; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
|
||||
/** @internal */
|
||||
private _writeFile: Function;
|
||||
/** @internal */
|
||||
private _path: string;
|
||||
/** @internal */
|
||||
private _description: SampleDescription;
|
||||
/** @internal */
|
||||
private _now: Function;
|
||||
|
||||
constructor(sampleDescription, path, writeFile, now) {
|
||||
super();
|
||||
this._description = sampleDescription;
|
||||
this._path = path;
|
||||
this._writeFile = writeFile;
|
||||
this._now = now;
|
||||
}
|
||||
|
||||
reportMeasureValues(measureValues: MeasureValues): Promise<any> { return Promise.resolve(null); }
|
||||
|
||||
reportSample(completeSample: MeasureValues[], validSample: MeasureValues[]): Promise<any> {
|
||||
var content = Json.stringify({
|
||||
'description': this._description,
|
||||
'completeSample': completeSample,
|
||||
'validSample': validSample
|
||||
});
|
||||
var filePath =
|
||||
`${this._path}/${this._description.id}_${DateWrapper.toMillis(this._now())}.json`;
|
||||
return this._writeFile(filePath, content);
|
||||
}
|
||||
}
|
||||
|
||||
var _PATH = new OpaqueToken('JsonFileReporter.path');
|
||||
var _PROVIDERS = [
|
||||
{
|
||||
provide: JsonFileReporter,
|
||||
useFactory: (sampleDescription, path, writeFile, now) =>
|
||||
new JsonFileReporter(sampleDescription, path, writeFile, now),
|
||||
deps: [SampleDescription, _PATH, Options.WRITE_FILE, Options.NOW]
|
||||
},
|
||||
{provide: _PATH, useValue: '.'}
|
||||
];
|
48
modules/@angular/benchpress/src/reporter/multi_reporter.ts
Normal file
48
modules/@angular/benchpress/src/reporter/multi_reporter.ts
Normal file
@ -0,0 +1,48 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Injector, OpaqueToken} from '@angular/core/src/di';
|
||||
|
||||
import {MeasureValues} from '../measure_values';
|
||||
import {Reporter} from '../reporter';
|
||||
|
||||
export class MultiReporter extends Reporter {
|
||||
static createBindings(childTokens: any[]): any[] {
|
||||
return [
|
||||
{
|
||||
provide: _CHILDREN,
|
||||
useFactory: (injector: Injector) => childTokens.map(token => injector.get(token)),
|
||||
deps: [Injector],
|
||||
},
|
||||
{
|
||||
provide: MultiReporter,
|
||||
useFactory: children => new MultiReporter(children),
|
||||
deps: [_CHILDREN]
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _reporters: Reporter[];
|
||||
|
||||
constructor(reporters) {
|
||||
super();
|
||||
this._reporters = reporters;
|
||||
}
|
||||
|
||||
reportMeasureValues(values: MeasureValues): Promise<any[]> {
|
||||
return Promise.all(this._reporters.map(reporter => reporter.reportMeasureValues(values)));
|
||||
}
|
||||
|
||||
reportSample(completeSample: MeasureValues[], validSample: MeasureValues[]): Promise<any[]> {
|
||||
return Promise.all(
|
||||
this._reporters.map(reporter => reporter.reportSample(completeSample, validSample)));
|
||||
}
|
||||
}
|
||||
|
||||
var _CHILDREN = new OpaqueToken('MultiReporter.children');
|
114
modules/@angular/benchpress/src/runner.ts
Normal file
114
modules/@angular/benchpress/src/runner.ts
Normal file
@ -0,0 +1,114 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Provider, ReflectiveInjector} from '@angular/core';
|
||||
import {isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
|
||||
import {Options} from './common_options';
|
||||
import {Metric} from './metric';
|
||||
import {MultiMetric} from './metric/multi_metric';
|
||||
import {PerflogMetric} from './metric/perflog_metric';
|
||||
import {UserMetric} from './metric/user_metric';
|
||||
import {Reporter} from './reporter';
|
||||
import {ConsoleReporter} from './reporter/console_reporter';
|
||||
import {MultiReporter} from './reporter/multi_reporter';
|
||||
import {SampleDescription} from './sample_description';
|
||||
import {SampleState, Sampler} from './sampler';
|
||||
import {Validator} from './validator';
|
||||
import {RegressionSlopeValidator} from './validator/regression_slope_validator';
|
||||
import {SizeValidator} from './validator/size_validator';
|
||||
import {WebDriverAdapter} from './web_driver_adapter';
|
||||
import {WebDriverExtension} from './web_driver_extension';
|
||||
import {ChromeDriverExtension} from './webdriver/chrome_driver_extension';
|
||||
import {FirefoxDriverExtension} from './webdriver/firefox_driver_extension';
|
||||
import {IOsDriverExtension} from './webdriver/ios_driver_extension';
|
||||
|
||||
|
||||
/**
|
||||
* The Runner is the main entry point for executing a sample run.
|
||||
* It provides defaults, creates the injector and calls the sampler.
|
||||
*/
|
||||
export class Runner {
|
||||
private _defaultProviders: Provider[];
|
||||
constructor(defaultProviders: Provider[] = null) {
|
||||
if (isBlank(defaultProviders)) {
|
||||
defaultProviders = [];
|
||||
}
|
||||
this._defaultProviders = defaultProviders;
|
||||
}
|
||||
|
||||
sample({id, execute, prepare, microMetrics, providers, userMetrics}: {
|
||||
id: string,
|
||||
execute?: any,
|
||||
prepare?: any,
|
||||
microMetrics?: any,
|
||||
providers?: any,
|
||||
userMetrics?: any
|
||||
}): Promise<SampleState> {
|
||||
var sampleProviders = [
|
||||
_DEFAULT_PROVIDERS, this._defaultProviders, {provide: Options.SAMPLE_ID, useValue: id},
|
||||
{provide: Options.EXECUTE, useValue: execute}
|
||||
];
|
||||
if (isPresent(prepare)) {
|
||||
sampleProviders.push({provide: Options.PREPARE, useValue: prepare});
|
||||
}
|
||||
if (isPresent(microMetrics)) {
|
||||
sampleProviders.push({provide: Options.MICRO_METRICS, useValue: microMetrics});
|
||||
}
|
||||
if (isPresent(userMetrics)) {
|
||||
sampleProviders.push({provide: Options.USER_METRICS, useValue: userMetrics});
|
||||
}
|
||||
if (isPresent(providers)) {
|
||||
sampleProviders.push(providers);
|
||||
}
|
||||
|
||||
var inj = ReflectiveInjector.resolveAndCreate(sampleProviders);
|
||||
var adapter = inj.get(WebDriverAdapter);
|
||||
|
||||
return Promise
|
||||
.all([adapter.capabilities(), adapter.executeScript('return window.navigator.userAgent;')])
|
||||
.then((args) => {
|
||||
var capabilities = args[0];
|
||||
var userAgent = args[1];
|
||||
|
||||
// This might still create instances twice. We are creating a new injector with all the
|
||||
// providers.
|
||||
// Only WebDriverAdapter is reused.
|
||||
// TODO vsavkin consider changing it when toAsyncFactory is added back or when child
|
||||
// injectors are handled better.
|
||||
var injector = ReflectiveInjector.resolveAndCreate([
|
||||
sampleProviders, {provide: Options.CAPABILITIES, useValue: capabilities},
|
||||
{provide: Options.USER_AGENT, useValue: userAgent},
|
||||
{provide: WebDriverAdapter, useValue: adapter}
|
||||
]);
|
||||
|
||||
var sampler = injector.get(Sampler);
|
||||
return sampler.sample();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var _DEFAULT_PROVIDERS = [
|
||||
Options.DEFAULT_PROVIDERS,
|
||||
Sampler.PROVIDERS,
|
||||
ConsoleReporter.PROVIDERS,
|
||||
RegressionSlopeValidator.PROVIDERS,
|
||||
SizeValidator.PROVIDERS,
|
||||
ChromeDriverExtension.PROVIDERS,
|
||||
FirefoxDriverExtension.PROVIDERS,
|
||||
IOsDriverExtension.PROVIDERS,
|
||||
PerflogMetric.PROVIDERS,
|
||||
UserMetric.PROVIDERS,
|
||||
SampleDescription.PROVIDERS,
|
||||
MultiReporter.createBindings([ConsoleReporter]),
|
||||
MultiMetric.createBindings([PerflogMetric, UserMetric]),
|
||||
Reporter.bindTo(MultiReporter),
|
||||
Validator.bindTo(RegressionSlopeValidator),
|
||||
WebDriverExtension.bindTo([ChromeDriverExtension, FirefoxDriverExtension, IOsDriverExtension]),
|
||||
Metric.bindTo(MultiMetric),
|
||||
];
|
50
modules/@angular/benchpress/src/sample_description.ts
Normal file
50
modules/@angular/benchpress/src/sample_description.ts
Normal file
@ -0,0 +1,50 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {StringMapWrapper} from '@angular/facade/src/collection';
|
||||
|
||||
import {Options} from './common_options';
|
||||
import {Metric} from './metric';
|
||||
import {Validator} from './validator';
|
||||
|
||||
|
||||
/**
|
||||
* SampleDescription merges all available descriptions about a sample
|
||||
*/
|
||||
export class SampleDescription {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
description: {[key: string]: any};
|
||||
|
||||
constructor(
|
||||
public id: string, descriptions: Array<{[key: string]: any}>,
|
||||
public metrics: {[key: string]: any}) {
|
||||
this.description = {};
|
||||
descriptions.forEach(description => {
|
||||
StringMapWrapper.forEach(description, (value, prop) => this.description[prop] = value);
|
||||
});
|
||||
}
|
||||
|
||||
toJson() { return {'id': this.id, 'description': this.description, 'metrics': this.metrics}; }
|
||||
}
|
||||
|
||||
var _PROVIDERS = [{
|
||||
provide: SampleDescription,
|
||||
useFactory: (metric, id, forceGc, userAgent, validator, defaultDesc, userDesc) =>
|
||||
new SampleDescription(
|
||||
id,
|
||||
[
|
||||
{'forceGc': forceGc, 'userAgent': userAgent}, validator.describe(),
|
||||
defaultDesc, userDesc
|
||||
],
|
||||
metric.describe()),
|
||||
deps: [
|
||||
Metric, Options.SAMPLE_ID, Options.FORCE_GC, Options.USER_AGENT, Validator,
|
||||
Options.DEFAULT_DESCRIPTION, Options.SAMPLE_DESCRIPTION
|
||||
]
|
||||
}];
|
129
modules/@angular/benchpress/src/sampler.ts
Normal file
129
modules/@angular/benchpress/src/sampler.ts
Normal file
@ -0,0 +1,129 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Date, DateWrapper, isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
|
||||
import {Options} from './common_options';
|
||||
import {MeasureValues} from './measure_values';
|
||||
import {Metric} from './metric';
|
||||
import {Reporter} from './reporter';
|
||||
import {Validator} from './validator';
|
||||
import {WebDriverAdapter} from './web_driver_adapter';
|
||||
|
||||
|
||||
/**
|
||||
* The Sampler owns the sample loop:
|
||||
* 1. calls the prepare/execute callbacks,
|
||||
* 2. gets data from the metric
|
||||
* 3. asks the validator for a valid sample
|
||||
* 4. reports the new data to the reporter
|
||||
* 5. loop until there is a valid sample
|
||||
*/
|
||||
export class Sampler {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
|
||||
/** @internal */
|
||||
private _driver: WebDriverAdapter;
|
||||
/** @internal */
|
||||
private _metric: Metric;
|
||||
/** @internal */
|
||||
private _reporter: Reporter;
|
||||
/** @internal */
|
||||
private _validator: Validator;
|
||||
/** @internal */
|
||||
private _prepare: Function;
|
||||
/** @internal */
|
||||
private _execute: Function;
|
||||
/** @internal */
|
||||
private _now: Function;
|
||||
|
||||
constructor({driver, metric, reporter, validator, prepare, execute, now}: {
|
||||
driver?: WebDriverAdapter,
|
||||
metric?: Metric,
|
||||
reporter?: Reporter,
|
||||
validator?: Validator,
|
||||
prepare?: Function,
|
||||
execute?: Function,
|
||||
now?: Function
|
||||
} = {}) {
|
||||
this._driver = driver;
|
||||
this._metric = metric;
|
||||
this._reporter = reporter;
|
||||
this._validator = validator;
|
||||
this._prepare = prepare;
|
||||
this._execute = execute;
|
||||
this._now = now;
|
||||
}
|
||||
|
||||
sample(): Promise<SampleState> {
|
||||
var loop;
|
||||
loop = (lastState) => {
|
||||
return this._iterate(lastState).then((newState) => {
|
||||
if (isPresent(newState.validSample)) {
|
||||
return newState;
|
||||
} else {
|
||||
return loop(newState);
|
||||
}
|
||||
});
|
||||
};
|
||||
return loop(new SampleState([], null));
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _iterate(lastState): Promise<SampleState> {
|
||||
var resultPromise: Promise<any>;
|
||||
if (isPresent(this._prepare)) {
|
||||
resultPromise = this._driver.waitFor(this._prepare);
|
||||
} else {
|
||||
resultPromise = Promise.resolve(null);
|
||||
}
|
||||
if (isPresent(this._prepare) || lastState.completeSample.length === 0) {
|
||||
resultPromise = resultPromise.then((_) => this._metric.beginMeasure());
|
||||
}
|
||||
return resultPromise.then((_) => this._driver.waitFor(this._execute))
|
||||
.then((_) => this._metric.endMeasure(isBlank(this._prepare)))
|
||||
.then((measureValues) => this._report(lastState, measureValues));
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _report(state: SampleState, metricValues: {[key: string]: any}): Promise<SampleState> {
|
||||
var measureValues = new MeasureValues(state.completeSample.length, this._now(), metricValues);
|
||||
var completeSample = state.completeSample.concat([measureValues]);
|
||||
var validSample = this._validator.validate(completeSample);
|
||||
var resultPromise = this._reporter.reportMeasureValues(measureValues);
|
||||
if (isPresent(validSample)) {
|
||||
resultPromise =
|
||||
resultPromise.then((_) => this._reporter.reportSample(completeSample, validSample));
|
||||
}
|
||||
return resultPromise.then((_) => new SampleState(completeSample, validSample));
|
||||
}
|
||||
}
|
||||
|
||||
export class SampleState {
|
||||
constructor(public completeSample: any[], public validSample: any[]) {}
|
||||
}
|
||||
|
||||
var _PROVIDERS = [{
|
||||
provide: Sampler,
|
||||
useFactory: (driver, metric, reporter, validator, prepare, execute, now) => new Sampler({
|
||||
driver: driver,
|
||||
reporter: reporter,
|
||||
validator: validator,
|
||||
metric: metric,
|
||||
// TODO(tbosch): DI right now does not support null/undefined objects
|
||||
// Mostly because the cache would have to be initialized with a
|
||||
// special null object, which is expensive.
|
||||
prepare: prepare !== false ? prepare : null,
|
||||
execute: execute,
|
||||
now: now
|
||||
}),
|
||||
deps: [
|
||||
WebDriverAdapter, Metric, Reporter, Validator, Options.PREPARE, Options.EXECUTE, Options.NOW
|
||||
]
|
||||
}];
|
43
modules/@angular/benchpress/src/statistic.ts
Normal file
43
modules/@angular/benchpress/src/statistic.ts
Normal file
@ -0,0 +1,43 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Math} from '@angular/facade/src/math';
|
||||
|
||||
export class Statistic {
|
||||
static calculateCoefficientOfVariation(sample, mean) {
|
||||
return Statistic.calculateStandardDeviation(sample, mean) / mean * 100;
|
||||
}
|
||||
|
||||
static calculateMean(samples: number[]) {
|
||||
var total = 0;
|
||||
// TODO: use reduce
|
||||
samples.forEach(x => total += x);
|
||||
return total / samples.length;
|
||||
}
|
||||
|
||||
static calculateStandardDeviation(samples: number[], mean) {
|
||||
var deviation = 0;
|
||||
// TODO: use reduce
|
||||
samples.forEach(x => deviation += Math.pow(x - mean, 2));
|
||||
deviation = deviation / (samples.length);
|
||||
deviation = Math.sqrt(deviation);
|
||||
return deviation;
|
||||
}
|
||||
|
||||
static calculateRegressionSlope(
|
||||
xValues: number[], xMean: number, yValues: number[], yMean: number) {
|
||||
// See http://en.wikipedia.org/wiki/Simple_linear_regression
|
||||
var dividendSum = 0;
|
||||
var divisorSum = 0;
|
||||
for (var i = 0; i < xValues.length; i++) {
|
||||
dividendSum += (xValues[i] - xMean) * (yValues[i] - yMean);
|
||||
divisorSum += Math.pow(xValues[i] - xMean, 2);
|
||||
}
|
||||
return dividendSum / divisorSum;
|
||||
}
|
||||
}
|
31
modules/@angular/benchpress/src/validator.ts
Normal file
31
modules/@angular/benchpress/src/validator.ts
Normal file
@ -0,0 +1,31 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {MeasureValues} from './measure_values';
|
||||
|
||||
/**
|
||||
* A Validator calculates a valid sample out of the complete sample.
|
||||
* A valid sample is a sample that represents the population that should be observed
|
||||
* in the correct way.
|
||||
*/
|
||||
export abstract class Validator {
|
||||
static bindTo(delegateToken): any[] {
|
||||
return [{provide: Validator, useFactory: (delegate) => delegate, deps: [delegateToken]}];
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates a valid sample out of the complete sample
|
||||
*/
|
||||
validate(completeSample: MeasureValues[]): MeasureValues[] { throw new Error('NYI'); }
|
||||
|
||||
/**
|
||||
* Returns a Map that describes the properties of the validator
|
||||
* (e.g. sample size, ...)
|
||||
*/
|
||||
describe(): {[key: string]: any} { throw new Error('NYI'); }
|
||||
}
|
@ -0,0 +1,74 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {OpaqueToken} from '@angular/core/src/di';
|
||||
import {ListWrapper} from '@angular/facade/src/collection';
|
||||
|
||||
import {MeasureValues} from '../measure_values';
|
||||
import {Statistic} from '../statistic';
|
||||
import {Validator} from '../validator';
|
||||
|
||||
|
||||
/**
|
||||
* A validator that checks the regression slope of a specific metric.
|
||||
* Waits for the regression slope to be >=0.
|
||||
*/
|
||||
export class RegressionSlopeValidator extends Validator {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get SAMPLE_SIZE(): OpaqueToken { return _SAMPLE_SIZE; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get METRIC(): OpaqueToken { return _METRIC; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
|
||||
/** @internal */
|
||||
private _sampleSize: number;
|
||||
/** @internal */
|
||||
private _metric: string;
|
||||
|
||||
constructor(sampleSize, metric) {
|
||||
super();
|
||||
this._sampleSize = sampleSize;
|
||||
this._metric = metric;
|
||||
}
|
||||
|
||||
describe(): {[key: string]: any} {
|
||||
return {'sampleSize': this._sampleSize, 'regressionSlopeMetric': this._metric};
|
||||
}
|
||||
|
||||
validate(completeSample: MeasureValues[]): MeasureValues[] {
|
||||
if (completeSample.length >= this._sampleSize) {
|
||||
var latestSample = ListWrapper.slice(
|
||||
completeSample, completeSample.length - this._sampleSize, completeSample.length);
|
||||
var xValues = [];
|
||||
var yValues = [];
|
||||
for (var i = 0; i < latestSample.length; i++) {
|
||||
// For now, we only use the array index as x value.
|
||||
// TODO(tbosch): think about whether we should use time here instead
|
||||
xValues.push(i);
|
||||
yValues.push(latestSample[i].values[this._metric]);
|
||||
}
|
||||
var regressionSlope = Statistic.calculateRegressionSlope(
|
||||
xValues, Statistic.calculateMean(xValues), yValues, Statistic.calculateMean(yValues));
|
||||
return regressionSlope >= 0 ? latestSample : null;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var _SAMPLE_SIZE = new OpaqueToken('RegressionSlopeValidator.sampleSize');
|
||||
var _METRIC = new OpaqueToken('RegressionSlopeValidator.metric');
|
||||
var _PROVIDERS = [
|
||||
{
|
||||
provide: RegressionSlopeValidator,
|
||||
useFactory: (sampleSize, metric) => new RegressionSlopeValidator(sampleSize, metric),
|
||||
deps: [_SAMPLE_SIZE, _METRIC]
|
||||
},
|
||||
{provide: _SAMPLE_SIZE, useValue: 10}, {provide: _METRIC, useValue: 'scriptTime'}
|
||||
];
|
49
modules/@angular/benchpress/src/validator/size_validator.ts
Normal file
49
modules/@angular/benchpress/src/validator/size_validator.ts
Normal file
@ -0,0 +1,49 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {OpaqueToken} from '@angular/core/src/di';
|
||||
import {ListWrapper} from '@angular/facade/src/collection';
|
||||
|
||||
import {MeasureValues} from '../measure_values';
|
||||
import {Validator} from '../validator';
|
||||
|
||||
|
||||
/**
|
||||
* A validator that waits for the sample to have a certain size.
|
||||
*/
|
||||
export class SizeValidator extends Validator {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get SAMPLE_SIZE() { return _SAMPLE_SIZE; }
|
||||
|
||||
/** @internal */
|
||||
private _sampleSize: number;
|
||||
|
||||
constructor(size) {
|
||||
super();
|
||||
this._sampleSize = size;
|
||||
}
|
||||
|
||||
describe(): {[key: string]: any} { return {'sampleSize': this._sampleSize}; }
|
||||
|
||||
validate(completeSample: MeasureValues[]): MeasureValues[] {
|
||||
if (completeSample.length >= this._sampleSize) {
|
||||
return ListWrapper.slice(
|
||||
completeSample, completeSample.length - this._sampleSize, completeSample.length);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var _SAMPLE_SIZE = new OpaqueToken('SizeValidator.sampleSize');
|
||||
var _PROVIDERS = [
|
||||
{provide: SizeValidator, useFactory: (size) => new SizeValidator(size), deps: [_SAMPLE_SIZE]},
|
||||
{provide: _SAMPLE_SIZE, useValue: 10}
|
||||
];
|
26
modules/@angular/benchpress/src/web_driver_adapter.ts
Normal file
26
modules/@angular/benchpress/src/web_driver_adapter.ts
Normal file
@ -0,0 +1,26 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* A WebDriverAdapter bridges API differences between different WebDriver clients,
|
||||
* e.g. JS vs Dart Async vs Dart Sync webdriver.
|
||||
* Needs one implementation for every supported WebDriver client.
|
||||
*/
|
||||
export abstract class WebDriverAdapter {
|
||||
static bindTo(delegateToken): any[] {
|
||||
return [{provide: WebDriverAdapter, useFactory: (delegate) => delegate, deps: [delegateToken]}];
|
||||
}
|
||||
|
||||
waitFor(callback: Function): Promise<any> { throw new Error('NYI'); }
|
||||
executeScript(script: string): Promise<any> { throw new Error('NYI'); }
|
||||
executeAsyncScript(script: string): Promise<any> { throw new Error('NYI'); }
|
||||
capabilities(): Promise<Map<string, any>> { throw new Error('NYI'); }
|
||||
logs(type: string): Promise<any[]> { throw new Error('NYI'); }
|
||||
}
|
90
modules/@angular/benchpress/src/web_driver_extension.ts
Normal file
90
modules/@angular/benchpress/src/web_driver_extension.ts
Normal file
@ -0,0 +1,90 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Injector, OpaqueToken} from '@angular/core/src/di';
|
||||
import {isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
|
||||
import {Options} from './common_options';
|
||||
|
||||
|
||||
/**
|
||||
* A WebDriverExtension implements extended commands of the webdriver protocol
|
||||
* for a given browser, independent of the WebDriverAdapter.
|
||||
* Needs one implementation for every supported Browser.
|
||||
*/
|
||||
export abstract class WebDriverExtension {
|
||||
static bindTo(childTokens: any[]): any[] {
|
||||
var res = [
|
||||
{
|
||||
provide: _CHILDREN,
|
||||
useFactory: (injector: Injector) => childTokens.map(token => injector.get(token)),
|
||||
deps: [Injector]
|
||||
},
|
||||
{
|
||||
provide: WebDriverExtension,
|
||||
useFactory: (children: WebDriverExtension[], capabilities) => {
|
||||
var delegate;
|
||||
children.forEach(extension => {
|
||||
if (extension.supports(capabilities)) {
|
||||
delegate = extension;
|
||||
}
|
||||
});
|
||||
if (isBlank(delegate)) {
|
||||
throw new Error('Could not find a delegate for given capabilities!');
|
||||
}
|
||||
return delegate;
|
||||
},
|
||||
deps: [_CHILDREN, Options.CAPABILITIES]
|
||||
}
|
||||
];
|
||||
return res;
|
||||
}
|
||||
|
||||
gc(): Promise<any> { throw new Error('NYI'); }
|
||||
|
||||
timeBegin(name: string): Promise<any> { throw new Error('NYI'); }
|
||||
|
||||
timeEnd(name: string, restartName: string): Promise<any> { throw new Error('NYI'); }
|
||||
|
||||
/**
|
||||
* Format:
|
||||
* - cat: category of the event
|
||||
* - name: event name: 'script', 'gc', 'render', ...
|
||||
* - ph: phase: 'B' (begin), 'E' (end), 'b' (nestable start), 'e' (nestable end), 'X' (Complete
|
||||
*event)
|
||||
* - ts: timestamp in ms, e.g. 12345
|
||||
* - pid: process id
|
||||
* - args: arguments, e.g. {heapSize: 1234}
|
||||
*
|
||||
* Based on [Chrome Trace Event
|
||||
*Format](https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/edit)
|
||||
**/
|
||||
readPerfLog(): Promise<any[]> { throw new Error('NYI'); }
|
||||
|
||||
perfLogFeatures(): PerfLogFeatures { throw new Error('NYI'); }
|
||||
|
||||
supports(capabilities: {[key: string]: any}): boolean { return true; }
|
||||
}
|
||||
|
||||
export class PerfLogFeatures {
|
||||
render: boolean;
|
||||
gc: boolean;
|
||||
frameCapture: boolean;
|
||||
userTiming: boolean;
|
||||
|
||||
constructor(
|
||||
{render = false, gc = false, frameCapture = false, userTiming = false}:
|
||||
{render?: boolean, gc?: boolean, frameCapture?: boolean, userTiming?: boolean} = {}) {
|
||||
this.render = render;
|
||||
this.gc = gc;
|
||||
this.frameCapture = frameCapture;
|
||||
this.userTiming = userTiming;
|
||||
}
|
||||
}
|
||||
|
||||
var _CHILDREN = new OpaqueToken('WebDriverExtension.children');
|
@ -0,0 +1,35 @@
|
||||
library benchpress.src.webdriver.async_webdriver_adapter_dart;
|
||||
|
||||
import 'dart:async';
|
||||
import 'package:webdriver/webdriver.dart' show WebDriver, LogEntry;
|
||||
import '../web_driver_adapter.dart' show WebDriverAdapter;
|
||||
|
||||
class AsyncWebDriverAdapter extends WebDriverAdapter {
|
||||
WebDriver _driver;
|
||||
AsyncWebDriverAdapter(this._driver);
|
||||
|
||||
Future waitFor(Function callback) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
Future executeScript(String script) {
|
||||
return _driver.execute(script, const []);
|
||||
}
|
||||
|
||||
Future executeAsyncScript(String script) {
|
||||
return _driver.executeAsync(script, const []);
|
||||
}
|
||||
|
||||
Future<Map> capabilities() {
|
||||
return new Future.value(_driver.capabilities);
|
||||
}
|
||||
|
||||
Future<List<Map>> logs(String type) {
|
||||
return _driver.logs
|
||||
.get(type)
|
||||
.map((LogEntry entry) => {'message': entry.message})
|
||||
.fold(<Map>[], (log, Map entry) {
|
||||
return log..add(entry);
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,257 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {ListWrapper, StringMapWrapper} from '@angular/facade/src/collection';
|
||||
import {Json, NumberWrapper, StringWrapper, isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
|
||||
import {Options} from '../common_options';
|
||||
import {WebDriverAdapter} from '../web_driver_adapter';
|
||||
import {PerfLogFeatures, WebDriverExtension} from '../web_driver_extension';
|
||||
|
||||
|
||||
/**
|
||||
* Set the following 'traceCategories' to collect metrics in Chrome:
|
||||
* 'v8,blink.console,disabled-by-default-devtools.timeline,devtools.timeline'
|
||||
*
|
||||
* In order to collect the frame rate related metrics, add 'benchmark'
|
||||
* to the list above.
|
||||
*/
|
||||
export class ChromeDriverExtension extends WebDriverExtension {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
|
||||
private _majorChromeVersion: number;
|
||||
|
||||
constructor(private _driver: WebDriverAdapter, userAgent: string) {
|
||||
super();
|
||||
this._majorChromeVersion = this._parseChromeVersion(userAgent);
|
||||
}
|
||||
|
||||
private _parseChromeVersion(userAgent: string): number {
|
||||
if (isBlank(userAgent)) {
|
||||
return -1;
|
||||
}
|
||||
var v = StringWrapper.split(userAgent, /Chrom(e|ium)\//g)[2];
|
||||
if (isBlank(v)) {
|
||||
return -1;
|
||||
}
|
||||
v = v.split('.')[0];
|
||||
if (isBlank(v)) {
|
||||
return -1;
|
||||
}
|
||||
return NumberWrapper.parseInt(v, 10);
|
||||
}
|
||||
|
||||
gc() { return this._driver.executeScript('window.gc()'); }
|
||||
|
||||
timeBegin(name: string): Promise<any> {
|
||||
return this._driver.executeScript(`console.time('${name}');`);
|
||||
}
|
||||
|
||||
timeEnd(name: string, restartName: string = null): Promise<any> {
|
||||
var script = `console.timeEnd('${name}');`;
|
||||
if (isPresent(restartName)) {
|
||||
script += `console.time('${restartName}');`;
|
||||
}
|
||||
return this._driver.executeScript(script);
|
||||
}
|
||||
|
||||
// See [Chrome Trace Event
|
||||
// Format](https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/edit)
|
||||
readPerfLog(): Promise<any> {
|
||||
// TODO(tbosch): Chromedriver bug https://code.google.com/p/chromedriver/issues/detail?id=1098
|
||||
// Need to execute at least one command so that the browser logs can be read out!
|
||||
return this._driver.executeScript('1+1')
|
||||
.then((_) => this._driver.logs('performance'))
|
||||
.then((entries) => {
|
||||
var events = [];
|
||||
entries.forEach(entry => {
|
||||
var message = Json.parse(entry['message'])['message'];
|
||||
if (StringWrapper.equals(message['method'], 'Tracing.dataCollected')) {
|
||||
events.push(message['params']);
|
||||
}
|
||||
if (StringWrapper.equals(message['method'], 'Tracing.bufferUsage')) {
|
||||
throw new Error('The DevTools trace buffer filled during the test!');
|
||||
}
|
||||
});
|
||||
return this._convertPerfRecordsToEvents(events);
|
||||
});
|
||||
}
|
||||
|
||||
private _convertPerfRecordsToEvents(
|
||||
chromeEvents: Array<{[key: string]: any}>,
|
||||
normalizedEvents: Array<{[key: string]: any}> = null) {
|
||||
if (isBlank(normalizedEvents)) {
|
||||
normalizedEvents = [];
|
||||
}
|
||||
var majorGCPids = {};
|
||||
chromeEvents.forEach((event) => {
|
||||
var categories = this._parseCategories(event['cat']);
|
||||
var name = event['name'];
|
||||
if (this._isEvent(categories, name, ['blink.console'])) {
|
||||
normalizedEvents.push(normalizeEvent(event, {'name': name}));
|
||||
} else if (this._isEvent(
|
||||
categories, name, ['benchmark'],
|
||||
'BenchmarkInstrumentation::ImplThreadRenderingStats')) {
|
||||
// TODO(goderbauer): Instead of BenchmarkInstrumentation::ImplThreadRenderingStats the
|
||||
// following events should be used (if available) for more accurate measurments:
|
||||
// 1st choice: vsync_before - ground truth on Android
|
||||
// 2nd choice: BenchmarkInstrumentation::DisplayRenderingStats - available on systems with
|
||||
// new surfaces framework (not broadly enabled yet)
|
||||
// 3rd choice: BenchmarkInstrumentation::ImplThreadRenderingStats - fallback event that is
|
||||
// always available if something is rendered
|
||||
var frameCount = event['args']['data']['frame_count'];
|
||||
if (frameCount > 1) {
|
||||
throw new Error('multi-frame render stats not supported');
|
||||
}
|
||||
if (frameCount == 1) {
|
||||
normalizedEvents.push(normalizeEvent(event, {'name': 'frame'}));
|
||||
}
|
||||
} else if (
|
||||
this._isEvent(categories, name, ['disabled-by-default-devtools.timeline'], 'Rasterize') ||
|
||||
this._isEvent(
|
||||
categories, name, ['disabled-by-default-devtools.timeline'], 'CompositeLayers')) {
|
||||
normalizedEvents.push(normalizeEvent(event, {'name': 'render'}));
|
||||
} else if (this._majorChromeVersion < 45) {
|
||||
var normalizedEvent = this._processAsPreChrome45Event(event, categories, majorGCPids);
|
||||
if (normalizedEvent != null) normalizedEvents.push(normalizedEvent);
|
||||
} else {
|
||||
var normalizedEvent = this._processAsPostChrome44Event(event, categories);
|
||||
if (normalizedEvent != null) normalizedEvents.push(normalizedEvent);
|
||||
}
|
||||
});
|
||||
return normalizedEvents;
|
||||
}
|
||||
|
||||
private _processAsPreChrome45Event(event, categories, majorGCPids) {
|
||||
var name = event['name'];
|
||||
var args = event['args'];
|
||||
var pid = event['pid'];
|
||||
var ph = event['ph'];
|
||||
if (this._isEvent(
|
||||
categories, name, ['disabled-by-default-devtools.timeline'], 'FunctionCall') &&
|
||||
(isBlank(args) || isBlank(args['data']) ||
|
||||
!StringWrapper.equals(args['data']['scriptName'], 'InjectedScript'))) {
|
||||
return normalizeEvent(event, {'name': 'script'});
|
||||
} else if (
|
||||
this._isEvent(
|
||||
categories, name, ['disabled-by-default-devtools.timeline'], 'RecalculateStyles') ||
|
||||
this._isEvent(categories, name, ['disabled-by-default-devtools.timeline'], 'Layout') ||
|
||||
this._isEvent(
|
||||
categories, name, ['disabled-by-default-devtools.timeline'], 'UpdateLayerTree') ||
|
||||
this._isEvent(categories, name, ['disabled-by-default-devtools.timeline'], 'Paint')) {
|
||||
return normalizeEvent(event, {'name': 'render'});
|
||||
} else if (this._isEvent(
|
||||
categories, name, ['disabled-by-default-devtools.timeline'], 'GCEvent')) {
|
||||
var normArgs = {
|
||||
'usedHeapSize': isPresent(args['usedHeapSizeAfter']) ? args['usedHeapSizeAfter'] :
|
||||
args['usedHeapSizeBefore']
|
||||
};
|
||||
if (StringWrapper.equals(ph, 'E')) {
|
||||
normArgs['majorGc'] = isPresent(majorGCPids[pid]) && majorGCPids[pid];
|
||||
}
|
||||
majorGCPids[pid] = false;
|
||||
return normalizeEvent(event, {'name': 'gc', 'args': normArgs});
|
||||
} else if (
|
||||
this._isEvent(categories, name, ['v8'], 'majorGC') && StringWrapper.equals(ph, 'B')) {
|
||||
majorGCPids[pid] = true;
|
||||
}
|
||||
return null; // nothing useful in this event
|
||||
}
|
||||
|
||||
private _processAsPostChrome44Event(event, categories) {
|
||||
var name = event['name'];
|
||||
var args = event['args'];
|
||||
if (this._isEvent(categories, name, ['devtools.timeline', 'v8'], 'MajorGC')) {
|
||||
var normArgs = {
|
||||
'majorGc': true,
|
||||
'usedHeapSize': isPresent(args['usedHeapSizeAfter']) ? args['usedHeapSizeAfter'] :
|
||||
args['usedHeapSizeBefore']
|
||||
};
|
||||
return normalizeEvent(event, {'name': 'gc', 'args': normArgs});
|
||||
} else if (this._isEvent(categories, name, ['devtools.timeline', 'v8'], 'MinorGC')) {
|
||||
var normArgs = {
|
||||
'majorGc': false,
|
||||
'usedHeapSize': isPresent(args['usedHeapSizeAfter']) ? args['usedHeapSizeAfter'] :
|
||||
args['usedHeapSizeBefore']
|
||||
};
|
||||
return normalizeEvent(event, {'name': 'gc', 'args': normArgs});
|
||||
} else if (
|
||||
this._isEvent(categories, name, ['devtools.timeline', 'v8'], 'FunctionCall') &&
|
||||
(isBlank(args) || isBlank(args['data']) ||
|
||||
(!StringWrapper.equals(args['data']['scriptName'], 'InjectedScript') &&
|
||||
!StringWrapper.equals(args['data']['scriptName'], '')))) {
|
||||
return normalizeEvent(event, {'name': 'script'});
|
||||
} else if (this._isEvent(
|
||||
categories, name, ['devtools.timeline', 'blink'], 'UpdateLayoutTree')) {
|
||||
return normalizeEvent(event, {'name': 'render'});
|
||||
} else if (
|
||||
this._isEvent(categories, name, ['devtools.timeline'], 'UpdateLayerTree') ||
|
||||
this._isEvent(categories, name, ['devtools.timeline'], 'Layout') ||
|
||||
this._isEvent(categories, name, ['devtools.timeline'], 'Paint')) {
|
||||
return normalizeEvent(event, {'name': 'render'});
|
||||
} else if (this._isEvent(categories, name, ['devtools.timeline'], 'ResourceReceivedData')) {
|
||||
let normArgs = {'encodedDataLength': args['data']['encodedDataLength']};
|
||||
return normalizeEvent(event, {'name': 'receivedData', 'args': normArgs});
|
||||
} else if (this._isEvent(categories, name, ['devtools.timeline'], 'ResourceSendRequest')) {
|
||||
let data = args['data'];
|
||||
let normArgs = {'url': data['url'], 'method': data['requestMethod']};
|
||||
return normalizeEvent(event, {'name': 'sendRequest', 'args': normArgs});
|
||||
} else if (this._isEvent(categories, name, ['blink.user_timing'], 'navigationStart')) {
|
||||
return normalizeEvent(event, {'name': name});
|
||||
}
|
||||
return null; // nothing useful in this event
|
||||
}
|
||||
|
||||
private _parseCategories(categories: string): string[] { return categories.split(','); }
|
||||
|
||||
private _isEvent(
|
||||
eventCategories: string[], eventName: string, expectedCategories: string[],
|
||||
expectedName: string = null): boolean {
|
||||
var hasCategories = expectedCategories.reduce(
|
||||
(value, cat) => { return value && ListWrapper.contains(eventCategories, cat); }, true);
|
||||
return isBlank(expectedName) ? hasCategories :
|
||||
hasCategories && StringWrapper.equals(eventName, expectedName);
|
||||
}
|
||||
|
||||
perfLogFeatures(): PerfLogFeatures {
|
||||
return new PerfLogFeatures({render: true, gc: true, frameCapture: true, userTiming: true});
|
||||
}
|
||||
|
||||
supports(capabilities: {[key: string]: any}): boolean {
|
||||
return this._majorChromeVersion != -1 &&
|
||||
StringWrapper.equals(capabilities['browserName'].toLowerCase(), 'chrome');
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeEvent(
|
||||
chromeEvent: {[key: string]: any}, data: {[key: string]: any}): {[key: string]: any} {
|
||||
var ph = chromeEvent['ph'];
|
||||
if (StringWrapper.equals(ph, 'S')) {
|
||||
ph = 'b';
|
||||
} else if (StringWrapper.equals(ph, 'F')) {
|
||||
ph = 'e';
|
||||
}
|
||||
var result =
|
||||
{'pid': chromeEvent['pid'], 'ph': ph, 'cat': 'timeline', 'ts': chromeEvent['ts'] / 1000};
|
||||
if (chromeEvent['ph'] === 'X') {
|
||||
var dur = chromeEvent['dur'];
|
||||
if (isBlank(dur)) {
|
||||
dur = chromeEvent['tdur'];
|
||||
}
|
||||
result['dur'] = isBlank(dur) ? 0.0 : dur / 1000;
|
||||
}
|
||||
StringMapWrapper.forEach(data, (value, prop) => { result[prop] = value; });
|
||||
return result;
|
||||
}
|
||||
|
||||
var _PROVIDERS = [{
|
||||
provide: ChromeDriverExtension,
|
||||
useFactory: (driver, userAgent) => new ChromeDriverExtension(driver, userAgent),
|
||||
deps: [WebDriverAdapter, Options.USER_AGENT]
|
||||
}];
|
@ -0,0 +1,57 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {StringWrapper, isPresent} from '@angular/facade/src/lang';
|
||||
|
||||
import {WebDriverAdapter} from '../web_driver_adapter';
|
||||
import {PerfLogFeatures, WebDriverExtension} from '../web_driver_extension';
|
||||
|
||||
export class FirefoxDriverExtension extends WebDriverExtension {
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
|
||||
private _profilerStarted: boolean;
|
||||
|
||||
constructor(private _driver: WebDriverAdapter) {
|
||||
super();
|
||||
this._profilerStarted = false;
|
||||
}
|
||||
|
||||
gc() { return this._driver.executeScript('window.forceGC()'); }
|
||||
|
||||
timeBegin(name: string): Promise<any> {
|
||||
if (!this._profilerStarted) {
|
||||
this._profilerStarted = true;
|
||||
this._driver.executeScript('window.startProfiler();');
|
||||
}
|
||||
return this._driver.executeScript('window.markStart("' + name + '");');
|
||||
}
|
||||
|
||||
timeEnd(name: string, restartName: string = null): Promise<any> {
|
||||
var script = 'window.markEnd("' + name + '");';
|
||||
if (isPresent(restartName)) {
|
||||
script += 'window.markStart("' + restartName + '");';
|
||||
}
|
||||
return this._driver.executeScript(script);
|
||||
}
|
||||
|
||||
readPerfLog(): Promise<any> {
|
||||
return this._driver.executeAsyncScript('var cb = arguments[0]; window.getProfile(cb);');
|
||||
}
|
||||
|
||||
perfLogFeatures(): PerfLogFeatures { return new PerfLogFeatures({render: true, gc: true}); }
|
||||
|
||||
supports(capabilities: {[key: string]: any}): boolean {
|
||||
return StringWrapper.equals(capabilities['browserName'].toLowerCase(), 'firefox');
|
||||
}
|
||||
}
|
||||
|
||||
var _PROVIDERS = [{
|
||||
provide: FirefoxDriverExtension,
|
||||
useFactory: (driver) => new FirefoxDriverExtension(driver),
|
||||
deps: [WebDriverAdapter]
|
||||
}];
|
@ -0,0 +1,134 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Json, StringWrapper, isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
|
||||
import {WebDriverAdapter} from '../web_driver_adapter';
|
||||
import {PerfLogFeatures, WebDriverExtension} from '../web_driver_extension';
|
||||
|
||||
export class IOsDriverExtension extends WebDriverExtension {
|
||||
// TODO(tbosch): use static values when our transpiler supports them
|
||||
static get PROVIDERS(): any[] { return _PROVIDERS; }
|
||||
|
||||
constructor(private _driver: WebDriverAdapter) { super(); }
|
||||
|
||||
gc(): Promise<any> { throw new Error('Force GC is not supported on iOS'); }
|
||||
|
||||
timeBegin(name: string): Promise<any> {
|
||||
return this._driver.executeScript(`console.time('${name}');`);
|
||||
}
|
||||
|
||||
timeEnd(name: string, restartName: string = null): Promise<any> {
|
||||
var script = `console.timeEnd('${name}');`;
|
||||
if (isPresent(restartName)) {
|
||||
script += `console.time('${restartName}');`;
|
||||
}
|
||||
return this._driver.executeScript(script);
|
||||
}
|
||||
|
||||
// See https://github.com/WebKit/webkit/tree/master/Source/WebInspectorUI/Versions
|
||||
readPerfLog() {
|
||||
// TODO(tbosch): Bug in IOsDriver: Need to execute at least one command
|
||||
// so that the browser logs can be read out!
|
||||
return this._driver.executeScript('1+1')
|
||||
.then((_) => this._driver.logs('performance'))
|
||||
.then((entries) => {
|
||||
var records = [];
|
||||
entries.forEach(entry => {
|
||||
var message = Json.parse(entry['message'])['message'];
|
||||
if (StringWrapper.equals(message['method'], 'Timeline.eventRecorded')) {
|
||||
records.push(message['params']['record']);
|
||||
}
|
||||
});
|
||||
return this._convertPerfRecordsToEvents(records);
|
||||
});
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
private _convertPerfRecordsToEvents(records: any[], events: any[] = null) {
|
||||
if (isBlank(events)) {
|
||||
events = [];
|
||||
}
|
||||
records.forEach((record) => {
|
||||
var endEvent = null;
|
||||
var type = record['type'];
|
||||
var data = record['data'];
|
||||
var startTime = record['startTime'];
|
||||
var endTime = record['endTime'];
|
||||
|
||||
if (StringWrapper.equals(type, 'FunctionCall') &&
|
||||
(isBlank(data) || !StringWrapper.equals(data['scriptName'], 'InjectedScript'))) {
|
||||
events.push(createStartEvent('script', startTime));
|
||||
endEvent = createEndEvent('script', endTime);
|
||||
} else if (StringWrapper.equals(type, 'Time')) {
|
||||
events.push(createMarkStartEvent(data['message'], startTime));
|
||||
} else if (StringWrapper.equals(type, 'TimeEnd')) {
|
||||
events.push(createMarkEndEvent(data['message'], startTime));
|
||||
} else if (
|
||||
StringWrapper.equals(type, 'RecalculateStyles') || StringWrapper.equals(type, 'Layout') ||
|
||||
StringWrapper.equals(type, 'UpdateLayerTree') || StringWrapper.equals(type, 'Paint') ||
|
||||
StringWrapper.equals(type, 'Rasterize') ||
|
||||
StringWrapper.equals(type, 'CompositeLayers')) {
|
||||
events.push(createStartEvent('render', startTime));
|
||||
endEvent = createEndEvent('render', endTime);
|
||||
}
|
||||
// Note: ios used to support GCEvent up until iOS 6 :-(
|
||||
if (isPresent(record['children'])) {
|
||||
this._convertPerfRecordsToEvents(record['children'], events);
|
||||
}
|
||||
if (isPresent(endEvent)) {
|
||||
events.push(endEvent);
|
||||
}
|
||||
});
|
||||
return events;
|
||||
}
|
||||
|
||||
perfLogFeatures(): PerfLogFeatures { return new PerfLogFeatures({render: true}); }
|
||||
|
||||
supports(capabilities: {[key: string]: any}): boolean {
|
||||
return StringWrapper.equals(capabilities['browserName'].toLowerCase(), 'safari');
|
||||
}
|
||||
}
|
||||
|
||||
function createEvent(ph, name, time, args = null) {
|
||||
var result = {
|
||||
'cat': 'timeline',
|
||||
'name': name,
|
||||
'ts': time,
|
||||
'ph': ph,
|
||||
// The ios protocol does not support the notions of multiple processes in
|
||||
// the perflog...
|
||||
'pid': 'pid0'
|
||||
};
|
||||
if (isPresent(args)) {
|
||||
result['args'] = args;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function createStartEvent(name, time, args = null) {
|
||||
return createEvent('B', name, time, args);
|
||||
}
|
||||
|
||||
function createEndEvent(name, time, args = null) {
|
||||
return createEvent('E', name, time, args);
|
||||
}
|
||||
|
||||
function createMarkStartEvent(name, time) {
|
||||
return createEvent('b', name, time);
|
||||
}
|
||||
|
||||
function createMarkEndEvent(name, time) {
|
||||
return createEvent('e', name, time);
|
||||
}
|
||||
|
||||
var _PROVIDERS = [{
|
||||
provide: IOsDriverExtension,
|
||||
useFactory: (driver) => new IOsDriverExtension(driver),
|
||||
deps: [WebDriverAdapter]
|
||||
}];
|
@ -0,0 +1,3 @@
|
||||
library benchpress.src.webdriver.selenium_webdriver_adapter;
|
||||
|
||||
//no dart implementation
|
@ -0,0 +1,77 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import * as webdriver from 'selenium-webdriver';
|
||||
|
||||
import {WebDriverAdapter} from '../web_driver_adapter';
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Adapter for the selenium-webdriver.
|
||||
*/
|
||||
export class SeleniumWebDriverAdapter extends WebDriverAdapter {
|
||||
static get PROTRACTOR_BINDINGS(): any[] { return _PROTRACTOR_BINDINGS; }
|
||||
|
||||
constructor(private _driver: any) { super(); }
|
||||
|
||||
/** @internal */
|
||||
private _convertPromise(thenable) {
|
||||
var resolve: (result: any) => void;
|
||||
var reject: (error: any) => void;
|
||||
var promise = new Promise((res, rej) => {
|
||||
resolve = res;
|
||||
reject = rej;
|
||||
});
|
||||
thenable.then(
|
||||
// selenium-webdriver uses an own Node.js context,
|
||||
// so we need to convert data into objects of this context.
|
||||
// Previously needed for rtts_asserts.
|
||||
(data) => resolve(convertToLocalProcess(data)), reject);
|
||||
return promise;
|
||||
}
|
||||
|
||||
waitFor(callback): Promise<any> {
|
||||
return this._convertPromise(this._driver.controlFlow().execute(callback));
|
||||
}
|
||||
|
||||
executeScript(script: string): Promise<any> {
|
||||
return this._convertPromise(this._driver.executeScript(script));
|
||||
}
|
||||
|
||||
executeAsyncScript(script: string): Promise<any> {
|
||||
return this._convertPromise(this._driver.executeAsyncScript(script));
|
||||
}
|
||||
|
||||
capabilities(): Promise<any> {
|
||||
return this._convertPromise(
|
||||
this._driver.getCapabilities().then((capsObject) => capsObject.serialize()));
|
||||
}
|
||||
|
||||
logs(type: string): Promise<any> {
|
||||
// Needed as selenium-webdriver does not forward
|
||||
// performance logs in the correct way via manage().logs
|
||||
return this._convertPromise(this._driver.schedule(
|
||||
new webdriver.Command(webdriver.CommandName.GET_LOG).setParameter('type', type),
|
||||
'WebDriver.manage().logs().get(' + type + ')'));
|
||||
}
|
||||
}
|
||||
|
||||
function convertToLocalProcess(data): Object {
|
||||
var serialized = JSON.stringify(data);
|
||||
if ('' + serialized === 'undefined') {
|
||||
return undefined;
|
||||
}
|
||||
return JSON.parse(serialized);
|
||||
}
|
||||
|
||||
var _PROTRACTOR_BINDINGS = [{
|
||||
provide: WebDriverAdapter,
|
||||
useFactory: () => new SeleniumWebDriverAdapter((<any>global).browser),
|
||||
deps: []
|
||||
}];
|
@ -0,0 +1,3 @@
|
||||
library benchpress.test.firefox_extension.conf;
|
||||
|
||||
//empty as we don't have a version for dart
|
21
modules/@angular/benchpress/test/firefox_extension/conf.ts
Normal file
21
modules/@angular/benchpress/test/firefox_extension/conf.ts
Normal file
@ -0,0 +1,21 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
require('core-js');
|
||||
require('reflect-metadata');
|
||||
var testHelper = require('../../src/firefox_extension/lib/test_helper.js');
|
||||
|
||||
exports.config = {
|
||||
specs: ['spec.js', 'sample_benchmark.js'],
|
||||
|
||||
framework: 'jasmine2',
|
||||
|
||||
jasmineNodeOpts: {showColors: true, defaultTimeoutInterval: 1200000},
|
||||
|
||||
getMultiCapabilities: function() { return testHelper.getFirefoxProfileWithExtension(); }
|
||||
};
|
@ -0,0 +1,3 @@
|
||||
library benchpress.test.firefox_extension.parser_util_spec;
|
||||
|
||||
main() {}
|
@ -0,0 +1,100 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {convertPerfProfileToEvents} from 'benchpress/src/firefox_extension/lib/parser_util';
|
||||
|
||||
function assertEventsEqual(actualEvents, expectedEvents) {
|
||||
expect(actualEvents.length == expectedEvents.length);
|
||||
for (var i = 0; i < actualEvents.length; ++i) {
|
||||
var actualEvent = actualEvents[i];
|
||||
var expectedEvent = expectedEvents[i];
|
||||
for (var key in actualEvent) {
|
||||
expect(actualEvent[key]).toEqual(expectedEvent[key]);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export function main() {
|
||||
describe('convertPerfProfileToEvents', function() {
|
||||
it('should convert single instantaneous event', function() {
|
||||
var profileData = {
|
||||
threads: [
|
||||
{samples: [{time: 1, frames: [{location: 'FirefoxDriver.prototype.executeScript'}]}]}
|
||||
]
|
||||
};
|
||||
var perfEvents = convertPerfProfileToEvents(profileData);
|
||||
assertEventsEqual(perfEvents, [{ph: 'X', ts: 1, name: 'script'}]);
|
||||
});
|
||||
|
||||
it('should convert single non-instantaneous event', function() {
|
||||
var profileData = {
|
||||
threads: [{
|
||||
samples: [
|
||||
{time: 1, frames: [{location: 'FirefoxDriver.prototype.executeScript'}]},
|
||||
{time: 2, frames: [{location: 'FirefoxDriver.prototype.executeScript'}]},
|
||||
{time: 100, frames: [{location: 'FirefoxDriver.prototype.executeScript'}]}
|
||||
]
|
||||
}]
|
||||
};
|
||||
var perfEvents = convertPerfProfileToEvents(profileData);
|
||||
assertEventsEqual(
|
||||
perfEvents, [{ph: 'B', ts: 1, name: 'script'}, {ph: 'E', ts: 100, name: 'script'}]);
|
||||
});
|
||||
|
||||
it('should convert multiple instantaneous events', function() {
|
||||
var profileData = {
|
||||
threads: [{
|
||||
samples: [
|
||||
{time: 1, frames: [{location: 'FirefoxDriver.prototype.executeScript'}]},
|
||||
{time: 2, frames: [{location: 'PresShell::Paint'}]}
|
||||
]
|
||||
}]
|
||||
};
|
||||
var perfEvents = convertPerfProfileToEvents(profileData);
|
||||
assertEventsEqual(
|
||||
perfEvents, [{ph: 'X', ts: 1, name: 'script'}, {ph: 'X', ts: 2, name: 'render'}]);
|
||||
});
|
||||
|
||||
it('should convert multiple mixed events', function() {
|
||||
var profileData = {
|
||||
threads: [{
|
||||
samples: [
|
||||
{time: 1, frames: [{location: 'FirefoxDriver.prototype.executeScript'}]},
|
||||
{time: 2, frames: [{location: 'PresShell::Paint'}]},
|
||||
{time: 5, frames: [{location: 'FirefoxDriver.prototype.executeScript'}]},
|
||||
{time: 10, frames: [{location: 'FirefoxDriver.prototype.executeScript'}]}
|
||||
]
|
||||
}]
|
||||
};
|
||||
var perfEvents = convertPerfProfileToEvents(profileData);
|
||||
assertEventsEqual(perfEvents, [
|
||||
{ph: 'X', ts: 1, name: 'script'}, {ph: 'X', ts: 2, name: 'render'},
|
||||
{ph: 'B', ts: 5, name: 'script'}, {ph: 'E', ts: 10, name: 'script'}
|
||||
]);
|
||||
});
|
||||
|
||||
it('should add args to gc events', function() {
|
||||
var profileData = {threads: [{samples: [{time: 1, frames: [{location: 'forceGC'}]}]}]};
|
||||
var perfEvents = convertPerfProfileToEvents(profileData);
|
||||
assertEventsEqual(perfEvents, [{ph: 'X', ts: 1, name: 'gc', args: {usedHeapSize: 0}}]);
|
||||
});
|
||||
|
||||
it('should skip unknown events', function() {
|
||||
var profileData = {
|
||||
threads: [{
|
||||
samples: [
|
||||
{time: 1, frames: [{location: 'FirefoxDriver.prototype.executeScript'}]},
|
||||
{time: 2, frames: [{location: 'foo'}]}
|
||||
]
|
||||
}]
|
||||
};
|
||||
var perfEvents = convertPerfProfileToEvents(profileData);
|
||||
assertEventsEqual(perfEvents, [{ph: 'X', ts: 1, name: 'script'}]);
|
||||
});
|
||||
});
|
||||
};
|
@ -0,0 +1,3 @@
|
||||
library benchpress.test.firefox_extension.sample_benchmark;
|
||||
|
||||
main() {}
|
@ -0,0 +1,40 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
var benchpress = require('../../index.js');
|
||||
var runner = new benchpress.Runner([
|
||||
// use protractor as Webdriver client
|
||||
benchpress.SeleniumWebDriverAdapter.PROTRACTOR_BINDINGS,
|
||||
// use RegressionSlopeValidator to validate samples
|
||||
benchpress.Validator.bindTo(benchpress.RegressionSlopeValidator),
|
||||
// use 10 samples to calculate slope regression
|
||||
benchpress.bind(benchpress.RegressionSlopeValidator.SAMPLE_SIZE).toValue(20),
|
||||
// use the script metric to calculate slope regression
|
||||
benchpress.bind(benchpress.RegressionSlopeValidator.METRIC).toValue('scriptTime'),
|
||||
benchpress.bind(benchpress.Options.FORCE_GC).toValue(true)
|
||||
]);
|
||||
|
||||
describe('deep tree baseline', function() {
|
||||
it('should be fast!', function(done) {
|
||||
browser.ignoreSynchronization = true;
|
||||
browser.get('http://localhost:8001/playground/src/benchpress/');
|
||||
|
||||
/*
|
||||
* Tell benchpress to click the buttons to destroy and re-create the tree for each sample.
|
||||
* Benchpress will log the collected metrics after each sample is collected, and will stop
|
||||
* sampling as soon as the calculated regression slope for last 20 samples is stable.
|
||||
*/
|
||||
runner
|
||||
.sample({
|
||||
id: 'baseline',
|
||||
execute: function() { $('button').click(); },
|
||||
providers: [benchpress.bind(benchpress.Options.SAMPLE_DESCRIPTION).toValue({depth: 9})]
|
||||
})
|
||||
.then(done, done.fail);
|
||||
});
|
||||
});
|
@ -0,0 +1,3 @@
|
||||
library benchpress.test.firefox_extension.spec;
|
||||
|
||||
main() {}
|
41
modules/@angular/benchpress/test/firefox_extension/spec.ts
Normal file
41
modules/@angular/benchpress/test/firefox_extension/spec.ts
Normal file
@ -0,0 +1,41 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
var assertEventsContainsName = function(events, eventName) {
|
||||
var found = false;
|
||||
for (var i = 0; i < events.length; ++i) {
|
||||
if (events[i].name == eventName) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
expect(found).toBeTruthy();
|
||||
};
|
||||
|
||||
describe('firefox extension', function() {
|
||||
var TEST_URL = 'http://localhost:8001/playground/src/hello_world/index.html';
|
||||
|
||||
it('should measure performance', function() {
|
||||
browser.sleep(3000); // wait for extension to load
|
||||
|
||||
browser.driver.get(TEST_URL);
|
||||
|
||||
browser.executeScript('window.startProfiler()').then(function() {
|
||||
console.log('started measuring perf');
|
||||
});
|
||||
|
||||
browser.executeAsyncScript('setTimeout(arguments[0], 1000);');
|
||||
browser.executeScript('window.forceGC()');
|
||||
|
||||
browser.executeAsyncScript('var cb = arguments[0]; window.getProfile(cb);')
|
||||
.then(function(profile) {
|
||||
assertEventsContainsName(profile, 'gc');
|
||||
assertEventsContainsName(profile, 'script');
|
||||
});
|
||||
});
|
||||
});
|
74
modules/@angular/benchpress/test/metric/multi_metric_spec.ts
Normal file
74
modules/@angular/benchpress/test/metric/multi_metric_spec.ts
Normal file
@ -0,0 +1,74 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {Metric, MultiMetric, ReflectiveInjector} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
function createMetric(ids: any[]) {
|
||||
var m = ReflectiveInjector
|
||||
.resolveAndCreate([
|
||||
ids.map(id => { return {provide: id, useValue: new MockMetric(id)}; }),
|
||||
MultiMetric.createBindings(ids)
|
||||
])
|
||||
.get(MultiMetric);
|
||||
return Promise.resolve(m);
|
||||
}
|
||||
|
||||
describe('multi metric', () => {
|
||||
it('should merge descriptions', inject([AsyncTestCompleter], (async) => {
|
||||
createMetric(['m1', 'm2']).then((m) => {
|
||||
expect(m.describe()).toEqual({'m1': 'describe', 'm2': 'describe'});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should merge all beginMeasure calls', inject([AsyncTestCompleter], (async) => {
|
||||
createMetric(['m1', 'm2']).then((m) => m.beginMeasure()).then((values) => {
|
||||
expect(values).toEqual(['m1_beginMeasure', 'm2_beginMeasure']);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
[false, true].forEach((restartFlag) => {
|
||||
it(`should merge all endMeasure calls for restart=${restartFlag}`,
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createMetric(['m1', 'm2']).then((m) => m.endMeasure(restartFlag)).then((values) => {
|
||||
expect(values).toEqual(
|
||||
{'m1': {'restart': restartFlag}, 'm2': {'restart': restartFlag}});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
/** @internal */
|
||||
private _id: string;
|
||||
|
||||
constructor(id) {
|
||||
super();
|
||||
this._id = id;
|
||||
}
|
||||
|
||||
beginMeasure(): Promise<string> { return Promise.resolve(`${this._id}_beginMeasure`); }
|
||||
|
||||
endMeasure(restart: boolean): Promise<{[key: string]: any}> {
|
||||
var result = {};
|
||||
result[this._id] = {'restart': restart};
|
||||
return Promise.resolve(result);
|
||||
}
|
||||
|
||||
describe(): {[key: string]: string} {
|
||||
var result: {[key: string]: string} = {};
|
||||
result[this._id] = 'describe';
|
||||
return result;
|
||||
}
|
||||
}
|
666
modules/@angular/benchpress/test/metric/perflog_metric_spec.ts
Normal file
666
modules/@angular/benchpress/test/metric/perflog_metric_spec.ts
Normal file
@ -0,0 +1,666 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Provider} from '@angular/core';
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {StringMapWrapper} from '@angular/facade/src/collection';
|
||||
import {isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
import {Metric, Options, PerfLogFeatures, PerflogMetric, ReflectiveInjector, WebDriverExtension} from 'benchpress/common';
|
||||
|
||||
import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
export function main() {
|
||||
var commandLog: any[];
|
||||
var eventFactory = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createMetric(
|
||||
perfLogs, perfLogFeatures,
|
||||
{microMetrics, forceGc, captureFrames, receivedData, requestCount}: {
|
||||
microMetrics?: {[key: string]: string},
|
||||
forceGc?: boolean,
|
||||
captureFrames?: boolean,
|
||||
receivedData?: boolean,
|
||||
requestCount?: boolean
|
||||
} = {}): Metric {
|
||||
commandLog = [];
|
||||
if (isBlank(perfLogFeatures)) {
|
||||
perfLogFeatures =
|
||||
new PerfLogFeatures({render: true, gc: true, frameCapture: true, userTiming: true});
|
||||
}
|
||||
if (isBlank(microMetrics)) {
|
||||
microMetrics = StringMapWrapper.create();
|
||||
}
|
||||
var providers: Provider[] = [
|
||||
Options.DEFAULT_PROVIDERS, PerflogMetric.PROVIDERS,
|
||||
{provide: Options.MICRO_METRICS, useValue: microMetrics}, {
|
||||
provide: PerflogMetric.SET_TIMEOUT,
|
||||
useValue: (fn, millis) => {
|
||||
commandLog.push(['setTimeout', millis]);
|
||||
fn();
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: WebDriverExtension,
|
||||
useValue: new MockDriverExtension(perfLogs, commandLog, perfLogFeatures)
|
||||
}
|
||||
];
|
||||
if (isPresent(forceGc)) {
|
||||
providers.push({provide: Options.FORCE_GC, useValue: forceGc});
|
||||
}
|
||||
if (isPresent(captureFrames)) {
|
||||
providers.push({provide: Options.CAPTURE_FRAMES, useValue: captureFrames});
|
||||
}
|
||||
if (isPresent(receivedData)) {
|
||||
providers.push({provide: Options.RECEIVED_DATA, useValue: receivedData});
|
||||
}
|
||||
if (isPresent(requestCount)) {
|
||||
providers.push({provide: Options.REQUEST_COUNT, useValue: requestCount});
|
||||
}
|
||||
return ReflectiveInjector.resolveAndCreate(providers).get(PerflogMetric);
|
||||
}
|
||||
|
||||
describe('perflog metric', () => {
|
||||
|
||||
function sortedKeys(stringMap) {
|
||||
var res = [];
|
||||
StringMapWrapper.forEach(stringMap, (_, key) => { res.push(key); });
|
||||
res.sort();
|
||||
return res;
|
||||
}
|
||||
|
||||
it('should describe itself based on the perfLogFeatrues', () => {
|
||||
expect(sortedKeys(createMetric([[]], new PerfLogFeatures()).describe())).toEqual([
|
||||
'pureScriptTime', 'scriptTime'
|
||||
]);
|
||||
|
||||
expect(
|
||||
sortedKeys(createMetric([[]], new PerfLogFeatures({render: true, gc: false})).describe()))
|
||||
.toEqual(['pureScriptTime', 'renderTime', 'scriptTime']);
|
||||
|
||||
expect(sortedKeys(createMetric([[]], null).describe())).toEqual([
|
||||
'gcAmount', 'gcTime', 'majorGcTime', 'pureScriptTime', 'renderTime', 'scriptTime'
|
||||
]);
|
||||
|
||||
expect(sortedKeys(createMetric([[]], new PerfLogFeatures({render: true, gc: true}), {
|
||||
forceGc: true
|
||||
}).describe()))
|
||||
.toEqual([
|
||||
'forcedGcAmount', 'forcedGcTime', 'gcAmount', 'gcTime', 'majorGcTime', 'pureScriptTime',
|
||||
'renderTime', 'scriptTime'
|
||||
]);
|
||||
|
||||
|
||||
expect(sortedKeys(createMetric([[]], new PerfLogFeatures({userTiming: true}), {
|
||||
receivedData: true,
|
||||
requestCount: true
|
||||
}).describe()))
|
||||
.toEqual(['pureScriptTime', 'receivedData', 'requestCount', 'scriptTime']);
|
||||
});
|
||||
|
||||
it('should describe itself based on micro metrics', () => {
|
||||
var description =
|
||||
createMetric([[]], null, {microMetrics: {'myMicroMetric': 'someDesc'}}).describe();
|
||||
expect(description['myMicroMetric']).toEqual('someDesc');
|
||||
});
|
||||
|
||||
it('should describe itself if frame capture is requested and available', () => {
|
||||
var description = createMetric([[]], new PerfLogFeatures({frameCapture: true}), {
|
||||
captureFrames: true
|
||||
}).describe();
|
||||
expect(description['frameTime.mean']).not.toContain('WARNING');
|
||||
expect(description['frameTime.best']).not.toContain('WARNING');
|
||||
expect(description['frameTime.worst']).not.toContain('WARNING');
|
||||
expect(description['frameTime.smooth']).not.toContain('WARNING');
|
||||
});
|
||||
|
||||
it('should describe itself if frame capture is requested and not available', () => {
|
||||
var description = createMetric([[]], new PerfLogFeatures({frameCapture: false}), {
|
||||
captureFrames: true
|
||||
}).describe();
|
||||
expect(description['frameTime.mean']).toContain('WARNING');
|
||||
expect(description['frameTime.best']).toContain('WARNING');
|
||||
expect(description['frameTime.worst']).toContain('WARNING');
|
||||
expect(description['frameTime.smooth']).toContain('WARNING');
|
||||
});
|
||||
|
||||
describe('beginMeasure', () => {
|
||||
|
||||
it('should not force gc and mark the timeline', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric([[]], null);
|
||||
metric.beginMeasure().then((_) => {
|
||||
expect(commandLog).toEqual([['timeBegin', 'benchpress0']]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should force gc and mark the timeline', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric([[]], null, {forceGc: true});
|
||||
metric.beginMeasure().then((_) => {
|
||||
expect(commandLog).toEqual([['gc'], ['timeBegin', 'benchpress0']]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
describe('endMeasure', () => {
|
||||
|
||||
it('should mark and aggregate events in between the marks',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var events = [[
|
||||
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 4),
|
||||
eventFactory.end('script', 6), eventFactory.markEnd('benchpress0', 10)
|
||||
]];
|
||||
var metric = createMetric(events, null);
|
||||
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', null], 'readPerfLog'
|
||||
]);
|
||||
expect(data['scriptTime']).toBe(2);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should restart timing', inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[
|
||||
eventFactory.markStart('benchpress0', 0),
|
||||
eventFactory.markEnd('benchpress0', 1),
|
||||
eventFactory.markStart('benchpress1', 2),
|
||||
],
|
||||
[eventFactory.markEnd('benchpress1', 3)]
|
||||
];
|
||||
var metric = createMetric(events, null);
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(true))
|
||||
.then((_) => metric.endMeasure(true))
|
||||
.then((_) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog', ['timeEnd', 'benchpress1', 'benchpress2'], 'readPerfLog'
|
||||
]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should loop and aggregate until the end mark is present',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 1)],
|
||||
[eventFactory.end('script', 2)],
|
||||
[
|
||||
eventFactory.start('script', 3), eventFactory.end('script', 5),
|
||||
eventFactory.markEnd('benchpress0', 10)
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events, null);
|
||||
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', null], 'readPerfLog',
|
||||
['setTimeout', 100], 'readPerfLog', ['setTimeout', 100], 'readPerfLog'
|
||||
]);
|
||||
expect(data['scriptTime']).toBe(3);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should store events after the end mark for the next call',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var events = [
|
||||
[
|
||||
eventFactory.markStart('benchpress0', 0), eventFactory.markEnd('benchpress0', 1),
|
||||
eventFactory.markStart('benchpress1', 1), eventFactory.start('script', 1),
|
||||
eventFactory.end('script', 2)
|
||||
],
|
||||
[
|
||||
eventFactory.start('script', 3), eventFactory.end('script', 5),
|
||||
eventFactory.markEnd('benchpress1', 6)
|
||||
]
|
||||
];
|
||||
var metric = createMetric(events, null);
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(true))
|
||||
.then((data) => {
|
||||
expect(data['scriptTime']).toBe(0);
|
||||
return metric.endMeasure(true);
|
||||
})
|
||||
.then((data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog', ['timeEnd', 'benchpress1', 'benchpress2'], 'readPerfLog'
|
||||
]);
|
||||
expect(data['scriptTime']).toBe(3);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('with forced gc', () => {
|
||||
var events;
|
||||
beforeEach(() => {
|
||||
events = [[
|
||||
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 4),
|
||||
eventFactory.end('script', 6), eventFactory.markEnd('benchpress0', 10),
|
||||
eventFactory.markStart('benchpress1', 11),
|
||||
eventFactory.start('gc', 12, {'usedHeapSize': 2500}),
|
||||
eventFactory.end('gc', 15, {'usedHeapSize': 1000}),
|
||||
eventFactory.markEnd('benchpress1', 20)
|
||||
]];
|
||||
});
|
||||
|
||||
it('should measure forced gc', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric(events, null, {forceGc: true});
|
||||
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
||||
expect(commandLog).toEqual([
|
||||
['gc'], ['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', 'benchpress1'],
|
||||
'readPerfLog', ['gc'], ['timeEnd', 'benchpress1', null], 'readPerfLog'
|
||||
]);
|
||||
expect(data['forcedGcTime']).toBe(3);
|
||||
expect(data['forcedGcAmount']).toBe(1.5);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should restart after the forced gc if needed', inject([AsyncTestCompleter], (async) => {
|
||||
var metric = createMetric(events, null, {forceGc: true});
|
||||
metric.beginMeasure().then((_) => metric.endMeasure(true)).then((data) => {
|
||||
expect(commandLog[5]).toEqual(['timeEnd', 'benchpress1', 'benchpress2']);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('aggregation', () => {
|
||||
|
||||
function aggregate(events: any[], {microMetrics, captureFrames, receivedData, requestCount}: {
|
||||
microMetrics?: {[key: string]: string},
|
||||
captureFrames?: boolean,
|
||||
receivedData?: boolean,
|
||||
requestCount?: boolean
|
||||
} = {}) {
|
||||
events.unshift(eventFactory.markStart('benchpress0', 0));
|
||||
events.push(eventFactory.markEnd('benchpress0', 10));
|
||||
var metric = createMetric([events], null, {
|
||||
microMetrics: microMetrics,
|
||||
captureFrames: captureFrames,
|
||||
receivedData: receivedData,
|
||||
requestCount: requestCount
|
||||
});
|
||||
return metric.beginMeasure().then((_) => metric.endMeasure(false));
|
||||
}
|
||||
|
||||
describe('frame metrics', () => {
|
||||
it('should calculate mean frame time', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate(
|
||||
[
|
||||
eventFactory.markStart('frameCapture', 0), eventFactory.instant('frame', 1),
|
||||
eventFactory.instant('frame', 3), eventFactory.instant('frame', 4),
|
||||
eventFactory.markEnd('frameCapture', 5)
|
||||
],
|
||||
{captureFrames: true})
|
||||
.then((data) => {
|
||||
expect(data['frameTime.mean']).toBe(((3 - 1) + (4 - 3)) / 2);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if no start event', inject([AsyncTestCompleter], (async) => {
|
||||
|
||||
aggregate(
|
||||
[eventFactory.instant('frame', 4), eventFactory.markEnd('frameCapture', 5)],
|
||||
{captureFrames: true})
|
||||
.catch((err): any => {
|
||||
expect(() => {
|
||||
throw err;
|
||||
}).toThrowError('missing start event for frame capture');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if no end event', inject([AsyncTestCompleter], (async) => {
|
||||
|
||||
aggregate(
|
||||
[eventFactory.markStart('frameCapture', 3), eventFactory.instant('frame', 4)],
|
||||
{captureFrames: true})
|
||||
.catch((err): any => {
|
||||
expect(() => { throw err; }).toThrowError('missing end event for frame capture');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if trying to capture twice', inject([AsyncTestCompleter], (async) => {
|
||||
|
||||
aggregate(
|
||||
[
|
||||
eventFactory.markStart('frameCapture', 3),
|
||||
eventFactory.markStart('frameCapture', 4)
|
||||
],
|
||||
{captureFrames: true})
|
||||
.catch((err): any => {
|
||||
expect(() => {
|
||||
throw err;
|
||||
}).toThrowError('can capture frames only once per benchmark run');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if trying to capture when frame capture is disabled',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([eventFactory.markStart('frameCapture', 3)]).catch((err) => {
|
||||
expect(() => { throw err; })
|
||||
.toThrowError(
|
||||
'found start event for frame capture, but frame capture was not requested in benchpress');
|
||||
async.done();
|
||||
return null;
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if frame capture is enabled, but nothing is captured',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([], {captureFrames: true}).catch((err): any => {
|
||||
expect(() => { throw err; })
|
||||
.toThrowError(
|
||||
'frame capture requested in benchpress, but no start event was found');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should calculate best and worst frame time', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate(
|
||||
[
|
||||
eventFactory.markStart('frameCapture', 0), eventFactory.instant('frame', 1),
|
||||
eventFactory.instant('frame', 9), eventFactory.instant('frame', 15),
|
||||
eventFactory.instant('frame', 18), eventFactory.instant('frame', 28),
|
||||
eventFactory.instant('frame', 32), eventFactory.markEnd('frameCapture', 10)
|
||||
],
|
||||
{captureFrames: true})
|
||||
.then((data) => {
|
||||
expect(data['frameTime.worst']).toBe(10);
|
||||
expect(data['frameTime.best']).toBe(3);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should calculate percentage of smoothness to be good',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate(
|
||||
[
|
||||
eventFactory.markStart('frameCapture', 0), eventFactory.instant('frame', 1),
|
||||
eventFactory.instant('frame', 2), eventFactory.instant('frame', 3),
|
||||
eventFactory.markEnd('frameCapture', 4)
|
||||
],
|
||||
{captureFrames: true})
|
||||
.then((data) => {
|
||||
expect(data['frameTime.smooth']).toBe(1.0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should calculate percentage of smoothness to be bad',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate(
|
||||
[
|
||||
eventFactory.markStart('frameCapture', 0), eventFactory.instant('frame', 1),
|
||||
eventFactory.instant('frame', 2), eventFactory.instant('frame', 22),
|
||||
eventFactory.instant('frame', 23), eventFactory.instant('frame', 24),
|
||||
eventFactory.markEnd('frameCapture', 4)
|
||||
],
|
||||
{captureFrames: true})
|
||||
.then((data) => {
|
||||
expect(data['frameTime.smooth']).toBe(0.75);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
it('should report a single interval', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0), eventFactory.end('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should sum up multiple intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0), eventFactory.end('script', 5),
|
||||
eventFactory.start('script', 10), eventFactory.end('script', 17)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(12);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore not started intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([eventFactory.end('script', 10)]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore not ended intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([eventFactory.start('script', 10)]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore nested intervals', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0), eventFactory.start('script', 5),
|
||||
eventFactory.end('script', 10), eventFactory.end('script', 17)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(17);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore events from different processed as the start mark',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var otherProcessEventFactory = new TraceEventFactory('timeline', 'pid1');
|
||||
var metric = createMetric(
|
||||
[[
|
||||
eventFactory.markStart('benchpress0', 0), eventFactory.start('script', 0, null),
|
||||
eventFactory.end('script', 5, null),
|
||||
otherProcessEventFactory.start('script', 10, null),
|
||||
otherProcessEventFactory.end('script', 17, null),
|
||||
eventFactory.markEnd('benchpress0', 20)
|
||||
]],
|
||||
null);
|
||||
metric.beginMeasure().then((_) => metric.endMeasure(false)).then((data) => {
|
||||
expect(data['scriptTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support scriptTime metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0), eventFactory.end('script', 5)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support renderTime metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('render', 0), eventFactory.end('render', 5)
|
||||
]).then((data) => {
|
||||
expect(data['renderTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support gcTime/gcAmount metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('gc', 0, {'usedHeapSize': 2500}),
|
||||
eventFactory.end('gc', 5, {'usedHeapSize': 1000})
|
||||
]).then((data) => {
|
||||
expect(data['gcTime']).toBe(5);
|
||||
expect(data['gcAmount']).toBe(1.5);
|
||||
expect(data['majorGcTime']).toBe(0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support majorGcTime metric', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('gc', 0, {'usedHeapSize': 2500}),
|
||||
eventFactory.end('gc', 5, {'usedHeapSize': 1000, 'majorGc': true})
|
||||
]).then((data) => {
|
||||
expect(data['gcTime']).toBe(5);
|
||||
expect(data['majorGcTime']).toBe(5);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should support pureScriptTime = scriptTime-gcTime-renderTime',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.start('script', 0), eventFactory.start('gc', 1, {'usedHeapSize': 1000}),
|
||||
eventFactory.end('gc', 4, {'usedHeapSize': 0}), eventFactory.start('render', 4),
|
||||
eventFactory.end('render', 5), eventFactory.end('script', 6)
|
||||
]).then((data) => {
|
||||
expect(data['scriptTime']).toBe(6);
|
||||
expect(data['pureScriptTime']).toBe(2);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('receivedData', () => {
|
||||
it('should report received data since last navigationStart',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate(
|
||||
[
|
||||
eventFactory.instant('receivedData', 0, {'encodedDataLength': 1}),
|
||||
eventFactory.instant('navigationStart', 1),
|
||||
eventFactory.instant('receivedData', 2, {'encodedDataLength': 2}),
|
||||
eventFactory.instant('navigationStart', 3),
|
||||
eventFactory.instant('receivedData', 4, {'encodedDataLength': 4}),
|
||||
eventFactory.instant('receivedData', 5, {'encodedDataLength': 8})
|
||||
],
|
||||
{receivedData: true})
|
||||
.then((data) => {
|
||||
expect(data['receivedData']).toBe(12);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
describe('requestCount', () => {
|
||||
it('should report count of requests sent since last navigationStart',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate(
|
||||
[
|
||||
eventFactory.instant('sendRequest', 0),
|
||||
eventFactory.instant('navigationStart', 1),
|
||||
eventFactory.instant('sendRequest', 2),
|
||||
eventFactory.instant('navigationStart', 3),
|
||||
eventFactory.instant('sendRequest', 4), eventFactory.instant('sendRequest', 5)
|
||||
],
|
||||
{requestCount: true})
|
||||
.then((data) => {
|
||||
expect(data['requestCount']).toBe(2);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
describe('microMetrics', () => {
|
||||
|
||||
it('should report micro metrics', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate(
|
||||
[
|
||||
eventFactory.markStart('mm1', 0),
|
||||
eventFactory.markEnd('mm1', 5),
|
||||
],
|
||||
{microMetrics: {'mm1': 'micro metric 1'}})
|
||||
.then((data) => {
|
||||
expect(data['mm1']).toBe(5.0);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore micro metrics that were not specified',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
aggregate([
|
||||
eventFactory.markStart('mm1', 0),
|
||||
eventFactory.markEnd('mm1', 5),
|
||||
]).then((data) => {
|
||||
expect(data['mm1']).toBeFalsy();
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report micro metric averages', inject([AsyncTestCompleter], (async) => {
|
||||
aggregate(
|
||||
[
|
||||
eventFactory.markStart('mm1*20', 0),
|
||||
eventFactory.markEnd('mm1*20', 5),
|
||||
],
|
||||
{microMetrics: {'mm1': 'micro metric 1'}})
|
||||
.then((data) => {
|
||||
expect(data['mm1']).toBe(5 / 20);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverExtension extends WebDriverExtension {
|
||||
constructor(
|
||||
private _perfLogs: any[], private _commandLog: any[],
|
||||
private _perfLogFeatures: PerfLogFeatures) {
|
||||
super();
|
||||
}
|
||||
|
||||
timeBegin(name): Promise<any> {
|
||||
this._commandLog.push(['timeBegin', name]);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
|
||||
timeEnd(name, restartName): Promise<any> {
|
||||
this._commandLog.push(['timeEnd', name, restartName]);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
|
||||
perfLogFeatures(): PerfLogFeatures { return this._perfLogFeatures; }
|
||||
|
||||
readPerfLog(): Promise<any> {
|
||||
this._commandLog.push('readPerfLog');
|
||||
if (this._perfLogs.length > 0) {
|
||||
var next = this._perfLogs[0];
|
||||
this._perfLogs.shift();
|
||||
return Promise.resolve(next);
|
||||
} else {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
}
|
||||
|
||||
gc(): Promise<any> {
|
||||
this._commandLog.push(['gc']);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
}
|
83
modules/@angular/benchpress/test/metric/user_metric_spec.ts
Normal file
83
modules/@angular/benchpress/test/metric/user_metric_spec.ts
Normal file
@ -0,0 +1,83 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {Provider, ReflectiveInjector} from '@angular/core';
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {StringMapWrapper} from '@angular/facade/src/collection';
|
||||
import {Json, isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
import {Injector, Metric, MultiMetric, Options, PerfLogFeatures, PerflogMetric, UserMetric, WebDriverAdapter, WebDriverExtension} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
var wdAdapter: MockDriverAdapter;
|
||||
|
||||
function createMetric(
|
||||
perfLogs, perfLogFeatures,
|
||||
{userMetrics}: {userMetrics?: {[key: string]: string}} = {}): UserMetric {
|
||||
if (isBlank(perfLogFeatures)) {
|
||||
perfLogFeatures =
|
||||
new PerfLogFeatures({render: true, gc: true, frameCapture: true, userTiming: true});
|
||||
}
|
||||
if (isBlank(userMetrics)) {
|
||||
userMetrics = StringMapWrapper.create();
|
||||
}
|
||||
wdAdapter = new MockDriverAdapter();
|
||||
var bindings: Provider[] = [
|
||||
Options.DEFAULT_PROVIDERS, UserMetric.PROVIDERS,
|
||||
{provide: Options.USER_METRICS, useValue: userMetrics},
|
||||
{provide: WebDriverAdapter, useValue: wdAdapter}
|
||||
];
|
||||
return ReflectiveInjector.resolveAndCreate(bindings).get(UserMetric);
|
||||
}
|
||||
|
||||
describe('user metric', () => {
|
||||
|
||||
it('should describe itself based on userMetrics', () => {
|
||||
expect(createMetric([[]], new PerfLogFeatures(), {
|
||||
userMetrics: {'loadTime': 'time to load'}
|
||||
}).describe())
|
||||
.toEqual({'loadTime': 'time to load'});
|
||||
});
|
||||
|
||||
describe('endMeasure', () => {
|
||||
it('should stop measuring when all properties have numeric values',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
let metric = createMetric(
|
||||
[[]], new PerfLogFeatures(),
|
||||
{userMetrics: {'loadTime': 'time to load', 'content': 'time to see content'}});
|
||||
metric.beginMeasure()
|
||||
.then((_) => metric.endMeasure(true))
|
||||
.then((values: {[key: string]: string}) => {
|
||||
expect(values['loadTime']).toBe(25);
|
||||
expect(values['content']).toBe(250);
|
||||
async.done();
|
||||
});
|
||||
|
||||
wdAdapter.data['loadTime'] = 25;
|
||||
// Wait before setting 2nd property.
|
||||
setTimeout(() => { wdAdapter.data['content'] = 250; }, 50);
|
||||
|
||||
}), 600);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
data: any = {};
|
||||
|
||||
executeScript(script: string): any {
|
||||
// Just handles `return window.propName` ignores `delete window.propName`.
|
||||
if (script.indexOf('return window.') == 0) {
|
||||
let metricName = script.substring('return window.'.length);
|
||||
return Promise.resolve(this.data[metricName]);
|
||||
} else if (script.indexOf('delete window.') == 0) {
|
||||
return Promise.resolve(null);
|
||||
} else {
|
||||
return Promise.reject(`Unexpected syntax: ${script}`);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,87 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {Date, DateWrapper, isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
import {ConsoleReporter, MeasureValues, ReflectiveInjector, Reporter, SampleDescription, SampleState} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
describe('console reporter', () => {
|
||||
var reporter;
|
||||
var log: string[];
|
||||
|
||||
function createReporter({columnWidth = null, sampleId = null, descriptions = null,
|
||||
metrics = null}: {columnWidth?, sampleId?, descriptions?, metrics?}) {
|
||||
log = [];
|
||||
if (isBlank(descriptions)) {
|
||||
descriptions = [];
|
||||
}
|
||||
if (isBlank(sampleId)) {
|
||||
sampleId = 'null';
|
||||
}
|
||||
var bindings = [
|
||||
ConsoleReporter.PROVIDERS, {
|
||||
provide: SampleDescription,
|
||||
useValue: new SampleDescription(sampleId, descriptions, metrics)
|
||||
},
|
||||
{provide: ConsoleReporter.PRINT, useValue: (line) => log.push(line)}
|
||||
];
|
||||
if (isPresent(columnWidth)) {
|
||||
bindings.push({provide: ConsoleReporter.COLUMN_WIDTH, useValue: columnWidth});
|
||||
}
|
||||
reporter = ReflectiveInjector.resolveAndCreate(bindings).get(ConsoleReporter);
|
||||
}
|
||||
|
||||
it('should print the sample id, description and table header', () => {
|
||||
createReporter({
|
||||
columnWidth: 8,
|
||||
sampleId: 'someSample',
|
||||
descriptions: [{'a': 1, 'b': 2}],
|
||||
metrics: {'m1': 'some desc', 'm2': 'some other desc'}
|
||||
});
|
||||
expect(log).toEqual([
|
||||
'BENCHMARK someSample',
|
||||
'Description:',
|
||||
'- a: 1',
|
||||
'- b: 2',
|
||||
'Metrics:',
|
||||
'- m1: some desc',
|
||||
'- m2: some other desc',
|
||||
'',
|
||||
' m1 | m2',
|
||||
'-------- | --------',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should print a table row', () => {
|
||||
createReporter({columnWidth: 8, metrics: {'a': '', 'b': ''}});
|
||||
log = [];
|
||||
reporter.reportMeasureValues(mv(0, 0, {'a': 1.23, 'b': 2}));
|
||||
expect(log).toEqual([' 1.23 | 2.00']);
|
||||
});
|
||||
|
||||
it('should print the table footer and stats when there is a valid sample', () => {
|
||||
createReporter({columnWidth: 8, metrics: {'a': '', 'b': ''}});
|
||||
log = [];
|
||||
reporter.reportSample([], [mv(0, 0, {'a': 3, 'b': 6}), mv(1, 1, {'a': 5, 'b': 9})]);
|
||||
expect(log).toEqual(['======== | ========', '4.00+-25% | 7.50+-20%']);
|
||||
});
|
||||
|
||||
it('should print the coefficient of variation only when it is meaningful', () => {
|
||||
createReporter({columnWidth: 8, metrics: {'a': '', 'b': ''}});
|
||||
log = [];
|
||||
reporter.reportSample([], [mv(0, 0, {'a': 3, 'b': 0}), mv(1, 1, {'a': 5, 'b': 0})]);
|
||||
expect(log).toEqual(['======== | ========', '4.00+-25% | 0.00']);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function mv(runIndex, time, values) {
|
||||
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
|
||||
}
|
@ -0,0 +1,71 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {DateWrapper, Json, isPresent} from '@angular/facade/src/lang';
|
||||
import {MeasureValues, Options, ReflectiveInjector, SampleDescription} from 'benchpress/common';
|
||||
import {JsonFileReporter} from 'benchpress/src/reporter/json_file_reporter';
|
||||
|
||||
export function main() {
|
||||
describe('file reporter', () => {
|
||||
var loggedFile;
|
||||
|
||||
function createReporter({sampleId, descriptions, metrics, path}) {
|
||||
var bindings = [
|
||||
JsonFileReporter.PROVIDERS, {
|
||||
provide: SampleDescription,
|
||||
useValue: new SampleDescription(sampleId, descriptions, metrics)
|
||||
},
|
||||
{provide: JsonFileReporter.PATH, useValue: path},
|
||||
{provide: Options.NOW, useValue: () => DateWrapper.fromMillis(1234)}, {
|
||||
provide: Options.WRITE_FILE,
|
||||
useValue: (filename, content) => {
|
||||
loggedFile = {'filename': filename, 'content': content};
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
}
|
||||
];
|
||||
return ReflectiveInjector.resolveAndCreate(bindings).get(JsonFileReporter);
|
||||
}
|
||||
|
||||
it('should write all data into a file', inject([AsyncTestCompleter], (async) => {
|
||||
createReporter({
|
||||
sampleId: 'someId',
|
||||
descriptions: [{'a': 2}],
|
||||
path: 'somePath',
|
||||
metrics: {'script': 'script time'}
|
||||
})
|
||||
.reportSample(
|
||||
[mv(0, 0, {'a': 3, 'b': 6})],
|
||||
[mv(0, 0, {'a': 3, 'b': 6}), mv(1, 1, {'a': 5, 'b': 9})]);
|
||||
var regExp = /somePath\/someId_\d+\.json/;
|
||||
expect(isPresent(loggedFile['filename'].match(regExp))).toBe(true);
|
||||
var parsedContent = Json.parse(loggedFile['content']);
|
||||
expect(parsedContent).toEqual({
|
||||
'description':
|
||||
{'id': 'someId', 'description': {'a': 2}, 'metrics': {'script': 'script time'}},
|
||||
'completeSample': [
|
||||
{'timeStamp': '1970-01-01T00:00:00.000Z', 'runIndex': 0, 'values': {'a': 3, 'b': 6}}
|
||||
],
|
||||
'validSample': [
|
||||
{'timeStamp': '1970-01-01T00:00:00.000Z', 'runIndex': 0, 'values': {'a': 3, 'b': 6}}, {
|
||||
'timeStamp': '1970-01-01T00:00:00.001Z',
|
||||
'runIndex': 1,
|
||||
'values': {'a': 5, 'b': 9}
|
||||
}
|
||||
]
|
||||
});
|
||||
async.done();
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function mv(runIndex, time, values) {
|
||||
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
|
||||
}
|
@ -0,0 +1,68 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {DateWrapper} from '@angular/facade/src/lang';
|
||||
import {MeasureValues, MultiReporter, ReflectiveInjector, Reporter} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
function createReporters(ids: any[]) {
|
||||
var r = ReflectiveInjector
|
||||
.resolveAndCreate([
|
||||
ids.map(id => { return {provide: id, useValue: new MockReporter(id)}; }),
|
||||
MultiReporter.createBindings(ids)
|
||||
])
|
||||
.get(MultiReporter);
|
||||
return Promise.resolve(r);
|
||||
}
|
||||
|
||||
describe('multi reporter', () => {
|
||||
|
||||
it('should reportMeasureValues to all', inject([AsyncTestCompleter], (async) => {
|
||||
var mv = new MeasureValues(0, DateWrapper.now(), {});
|
||||
createReporters(['m1', 'm2']).then((r) => r.reportMeasureValues(mv)).then((values) => {
|
||||
|
||||
expect(values).toEqual([{'id': 'm1', 'values': mv}, {'id': 'm2', 'values': mv}]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should reportSample to call', inject([AsyncTestCompleter], (async) => {
|
||||
var completeSample = [
|
||||
new MeasureValues(0, DateWrapper.now(), {}), new MeasureValues(1, DateWrapper.now(), {})
|
||||
];
|
||||
var validSample = [completeSample[1]];
|
||||
|
||||
createReporters(['m1', 'm2'])
|
||||
.then((r) => r.reportSample(completeSample, validSample))
|
||||
.then((values) => {
|
||||
|
||||
expect(values).toEqual([
|
||||
{'id': 'm1', 'completeSample': completeSample, 'validSample': validSample},
|
||||
{'id': 'm2', 'completeSample': completeSample, 'validSample': validSample}
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
constructor(private _id: string) { super(); }
|
||||
|
||||
reportMeasureValues(values: MeasureValues): Promise<{[key: string]: any}> {
|
||||
return Promise.resolve({'id': this._id, 'values': values});
|
||||
}
|
||||
|
||||
reportSample(completeSample: MeasureValues[], validSample: MeasureValues[]):
|
||||
Promise<{[key: string]: any}> {
|
||||
return Promise.resolve(
|
||||
{'id': this._id, 'completeSample': completeSample, 'validSample': validSample});
|
||||
}
|
||||
}
|
134
modules/@angular/benchpress/test/runner_spec.ts
Normal file
134
modules/@angular/benchpress/test/runner_spec.ts
Normal file
@ -0,0 +1,134 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {isBlank} from '@angular/facade/src/lang';
|
||||
import {Injector, Metric, Options, ReflectiveInjector, Runner, SampleDescription, SampleState, Sampler, Validator, WebDriverAdapter} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
describe('runner', () => {
|
||||
var injector: ReflectiveInjector;
|
||||
var runner;
|
||||
|
||||
function createRunner(defaultBindings = null): Runner {
|
||||
if (isBlank(defaultBindings)) {
|
||||
defaultBindings = [];
|
||||
}
|
||||
runner = new Runner([
|
||||
defaultBindings, {
|
||||
provide: Sampler,
|
||||
useFactory: (_injector) => {
|
||||
injector = _injector;
|
||||
return new MockSampler();
|
||||
},
|
||||
deps: [Injector]
|
||||
},
|
||||
{provide: Metric, useFactory: () => new MockMetric(), deps: []},
|
||||
{provide: Validator, useFactory: () => new MockValidator(), deps: []},
|
||||
{provide: WebDriverAdapter, useFactory: () => new MockWebDriverAdapter(), deps: []}
|
||||
]);
|
||||
return runner;
|
||||
}
|
||||
|
||||
it('should set SampleDescription.id', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner()
|
||||
.sample({id: 'someId'})
|
||||
.then((_) => injector.get(SampleDescription))
|
||||
.then((desc) => {
|
||||
expect(desc.id).toBe('someId');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should merge SampleDescription.description', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner([{provide: Options.DEFAULT_DESCRIPTION, useValue: {'a': 1}}])
|
||||
.sample({
|
||||
id: 'someId',
|
||||
providers: [{provide: Options.SAMPLE_DESCRIPTION, useValue: {'b': 2}}]
|
||||
})
|
||||
.then((_) => injector.get(SampleDescription))
|
||||
.then((desc) => {
|
||||
expect(desc.description)
|
||||
.toEqual(
|
||||
{'forceGc': false, 'userAgent': 'someUserAgent', 'a': 1, 'b': 2, 'v': 11});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should fill SampleDescription.metrics from the Metric',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createRunner()
|
||||
.sample({id: 'someId'})
|
||||
.then((_) => injector.get(SampleDescription))
|
||||
.then((desc) => {
|
||||
|
||||
expect(desc.metrics).toEqual({'m1': 'some metric'});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should bind Options.EXECUTE', inject([AsyncTestCompleter], (async) => {
|
||||
var execute = () => {};
|
||||
createRunner().sample({id: 'someId', execute: execute}).then((_) => {
|
||||
expect(injector.get(Options.EXECUTE)).toEqual(execute);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should bind Options.PREPARE', inject([AsyncTestCompleter], (async) => {
|
||||
var prepare = () => {};
|
||||
createRunner().sample({id: 'someId', prepare: prepare}).then((_) => {
|
||||
expect(injector.get(Options.PREPARE)).toEqual(prepare);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should bind Options.MICRO_METRICS', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner().sample({id: 'someId', microMetrics: {'a': 'b'}}).then((_) => {
|
||||
expect(injector.get(Options.MICRO_METRICS)).toEqual({'a': 'b'});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should overwrite bindings per sample call', inject([AsyncTestCompleter], (async) => {
|
||||
createRunner([{provide: Options.DEFAULT_DESCRIPTION, useValue: {'a': 1}}])
|
||||
.sample({
|
||||
id: 'someId',
|
||||
providers: [{provide: Options.DEFAULT_DESCRIPTION, useValue: {'a': 2}}]
|
||||
})
|
||||
.then((_) => injector.get(SampleDescription))
|
||||
.then((desc) => {
|
||||
|
||||
expect(desc.description['a']).toBe(2);
|
||||
async.done();
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockWebDriverAdapter extends WebDriverAdapter {
|
||||
executeScript(script): Promise<string> { return Promise.resolve('someUserAgent'); }
|
||||
capabilities() { return null; }
|
||||
}
|
||||
|
||||
class MockValidator extends Validator {
|
||||
constructor() { super(); }
|
||||
describe() { return {'v': 11}; }
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
constructor() { super(); }
|
||||
describe() { return {'m1': 'some metric'}; }
|
||||
}
|
||||
|
||||
class MockSampler extends Sampler {
|
||||
constructor() { super(); }
|
||||
sample(): Promise<SampleState> { return Promise.resolve(new SampleState([], [])); }
|
||||
}
|
302
modules/@angular/benchpress/test/sampler_spec.ts
Normal file
302
modules/@angular/benchpress/test/sampler_spec.ts
Normal file
@ -0,0 +1,302 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {Date, DateWrapper, isBlank, isPresent, stringify} from '@angular/facade/src/lang';
|
||||
import {MeasureValues, Metric, Options, ReflectiveInjector, Reporter, Sampler, Validator, WebDriverAdapter} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
var EMPTY_EXECUTE = () => {};
|
||||
|
||||
describe('sampler', () => {
|
||||
var sampler: Sampler;
|
||||
|
||||
function createSampler({driver, metric, reporter, validator, prepare, execute}: {
|
||||
driver?: any,
|
||||
metric?: Metric,
|
||||
reporter?: Reporter,
|
||||
validator?: Validator,
|
||||
prepare?: any,
|
||||
execute?: any
|
||||
} = {}) {
|
||||
var time = 1000;
|
||||
if (isBlank(metric)) {
|
||||
metric = new MockMetric([]);
|
||||
}
|
||||
if (isBlank(reporter)) {
|
||||
reporter = new MockReporter([]);
|
||||
}
|
||||
if (isBlank(driver)) {
|
||||
driver = new MockDriverAdapter([]);
|
||||
}
|
||||
var providers = [
|
||||
Options.DEFAULT_PROVIDERS, Sampler.PROVIDERS, {provide: Metric, useValue: metric},
|
||||
{provide: Reporter, useValue: reporter}, {provide: WebDriverAdapter, useValue: driver},
|
||||
{provide: Options.EXECUTE, useValue: execute}, {provide: Validator, useValue: validator},
|
||||
{provide: Options.NOW, useValue: () => DateWrapper.fromMillis(time++)}
|
||||
];
|
||||
if (isPresent(prepare)) {
|
||||
providers.push({provide: Options.PREPARE, useValue: prepare});
|
||||
}
|
||||
|
||||
sampler = ReflectiveInjector.resolveAndCreate(providers).get(Sampler);
|
||||
}
|
||||
|
||||
it('should call the prepare and execute callbacks using WebDriverAdapter.waitFor',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var count = 0;
|
||||
var driver = new MockDriverAdapter([], (callback) => {
|
||||
var result = callback();
|
||||
log.push(result);
|
||||
return Promise.resolve(result);
|
||||
});
|
||||
createSampler({
|
||||
driver: driver,
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => { return count++; },
|
||||
execute: () => { return count++; }
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
expect(count).toBe(4);
|
||||
expect(log).toEqual([0, 1, 2, 3]);
|
||||
async.done();
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
it('should call prepare, beginMeasure, execute, endMeasure for every iteration',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var workCount = 0;
|
||||
var log = [];
|
||||
createSampler({
|
||||
metric: createCountingMetric(log),
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => { log.push(`p${workCount++}`); },
|
||||
execute: () => { log.push(`w${workCount++}`); }
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
expect(log).toEqual([
|
||||
'p0',
|
||||
['beginMeasure'],
|
||||
'w1',
|
||||
['endMeasure', false, {'script': 0}],
|
||||
'p2',
|
||||
['beginMeasure'],
|
||||
'w3',
|
||||
['endMeasure', false, {'script': 1}],
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should call execute, endMeasure for every iteration if there is no prepare callback',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var workCount = 0;
|
||||
createSampler({
|
||||
metric: createCountingMetric(log),
|
||||
validator: createCountingValidator(2),
|
||||
execute: () => { log.push(`w${workCount++}`); },
|
||||
prepare: null
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
expect(log).toEqual([
|
||||
['beginMeasure'],
|
||||
'w0',
|
||||
['endMeasure', true, {'script': 0}],
|
||||
'w1',
|
||||
['endMeasure', true, {'script': 1}],
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should only collect metrics for execute and ignore metrics from prepare',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var scriptTime = 0;
|
||||
var iterationCount = 1;
|
||||
createSampler({
|
||||
validator: createCountingValidator(2),
|
||||
metric: new MockMetric(
|
||||
[],
|
||||
() => {
|
||||
var result = Promise.resolve({'script': scriptTime});
|
||||
scriptTime = 0;
|
||||
return result;
|
||||
}),
|
||||
prepare: () => { scriptTime = 1 * iterationCount; },
|
||||
execute: () => {
|
||||
scriptTime = 10 * iterationCount;
|
||||
iterationCount++;
|
||||
}
|
||||
});
|
||||
sampler.sample().then((state) => {
|
||||
expect(state.completeSample.length).toBe(2);
|
||||
expect(state.completeSample[0]).toEqual(mv(0, 1000, {'script': 10}));
|
||||
expect(state.completeSample[1]).toEqual(mv(1, 1001, {'script': 20}));
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should call the validator for every execution and store the valid sample',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
|
||||
createSampler({
|
||||
metric: createCountingMetric(),
|
||||
validator: createCountingValidator(2, validSample, log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then((state) => {
|
||||
expect(state.validSample).toBe(validSample);
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['validate', [{'script': 0}], null],
|
||||
// ['validate', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
|
||||
expect(log.length).toBe(2);
|
||||
expect(log[0]).toEqual(['validate', [mv(0, 1000, {'script': 0})], null]);
|
||||
expect(log[1]).toEqual(
|
||||
['validate', [mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})], validSample]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report the metric values', inject([AsyncTestCompleter], (async) => {
|
||||
var log = [];
|
||||
var validSample = [{}];
|
||||
createSampler({
|
||||
validator: createCountingValidator(2, validSample),
|
||||
metric: createCountingMetric(),
|
||||
reporter: new MockReporter(log),
|
||||
execute: EMPTY_EXECUTE
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
// TODO(tbosch): Why does this fail??
|
||||
// expect(log).toEqual([
|
||||
// ['reportMeasureValues', 0, {'script': 0}],
|
||||
// ['reportMeasureValues', 1, {'script': 1}],
|
||||
// ['reportSample', [{'script': 0}, {'script': 1}], validSample]
|
||||
// ]);
|
||||
expect(log.length).toBe(3);
|
||||
expect(log[0]).toEqual(['reportMeasureValues', mv(0, 1000, {'script': 0})]);
|
||||
expect(log[1]).toEqual(['reportMeasureValues', mv(1, 1001, {'script': 1})]);
|
||||
expect(log[2]).toEqual([
|
||||
'reportSample', [mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})], validSample
|
||||
]);
|
||||
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function mv(runIndex, time, values) {
|
||||
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
|
||||
}
|
||||
|
||||
function createCountingValidator(count, validSample = null, log = null) {
|
||||
return new MockValidator(log, (completeSample) => {
|
||||
count--;
|
||||
if (count === 0) {
|
||||
return isPresent(validSample) ? validSample : completeSample;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createCountingMetric(log = null) {
|
||||
var scriptTime = 0;
|
||||
return new MockMetric(log, () => { return {'script': scriptTime++}; });
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
/** @internal */
|
||||
private _log: any[];
|
||||
private _waitFor: Function;
|
||||
constructor(log = null, waitFor = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
this._waitFor = waitFor;
|
||||
}
|
||||
waitFor(callback: Function): Promise<any> {
|
||||
if (isPresent(this._waitFor)) {
|
||||
return this._waitFor(callback);
|
||||
} else {
|
||||
return Promise.resolve(callback());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class MockValidator extends Validator {
|
||||
/** @internal */
|
||||
private _log: any[];
|
||||
constructor(log = null, private _validate: Function = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
validate(completeSample: MeasureValues[]): MeasureValues[] {
|
||||
var stableSample = isPresent(this._validate) ? this._validate(completeSample) : completeSample;
|
||||
this._log.push(['validate', completeSample, stableSample]);
|
||||
return stableSample;
|
||||
}
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
/** @internal */
|
||||
private _log: any[];
|
||||
constructor(log = null, private _endMeasure: Function = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
beginMeasure() {
|
||||
this._log.push(['beginMeasure']);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
endMeasure(restart) {
|
||||
var measureValues = isPresent(this._endMeasure) ? this._endMeasure() : {};
|
||||
this._log.push(['endMeasure', restart, measureValues]);
|
||||
return Promise.resolve(measureValues);
|
||||
}
|
||||
}
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
/** @internal */
|
||||
private _log: any[];
|
||||
constructor(log = null) {
|
||||
super();
|
||||
if (isBlank(log)) {
|
||||
log = [];
|
||||
}
|
||||
this._log = log;
|
||||
}
|
||||
reportMeasureValues(values): Promise<any> {
|
||||
this._log.push(['reportMeasureValues', values]);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
reportSample(completeSample, validSample): Promise<any> {
|
||||
this._log.push(['reportSample', completeSample, validSample]);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
}
|
39
modules/@angular/benchpress/test/statistic_spec.ts
Normal file
39
modules/@angular/benchpress/test/statistic_spec.ts
Normal file
@ -0,0 +1,39 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {Statistic} from 'benchpress/src/statistic';
|
||||
|
||||
export function main() {
|
||||
describe('statistic', () => {
|
||||
|
||||
it('should calculate the mean', () => {
|
||||
expect(Statistic.calculateMean([])).toBeNaN();
|
||||
expect(Statistic.calculateMean([1, 2, 3])).toBe(2.0);
|
||||
});
|
||||
|
||||
it('should calculate the standard deviation', () => {
|
||||
expect(Statistic.calculateStandardDeviation([], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateStandardDeviation([1], 1)).toBe(0.0);
|
||||
expect(Statistic.calculateStandardDeviation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(2.0);
|
||||
});
|
||||
|
||||
it('should calculate the coefficient of variation', () => {
|
||||
expect(Statistic.calculateCoefficientOfVariation([], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateCoefficientOfVariation([1], 1)).toBe(0.0);
|
||||
expect(Statistic.calculateCoefficientOfVariation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(40.0);
|
||||
});
|
||||
|
||||
it('should calculate the regression slope', () => {
|
||||
expect(Statistic.calculateRegressionSlope([], NaN, [], NaN)).toBeNaN();
|
||||
expect(Statistic.calculateRegressionSlope([1], 1, [2], 2)).toBeNaN();
|
||||
expect(Statistic.calculateRegressionSlope([1, 2], 1.5, [2, 4], 3)).toBe(2.0);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
43
modules/@angular/benchpress/test/trace_event_factory.ts
Normal file
43
modules/@angular/benchpress/test/trace_event_factory.ts
Normal file
@ -0,0 +1,43 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {isPresent} from '@angular/facade/src/lang';
|
||||
|
||||
export class TraceEventFactory {
|
||||
private _cat: string;
|
||||
private _pid;
|
||||
|
||||
constructor(cat, pid) {
|
||||
this._cat = cat;
|
||||
this._pid = pid;
|
||||
}
|
||||
|
||||
create(ph, name, time, args = null) {
|
||||
var res = {'name': name, 'cat': this._cat, 'ph': ph, 'ts': time, 'pid': this._pid};
|
||||
if (isPresent(args)) {
|
||||
res['args'] = args;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
markStart(name, time) { return this.create('b', name, time); }
|
||||
|
||||
markEnd(name, time) { return this.create('e', name, time); }
|
||||
|
||||
start(name, time, args = null) { return this.create('B', name, time, args); }
|
||||
|
||||
end(name, time, args = null) { return this.create('E', name, time, args); }
|
||||
|
||||
instant(name, time, args = null) { return this.create('i', name, time, args); }
|
||||
|
||||
complete(name, time, duration, args = null) {
|
||||
var res = this.create('X', name, time, args);
|
||||
res['dur'] = duration;
|
||||
return res;
|
||||
}
|
||||
}
|
@ -0,0 +1,65 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {ListWrapper} from '@angular/facade/src/collection';
|
||||
import {Date, DateWrapper} from '@angular/facade/src/lang';
|
||||
import {MeasureValues, ReflectiveInjector, RegressionSlopeValidator} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
describe('regression slope validator', () => {
|
||||
var validator;
|
||||
|
||||
function createValidator({size, metric}) {
|
||||
validator = ReflectiveInjector
|
||||
.resolveAndCreate([
|
||||
RegressionSlopeValidator.PROVIDERS,
|
||||
{provide: RegressionSlopeValidator.METRIC, useValue: metric},
|
||||
{provide: RegressionSlopeValidator.SAMPLE_SIZE, useValue: size}
|
||||
])
|
||||
.get(RegressionSlopeValidator);
|
||||
}
|
||||
|
||||
it('should return sampleSize and metric as description', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.describe()).toEqual({'sampleSize': 2, 'regressionSlopeMetric': 'script'});
|
||||
});
|
||||
|
||||
it('should return null while the completeSample is smaller than the given size', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([])).toBe(null);
|
||||
expect(validator.validate([mv(0, 0, {})])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return null while the regression slope is < 0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
expect(validator.validate([mv(0, 0, {'script': 2}), mv(1, 1, {'script': 1})])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when the regression slope is ==0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
var sample = [mv(0, 0, {'script': 1}), mv(1, 1, {'script': 1}), mv(2, 2, {'script': 1})];
|
||||
expect(validator.validate(ListWrapper.slice(sample, 0, 2)))
|
||||
.toEqual(ListWrapper.slice(sample, 0, 2));
|
||||
expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample, 1, 3));
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when the regression slope is >0', () => {
|
||||
createValidator({size: 2, metric: 'script'});
|
||||
var sample = [mv(0, 0, {'script': 1}), mv(1, 1, {'script': 2}), mv(2, 2, {'script': 3})];
|
||||
expect(validator.validate(ListWrapper.slice(sample, 0, 2)))
|
||||
.toEqual(ListWrapper.slice(sample, 0, 2));
|
||||
expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample, 1, 3));
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function mv(runIndex, time, values) {
|
||||
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {ListWrapper} from '@angular/facade/src/collection';
|
||||
import {Date, DateWrapper} from '@angular/facade/src/lang';
|
||||
import {MeasureValues, ReflectiveInjector, SizeValidator, Validator} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
describe('size validator', () => {
|
||||
var validator;
|
||||
|
||||
function createValidator(size) {
|
||||
validator =
|
||||
ReflectiveInjector
|
||||
.resolveAndCreate(
|
||||
[SizeValidator.PROVIDERS, {provide: SizeValidator.SAMPLE_SIZE, useValue: size}])
|
||||
.get(SizeValidator);
|
||||
}
|
||||
|
||||
it('should return sampleSize as description', () => {
|
||||
createValidator(2);
|
||||
expect(validator.describe()).toEqual({'sampleSize': 2});
|
||||
});
|
||||
|
||||
it('should return null while the completeSample is smaller than the given size', () => {
|
||||
createValidator(2);
|
||||
expect(validator.validate([])).toBe(null);
|
||||
expect(validator.validate([mv(0, 0, {})])).toBe(null);
|
||||
});
|
||||
|
||||
it('should return the last sampleSize runs when it has at least the given size', () => {
|
||||
createValidator(2);
|
||||
var sample = [mv(0, 0, {'a': 1}), mv(1, 1, {'b': 2}), mv(2, 2, {'c': 3})];
|
||||
expect(validator.validate(ListWrapper.slice(sample, 0, 2)))
|
||||
.toEqual(ListWrapper.slice(sample, 0, 2));
|
||||
expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample, 1, 3));
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function mv(runIndex, time, values) {
|
||||
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
|
||||
}
|
@ -0,0 +1,59 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {StringWrapper, isPresent} from '@angular/facade/src/lang';
|
||||
import {Options, ReflectiveInjector, WebDriverExtension} from 'benchpress/common';
|
||||
|
||||
export function main() {
|
||||
function createExtension(ids: any[], caps) {
|
||||
return new Promise<any>((res, rej) => {
|
||||
try {
|
||||
res(ReflectiveInjector
|
||||
.resolveAndCreate([
|
||||
ids.map((id) => { return {provide: id, useValue: new MockExtension(id)}; }),
|
||||
{provide: Options.CAPABILITIES, useValue: caps}, WebDriverExtension.bindTo(ids)
|
||||
])
|
||||
.get(WebDriverExtension));
|
||||
} catch (e) {
|
||||
rej(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe('WebDriverExtension.bindTo', () => {
|
||||
|
||||
it('should bind the extension that matches the capabilities',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(['m1', 'm2', 'm3'], {'browser': 'm2'}).then((m) => {
|
||||
expect(m.id).toEqual('m2');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if there is no match', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(['m1'], {'browser': 'm2'}).catch((err) => {
|
||||
expect(isPresent(err)).toBe(true);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
class MockExtension extends WebDriverExtension {
|
||||
id: string;
|
||||
|
||||
constructor(id) {
|
||||
super();
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
supports(capabilities: {[key: string]: any}): boolean {
|
||||
return StringWrapper.equals(capabilities['browser'], this.id);
|
||||
}
|
||||
}
|
@ -0,0 +1,517 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {Json, isBlank} from '@angular/facade/src/lang';
|
||||
import {ChromeDriverExtension, Options, ReflectiveInjector, WebDriverAdapter, WebDriverExtension} from 'benchpress/common';
|
||||
|
||||
import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
export function main() {
|
||||
describe('chrome driver extension', () => {
|
||||
var CHROME44_USER_AGENT =
|
||||
'"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.0 Safari/537.36"';
|
||||
var CHROME45_USER_AGENT =
|
||||
'"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2499.0 Safari/537.36"';
|
||||
|
||||
var log;
|
||||
var extension;
|
||||
|
||||
var blinkEvents = new TraceEventFactory('blink.console', 'pid0');
|
||||
var v8Events = new TraceEventFactory('v8', 'pid0');
|
||||
var v8EventsOtherProcess = new TraceEventFactory('v8', 'pid1');
|
||||
var chromeTimelineEvents =
|
||||
new TraceEventFactory('disabled-by-default-devtools.timeline', 'pid0');
|
||||
var chrome45TimelineEvents = new TraceEventFactory('devtools.timeline', 'pid0');
|
||||
var chromeTimelineV8Events = new TraceEventFactory('devtools.timeline,v8', 'pid0');
|
||||
var chromeBlinkTimelineEvents = new TraceEventFactory('blink,devtools.timeline', 'pid0');
|
||||
var chromeBlinkUserTimingEvents = new TraceEventFactory('blink.user_timing', 'pid0');
|
||||
var benchmarkEvents = new TraceEventFactory('benchmark', 'pid0');
|
||||
var normEvents = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createExtension(
|
||||
perfRecords = null, userAgent = null,
|
||||
messageMethod = 'Tracing.dataCollected'): WebDriverExtension {
|
||||
if (isBlank(perfRecords)) {
|
||||
perfRecords = [];
|
||||
}
|
||||
if (isBlank(userAgent)) {
|
||||
userAgent = CHROME44_USER_AGENT;
|
||||
}
|
||||
log = [];
|
||||
extension = ReflectiveInjector
|
||||
.resolveAndCreate([
|
||||
ChromeDriverExtension.PROVIDERS, {
|
||||
provide: WebDriverAdapter,
|
||||
useValue: new MockDriverAdapter(log, perfRecords, messageMethod)
|
||||
},
|
||||
{provide: Options.USER_AGENT, useValue: userAgent}
|
||||
])
|
||||
.get(ChromeDriverExtension);
|
||||
return extension;
|
||||
}
|
||||
|
||||
it('should force gc via window.gc()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().gc().then((_) => {
|
||||
expect(log).toEqual([['executeScript', 'window.gc()']]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.time()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeBegin('someName').then((_) => {
|
||||
expect(log).toEqual([['executeScript', `console.time('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.timeEnd()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeEnd('someName', null).then((_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeEnd('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.time() and console.timeEnd()',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeEnd('name1', 'name2').then((_) => {
|
||||
expect(log).toEqual(
|
||||
[['executeScript', `console.timeEnd('name1');console.time('name2');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('readPerfLog Chrome44', () => {
|
||||
it('should normalize times to ms and forward ph and pid event properties',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([chromeTimelineEvents.complete('FunctionCall', 1100, 5500, null)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.complete('script', 1.1, 5.5, null),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should normalize "tdur" to "dur"', inject([AsyncTestCompleter], (async) => {
|
||||
var event = chromeTimelineEvents.create('X', 'FunctionCall', 1100, null);
|
||||
event['tdur'] = 5500;
|
||||
createExtension([event]).readPerfLog().then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.complete('script', 1.1, 5.5, null),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report FunctionCall events as "script"', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([chromeTimelineEvents.start('FunctionCall', 0)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('script', 0),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report gc', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('GCEvent', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
chromeTimelineEvents.end('GCEvent', 2000, {'usedHeapSizeAfter': 0}),
|
||||
])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000}),
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': false}),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore major gc from different processes',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('GCEvent', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
v8EventsOtherProcess.start('majorGC', 1100, null),
|
||||
v8EventsOtherProcess.end('majorGC', 1200, null),
|
||||
chromeTimelineEvents.end('GCEvent', 2000, {'usedHeapSizeAfter': 0}),
|
||||
])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000}),
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': false}),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report major gc', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start('GCEvent', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
v8Events.start('majorGC', 1100, null),
|
||||
v8Events.end('majorGC', 1200, null),
|
||||
chromeTimelineEvents.end('GCEvent', 2000, {'usedHeapSizeAfter': 0}),
|
||||
])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000}),
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': true}),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint'].forEach((recordType) => {
|
||||
it(`should report ${recordType} as "render"`, inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([
|
||||
chromeTimelineEvents.start(recordType, 1234),
|
||||
chromeTimelineEvents.end(recordType, 2345)
|
||||
])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('render', 1.234),
|
||||
normEvents.end('render', 2.345),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
it('should ignore FunctionCalls from webdriver', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([chromeTimelineEvents.start(
|
||||
'FunctionCall', 0, {'data': {'scriptName': 'InjectedScript'}})])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
|
||||
});
|
||||
|
||||
describe('readPerfLog Chrome45', () => {
|
||||
it('should normalize times to ms and forward ph and pid event properties',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[chromeTimelineV8Events.complete('FunctionCall', 1100, 5500, null)],
|
||||
CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.complete('script', 1.1, 5.5, null),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should normalize "tdur" to "dur"', inject([AsyncTestCompleter], (async) => {
|
||||
var event = chromeTimelineV8Events.create('X', 'FunctionCall', 1100, null);
|
||||
event['tdur'] = 5500;
|
||||
createExtension([event], CHROME45_USER_AGENT).readPerfLog().then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.complete('script', 1.1, 5.5, null),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report FunctionCall events as "script"', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([chromeTimelineV8Events.start('FunctionCall', 0)], CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('script', 0),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report minor gc', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[
|
||||
chromeTimelineV8Events.start('MinorGC', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
chromeTimelineV8Events.end('MinorGC', 2000, {'usedHeapSizeAfter': 0}),
|
||||
],
|
||||
CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events.length).toEqual(2);
|
||||
expect(events[0]).toEqual(
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000, 'majorGc': false}));
|
||||
expect(events[1]).toEqual(
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': false}));
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report major gc', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[
|
||||
chromeTimelineV8Events.start('MajorGC', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
chromeTimelineV8Events.end('MajorGC', 2000, {'usedHeapSizeAfter': 0}),
|
||||
],
|
||||
CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events.length).toEqual(2);
|
||||
expect(events[0]).toEqual(
|
||||
normEvents.start('gc', 1.0, {'usedHeapSize': 1000, 'majorGc': true}));
|
||||
expect(events[1]).toEqual(
|
||||
normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': true}));
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
['Layout', 'UpdateLayerTree', 'Paint'].forEach((recordType) => {
|
||||
it(`should report ${recordType} as "render"`, inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[
|
||||
chrome45TimelineEvents.start(recordType, 1234),
|
||||
chrome45TimelineEvents.end(recordType, 2345)
|
||||
],
|
||||
CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('render', 1.234),
|
||||
normEvents.end('render', 2.345),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
it(`should report UpdateLayoutTree as "render"`, inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[
|
||||
chromeBlinkTimelineEvents.start('UpdateLayoutTree', 1234),
|
||||
chromeBlinkTimelineEvents.end('UpdateLayoutTree', 2345)
|
||||
],
|
||||
CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('render', 1.234),
|
||||
normEvents.end('render', 2.345),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
|
||||
|
||||
it('should ignore FunctionCalls from webdriver', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([chromeTimelineV8Events.start(
|
||||
'FunctionCall', 0, {'data': {'scriptName': 'InjectedScript'}})])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore FunctionCalls with empty scriptName',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[chromeTimelineV8Events.start('FunctionCall', 0, {'data': {'scriptName': ''}})])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report navigationStart', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[chromeBlinkUserTimingEvents.start('navigationStart', 1234)], CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([normEvents.start('navigationStart', 1.234)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report receivedData', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[chrome45TimelineEvents.instant(
|
||||
'ResourceReceivedData', 1234, {'data': {'encodedDataLength': 987}})],
|
||||
CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual(
|
||||
[normEvents.instant('receivedData', 1.234, {'encodedDataLength': 987})]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report sendRequest', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[chrome45TimelineEvents.instant(
|
||||
'ResourceSendRequest', 1234,
|
||||
{'data': {'url': 'http://here', 'requestMethod': 'GET'}})],
|
||||
CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([normEvents.instant(
|
||||
'sendRequest', 1.234, {'url': 'http://here', 'method': 'GET'})]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
describe('readPerfLog (common)', () => {
|
||||
|
||||
it('should execute a dummy script before reading them',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
// Sometimes it does not report the newest events of the performance log
|
||||
// to the WebDriver client unless a script is executed...
|
||||
createExtension([]).readPerfLog().then((_) => {
|
||||
expect(log).toEqual([['executeScript', '1+1'], ['logs', 'performance']]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
['Rasterize', 'CompositeLayers'].forEach((recordType) => {
|
||||
it(`should report ${recordType} as "render"`, inject([AsyncTestCompleter], (async) => {
|
||||
createExtension(
|
||||
[
|
||||
chromeTimelineEvents.start(recordType, 1234),
|
||||
chromeTimelineEvents.end(recordType, 2345)
|
||||
],
|
||||
CHROME45_USER_AGENT)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('render', 1.234),
|
||||
normEvents.end('render', 2.345),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
describe('frame metrics', () => {
|
||||
it('should report ImplThreadRenderingStats as frame event',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([benchmarkEvents.instant(
|
||||
'BenchmarkInstrumentation::ImplThreadRenderingStats', 1100,
|
||||
{'data': {'frame_count': 1}})])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.create('i', 'frame', 1.1),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should not report ImplThreadRenderingStats with zero frames',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([benchmarkEvents.instant(
|
||||
'BenchmarkInstrumentation::ImplThreadRenderingStats', 1100,
|
||||
{'data': {'frame_count': 0}})])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw when ImplThreadRenderingStats contains more than one frame',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
|
||||
createExtension([benchmarkEvents.instant(
|
||||
'BenchmarkInstrumentation::ImplThreadRenderingStats', 1100,
|
||||
{'data': {'frame_count': 2}})])
|
||||
.readPerfLog()
|
||||
.catch((err): any => {
|
||||
expect(() => {
|
||||
throw err;
|
||||
}).toThrowError('multi-frame render stats not supported');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
it('should report begin timestamps', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([blinkEvents.create('S', 'someName', 1000)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([normEvents.markStart('someName', 1.0)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report end timestamps', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([blinkEvents.create('F', 'someName', 1000)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([normEvents.markEnd('someName', 1.0)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw an error on buffer overflow', inject([AsyncTestCompleter], (async) => {
|
||||
|
||||
createExtension(
|
||||
[
|
||||
chromeTimelineEvents.start('FunctionCall', 1234),
|
||||
],
|
||||
CHROME45_USER_AGENT, 'Tracing.bufferUsage')
|
||||
.readPerfLog()
|
||||
.catch((err): any => {
|
||||
expect(() => {
|
||||
throw err;
|
||||
}).toThrowError('The DevTools trace buffer filled during the test!');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should match chrome browsers', () => {
|
||||
expect(createExtension().supports({'browserName': 'chrome'})).toBe(true);
|
||||
|
||||
expect(createExtension().supports({'browserName': 'Chrome'})).toBe(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
constructor(private _log: any[], private _events: any[], private _messageMethod: string) {
|
||||
super();
|
||||
}
|
||||
|
||||
executeScript(script) {
|
||||
this._log.push(['executeScript', script]);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
|
||||
logs(type) {
|
||||
this._log.push(['logs', type]);
|
||||
if (type === 'performance') {
|
||||
return Promise.resolve(this._events.map((event) => {
|
||||
return {
|
||||
'message': Json.stringify({'message': {'method': this._messageMethod, 'params': event}})
|
||||
};
|
||||
}));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,189 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
|
||||
import {Json, isBlank, isPresent} from '@angular/facade/src/lang';
|
||||
import {IOsDriverExtension, ReflectiveInjector, WebDriverAdapter, WebDriverExtension} from 'benchpress/common';
|
||||
|
||||
import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
export function main() {
|
||||
describe('ios driver extension', () => {
|
||||
var log;
|
||||
var extension;
|
||||
|
||||
var normEvents = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createExtension(perfRecords = null): WebDriverExtension {
|
||||
if (isBlank(perfRecords)) {
|
||||
perfRecords = [];
|
||||
}
|
||||
log = [];
|
||||
extension =
|
||||
ReflectiveInjector
|
||||
.resolveAndCreate([
|
||||
IOsDriverExtension.PROVIDERS,
|
||||
{provide: WebDriverAdapter, useValue: new MockDriverAdapter(log, perfRecords)}
|
||||
])
|
||||
.get(IOsDriverExtension);
|
||||
return extension;
|
||||
}
|
||||
|
||||
it('should throw on forcing gc', () => {
|
||||
expect(() => createExtension().gc()).toThrowError('Force GC is not supported on iOS');
|
||||
});
|
||||
|
||||
it('should mark the timeline via console.time()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeBegin('someName').then((_) => {
|
||||
expect(log).toEqual([['executeScript', `console.time('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.timeEnd()', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeEnd('someName', null).then((_) => {
|
||||
expect(log).toEqual([['executeScript', `console.timeEnd('someName');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should mark the timeline via console.time() and console.timeEnd()',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
createExtension().timeEnd('name1', 'name2').then((_) => {
|
||||
expect(log).toEqual(
|
||||
[['executeScript', `console.timeEnd('name1');console.time('name2');`]]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
describe('readPerfLog', () => {
|
||||
|
||||
it('should execute a dummy script before reading them',
|
||||
inject([AsyncTestCompleter], (async) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
// Sometimes it does not report the newest events of the performance log
|
||||
// to the WebDriver client unless a script is executed...
|
||||
createExtension([]).readPerfLog().then((_) => {
|
||||
expect(log).toEqual([['executeScript', '1+1'], ['logs', 'performance']]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report FunctionCall records as "script"', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([durationRecord('FunctionCall', 1, 5)]).readPerfLog().then((events) => {
|
||||
expect(events).toEqual([normEvents.start('script', 1), normEvents.end('script', 5)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should ignore FunctionCalls from webdriver', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([internalScriptRecord(1, 5)]).readPerfLog().then((events) => {
|
||||
expect(events).toEqual([]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report begin time', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([timeBeginRecord('someName', 12)]).readPerfLog().then((events) => {
|
||||
expect(events).toEqual([normEvents.markStart('someName', 12)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should report end timestamps', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([timeEndRecord('someName', 12)]).readPerfLog().then((events) => {
|
||||
expect(events).toEqual([normEvents.markEnd('someName', 12)]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint', 'Rasterize', 'CompositeLayers']
|
||||
.forEach((recordType) => {
|
||||
it(`should report ${recordType}`, inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([durationRecord(recordType, 0, 1)])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('render', 0),
|
||||
normEvents.end('render', 1),
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
|
||||
it('should walk children', inject([AsyncTestCompleter], (async) => {
|
||||
createExtension([durationRecord('FunctionCall', 1, 5, [timeBeginRecord('someName', 2)])])
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
normEvents.start('script', 1), normEvents.markStart('someName', 2),
|
||||
normEvents.end('script', 5)
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should match safari browsers', () => {
|
||||
expect(createExtension().supports({'browserName': 'safari'})).toBe(true);
|
||||
|
||||
expect(createExtension().supports({'browserName': 'Safari'})).toBe(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function timeBeginRecord(name, time) {
|
||||
return {'type': 'Time', 'startTime': time, 'data': {'message': name}};
|
||||
}
|
||||
|
||||
function timeEndRecord(name, time) {
|
||||
return {'type': 'TimeEnd', 'startTime': time, 'data': {'message': name}};
|
||||
}
|
||||
|
||||
function durationRecord(type, startTime, endTime, children = null) {
|
||||
if (isBlank(children)) {
|
||||
children = [];
|
||||
}
|
||||
return {'type': type, 'startTime': startTime, 'endTime': endTime, 'children': children};
|
||||
}
|
||||
|
||||
function internalScriptRecord(startTime, endTime) {
|
||||
return {
|
||||
'type': 'FunctionCall',
|
||||
'startTime': startTime,
|
||||
'endTime': endTime,
|
||||
'data': {'scriptName': 'InjectedScript'}
|
||||
};
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
constructor(private _log: any[], private _perfRecords: any[]) { super(); }
|
||||
|
||||
executeScript(script) {
|
||||
this._log.push(['executeScript', script]);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
|
||||
logs(type) {
|
||||
this._log.push(['logs', type]);
|
||||
if (type === 'performance') {
|
||||
return Promise.resolve(this._perfRecords.map(function(record) {
|
||||
return {
|
||||
'message': Json.stringify(
|
||||
{'message': {'method': 'Timeline.eventRecorded', 'params': {'record': record}}})
|
||||
};
|
||||
}));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
23
modules/@angular/benchpress/tsconfig.json
Normal file
23
modules/@angular/benchpress/tsconfig.json
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"declaration": true,
|
||||
"experimentalDecorators": true,
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"outDir": "../../dist/all/benchpress",
|
||||
"noImplicitAny": false,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"paths": {
|
||||
"selenium-webdriver": ["../../node_modules/@types/selenium-webdriver/index.d.ts"],
|
||||
"rxjs/*": ["../../node_modules/rxjs/*"],
|
||||
"@angular/*": ["../../dist/all/@angular/*"],
|
||||
"benchpress/*": ["./*"]
|
||||
},
|
||||
"rootDir": ".",
|
||||
"inlineSourceMap": true,
|
||||
"lib": ["es5", "dom", "es2015.promise", "es2015.collection", "es2015.iterable"],
|
||||
"skipDefaultLibCheck": true,
|
||||
"target": "es5"
|
||||
}
|
||||
}
|
14
modules/@angular/benchpress/types.d.ts
vendored
Normal file
14
modules/@angular/benchpress/types.d.ts
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
// This file contains all ambient imports needed to compile the modules/ source code
|
||||
|
||||
/// <reference path="../../node_modules/@types/node/index.d.ts" />
|
||||
/// <reference path="../../node_modules/@types/jasmine/index.d.ts" />
|
||||
/// <reference path="../../node_modules/@types/protractor/index.d.ts" />
|
||||
/// <reference path="../../node_modules/@types/selenium-webdriver/index.d.ts" />
|
Reference in New Issue
Block a user