@ -11,50 +11,52 @@ import {AsyncTestCompleter, describe, expect, inject, it} from '@angular/core/te
|
||||
import {Injector, Metric, MultiMetric} from '../../index';
|
||||
|
||||
(function() {
|
||||
function createMetric(ids: any[]) {
|
||||
const m = Injector
|
||||
.create([
|
||||
ids.map(id => ({provide: id, useValue: new MockMetric(id)})),
|
||||
MultiMetric.provideWith(ids)
|
||||
])
|
||||
.get<MultiMetric>(MultiMetric);
|
||||
return Promise.resolve(m);
|
||||
}
|
||||
function createMetric(ids: any[]) {
|
||||
const m = Injector
|
||||
.create([
|
||||
ids.map(id => ({provide: id, useValue: new MockMetric(id)})),
|
||||
MultiMetric.provideWith(ids)
|
||||
])
|
||||
.get<MultiMetric>(MultiMetric);
|
||||
return Promise.resolve(m);
|
||||
}
|
||||
|
||||
describe('multi metric', () => {
|
||||
it('should merge descriptions', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createMetric(['m1', 'm2']).then((m) => {
|
||||
expect(m.describe()).toEqual({'m1': 'describe', 'm2': 'describe'});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
describe('multi metric', () => {
|
||||
it('should merge descriptions', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createMetric(['m1', 'm2']).then((m) => {
|
||||
expect(m.describe()).toEqual({'m1': 'describe', 'm2': 'describe'});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should merge all beginMeasure calls',
|
||||
it('should merge all beginMeasure calls',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createMetric(['m1', 'm2']).then((m) => m.beginMeasure()).then((values) => {
|
||||
expect(values).toEqual(['m1_beginMeasure', 'm2_beginMeasure']);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
[false, true].forEach((restartFlag) => {
|
||||
it(`should merge all endMeasure calls for restart=${restartFlag}`,
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createMetric(['m1', 'm2']).then((m) => m.beginMeasure()).then((values) => {
|
||||
expect(values).toEqual(['m1_beginMeasure', 'm2_beginMeasure']);
|
||||
createMetric(['m1', 'm2']).then((m) => m.endMeasure(restartFlag)).then((values) => {
|
||||
expect(values).toEqual({'m1': {'restart': restartFlag}, 'm2': {'restart': restartFlag}});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
[false, true].forEach((restartFlag) => {
|
||||
it(`should merge all endMeasure calls for restart=${restartFlag}`,
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createMetric(['m1', 'm2']).then((m) => m.endMeasure(restartFlag)).then((values) => {
|
||||
expect(values).toEqual(
|
||||
{'m1': {'restart': restartFlag}, 'm2': {'restart': restartFlag}});
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
})();
|
||||
|
||||
class MockMetric extends Metric {
|
||||
constructor(private _id: string) { super(); }
|
||||
constructor(private _id: string) {
|
||||
super();
|
||||
}
|
||||
|
||||
beginMeasure(): Promise<string> { return Promise.resolve(`${this._id}_beginMeasure`); }
|
||||
beginMeasure(): Promise<string> {
|
||||
return Promise.resolve(`${this._id}_beginMeasure`);
|
||||
}
|
||||
|
||||
endMeasure(restart: boolean): Promise<{[key: string]: any}> {
|
||||
const result: {[key: string]: any} = {};
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -12,55 +12,55 @@ import {AsyncTestCompleter, describe, expect, inject, it} from '@angular/core/te
|
||||
import {Options, PerfLogEvent, PerfLogFeatures, UserMetric, WebDriverAdapter} from '../../index';
|
||||
|
||||
(function() {
|
||||
let wdAdapter: MockDriverAdapter;
|
||||
let wdAdapter: MockDriverAdapter;
|
||||
|
||||
function createMetric(
|
||||
perfLogs: PerfLogEvent[], perfLogFeatures: PerfLogFeatures,
|
||||
{userMetrics}: {userMetrics?: {[key: string]: string}} = {}): UserMetric {
|
||||
if (!perfLogFeatures) {
|
||||
perfLogFeatures =
|
||||
new PerfLogFeatures({render: true, gc: true, frameCapture: true, userTiming: true});
|
||||
}
|
||||
if (!userMetrics) {
|
||||
userMetrics = {};
|
||||
}
|
||||
wdAdapter = new MockDriverAdapter();
|
||||
const providers: StaticProvider[] = [
|
||||
Options.DEFAULT_PROVIDERS, UserMetric.PROVIDERS,
|
||||
{provide: Options.USER_METRICS, useValue: userMetrics},
|
||||
{provide: WebDriverAdapter, useValue: wdAdapter}
|
||||
];
|
||||
return Injector.create(providers).get(UserMetric);
|
||||
function createMetric(
|
||||
perfLogs: PerfLogEvent[], perfLogFeatures: PerfLogFeatures,
|
||||
{userMetrics}: {userMetrics?: {[key: string]: string}} = {}): UserMetric {
|
||||
if (!perfLogFeatures) {
|
||||
perfLogFeatures =
|
||||
new PerfLogFeatures({render: true, gc: true, frameCapture: true, userTiming: true});
|
||||
}
|
||||
if (!userMetrics) {
|
||||
userMetrics = {};
|
||||
}
|
||||
wdAdapter = new MockDriverAdapter();
|
||||
const providers: StaticProvider[] = [
|
||||
Options.DEFAULT_PROVIDERS, UserMetric.PROVIDERS,
|
||||
{provide: Options.USER_METRICS, useValue: userMetrics},
|
||||
{provide: WebDriverAdapter, useValue: wdAdapter}
|
||||
];
|
||||
return Injector.create(providers).get(UserMetric);
|
||||
}
|
||||
|
||||
describe('user metric', () => {
|
||||
|
||||
it('should describe itself based on userMetrics', () => {
|
||||
expect(createMetric([[]], new PerfLogFeatures(), {
|
||||
userMetrics: {'loadTime': 'time to load'}
|
||||
}).describe())
|
||||
.toEqual({'loadTime': 'time to load'});
|
||||
});
|
||||
|
||||
describe('endMeasure', () => {
|
||||
it('should stop measuring when all properties have numeric values',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
const metric = createMetric(
|
||||
[[]], new PerfLogFeatures(),
|
||||
{userMetrics: {'loadTime': 'time to load', 'content': 'time to see content'}});
|
||||
metric.beginMeasure().then(() => metric.endMeasure(true)).then(values => {
|
||||
expect(values['loadTime']).toBe(25);
|
||||
expect(values['content']).toBe(250);
|
||||
async.done();
|
||||
});
|
||||
|
||||
wdAdapter.data['loadTime'] = 25;
|
||||
// Wait before setting 2nd property.
|
||||
setTimeout(() => { wdAdapter.data['content'] = 250; }, 50);
|
||||
|
||||
}), 600);
|
||||
});
|
||||
describe('user metric', () => {
|
||||
it('should describe itself based on userMetrics', () => {
|
||||
expect(createMetric([[]], new PerfLogFeatures(), {
|
||||
userMetrics: {'loadTime': 'time to load'}
|
||||
}).describe())
|
||||
.toEqual({'loadTime': 'time to load'});
|
||||
});
|
||||
|
||||
describe('endMeasure', () => {
|
||||
it('should stop measuring when all properties have numeric values',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
const metric = createMetric(
|
||||
[[]], new PerfLogFeatures(),
|
||||
{userMetrics: {'loadTime': 'time to load', 'content': 'time to see content'}});
|
||||
metric.beginMeasure().then(() => metric.endMeasure(true)).then(values => {
|
||||
expect(values['loadTime']).toBe(25);
|
||||
expect(values['content']).toBe(250);
|
||||
async.done();
|
||||
});
|
||||
|
||||
wdAdapter.data['loadTime'] = 25;
|
||||
// Wait before setting 2nd property.
|
||||
setTimeout(() => {
|
||||
wdAdapter.data['content'] = 250;
|
||||
}, 50);
|
||||
}), 600);
|
||||
});
|
||||
});
|
||||
})();
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
|
@ -18,10 +18,10 @@ import {ConsoleReporter, Injector, MeasureValues, SampleDescription} from '../..
|
||||
|
||||
function createReporter(
|
||||
{columnWidth = null, sampleId = null, descriptions = null, metrics = null}: {
|
||||
columnWidth?: number | null,
|
||||
sampleId?: string | null,
|
||||
descriptions?: {[key: string]: any}[] | null,
|
||||
metrics?: {[key: string]: any} | null
|
||||
columnWidth?: number|null,
|
||||
sampleId?: string|null,
|
||||
descriptions?: {[key: string]: any}[]|null,
|
||||
metrics?: {[key: string]: any}|null
|
||||
}) {
|
||||
log = [];
|
||||
if (!descriptions) {
|
||||
@ -33,7 +33,7 @@ import {ConsoleReporter, Injector, MeasureValues, SampleDescription} from '../..
|
||||
const providers: StaticProvider[] = [
|
||||
ConsoleReporter.PROVIDERS, {
|
||||
provide: SampleDescription,
|
||||
useValue: new SampleDescription(sampleId, descriptions, metrics !)
|
||||
useValue: new SampleDescription(sampleId, descriptions, metrics!)
|
||||
},
|
||||
{provide: ConsoleReporter.PRINT, useValue: (line: string) => log.push(line)}
|
||||
];
|
||||
@ -84,7 +84,6 @@ import {ConsoleReporter, Injector, MeasureValues, SampleDescription} from '../..
|
||||
reporter.reportSample([], [mv(0, 0, {'a': 3, 'b': 0}), mv(1, 1, {'a': 5, 'b': 0})]);
|
||||
expect(log).toEqual(['======== | ========', '4.00+-25% | 0.00']);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -62,16 +62,12 @@ import {Injector, JsonFileReporter, MeasureValues, Options, SampleDescription} f
|
||||
{'timeStamp': '1970-01-01T00:00:00.000Z', 'runIndex': 0, 'values': {'a': 3, 'b': 6}}
|
||||
],
|
||||
'validSample': [
|
||||
{'timeStamp': '1970-01-01T00:00:00.000Z', 'runIndex': 0, 'values': {'a': 3, 'b': 6}}, {
|
||||
'timeStamp': '1970-01-01T00:00:00.001Z',
|
||||
'runIndex': 1,
|
||||
'values': {'a': 5, 'b': 9}
|
||||
}
|
||||
{'timeStamp': '1970-01-01T00:00:00.000Z', 'runIndex': 0, 'values': {'a': 3, 'b': 6}},
|
||||
{'timeStamp': '1970-01-01T00:00:00.001Z', 'runIndex': 1, 'values': {'a': 5, 'b': 9}}
|
||||
]
|
||||
});
|
||||
async.done();
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -11,50 +11,48 @@ import {AsyncTestCompleter, describe, expect, inject, it} from '@angular/core/te
|
||||
import {Injector, MeasureValues, MultiReporter, Reporter} from '../../index';
|
||||
|
||||
(function() {
|
||||
function createReporters(ids: any[]) {
|
||||
const r = Injector
|
||||
.create([
|
||||
ids.map(id => ({provide: id, useValue: new MockReporter(id)})),
|
||||
MultiReporter.provideWith(ids)
|
||||
])
|
||||
.get<MultiReporter>(MultiReporter);
|
||||
return Promise.resolve(r);
|
||||
}
|
||||
function createReporters(ids: any[]) {
|
||||
const r = Injector
|
||||
.create([
|
||||
ids.map(id => ({provide: id, useValue: new MockReporter(id)})),
|
||||
MultiReporter.provideWith(ids)
|
||||
])
|
||||
.get<MultiReporter>(MultiReporter);
|
||||
return Promise.resolve(r);
|
||||
}
|
||||
|
||||
describe('multi reporter', () => {
|
||||
describe('multi reporter', () => {
|
||||
it('should reportMeasureValues to all',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
const mv = new MeasureValues(0, new Date(), {});
|
||||
createReporters(['m1', 'm2']).then((r) => r.reportMeasureValues(mv)).then((values) => {
|
||||
expect(values).toEqual([{'id': 'm1', 'values': mv}, {'id': 'm2', 'values': mv}]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should reportMeasureValues to all',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
const mv = new MeasureValues(0, new Date(), {});
|
||||
createReporters(['m1', 'm2']).then((r) => r.reportMeasureValues(mv)).then((values) => {
|
||||
it('should reportSample to call', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
const completeSample =
|
||||
[new MeasureValues(0, new Date(), {}), new MeasureValues(1, new Date(), {})];
|
||||
const validSample = [completeSample[1]];
|
||||
|
||||
expect(values).toEqual([{'id': 'm1', 'values': mv}, {'id': 'm2', 'values': mv}]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should reportSample to call', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
const completeSample =
|
||||
[new MeasureValues(0, new Date(), {}), new MeasureValues(1, new Date(), {})];
|
||||
const validSample = [completeSample[1]];
|
||||
|
||||
createReporters(['m1', 'm2'])
|
||||
.then((r) => r.reportSample(completeSample, validSample))
|
||||
.then((values) => {
|
||||
|
||||
expect(values).toEqual([
|
||||
{'id': 'm1', 'completeSample': completeSample, 'validSample': validSample},
|
||||
{'id': 'm2', 'completeSample': completeSample, 'validSample': validSample}
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
createReporters(['m1', 'm2'])
|
||||
.then((r) => r.reportSample(completeSample, validSample))
|
||||
.then((values) => {
|
||||
expect(values).toEqual([
|
||||
{'id': 'm1', 'completeSample': completeSample, 'validSample': validSample},
|
||||
{'id': 'm2', 'completeSample': completeSample, 'validSample': validSample}
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
})();
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
constructor(private _id: string) { super(); }
|
||||
constructor(private _id: string) {
|
||||
super();
|
||||
}
|
||||
|
||||
reportMeasureValues(values: MeasureValues): Promise<{[key: string]: any}> {
|
||||
return Promise.resolve({'id': this._id, 'values': values});
|
||||
|
@ -8,7 +8,7 @@
|
||||
|
||||
import {AsyncTestCompleter, describe, expect, inject, it} from '@angular/core/testing/src/testing_internal';
|
||||
|
||||
import {Injector, Metric, Options, Runner, SampleDescription, SampleState, Sampler, Validator, WebDriverAdapter} from '../index';
|
||||
import {Injector, Metric, Options, Runner, SampleDescription, Sampler, SampleState, Validator, WebDriverAdapter} from '../index';
|
||||
|
||||
{
|
||||
describe('runner', () => {
|
||||
@ -68,7 +68,6 @@ import {Injector, Metric, Options, Runner, SampleDescription, SampleState, Sampl
|
||||
.sample({id: 'someId'})
|
||||
.then((_) => injector.get(SampleDescription))
|
||||
.then((desc) => {
|
||||
|
||||
expect(desc.metrics).toEqual({'m1': 'some metric'});
|
||||
async.done();
|
||||
});
|
||||
@ -109,32 +108,45 @@ import {Injector, Metric, Options, Runner, SampleDescription, SampleState, Sampl
|
||||
})
|
||||
.then((_) => injector.get(SampleDescription))
|
||||
.then((desc) => {
|
||||
|
||||
expect(desc.description['a']).toBe(2);
|
||||
async.done();
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
class MockWebDriverAdapter extends WebDriverAdapter {
|
||||
executeScript(script: string): Promise<string> { return Promise.resolve('someUserAgent'); }
|
||||
capabilities(): Promise<Map<string, any>> { return null !; }
|
||||
executeScript(script: string): Promise<string> {
|
||||
return Promise.resolve('someUserAgent');
|
||||
}
|
||||
capabilities(): Promise<Map<string, any>> {
|
||||
return null!;
|
||||
}
|
||||
}
|
||||
|
||||
class MockValidator extends Validator {
|
||||
constructor() { super(); }
|
||||
describe() { return {'v': 11}; }
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
describe() {
|
||||
return {'v': 11};
|
||||
}
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
constructor() { super(); }
|
||||
describe() { return {'m1': 'some metric'}; }
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
describe() {
|
||||
return {'m1': 'some metric'};
|
||||
}
|
||||
}
|
||||
|
||||
class MockSampler extends Sampler {
|
||||
constructor() { super(null !, null !, null !, null !, null !, null !, null !); }
|
||||
sample(): Promise<SampleState> { return Promise.resolve(new SampleState([], [])); }
|
||||
constructor() {
|
||||
super(null!, null!, null!, null!, null!, null!, null!);
|
||||
}
|
||||
sample(): Promise<SampleState> {
|
||||
return Promise.resolve(new SampleState([], []));
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,6 @@ import {Injector, MeasureValues, Metric, Options, Reporter, Sampler, Validator,
|
||||
expect(log).toEqual([0, 1, 2, 3]);
|
||||
async.done();
|
||||
});
|
||||
|
||||
}));
|
||||
|
||||
it('should call prepare, beginMeasure, execute, endMeasure for every iteration',
|
||||
@ -77,8 +76,12 @@ import {Injector, MeasureValues, Metric, Options, Reporter, Sampler, Validator,
|
||||
createSampler({
|
||||
metric: createCountingMetric(log),
|
||||
validator: createCountingValidator(2),
|
||||
prepare: () => { log.push(`p${workCount++}`); },
|
||||
execute: () => { log.push(`w${workCount++}`); }
|
||||
prepare: () => {
|
||||
log.push(`p${workCount++}`);
|
||||
},
|
||||
execute: () => {
|
||||
log.push(`w${workCount++}`);
|
||||
}
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
expect(log).toEqual([
|
||||
@ -102,7 +105,9 @@ import {Injector, MeasureValues, Metric, Options, Reporter, Sampler, Validator,
|
||||
createSampler({
|
||||
metric: createCountingMetric(log),
|
||||
validator: createCountingValidator(2),
|
||||
execute: () => { log.push(`w${workCount++}`); },
|
||||
execute: () => {
|
||||
log.push(`w${workCount++}`);
|
||||
},
|
||||
prepare: null
|
||||
});
|
||||
sampler.sample().then((_) => {
|
||||
@ -130,7 +135,9 @@ import {Injector, MeasureValues, Metric, Options, Reporter, Sampler, Validator,
|
||||
scriptTime = 0;
|
||||
return result;
|
||||
}),
|
||||
prepare: () => { scriptTime = 1 * iterationCount; },
|
||||
prepare: () => {
|
||||
scriptTime = 1 * iterationCount;
|
||||
},
|
||||
execute: () => {
|
||||
scriptTime = 10 * iterationCount;
|
||||
iterationCount++;
|
||||
@ -147,7 +154,7 @@ import {Injector, MeasureValues, Metric, Options, Reporter, Sampler, Validator,
|
||||
it('should call the validator for every execution and store the valid sample',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
const log: any[] = [];
|
||||
const validSample = [mv(null !, null !, {})];
|
||||
const validSample = [mv(null!, null!, {})];
|
||||
|
||||
createSampler({
|
||||
metric: createCountingMetric(),
|
||||
@ -174,7 +181,7 @@ import {Injector, MeasureValues, Metric, Options, Reporter, Sampler, Validator,
|
||||
it('should report the metric values',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
const log: any[] = [];
|
||||
const validSample = [mv(null !, null !, {})];
|
||||
const validSample = [mv(null!, null!, {})];
|
||||
createSampler({
|
||||
validator: createCountingValidator(2, validSample),
|
||||
metric: createCountingMetric(),
|
||||
@ -198,7 +205,6 @@ import {Injector, MeasureValues, Metric, Options, Reporter, Sampler, Validator,
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
@ -223,7 +229,9 @@ function createCountingMetric(log: any[] = []) {
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
constructor(private _log: any[] = [], private _waitFor: Function|null = null) { super(); }
|
||||
constructor(private _log: any[] = [], private _waitFor: Function|null = null) {
|
||||
super();
|
||||
}
|
||||
waitFor(callback: Function): Promise<any> {
|
||||
if (this._waitFor != null) {
|
||||
return this._waitFor(callback);
|
||||
@ -235,7 +243,9 @@ class MockDriverAdapter extends WebDriverAdapter {
|
||||
|
||||
|
||||
class MockValidator extends Validator {
|
||||
constructor(private _log: any[] = [], private _validate: Function|null = null) { super(); }
|
||||
constructor(private _log: any[] = [], private _validate: Function|null = null) {
|
||||
super();
|
||||
}
|
||||
validate(completeSample: MeasureValues[]): MeasureValues[] {
|
||||
const stableSample = this._validate != null ? this._validate(completeSample) : completeSample;
|
||||
this._log.push(['validate', completeSample, stableSample]);
|
||||
@ -244,7 +254,9 @@ class MockValidator extends Validator {
|
||||
}
|
||||
|
||||
class MockMetric extends Metric {
|
||||
constructor(private _log: any[] = [], private _endMeasure: Function|null = null) { super(); }
|
||||
constructor(private _log: any[] = [], private _endMeasure: Function|null = null) {
|
||||
super();
|
||||
}
|
||||
beginMeasure() {
|
||||
this._log.push(['beginMeasure']);
|
||||
return Promise.resolve(null);
|
||||
@ -257,7 +269,9 @@ class MockMetric extends Metric {
|
||||
}
|
||||
|
||||
class MockReporter extends Reporter {
|
||||
constructor(private _log: any[] = []) { super(); }
|
||||
constructor(private _log: any[] = []) {
|
||||
super();
|
||||
}
|
||||
reportMeasureValues(values: MeasureValues): Promise<any> {
|
||||
this._log.push(['reportMeasureValues', values]);
|
||||
return Promise.resolve(null);
|
||||
|
@ -11,7 +11,6 @@ import {Statistic} from '../src/statistic';
|
||||
|
||||
{
|
||||
describe('statistic', () => {
|
||||
|
||||
it('should calculate the mean', () => {
|
||||
expect(Statistic.calculateMean([])).toBeNaN();
|
||||
expect(Statistic.calculateMean([1, 2, 3])).toBe(2.0);
|
||||
@ -34,6 +33,5 @@ import {Statistic} from '../src/statistic';
|
||||
expect(Statistic.calculateRegressionSlope([1], 1, [2], 2)).toBeNaN();
|
||||
expect(Statistic.calculateRegressionSlope([1, 2], 1.5, [2, 4], 3)).toBe(2.0);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
@ -20,13 +20,21 @@ export class TraceEventFactory {
|
||||
return res;
|
||||
}
|
||||
|
||||
markStart(name: string, time: number) { return this.create('B', name, time); }
|
||||
markStart(name: string, time: number) {
|
||||
return this.create('B', name, time);
|
||||
}
|
||||
|
||||
markEnd(name: string, time: number) { return this.create('E', name, time); }
|
||||
markEnd(name: string, time: number) {
|
||||
return this.create('E', name, time);
|
||||
}
|
||||
|
||||
start(name: string, time: number, args: any = null) { return this.create('B', name, time, args); }
|
||||
start(name: string, time: number, args: any = null) {
|
||||
return this.create('B', name, time, args);
|
||||
}
|
||||
|
||||
end(name: string, time: number, args: any = null) { return this.create('E', name, time, args); }
|
||||
end(name: string, time: number, args: any = null) {
|
||||
return this.create('E', name, time, args);
|
||||
}
|
||||
|
||||
instant(name: string, time: number, args: any = null) {
|
||||
return this.create('I', name, time, args);
|
||||
|
@ -53,7 +53,6 @@ import {Injector, MeasureValues, RegressionSlopeValidator} from '../../index';
|
||||
expect(validator.validate(sample.slice(0, 2))).toEqual(sample.slice(0, 2));
|
||||
expect(validator.validate(sample)).toEqual(sample.slice(1, 3));
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -39,7 +39,6 @@ import {Injector, MeasureValues, SizeValidator} from '../../index';
|
||||
expect(validator.validate(sample.slice(0, 2))).toEqual(sample.slice(0, 2));
|
||||
expect(validator.validate(sample)).toEqual(sample.slice(1, 3));
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -11,44 +11,45 @@ import {AsyncTestCompleter, describe, expect, inject, it} from '@angular/core/te
|
||||
import {Injector, Options, WebDriverExtension} from '../index';
|
||||
|
||||
(function() {
|
||||
function createExtension(ids: any[], caps: any) {
|
||||
return new Promise<any>((res, rej) => {
|
||||
try {
|
||||
res(Injector
|
||||
.create([
|
||||
ids.map((id) => ({provide: id, useValue: new MockExtension(id)})),
|
||||
{provide: Options.CAPABILITIES, useValue: caps},
|
||||
WebDriverExtension.provideFirstSupported(ids)
|
||||
])
|
||||
.get(WebDriverExtension));
|
||||
} catch (e) {
|
||||
rej(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe('WebDriverExtension.provideFirstSupported', () => {
|
||||
|
||||
it('should provide the extension that matches the capabilities',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createExtension(['m1', 'm2', 'm3'], {'browser': 'm2'}).then((m) => {
|
||||
expect(m.id).toEqual('m2');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if there is no match',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createExtension(['m1'], {'browser': 'm2'}).catch((err) => {
|
||||
expect(err != null).toBe(true);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
function createExtension(ids: any[], caps: any) {
|
||||
return new Promise<any>((res, rej) => {
|
||||
try {
|
||||
res(Injector
|
||||
.create([
|
||||
ids.map((id) => ({provide: id, useValue: new MockExtension(id)})),
|
||||
{provide: Options.CAPABILITIES, useValue: caps},
|
||||
WebDriverExtension.provideFirstSupported(ids)
|
||||
])
|
||||
.get(WebDriverExtension));
|
||||
} catch (e) {
|
||||
rej(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe('WebDriverExtension.provideFirstSupported', () => {
|
||||
it('should provide the extension that matches the capabilities',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createExtension(['m1', 'm2', 'm3'], {'browser': 'm2'}).then((m) => {
|
||||
expect(m.id).toEqual('m2');
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
it('should throw if there is no match',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createExtension(['m1'], {'browser': 'm2'}).catch((err) => {
|
||||
expect(err != null).toBe(true);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
});
|
||||
})();
|
||||
|
||||
class MockExtension extends WebDriverExtension {
|
||||
constructor(public id: string) { super(); }
|
||||
constructor(public id: string) {
|
||||
super();
|
||||
}
|
||||
|
||||
supports(capabilities: {[key: string]: any}): boolean {
|
||||
return capabilities['browser'] === this.id;
|
||||
|
@ -32,7 +32,7 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
const normEvents = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createExtension(
|
||||
perfRecords: any[] | null = null, userAgent: string | null = null,
|
||||
perfRecords: any[]|null = null, userAgent: string|null = null,
|
||||
messageMethod = 'Tracing.dataCollected'): WebDriverExtension {
|
||||
if (!perfRecords) {
|
||||
perfRecords = [];
|
||||
@ -97,9 +97,9 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
it('should mark the timeline via performance.mark() with start and end of a test',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createExtension().timeEnd('name1', 'name2').then((_) => {
|
||||
expect(log).toEqual([[
|
||||
'executeScript', `performance.mark('name1-bpend');performance.mark('name2-bpstart');`
|
||||
]]);
|
||||
expect(log).toEqual([
|
||||
['executeScript', `performance.mark('name1-bpend');performance.mark('name2-bpstart');`]
|
||||
]);
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
@ -173,7 +173,8 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
[
|
||||
chromeTimelineV8Events.start('MajorGC', 1000, {'usedHeapSizeBefore': 1000}),
|
||||
chromeTimelineV8Events.end('MajorGC', 2000, {'usedHeapSizeAfter': 0}),
|
||||
], )
|
||||
],
|
||||
)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events.length).toEqual(2);
|
||||
@ -192,7 +193,8 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
[
|
||||
chrome45TimelineEvents.start(recordType, 1234),
|
||||
chrome45TimelineEvents.end(recordType, 2345)
|
||||
], )
|
||||
],
|
||||
)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
@ -210,7 +212,8 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
[
|
||||
chromeBlinkTimelineEvents.start('UpdateLayoutTree', 1234),
|
||||
chromeBlinkTimelineEvents.end('UpdateLayoutTree', 2345)
|
||||
], )
|
||||
],
|
||||
)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
@ -254,8 +257,10 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
}));
|
||||
|
||||
it('should report receivedData', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createExtension([chrome45TimelineEvents.instant(
|
||||
'ResourceReceivedData', 1234, {'data': {'encodedDataLength': 987}})], )
|
||||
createExtension(
|
||||
[chrome45TimelineEvents.instant(
|
||||
'ResourceReceivedData', 1234, {'data': {'encodedDataLength': 987}})],
|
||||
)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual(
|
||||
@ -265,9 +270,11 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
}));
|
||||
|
||||
it('should report sendRequest', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
createExtension([chrome45TimelineEvents.instant(
|
||||
'ResourceSendRequest', 1234,
|
||||
{'data': {'url': 'http://here', 'requestMethod': 'GET'}})], )
|
||||
createExtension(
|
||||
[chrome45TimelineEvents.instant(
|
||||
'ResourceSendRequest', 1234,
|
||||
{'data': {'url': 'http://here', 'requestMethod': 'GET'}})],
|
||||
)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([normEvents.instant(
|
||||
@ -277,7 +284,6 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
}));
|
||||
|
||||
describe('readPerfLog (common)', () => {
|
||||
|
||||
it('should execute a dummy script before reading them',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
@ -296,7 +302,8 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
[
|
||||
chromeTimelineEvents.start(recordType, 1234),
|
||||
chromeTimelineEvents.end(recordType, 2345)
|
||||
], )
|
||||
],
|
||||
)
|
||||
.readPerfLog()
|
||||
.then((events) => {
|
||||
expect(events).toEqual([
|
||||
@ -337,7 +344,6 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
it('should throw when ImplThreadRenderingStats contains more than one frame',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
|
||||
createExtension([benchmarkEvents.instant(
|
||||
'BenchmarkInstrumentation::ImplThreadRenderingStats', 1100,
|
||||
{'data': {'frame_count': 2}})])
|
||||
@ -349,7 +355,6 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
async.done();
|
||||
});
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
it('should report begin timestamps',
|
||||
@ -374,7 +379,6 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
it('should throw an error on buffer overflow',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
|
||||
createExtension(
|
||||
[
|
||||
chromeTimelineEvents.start('FunctionCall', 1234),
|
||||
@ -394,9 +398,7 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
expect(createExtension().supports({'browserName': 'Chrome'})).toBe(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
@ -419,7 +421,7 @@ class MockDriverAdapter extends WebDriverAdapter {
|
||||
{'message': {'method': this._messageMethod, 'params': event}}, null, 2)
|
||||
})));
|
||||
} else {
|
||||
return null !;
|
||||
return null!;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@
|
||||
|
||||
import {AsyncTestCompleter, describe, expect, inject, it} from '@angular/core/testing/src/testing_internal';
|
||||
|
||||
import {IOsDriverExtension, Injector, WebDriverAdapter, WebDriverExtension} from '../../index';
|
||||
import {Injector, IOsDriverExtension, WebDriverAdapter, WebDriverExtension} from '../../index';
|
||||
import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
{
|
||||
@ -18,7 +18,7 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
const normEvents = new TraceEventFactory('timeline', 'pid0');
|
||||
|
||||
function createExtension(perfRecords: any[] | null = null): WebDriverExtension {
|
||||
function createExtension(perfRecords: any[]|null = null): WebDriverExtension {
|
||||
if (!perfRecords) {
|
||||
perfRecords = [];
|
||||
}
|
||||
@ -63,7 +63,6 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
}));
|
||||
|
||||
describe('readPerfLog', () => {
|
||||
|
||||
it('should execute a dummy script before reading them',
|
||||
inject([AsyncTestCompleter], (async: AsyncTestCompleter) => {
|
||||
// TODO(tbosch): This seems to be a bug in ChromeDriver:
|
||||
@ -140,9 +139,7 @@ import {TraceEventFactory} from '../trace_event_factory';
|
||||
|
||||
expect(createExtension().supports({'browserName': 'Safari'})).toBe(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
@ -155,7 +152,7 @@ function timeEndRecord(name: string, time: number) {
|
||||
}
|
||||
|
||||
function durationRecord(
|
||||
type: string, startTime: number, endTime: number, children: any[] | null = null) {
|
||||
type: string, startTime: number, endTime: number, children: any[]|null = null) {
|
||||
if (!children) {
|
||||
children = [];
|
||||
}
|
||||
@ -172,7 +169,9 @@ function internalScriptRecord(startTime: number, endTime: number) {
|
||||
}
|
||||
|
||||
class MockDriverAdapter extends WebDriverAdapter {
|
||||
constructor(private _log: any[], private _perfRecords: any[]) { super(); }
|
||||
constructor(private _log: any[], private _perfRecords: any[]) {
|
||||
super();
|
||||
}
|
||||
|
||||
executeScript(script: string) {
|
||||
this._log.push(['executeScript', script]);
|
||||
@ -190,7 +189,7 @@ class MockDriverAdapter extends WebDriverAdapter {
|
||||
};
|
||||
}));
|
||||
} else {
|
||||
return null !;
|
||||
return null!;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user