chore: move to clang-format 1.0.17.

clang-format 1.0.17 substantially improves formatting for fat arrow functions
and array literal detection. It also fixes a number of minor formatting issues.
This commit is contained in:
Martin Probst
2015-06-03 13:42:57 -07:00
parent f74d7727ca
commit f999d5a156
56 changed files with 494 additions and 525 deletions

View File

@ -44,11 +44,10 @@ export function main() {
PerflogMetric.BINDINGS,
bind(Options.MICRO_METRICS).toValue(microMetrics),
bind(PerflogMetric.SET_TIMEOUT)
.toValue((fn, millis) =>
{
ListWrapper.push(commandLog, ['setTimeout', millis]);
fn();
}),
.toValue((fn, millis) => {
ListWrapper.push(commandLog, ['setTimeout', millis]);
fn();
}),
bind(WebDriverExtension)
.toValue(new MockDriverExtension(perfLogs, commandLog, perfLogFeatures))
];
@ -125,12 +124,14 @@ export function main() {
it('should mark and aggregate events in between the marks',
inject([AsyncTestCompleter], (async) => {
var events = [[
eventFactory.markStart('benchpress0', 0),
eventFactory.start('script', 4),
eventFactory.end('script', 6),
eventFactory.markEnd('benchpress0', 10)
]];
var events = [
[
eventFactory.markStart('benchpress0', 0),
eventFactory.start('script', 4),
eventFactory.end('script', 6),
eventFactory.markEnd('benchpress0', 10)
]
];
var metric = createMetric(events);
metric.beginMeasure()
.then((_) => metric.endMeasure(false))
@ -224,11 +225,10 @@ export function main() {
var metric = createMetric(events);
metric.beginMeasure()
.then((_) => metric.endMeasure(true))
.then((data) =>
{
expect(data['scriptTime']).toBe(0);
return metric.endMeasure(true)
})
.then((data) => {
expect(data['scriptTime']).toBe(0);
return metric.endMeasure(true)
})
.then((data) => {
expect(commandLog)
.toEqual([
@ -247,16 +247,18 @@ export function main() {
describe('with forced gc', () => {
var events;
beforeEach(() => {
events = [[
eventFactory.markStart('benchpress0', 0),
eventFactory.start('script', 4),
eventFactory.end('script', 6),
eventFactory.markEnd('benchpress0', 10),
eventFactory.markStart('benchpress1', 11),
eventFactory.start('gc', 12, {'usedHeapSize': 2500}),
eventFactory.end('gc', 15, {'usedHeapSize': 1000}),
eventFactory.markEnd('benchpress1', 20)
]];
events = [
[
eventFactory.markStart('benchpress0', 0),
eventFactory.start('script', 4),
eventFactory.end('script', 6),
eventFactory.markEnd('benchpress0', 10),
eventFactory.markStart('benchpress1', 11),
eventFactory.start('gc', 12, {'usedHeapSize': 2500}),
eventFactory.end('gc', 15, {'usedHeapSize': 1000}),
eventFactory.markEnd('benchpress1', 20)
]
];
});
it('should measure forced gc', inject([AsyncTestCompleter], (async) => {
@ -359,14 +361,16 @@ export function main() {
it('should ignore events from different processed as the start mark',
inject([AsyncTestCompleter], (async) => {
var otherProcessEventFactory = new TraceEventFactory('timeline', 'pid1');
var metric = createMetric([[
eventFactory.markStart('benchpress0', 0),
eventFactory.start('script', 0, null),
eventFactory.end('script', 5, null),
otherProcessEventFactory.start('script', 10, null),
otherProcessEventFactory.end('script', 17, null),
eventFactory.markEnd('benchpress0', 20)
]]);
var metric = createMetric([
[
eventFactory.markStart('benchpress0', 0),
eventFactory.start('script', 0, null),
eventFactory.end('script', 5, null),
otherProcessEventFactory.start('script', 10, null),
otherProcessEventFactory.end('script', 17, null),
eventFactory.markEnd('benchpress0', 20)
]
]);
metric.beginMeasure()
.then((_) => metric.endMeasure(false))
.then((data) => {

View File

@ -30,14 +30,13 @@ export function main() {
bind(JsonFileReporter.PATH).toValue(path),
bind(Options.NOW).toValue(() => DateWrapper.fromMillis(1234)),
bind(Options.WRITE_FILE)
.toValue((filename, content) =>
{
loggedFile = {
'filename': filename,
'content': content
};
return PromiseWrapper.resolve(null);
})
.toValue((filename, content) => {
loggedFile = {
'filename': filename,
'content': content
};
return PromiseWrapper.resolve(null);
})
];
return Injector.resolveAndCreate(bindings).get(JsonFileReporter);
}
@ -58,11 +57,13 @@ export function main() {
.toEqual({
"description":
{"id": "someId", "description": {"a": 2}, "metrics": {"script": "script time"}},
"completeSample": [{
"timeStamp": "1970-01-01T00:00:00.000Z",
"runIndex": 0,
"values": {"a": 3, "b": 6}
}],
"completeSample": [
{
"timeStamp": "1970-01-01T00:00:00.000Z",
"runIndex": 0,
"values": {"a": 3, "b": 6}
}
],
"validSample": [
{
"timeStamp": "1970-01-01T00:00:00.000Z",

View File

@ -36,12 +36,12 @@ export function main() {
}
runner = new Runner([
defaultBindings,
bind(Sampler).toFactory((_injector) =>
{
injector = _injector;
return new MockSampler();
},
[Injector]),
bind(Sampler).toFactory(
(_injector) => {
injector = _injector;
return new MockSampler();
},
[Injector]),
bind(Metric).toFactory(() => new MockMetric(), []),
bind(Validator).toFactory(() => new MockValidator(), []),
bind(WebDriverAdapter).toFactory(() => new MockWebDriverAdapter(), [])
@ -61,8 +61,7 @@ export function main() {
it('should merge SampleDescription.description', inject([AsyncTestCompleter], (async) => {
createRunner([bind(Options.DEFAULT_DESCRIPTION).toValue({'a': 1})])
.sample(
{id: 'someId', bindings: [bind(Options.SAMPLE_DESCRIPTION).toValue({'b': 2})]})
.sample({id: 'someId', bindings: [bind(Options.SAMPLE_DESCRIPTION).toValue({'b': 2})]})
.then((_) => injector.asyncGet(SampleDescription))
.then((desc) => {

View File

@ -151,18 +151,17 @@ export function main() {
var iterationCount = 1;
createSampler({
validator: createCountingValidator(2),
metric: new MockMetric([], () =>
{
var result = PromiseWrapper.resolve({'script': scriptTime});
scriptTime = 0;
return result;
}),
metric: new MockMetric([],
() => {
var result = PromiseWrapper.resolve({'script': scriptTime});
scriptTime = 0;
return result;
}),
prepare: () => { scriptTime = 1 * iterationCount; },
execute: () =>
{
scriptTime = 10 * iterationCount;
iterationCount++;
}
execute: () => {
scriptTime = 10 * iterationCount;
iterationCount++;
}
});
sampler.sample().then((state) => {
expect(state.completeSample.length).toBe(2);
@ -192,11 +191,8 @@ export function main() {
expect(log.length).toBe(2);
expect(log[0]).toEqual(['validate', [mv(0, 1000, {'script': 0})], null]);
expect(log[1]).toEqual([
'validate',
[mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})],
validSample
]);
expect(log[1]).toEqual(
['validate', [mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})], validSample]);
async.done();
});

View File

@ -135,8 +135,10 @@ export function main() {
}));
it('should ignore FunctionCalls from webdriver', inject([AsyncTestCompleter], (async) => {
createExtension([chromeTimelineEvents.start(
'FunctionCall', 0, {'data': {'scriptName': 'InjectedScript'}})])
createExtension([
chromeTimelineEvents.start('FunctionCall', 0,
{'data': {'scriptName': 'InjectedScript'}})
])
.readPerfLog()
.then((events) => {
expect(events).toEqual([]);
@ -270,8 +272,7 @@ class MockDriverAdapter extends WebDriverAdapter {
if (type === 'performance') {
return PromiseWrapper.resolve(this._events.map((event) => {
return {
'message':
Json.stringify({'message': {'method': this._messageMethod, 'params': event}})
'message': Json.stringify({'message': {'method': this._messageMethod, 'params': event}})
};
}));
} else {