style(ngcc): reformat of ngcc after clang update (#36447)
PR Close #36447
This commit is contained in:

committed by
Kara Erickson

parent
bfa55162de
commit
74b7a8eaf5
@ -51,7 +51,7 @@ describe('ClusterExecutor', () => {
|
||||
describe('(on cluster master)', () => {
|
||||
beforeEach(() => runAsClusterMaster(true));
|
||||
|
||||
it('should log debug info about the executor', async() => {
|
||||
it('should log debug info about the executor', async () => {
|
||||
const anyFn: () => any = () => undefined;
|
||||
await executor.execute(anyFn, anyFn);
|
||||
|
||||
@ -60,7 +60,7 @@ describe('ClusterExecutor', () => {
|
||||
]);
|
||||
});
|
||||
|
||||
it('should delegate to `ClusterMaster#run()`', async() => {
|
||||
it('should delegate to `ClusterMaster#run()`', async () => {
|
||||
const analyzeEntryPointsSpy = jasmine.createSpy('analyzeEntryPoints');
|
||||
const createCompilerFnSpy = jasmine.createSpy('createCompilerFn');
|
||||
|
||||
@ -75,13 +75,13 @@ describe('ClusterExecutor', () => {
|
||||
});
|
||||
|
||||
it('should call LockFile.write() and LockFile.remove() if master runner completes successfully',
|
||||
async() => {
|
||||
async () => {
|
||||
const anyFn: () => any = () => undefined;
|
||||
await executor.execute(anyFn, anyFn);
|
||||
expect(lockFileLog).toEqual(['write()', 'remove()']);
|
||||
});
|
||||
|
||||
it('should call LockFile.write() and LockFile.remove() if master runner fails', async() => {
|
||||
it('should call LockFile.write() and LockFile.remove() if master runner fails', async () => {
|
||||
const anyFn: () => any = () => undefined;
|
||||
masterRunSpy.and.returnValue(Promise.reject(new Error('master runner error')));
|
||||
let error = '';
|
||||
@ -94,7 +94,7 @@ describe('ClusterExecutor', () => {
|
||||
expect(lockFileLog).toEqual(['write()', 'remove()']);
|
||||
});
|
||||
|
||||
it('should not call master runner if LockFile.write() fails', async() => {
|
||||
it('should not call master runner if LockFile.write() fails', async () => {
|
||||
const anyFn: () => any = () => undefined;
|
||||
spyOn(mockLockFile, 'write').and.callFake(() => {
|
||||
lockFileLog.push('write()');
|
||||
@ -114,7 +114,7 @@ describe('ClusterExecutor', () => {
|
||||
expect(masterRunSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if LockFile.remove() fails', async() => {
|
||||
it('should fail if LockFile.remove() fails', async () => {
|
||||
const anyFn: () => any = () => undefined;
|
||||
spyOn(mockLockFile, 'remove').and.callFake(() => {
|
||||
lockFileLog.push('remove()');
|
||||
@ -139,14 +139,14 @@ describe('ClusterExecutor', () => {
|
||||
describe('(on cluster worker)', () => {
|
||||
beforeEach(() => runAsClusterMaster(false));
|
||||
|
||||
it('should not log debug info about the executor', async() => {
|
||||
it('should not log debug info about the executor', async () => {
|
||||
const anyFn: () => any = () => undefined;
|
||||
await executor.execute(anyFn, anyFn);
|
||||
|
||||
expect(mockLogger.logs.debug).toEqual([]);
|
||||
});
|
||||
|
||||
it('should delegate to `ClusterWorker#run()`', async() => {
|
||||
it('should delegate to `ClusterWorker#run()`', async () => {
|
||||
const analyzeEntryPointsSpy = jasmine.createSpy('analyzeEntryPoints');
|
||||
const createCompilerFnSpy = jasmine.createSpy('createCompilerFn');
|
||||
|
||||
@ -160,7 +160,7 @@ describe('ClusterExecutor', () => {
|
||||
expect(createCompilerFnSpy).toHaveBeenCalledWith(jasmine.any(Function));
|
||||
});
|
||||
|
||||
it('should not call LockFile.write() or LockFile.remove()', async() => {
|
||||
it('should not call LockFile.write() or LockFile.remove()', async () => {
|
||||
const anyFn: () => any = () => undefined;
|
||||
await executor.execute(anyFn, anyFn);
|
||||
expect(lockFileLog).toEqual([]);
|
||||
|
@ -39,8 +39,9 @@ runInEachFileSystem(() => {
|
||||
isMaster => describe(`(on cluster ${isMaster ? 'master' : 'worker'})`, () => {
|
||||
beforeEach(() => runAsClusterMaster(isMaster));
|
||||
|
||||
it('should return a `PackageJsonUpdate` instance',
|
||||
() => { expect(updater.createUpdate()).toEqual(jasmine.any(PackageJsonUpdate)); });
|
||||
it('should return a `PackageJsonUpdate` instance', () => {
|
||||
expect(updater.createUpdate()).toEqual(jasmine.any(PackageJsonUpdate));
|
||||
});
|
||||
|
||||
it('should wire up the `PackageJsonUpdate` with its `writeChanges()` method', () => {
|
||||
const writeChangesSpy = spyOn(updater, 'writeChanges');
|
||||
|
@ -60,11 +60,9 @@ describe('ClusterWorker', () => {
|
||||
|
||||
onTaskCompleted(null as any, TaskProcessingOutcome.Processed, null);
|
||||
expect(processSendSpy).toHaveBeenCalledTimes(1);
|
||||
expect(processSendSpy).toHaveBeenCalledWith({
|
||||
type: 'task-completed',
|
||||
outcome: TaskProcessingOutcome.Processed,
|
||||
message: null
|
||||
});
|
||||
expect(processSendSpy)
|
||||
.toHaveBeenCalledWith(
|
||||
{type: 'task-completed', outcome: TaskProcessingOutcome.Processed, message: null});
|
||||
|
||||
processSendSpy.calls.reset();
|
||||
|
||||
@ -137,7 +135,9 @@ describe('ClusterWorker', () => {
|
||||
} as unknown as Task;
|
||||
|
||||
let err: string|Error;
|
||||
compileFnSpy.and.callFake(() => { throw err; });
|
||||
compileFnSpy.and.callFake(() => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
worker.run();
|
||||
|
||||
|
@ -14,8 +14,8 @@ import {EntryPoint} from '../../src/packages/entry_point';
|
||||
*
|
||||
* NOTE 1: The first task for each entry-point generates typings (which is similar to what happens
|
||||
* in the actual code).
|
||||
* NOTE 2: The `computeTaskDependencies()` implementation relies on the fact that tasks are sorted in such
|
||||
* a way that a task can only depend upon earlier tasks (i.e. dependencies always come
|
||||
* NOTE 2: The `computeTaskDependencies()` implementation relies on the fact that tasks are sorted
|
||||
* in such a way that a task can only depend upon earlier tasks (i.e. dependencies always come
|
||||
* before dependents in the list of tasks).
|
||||
* To preserve this attribute, you need to ensure that entry-points will only depend on
|
||||
* entry-points with a lower index. Take this into account when defining `entryPointDeps`.
|
||||
@ -52,7 +52,7 @@ export function createTasksAndGraph(
|
||||
graph.addNode(entryPoint.path);
|
||||
|
||||
for (let tIdx = 0; tIdx < tasksPerEntryPointCount; tIdx++) {
|
||||
tasks.push({ entryPoint, formatProperty: `prop-${tIdx}`, processDts: tIdx === 0 } as Task);
|
||||
tasks.push({entryPoint, formatProperty: `prop-${tIdx}`, processDts: tIdx === 0} as Task);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -33,11 +33,13 @@ describe('SingleProcessExecutor', () => {
|
||||
executor = new SingleProcessExecutorSync(mockLogger, locker, createTaskCompletedCallback);
|
||||
});
|
||||
|
||||
const noTasks = () => ({ allTasksCompleted: true, getNextTask: () => null } as TaskQueue);
|
||||
const noTasks = () => ({allTasksCompleted: true, getNextTask: () => null} as TaskQueue);
|
||||
const oneTask = () => {
|
||||
let tasksCount = 1;
|
||||
return <TaskQueue>{
|
||||
get allTasksCompleted() { return tasksCount === 0; },
|
||||
get allTasksCompleted() {
|
||||
return tasksCount === 0;
|
||||
},
|
||||
getNextTask() {
|
||||
tasksCount--;
|
||||
return {};
|
||||
@ -55,7 +57,9 @@ describe('SingleProcessExecutor', () => {
|
||||
});
|
||||
|
||||
it('should call LockFile.write() and LockFile.remove() if `analyzeEntryPoints` fails', () => {
|
||||
const errorFn: () => never = () => { throw new Error('analyze error'); };
|
||||
const errorFn: () => never = () => {
|
||||
throw new Error('analyze error');
|
||||
};
|
||||
const createCompileFn: () => any = () => undefined;
|
||||
let error: string = '';
|
||||
try {
|
||||
@ -68,7 +72,9 @@ describe('SingleProcessExecutor', () => {
|
||||
});
|
||||
|
||||
it('should call LockFile.write() and LockFile.remove() if `createCompileFn` fails', () => {
|
||||
const createErrorCompileFn: () => any = () => { throw new Error('compile error'); };
|
||||
const createErrorCompileFn: () => any = () => {
|
||||
throw new Error('compile error');
|
||||
};
|
||||
let error: string = '';
|
||||
try {
|
||||
executor.execute(oneTask, createErrorCompileFn);
|
||||
@ -85,7 +91,9 @@ describe('SingleProcessExecutor', () => {
|
||||
throw new Error('LockFile.write() error');
|
||||
});
|
||||
|
||||
const analyzeFn: () => any = () => { lockFileLog.push('analyzeFn'); };
|
||||
const analyzeFn: () => any = () => {
|
||||
lockFileLog.push('analyzeFn');
|
||||
};
|
||||
const anyFn: () => any = () => undefined;
|
||||
executor = new SingleProcessExecutorSync(mockLogger, locker, createTaskCompletedCallback);
|
||||
let error = '';
|
||||
|
@ -77,9 +77,9 @@ describe('ParallelTaskQueue', () => {
|
||||
it('should be `true`, when there are no unprocess or in-progress tasks', () => {
|
||||
const {queue} = createQueue(3);
|
||||
|
||||
const task1 = queue.getNextTask() !;
|
||||
const task2 = queue.getNextTask() !;
|
||||
const task3 = queue.getNextTask() !;
|
||||
const task1 = queue.getNextTask()!;
|
||||
const task2 = queue.getNextTask()!;
|
||||
const task3 = queue.getNextTask()!;
|
||||
expect(queue.allTasksCompleted).toBe(false);
|
||||
|
||||
queue.markTaskCompleted(task1);
|
||||
@ -266,8 +266,8 @@ describe('ParallelTaskQueue', () => {
|
||||
it('should mark a task as completed', () => {
|
||||
const {queue} = createQueue(2);
|
||||
|
||||
const task1 = queue.getNextTask() !;
|
||||
const task2 = queue.getNextTask() !;
|
||||
const task1 = queue.getNextTask()!;
|
||||
const task2 = queue.getNextTask()!;
|
||||
expect(queue.allTasksCompleted).toBe(false);
|
||||
|
||||
queue.markTaskCompleted(task1);
|
||||
@ -327,7 +327,7 @@ describe('ParallelTaskQueue', () => {
|
||||
|
||||
processNextTask(queue2);
|
||||
processNextTask(queue2);
|
||||
const task = queue2.getNextTask() !;
|
||||
const task = queue2.getNextTask()!;
|
||||
|
||||
expect(queue2.toString()).toContain(' All tasks completed: false\n');
|
||||
|
||||
@ -344,7 +344,7 @@ describe('ParallelTaskQueue', () => {
|
||||
' - {entryPoint: entry-point-1, formatProperty: prop-0, processDts: true}\n' +
|
||||
' - {entryPoint: entry-point-2, formatProperty: prop-0, processDts: true}\n');
|
||||
|
||||
const task1 = queue.getNextTask() !;
|
||||
const task1 = queue.getNextTask()!;
|
||||
expect(queue.toString())
|
||||
.toContain(
|
||||
' Unprocessed tasks (2): \n' +
|
||||
@ -352,7 +352,7 @@ describe('ParallelTaskQueue', () => {
|
||||
' - {entryPoint: entry-point-2, formatProperty: prop-0, processDts: true}\n');
|
||||
|
||||
queue.markTaskCompleted(task1);
|
||||
const task2 = queue.getNextTask() !;
|
||||
const task2 = queue.getNextTask()!;
|
||||
expect(queue.toString())
|
||||
.toContain(
|
||||
' Unprocessed tasks (1): \n' +
|
||||
@ -367,14 +367,14 @@ describe('ParallelTaskQueue', () => {
|
||||
const {queue} = createQueue(3);
|
||||
expect(queue.toString()).toContain(' In-progress tasks (0): \n');
|
||||
|
||||
const task1 = queue.getNextTask() !;
|
||||
const task1 = queue.getNextTask()!;
|
||||
expect(queue.toString())
|
||||
.toContain(
|
||||
' In-progress tasks (1): \n' +
|
||||
' - {entryPoint: entry-point-0, formatProperty: prop-0, processDts: true}\n');
|
||||
|
||||
queue.markTaskCompleted(task1);
|
||||
const task2 = queue.getNextTask() !;
|
||||
const task2 = queue.getNextTask()!;
|
||||
expect(queue.toString())
|
||||
.toContain(
|
||||
' In-progress tasks (1): \n' +
|
||||
|
@ -30,12 +30,10 @@ describe('SerialTaskQueue', () => {
|
||||
const tasks: PartiallyOrderedTasks = [] as any;
|
||||
const graph = new DepGraph<EntryPoint>();
|
||||
for (let i = 0; i < taskCount; i++) {
|
||||
const entryPoint = {
|
||||
name: `entry-point-${i}`,
|
||||
path: `/path/to/entry/point/${i}`
|
||||
} as EntryPoint;
|
||||
const entryPoint = {name: `entry-point-${i}`, path: `/path/to/entry/point/${i}`} as
|
||||
EntryPoint;
|
||||
tasks.push(
|
||||
{ entryPoint: entryPoint, formatProperty: `prop-${i}`, processDts: i % 2 === 0 } as Task);
|
||||
{entryPoint: entryPoint, formatProperty: `prop-${i}`, processDts: i % 2 === 0} as Task);
|
||||
graph.addNode(entryPoint.path);
|
||||
}
|
||||
const dependencies = computeTaskDependencies(tasks, graph);
|
||||
@ -140,7 +138,7 @@ describe('SerialTaskQueue', () => {
|
||||
describe('markTaskCompleted()', () => {
|
||||
it('should mark a task as completed, so that the next task can be picked', () => {
|
||||
const {queue} = createQueue(3);
|
||||
const task = queue.getNextTask() !;
|
||||
const task = queue.getNextTask()!;
|
||||
|
||||
expect(() => queue.getNextTask()).toThrow();
|
||||
|
||||
@ -174,7 +172,7 @@ describe('SerialTaskQueue', () => {
|
||||
|
||||
processNextTask(queue2);
|
||||
processNextTask(queue2);
|
||||
const task = queue2.getNextTask() !;
|
||||
const task = queue2.getNextTask()!;
|
||||
|
||||
expect(queue2.toString()).toContain(' All tasks completed: false\n');
|
||||
|
||||
@ -191,7 +189,7 @@ describe('SerialTaskQueue', () => {
|
||||
' - {entryPoint: entry-point-1, formatProperty: prop-1, processDts: false}\n' +
|
||||
' - {entryPoint: entry-point-2, formatProperty: prop-2, processDts: true}\n');
|
||||
|
||||
const task1 = queue.getNextTask() !;
|
||||
const task1 = queue.getNextTask()!;
|
||||
expect(queue.toString())
|
||||
.toContain(
|
||||
' Unprocessed tasks (2): \n' +
|
||||
@ -199,7 +197,7 @@ describe('SerialTaskQueue', () => {
|
||||
' - {entryPoint: entry-point-2, formatProperty: prop-2, processDts: true}\n');
|
||||
|
||||
queue.markTaskCompleted(task1);
|
||||
const task2 = queue.getNextTask() !;
|
||||
const task2 = queue.getNextTask()!;
|
||||
expect(queue.toString())
|
||||
.toContain(
|
||||
' Unprocessed tasks (1): \n' +
|
||||
@ -214,14 +212,14 @@ describe('SerialTaskQueue', () => {
|
||||
const {queue} = createQueue(3);
|
||||
expect(queue.toString()).toContain(' In-progress tasks (0): ');
|
||||
|
||||
const task1 = queue.getNextTask() !;
|
||||
const task1 = queue.getNextTask()!;
|
||||
expect(queue.toString())
|
||||
.toContain(
|
||||
' In-progress tasks (1): \n' +
|
||||
' - {entryPoint: entry-point-0, formatProperty: prop-0, processDts: true}');
|
||||
|
||||
queue.markTaskCompleted(task1);
|
||||
const task2 = queue.getNextTask() !;
|
||||
const task2 = queue.getNextTask()!;
|
||||
expect(queue.toString())
|
||||
.toContain(
|
||||
' In-progress tasks (1): \n' +
|
||||
@ -253,7 +251,7 @@ describe('SerialTaskQueue', () => {
|
||||
' In-progress tasks (0): ');
|
||||
|
||||
processNextTask(queue2);
|
||||
const task = queue2.getNextTask() !;
|
||||
const task = queue2.getNextTask()!;
|
||||
expect(queue2.toString())
|
||||
.toBe(
|
||||
'SerialTaskQueue\n' +
|
||||
|
Reference in New Issue
Block a user