diff --git a/packages/core/test/render3/perf/BUILD.bazel b/packages/core/test/render3/perf/BUILD.bazel
index 1520e05141..84c1bba8f8 100644
--- a/packages/core/test/render3/perf/BUILD.bazel
+++ b/packages/core/test/render3/perf/BUILD.bazel
@@ -9,6 +9,7 @@ ts_library(
),
deps = [
"//packages/core",
+ "@npm//@types/node",
],
)
diff --git a/packages/core/test/render3/perf/README.md b/packages/core/test/render3/perf/README.md
index e09461dd6e..9708d02c5e 100644
--- a/packages/core/test/render3/perf/README.md
+++ b/packages/core/test/render3/perf/README.md
@@ -4,7 +4,7 @@ yarn bazel build //packages/core/test/render3/perf:{name}.min_debug.es2015.js --
### Run
-time node dist/bin/packages/core/test/render3/perf/{name}.min_debug.es2015.js
+node dist/bin/packages/core/test/render3/perf/{name}.min_debug.es2015.js
### Profile
@@ -19,4 +19,4 @@ The actual benchmark code has calls that will start (`console.profile`) and stop
In all the above commands {name} should be replaced with the actual benchmark (folder) name, ex.:
- build: `yarn bazel build //packages/core/test/render3/perf:noop_change_detection.min_debug.es2015.js --define=compile=aot`
- run: `time node dist/bin/packages/core/test/render3/perf/noop_change_detection.min_debug.es2015.js`
-- profile: `node --no-turbo-inlining --inspect-brk dist/bin/packages/core/test/render3/perf/noop_change_detection.min_debug.es2015.js`
+- profile: `node --no-turbo-inlining --inspect-brk dist/bin/packages/core/test/render3/perf/noop_change_detection.min_debug.es2015.js profile`
diff --git a/packages/core/test/render3/perf/element_text_create/index.ts b/packages/core/test/render3/perf/element_text_create/index.ts
index eadb7d8cf3..05df733a59 100644
--- a/packages/core/test/render3/perf/element_text_create/index.ts
+++ b/packages/core/test/render3/perf/element_text_create/index.ts
@@ -11,6 +11,7 @@ import {ɵɵtext} from '../../../../src/render3/instructions/text';
import {RenderFlags} from '../../../../src/render3/interfaces/definition';
import {TNodeType, TViewNode} from '../../../../src/render3/interfaces/node';
import {resetComponentState} from '../../../../src/render3/state';
+import {createBenchmark} from '../micro_bench';
import {createAndRenderLView} from '../setup';
`
@@ -72,9 +73,17 @@ resetComponentState();
// create view once so we don't profile first template pass
createAndRenderLView(null, embeddedTView, viewTNode);
-// profile create views (run templates in creation mode)
-console.profile('create');
-for (let i = 0; i < 500000; i++) {
- createAndRenderLView(null, embeddedTView, viewTNode);
+// scenario to benchmark
+const elementTextCreate = createBenchmark('element and text create', 500000, 20);
+const createTime = elementTextCreate('create');
+
+console.profile('element_text_create');
+while (createTime.run()) {
+ while (createTime()) {
+ createAndRenderLView(null, embeddedTView, viewTNode);
+ }
}
-console.profileEnd();
\ No newline at end of file
+console.profileEnd();
+
+// report results
+elementTextCreate.report();
\ No newline at end of file
diff --git a/packages/core/test/render3/perf/micro_bench.ts b/packages/core/test/render3/perf/micro_bench.ts
new file mode 100644
index 0000000000..162da2a656
--- /dev/null
+++ b/packages/core/test/render3/perf/micro_bench.ts
@@ -0,0 +1,71 @@
+/**
+ * @license
+ * Copyright Google Inc. All Rights Reserved.
+ *
+ * Use of this source code is governed by an MIT-style license that can be
+ * found in the LICENSE file at https://angular.io/license
+ */
+const performance = require('perf_hooks').performance;
+
+interface Benchmark {
+ (versionName: string): Profile;
+ report(fn?: (report: string) => void): void;
+}
+interface Profile {
+ (): boolean;
+ profileName: string;
+ run(): boolean;
+ bestTime: number;
+}
+
+export function createBenchmark(
+ benchmarkName: string, iterationCount: number, runs: number = 50): Benchmark {
+ const profiles: Profile[] = [];
+
+ const benchmark = function Benchmark(profileName: string): Profile {
+ let iterationCounter: number = iterationCount;
+ const profile: Profile = function Profile() {
+ if (iterationCounter === 0) {
+ iterationCounter = iterationCount;
+ return false;
+ } else {
+ iterationCounter--;
+ return true;
+ }
+ } as Profile;
+ let lastTimestamp = 0;
+ let runCount = runs;
+ profile.run = function() {
+ const now = performance.now();
+ if (lastTimestamp !== 0) {
+ const time = now - lastTimestamp;
+ profile.bestTime = Math.min(profile.bestTime, time);
+ }
+ lastTimestamp = now;
+ if (runCount === 0) {
+ runCount = runs;
+ return false;
+ } else {
+ runCount--;
+ return true;
+ }
+ };
+ profile.profileName = profileName;
+ profile.bestTime = Number.MAX_SAFE_INTEGER;
+ profiles.push(profile);
+ return profile;
+ } as Benchmark;
+
+ benchmark.report = function(fn?: (report: string) => void) {
+ setTimeout(() => {
+ const fastest = profiles.reduce((previous: Profile, current: Profile) => {
+ return (previous.bestTime < current.bestTime) ? previous : current;
+ });
+ (fn || console.log)(`Benchmark: ${benchmarkName}\n${profiles.map((profile: Profile) => {
+ const percent = (100 - profile.bestTime / fastest.bestTime * 100).toFixed(0);
+ return profile.profileName + ': ' + profile.bestTime.toFixed(0) + ` us(${percent} %) `;
+ }).join('\n')}`);
+ });
+ };
+ return benchmark;
+}
\ No newline at end of file