Skip to content

Commit

Permalink
fix: instantiate perf observer for each iteration (#3585)
Browse files Browse the repository at this point in the history
Changes to instantiate a `PerformanceObserver` instance for each
iteration of a benchmark run. This prevents the observer from being
killed before all iterations of the wrapped function run due to
environment changes that may cause the observer from falling out of
scope and being GCed. Previously in github actions only a single run's
results were recorded as the observer was killed after the first
iteration, though this wasn't the case when running locally.

Additionally adds the option to record profiles for benchmarks and write
them to a directory. This can be used to compare call stacks of
benchmarks across changes to the code.

---

By submitting this pull request, I confirm that my contribution is made under the terms of the [Apache 2.0 license].

[Apache 2.0 license]: https://www.apache.org/licenses/LICENSE-2.0
  • Loading branch information
MrArnoldPalmer committed Jun 13, 2022
1 parent 3052275 commit 3827d93
Show file tree
Hide file tree
Showing 2 changed files with 174 additions and 72 deletions.
92 changes: 61 additions & 31 deletions packages/@jsii/benchmarks/bin/benchmark.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import * as fs from 'fs-extra';
import * as path from 'path';
import * as yargs from 'yargs';

import { benchmarks } from '../lib';
Expand Down Expand Up @@ -39,43 +40,72 @@ interface ResultsJson {
/* eslint-disable-next-line @typescript-eslint/await-thenable */
const argv = await yargs
.command('$0', 'Runs jsii benchmark tests and displays results', (argv) =>
argv.option('output', {
type: 'string',
desc: 'location of benchmark results json file, does not output to file if not specified.',
}),
argv
.option('output', {
type: 'string',
desc: 'location of benchmark results json file, does not output to file if not specified.',
})
.option('profile-dir', {
type: 'string',
desc: 'directory to write benchmark profiles to',
}),
)
.help().argv;

// Run list of benchmarks in sequence
const resultsJson: ResultsJson[] = await benchmarks.reduce(
async (
accum: Promise<ResultsJson[]>,
benchmark: Benchmark<any>,
): Promise<ResultsJson[]> => {
const prev = await accum;
const result = await benchmark.run();
const extra = `${result.name} averaged ${result.average} milliseconds over ${result.iterations.length} runs`;
console.log(extra);
return [
...prev,
{
name: result.name,
unit: 'milliseconds',
value: result.average,
range: result.variance,
extra,
},
];
},
Promise.resolve([]),
);
try {
const resultsJson: ResultsJson[] = await benchmarks.reduce(
async (
accum: Promise<ResultsJson[]>,
benchmark: Benchmark<any>,
): Promise<ResultsJson[]> => {
const bench = argv.profileDir ? benchmark.profile() : benchmark;
const prev = await accum;
const result = await bench.run();

if (argv.output) {
await fs.writeJson(argv.output, resultsJson, { spaces: 2 });
console.log(`results written to ${argv.output}`);
}
// Output summary to console
const extra = `${result.name} averaged ${result.average} milliseconds over ${result.iterations.length} runs`;
console.log(extra);

// Write profiles if enabled
if (argv.profileDir) {
const dirName = result.name.replace(/[/\\:*?"<>]/g, '');
const profilesTargetDir = path.join(argv.profileDir, dirName);
await fs.mkdir(profilesTargetDir);

await Promise.all(
result.iterations.map(async (iter, idx) => {
const profileFile = path.join(profilesTargetDir, `${idx}.json`);
await fs.writeJson(profileFile, iter.profile);
}),
);
console.log(`profiles written to ${profilesTargetDir} directory`);
}

return resultsJson;
return [
...prev,
{
name: result.name,
unit: 'milliseconds',
value: result.average,
range: result.variance,
extra,
},
];
},
Promise.resolve([]),
);

if (argv.output) {
await fs.writeJson(argv.output, resultsJson, { spaces: 2 });
console.log(`results written to ${argv.output}`);
}

return resultsJson;
} catch (e) {
console.error(e);
throw e;
}
})()
.then((results) => {
console.log(`successfully completed ${results.length} benchmarks`);
Expand Down
154 changes: 113 additions & 41 deletions packages/@jsii/benchmarks/lib/benchmark.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,21 @@
import { Profiler, Session } from 'inspector';
import { performance, PerformanceObserver, PerformanceEntry } from 'perf_hooks';

/**
* Result of a single run of the subject
*/
interface Iteration {
/**
* The result of perf_hooks measurement
*/
performance: PerformanceEntry;

/**
* The cpu profile, undefined unless profiling is enabled.
*/
profile?: Profiler.Profile;
}

/**
* Result of a benchmark run
*/
Expand Down Expand Up @@ -32,7 +48,7 @@ interface Result {
/**
* Results of individual runs
*/
readonly iterations: readonly PerformanceEntry[];
readonly iterations: readonly Iteration[];
}

/**
Expand All @@ -46,15 +62,8 @@ interface Result {
* teardown, stubbing, etc.
*/
export class Benchmark<C> {
/**
* How many times to run the subject
*/
#iterations = 5;

/**
* Results of individual runs
*/
#results: PerformanceEntry[] = [];
#profile = false;

public constructor(private readonly name: string) {}
#setup: () => C | Promise<C> = () => ({} as C);
Expand All @@ -76,7 +85,7 @@ export class Benchmark<C> {
* Create a teardown function to be run once after all benchmark runs. Use to
* clean up your mess.
*/
public teardown(fn: (ctx: C) => void) {
public teardown(fn: (ctx: C) => any) {
this.#teardown = fn;
return this;
}
Expand All @@ -85,7 +94,7 @@ export class Benchmark<C> {
* Create a beforeEach function to be run before each iteration. Use to reset
* state the subject may have changed.
*/
public beforeEach(fn: (ctx: C) => void) {
public beforeEach(fn: (ctx: C) => any) {
this.#beforeEach = fn;
return this;
}
Expand All @@ -94,7 +103,7 @@ export class Benchmark<C> {
* Create an afterEach function to be run after each iteration. Use to reset
* state the subject may have changed.
*/
public afterEach(fn: (ctx: C) => void) {
public afterEach(fn: (ctx: C) => any) {
this.#afterEach = fn;
return this;
}
Expand All @@ -116,43 +125,106 @@ export class Benchmark<C> {
}

/**
* Run and measure the benchmark
* Enable the profiler to collect CPU and Memory usage.
*/
public async run(): Promise<Result> {
const c = await this.#setup?.();
public profile() {
this.#profile = true;
return this;
}

private async startProfiler(): Promise<Session> {
const session = new Session();
session.connect();

return new Promise((ok) => {
session.post('Profiler.enable', () => {
session.post('Profiler.start', () => {
ok(session);
});
});
});
}

private async killProfiler(
s?: Session,
): Promise<Profiler.Profile | undefined> {
return new Promise((ok, ko) => {
if (!s) {
return ok(undefined);
}

s.post('Profiler.stop', (err, { profile }) => {
if (err) {
return ko(err);
}

return ok(profile);
});
});
}

private async makeObserver(): Promise<PerformanceEntry> {
return new Promise((ok) => {
const wrapped = performance.timerify(this.#subject);
const obs = new PerformanceObserver((list, observer) => {
this.#results = list.getEntries();
ok(list.getEntries()[0]);
performance.clearMarks();
observer.disconnect();
const durations = this.#results.map((i) => i.duration);
const max = Math.max(...durations);
const min = Math.min(...durations);
const variance = max - min;

return ok({
name: this.name,
average:
durations.reduce((accum, duration) => accum + duration, 0) /
durations.length,
max,
min,
variance,
iterations: this.#results,
});
});
obs.observe({ entryTypes: ['function'] });
});
}

try {
for (let i = 0; i < this.#iterations; i++) {
this.#beforeEach(c);
wrapped(c);
this.#afterEach(c);
}
} finally {
this.#teardown(c);
private async *runIterations(ctx: C) {
let i = 0;
let profiler;
const wrapped = performance.timerify(this.#subject);

/* eslint-disable no-await-in-loop */
while (i < this.#iterations) {
const observer = this.makeObserver();
this.#beforeEach(ctx);
if (this.#profile) {
profiler = await this.startProfiler();
}
});
wrapped(ctx);
const profile = await this.killProfiler(profiler);
const perf = await observer;
this.#afterEach(ctx);

i++;
yield { profile, performance: perf };
}
/* eslint-enable no-await-in-loop */
}

/**
* Run and measure the benchmark
*/
public async run(): Promise<Result> {
const iterations: Iteration[] = [];
const c = await this.#setup?.();

for await (const result of this.runIterations(c)) {
iterations.push(result);
}

this.#teardown(c);

const durations = iterations.map((i) => i.performance.duration);
const max = Math.max(...durations);
const min = Math.min(...durations);
const variance = max - min;
const average =
durations.reduce((accum, duration) => accum + duration, 0) /
durations.length;

return {
name: this.name,
average,
max,
min,
variance,
iterations,
};
}
}

0 comments on commit 3827d93

Please sign in to comment.