Skip to content

Commit 3827d93

Browse files
fix: instantiate perf observer for each iteration (#3585)
Changes to instantiate a `PerformanceObserver` instance for each iteration of a benchmark run. This prevents the observer from being killed before all iterations of the wrapped function run due to environment changes that may cause the observer from falling out of scope and being GCed. Previously in github actions only a single run's results were recorded as the observer was killed after the first iteration, though this wasn't the case when running locally. Additionally adds the option to record profiles for benchmarks and write them to a directory. This can be used to compare call stacks of benchmarks across changes to the code. --- By submitting this pull request, I confirm that my contribution is made under the terms of the [Apache 2.0 license]. [Apache 2.0 license]: https://www.apache.org/licenses/LICENSE-2.0
1 parent 3052275 commit 3827d93

File tree

2 files changed

+174
-72
lines changed

2 files changed

+174
-72
lines changed

Diff for: packages/@jsii/benchmarks/bin/benchmark.ts

+61-31
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import * as fs from 'fs-extra';
2+
import * as path from 'path';
23
import * as yargs from 'yargs';
34

45
import { benchmarks } from '../lib';
@@ -39,43 +40,72 @@ interface ResultsJson {
3940
/* eslint-disable-next-line @typescript-eslint/await-thenable */
4041
const argv = await yargs
4142
.command('$0', 'Runs jsii benchmark tests and displays results', (argv) =>
42-
argv.option('output', {
43-
type: 'string',
44-
desc: 'location of benchmark results json file, does not output to file if not specified.',
45-
}),
43+
argv
44+
.option('output', {
45+
type: 'string',
46+
desc: 'location of benchmark results json file, does not output to file if not specified.',
47+
})
48+
.option('profile-dir', {
49+
type: 'string',
50+
desc: 'directory to write benchmark profiles to',
51+
}),
4652
)
4753
.help().argv;
4854

4955
// Run list of benchmarks in sequence
50-
const resultsJson: ResultsJson[] = await benchmarks.reduce(
51-
async (
52-
accum: Promise<ResultsJson[]>,
53-
benchmark: Benchmark<any>,
54-
): Promise<ResultsJson[]> => {
55-
const prev = await accum;
56-
const result = await benchmark.run();
57-
const extra = `${result.name} averaged ${result.average} milliseconds over ${result.iterations.length} runs`;
58-
console.log(extra);
59-
return [
60-
...prev,
61-
{
62-
name: result.name,
63-
unit: 'milliseconds',
64-
value: result.average,
65-
range: result.variance,
66-
extra,
67-
},
68-
];
69-
},
70-
Promise.resolve([]),
71-
);
56+
try {
57+
const resultsJson: ResultsJson[] = await benchmarks.reduce(
58+
async (
59+
accum: Promise<ResultsJson[]>,
60+
benchmark: Benchmark<any>,
61+
): Promise<ResultsJson[]> => {
62+
const bench = argv.profileDir ? benchmark.profile() : benchmark;
63+
const prev = await accum;
64+
const result = await bench.run();
7265

73-
if (argv.output) {
74-
await fs.writeJson(argv.output, resultsJson, { spaces: 2 });
75-
console.log(`results written to ${argv.output}`);
76-
}
66+
// Output summary to console
67+
const extra = `${result.name} averaged ${result.average} milliseconds over ${result.iterations.length} runs`;
68+
console.log(extra);
69+
70+
// Write profiles if enabled
71+
if (argv.profileDir) {
72+
const dirName = result.name.replace(/[/\\:*?"<>]/g, '');
73+
const profilesTargetDir = path.join(argv.profileDir, dirName);
74+
await fs.mkdir(profilesTargetDir);
75+
76+
await Promise.all(
77+
result.iterations.map(async (iter, idx) => {
78+
const profileFile = path.join(profilesTargetDir, `${idx}.json`);
79+
await fs.writeJson(profileFile, iter.profile);
80+
}),
81+
);
82+
console.log(`profiles written to ${profilesTargetDir} directory`);
83+
}
7784

78-
return resultsJson;
85+
return [
86+
...prev,
87+
{
88+
name: result.name,
89+
unit: 'milliseconds',
90+
value: result.average,
91+
range: result.variance,
92+
extra,
93+
},
94+
];
95+
},
96+
Promise.resolve([]),
97+
);
98+
99+
if (argv.output) {
100+
await fs.writeJson(argv.output, resultsJson, { spaces: 2 });
101+
console.log(`results written to ${argv.output}`);
102+
}
103+
104+
return resultsJson;
105+
} catch (e) {
106+
console.error(e);
107+
throw e;
108+
}
79109
})()
80110
.then((results) => {
81111
console.log(`successfully completed ${results.length} benchmarks`);

Diff for: packages/@jsii/benchmarks/lib/benchmark.ts

+113-41
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,21 @@
1+
import { Profiler, Session } from 'inspector';
12
import { performance, PerformanceObserver, PerformanceEntry } from 'perf_hooks';
23

4+
/**
5+
* Result of a single run of the subject
6+
*/
7+
interface Iteration {
8+
/**
9+
* The result of perf_hooks measurement
10+
*/
11+
performance: PerformanceEntry;
12+
13+
/**
14+
* The cpu profile, undefined unless profiling is enabled.
15+
*/
16+
profile?: Profiler.Profile;
17+
}
18+
319
/**
420
* Result of a benchmark run
521
*/
@@ -32,7 +48,7 @@ interface Result {
3248
/**
3349
* Results of individual runs
3450
*/
35-
readonly iterations: readonly PerformanceEntry[];
51+
readonly iterations: readonly Iteration[];
3652
}
3753

3854
/**
@@ -46,15 +62,8 @@ interface Result {
4662
* teardown, stubbing, etc.
4763
*/
4864
export class Benchmark<C> {
49-
/**
50-
* How many times to run the subject
51-
*/
5265
#iterations = 5;
53-
54-
/**
55-
* Results of individual runs
56-
*/
57-
#results: PerformanceEntry[] = [];
66+
#profile = false;
5867

5968
public constructor(private readonly name: string) {}
6069
#setup: () => C | Promise<C> = () => ({} as C);
@@ -76,7 +85,7 @@ export class Benchmark<C> {
7685
* Create a teardown function to be run once after all benchmark runs. Use to
7786
* clean up your mess.
7887
*/
79-
public teardown(fn: (ctx: C) => void) {
88+
public teardown(fn: (ctx: C) => any) {
8089
this.#teardown = fn;
8190
return this;
8291
}
@@ -85,7 +94,7 @@ export class Benchmark<C> {
8594
* Create a beforeEach function to be run before each iteration. Use to reset
8695
* state the subject may have changed.
8796
*/
88-
public beforeEach(fn: (ctx: C) => void) {
97+
public beforeEach(fn: (ctx: C) => any) {
8998
this.#beforeEach = fn;
9099
return this;
91100
}
@@ -94,7 +103,7 @@ export class Benchmark<C> {
94103
* Create an afterEach function to be run after each iteration. Use to reset
95104
* state the subject may have changed.
96105
*/
97-
public afterEach(fn: (ctx: C) => void) {
106+
public afterEach(fn: (ctx: C) => any) {
98107
this.#afterEach = fn;
99108
return this;
100109
}
@@ -116,43 +125,106 @@ export class Benchmark<C> {
116125
}
117126

118127
/**
119-
* Run and measure the benchmark
128+
* Enable the profiler to collect CPU and Memory usage.
120129
*/
121-
public async run(): Promise<Result> {
122-
const c = await this.#setup?.();
130+
public profile() {
131+
this.#profile = true;
132+
return this;
133+
}
134+
135+
private async startProfiler(): Promise<Session> {
136+
const session = new Session();
137+
session.connect();
138+
139+
return new Promise((ok) => {
140+
session.post('Profiler.enable', () => {
141+
session.post('Profiler.start', () => {
142+
ok(session);
143+
});
144+
});
145+
});
146+
}
147+
148+
private async killProfiler(
149+
s?: Session,
150+
): Promise<Profiler.Profile | undefined> {
151+
return new Promise((ok, ko) => {
152+
if (!s) {
153+
return ok(undefined);
154+
}
155+
156+
s.post('Profiler.stop', (err, { profile }) => {
157+
if (err) {
158+
return ko(err);
159+
}
160+
161+
return ok(profile);
162+
});
163+
});
164+
}
165+
166+
private async makeObserver(): Promise<PerformanceEntry> {
123167
return new Promise((ok) => {
124-
const wrapped = performance.timerify(this.#subject);
125168
const obs = new PerformanceObserver((list, observer) => {
126-
this.#results = list.getEntries();
169+
ok(list.getEntries()[0]);
127170
performance.clearMarks();
128171
observer.disconnect();
129-
const durations = this.#results.map((i) => i.duration);
130-
const max = Math.max(...durations);
131-
const min = Math.min(...durations);
132-
const variance = max - min;
133-
134-
return ok({
135-
name: this.name,
136-
average:
137-
durations.reduce((accum, duration) => accum + duration, 0) /
138-
durations.length,
139-
max,
140-
min,
141-
variance,
142-
iterations: this.#results,
143-
});
144172
});
145173
obs.observe({ entryTypes: ['function'] });
174+
});
175+
}
146176

147-
try {
148-
for (let i = 0; i < this.#iterations; i++) {
149-
this.#beforeEach(c);
150-
wrapped(c);
151-
this.#afterEach(c);
152-
}
153-
} finally {
154-
this.#teardown(c);
177+
private async *runIterations(ctx: C) {
178+
let i = 0;
179+
let profiler;
180+
const wrapped = performance.timerify(this.#subject);
181+
182+
/* eslint-disable no-await-in-loop */
183+
while (i < this.#iterations) {
184+
const observer = this.makeObserver();
185+
this.#beforeEach(ctx);
186+
if (this.#profile) {
187+
profiler = await this.startProfiler();
155188
}
156-
});
189+
wrapped(ctx);
190+
const profile = await this.killProfiler(profiler);
191+
const perf = await observer;
192+
this.#afterEach(ctx);
193+
194+
i++;
195+
yield { profile, performance: perf };
196+
}
197+
/* eslint-enable no-await-in-loop */
198+
}
199+
200+
/**
201+
* Run and measure the benchmark
202+
*/
203+
public async run(): Promise<Result> {
204+
const iterations: Iteration[] = [];
205+
const c = await this.#setup?.();
206+
207+
for await (const result of this.runIterations(c)) {
208+
iterations.push(result);
209+
}
210+
211+
this.#teardown(c);
212+
213+
const durations = iterations.map((i) => i.performance.duration);
214+
const max = Math.max(...durations);
215+
const min = Math.min(...durations);
216+
const variance = max - min;
217+
const average =
218+
durations.reduce((accum, duration) => accum + duration, 0) /
219+
durations.length;
220+
221+
return {
222+
name: this.name,
223+
average,
224+
max,
225+
min,
226+
variance,
227+
iterations,
228+
};
157229
}
158230
}

0 commit comments

Comments
 (0)