1
+ import { Profiler , Session } from 'inspector' ;
1
2
import { performance , PerformanceObserver , PerformanceEntry } from 'perf_hooks' ;
2
3
4
+ /**
5
+ * Result of a single run of the subject
6
+ */
7
+ interface Iteration {
8
+ /**
9
+ * The result of perf_hooks measurement
10
+ */
11
+ performance : PerformanceEntry ;
12
+
13
+ /**
14
+ * The cpu profile, undefined unless profiling is enabled.
15
+ */
16
+ profile ?: Profiler . Profile ;
17
+ }
18
+
3
19
/**
4
20
* Result of a benchmark run
5
21
*/
@@ -32,7 +48,7 @@ interface Result {
32
48
/**
33
49
* Results of individual runs
34
50
*/
35
- readonly iterations : readonly PerformanceEntry [ ] ;
51
+ readonly iterations : readonly Iteration [ ] ;
36
52
}
37
53
38
54
/**
@@ -46,15 +62,8 @@ interface Result {
46
62
* teardown, stubbing, etc.
47
63
*/
48
64
export class Benchmark < C > {
49
- /**
50
- * How many times to run the subject
51
- */
52
65
#iterations = 5 ;
53
-
54
- /**
55
- * Results of individual runs
56
- */
57
- #results: PerformanceEntry [ ] = [ ] ;
66
+ #profile = false ;
58
67
59
68
public constructor ( private readonly name : string ) { }
60
69
#setup: ( ) => C | Promise < C > = ( ) => ( { } as C ) ;
@@ -76,7 +85,7 @@ export class Benchmark<C> {
76
85
* Create a teardown function to be run once after all benchmark runs. Use to
77
86
* clean up your mess.
78
87
*/
79
- public teardown ( fn : ( ctx : C ) => void ) {
88
+ public teardown ( fn : ( ctx : C ) => any ) {
80
89
this . #teardown = fn ;
81
90
return this ;
82
91
}
@@ -85,7 +94,7 @@ export class Benchmark<C> {
85
94
* Create a beforeEach function to be run before each iteration. Use to reset
86
95
* state the subject may have changed.
87
96
*/
88
- public beforeEach ( fn : ( ctx : C ) => void ) {
97
+ public beforeEach ( fn : ( ctx : C ) => any ) {
89
98
this . #beforeEach = fn ;
90
99
return this ;
91
100
}
@@ -94,7 +103,7 @@ export class Benchmark<C> {
94
103
* Create an afterEach function to be run after each iteration. Use to reset
95
104
* state the subject may have changed.
96
105
*/
97
- public afterEach ( fn : ( ctx : C ) => void ) {
106
+ public afterEach ( fn : ( ctx : C ) => any ) {
98
107
this . #afterEach = fn ;
99
108
return this ;
100
109
}
@@ -116,43 +125,106 @@ export class Benchmark<C> {
116
125
}
117
126
118
127
/**
119
- * Run and measure the benchmark
128
+ * Enable the profiler to collect CPU and Memory usage.
120
129
*/
121
- public async run ( ) : Promise < Result > {
122
- const c = await this . #setup?.( ) ;
130
+ public profile ( ) {
131
+ this . #profile = true ;
132
+ return this ;
133
+ }
134
+
135
+ private async startProfiler ( ) : Promise < Session > {
136
+ const session = new Session ( ) ;
137
+ session . connect ( ) ;
138
+
139
+ return new Promise ( ( ok ) => {
140
+ session . post ( 'Profiler.enable' , ( ) => {
141
+ session . post ( 'Profiler.start' , ( ) => {
142
+ ok ( session ) ;
143
+ } ) ;
144
+ } ) ;
145
+ } ) ;
146
+ }
147
+
148
+ private async killProfiler (
149
+ s ?: Session ,
150
+ ) : Promise < Profiler . Profile | undefined > {
151
+ return new Promise ( ( ok , ko ) => {
152
+ if ( ! s ) {
153
+ return ok ( undefined ) ;
154
+ }
155
+
156
+ s . post ( 'Profiler.stop' , ( err , { profile } ) => {
157
+ if ( err ) {
158
+ return ko ( err ) ;
159
+ }
160
+
161
+ return ok ( profile ) ;
162
+ } ) ;
163
+ } ) ;
164
+ }
165
+
166
+ private async makeObserver ( ) : Promise < PerformanceEntry > {
123
167
return new Promise ( ( ok ) => {
124
- const wrapped = performance . timerify ( this . #subject) ;
125
168
const obs = new PerformanceObserver ( ( list , observer ) => {
126
- this . #results = list . getEntries ( ) ;
169
+ ok ( list . getEntries ( ) [ 0 ] ) ;
127
170
performance . clearMarks ( ) ;
128
171
observer . disconnect ( ) ;
129
- const durations = this . #results. map ( ( i ) => i . duration ) ;
130
- const max = Math . max ( ...durations ) ;
131
- const min = Math . min ( ...durations ) ;
132
- const variance = max - min ;
133
-
134
- return ok ( {
135
- name : this . name ,
136
- average :
137
- durations . reduce ( ( accum , duration ) => accum + duration , 0 ) /
138
- durations . length ,
139
- max,
140
- min,
141
- variance,
142
- iterations : this . #results,
143
- } ) ;
144
172
} ) ;
145
173
obs . observe ( { entryTypes : [ 'function' ] } ) ;
174
+ } ) ;
175
+ }
146
176
147
- try {
148
- for ( let i = 0 ; i < this . #iterations; i ++ ) {
149
- this . #beforeEach( c ) ;
150
- wrapped ( c ) ;
151
- this . #afterEach( c ) ;
152
- }
153
- } finally {
154
- this . #teardown( c ) ;
177
+ private async * runIterations ( ctx : C ) {
178
+ let i = 0 ;
179
+ let profiler ;
180
+ const wrapped = performance . timerify ( this . #subject) ;
181
+
182
+ /* eslint-disable no-await-in-loop */
183
+ while ( i < this . #iterations) {
184
+ const observer = this . makeObserver ( ) ;
185
+ this . #beforeEach( ctx ) ;
186
+ if ( this . #profile) {
187
+ profiler = await this . startProfiler ( ) ;
155
188
}
156
- } ) ;
189
+ wrapped ( ctx ) ;
190
+ const profile = await this . killProfiler ( profiler ) ;
191
+ const perf = await observer ;
192
+ this . #afterEach( ctx ) ;
193
+
194
+ i ++ ;
195
+ yield { profile, performance : perf } ;
196
+ }
197
+ /* eslint-enable no-await-in-loop */
198
+ }
199
+
200
+ /**
201
+ * Run and measure the benchmark
202
+ */
203
+ public async run ( ) : Promise < Result > {
204
+ const iterations : Iteration [ ] = [ ] ;
205
+ const c = await this . #setup?.( ) ;
206
+
207
+ for await ( const result of this . runIterations ( c ) ) {
208
+ iterations . push ( result ) ;
209
+ }
210
+
211
+ this . #teardown( c ) ;
212
+
213
+ const durations = iterations . map ( ( i ) => i . performance . duration ) ;
214
+ const max = Math . max ( ...durations ) ;
215
+ const min = Math . min ( ...durations ) ;
216
+ const variance = max - min ;
217
+ const average =
218
+ durations . reduce ( ( accum , duration ) => accum + duration , 0 ) /
219
+ durations . length ;
220
+
221
+ return {
222
+ name : this . name ,
223
+ average,
224
+ max,
225
+ min,
226
+ variance,
227
+ iterations,
228
+ } ;
157
229
}
158
230
}
0 commit comments