@@ -31,12 +31,27 @@ class SizeLimit {
31
31
return bytes . format ( size , { unitSeparator : ' ' } ) ;
32
32
}
33
33
34
- formatTime ( seconds ) {
35
- if ( seconds >= 1 ) {
36
- return `${ Math . ceil ( seconds * 10 ) / 10 } s` ;
34
+ formatPercentageChange ( base = 0 , current = 0 ) {
35
+ if ( base === 0 ) {
36
+ return 'added' ;
37
+ }
38
+
39
+ if ( current === 0 ) {
40
+ return 'removed' ;
41
+ }
42
+
43
+ const value = ( ( current - base ) / base ) * 100 ;
44
+ const formatted = ( Math . sign ( value ) * Math . ceil ( Math . abs ( value ) * 100 ) ) / 100 ;
45
+
46
+ if ( value > 0 ) {
47
+ return `+${ formatted } %` ;
48
+ }
49
+
50
+ if ( value === 0 ) {
51
+ return '-' ;
37
52
}
38
53
39
- return `${ Math . ceil ( seconds * 1000 ) } ms ` ;
54
+ return `${ formatted } % ` ;
40
55
}
41
56
42
57
formatChange ( base = 0 , current = 0 ) {
@@ -48,75 +63,50 @@ class SizeLimit {
48
63
return 'removed' ;
49
64
}
50
65
51
- const value = ( ( current - base ) / base ) * 100 ;
52
- const formatted = ( Math . sign ( value ) * Math . ceil ( Math . abs ( value ) * 100 ) ) / 100 ;
66
+ const value = current - base ;
67
+ const formatted = this . formatBytes ( value ) ;
53
68
54
69
if ( value > 0 ) {
55
- return `+${ formatted } % 🔺` ;
70
+ return `+${ formatted } 🔺` ;
56
71
}
57
72
58
73
if ( value === 0 ) {
59
- return ` ${ formatted } %` ;
74
+ return '-' ;
60
75
}
61
76
62
- return `${ formatted } % 🔽` ;
77
+ return `${ formatted } 🔽` ;
63
78
}
64
79
65
80
formatLine ( value , change ) {
66
81
return `${ value } (${ change } )` ;
67
82
}
68
83
69
84
formatSizeResult ( name , base , current ) {
70
- return [ name , this . formatLine ( this . formatBytes ( current . size ) , this . formatChange ( base . size , current . size ) ) ] ;
71
- }
72
-
73
- formatTimeResult ( name , base , current ) {
74
85
return [
75
86
name ,
76
- this . formatLine ( this . formatBytes ( current . size ) , this . formatChange ( base . size , current . size ) ) ,
77
- this . formatLine ( this . formatTime ( current . loading ) , this . formatChange ( base . loading , current . loading ) ) ,
78
- this . formatLine ( this . formatTime ( current . running ) , this . formatChange ( base . running , current . running ) ) ,
79
- this . formatTime ( current . total ) ,
87
+ this . formatBytes ( current . size ) ,
88
+ this . formatPercentageChange ( base . size , current . size ) ,
89
+ this . formatChange ( base . size , current . size ) ,
80
90
] ;
81
91
}
82
92
83
93
parseResults ( output ) {
84
94
const results = JSON . parse ( output ) ;
85
95
86
96
return results . reduce ( ( current , result ) => {
87
- let time = { } ;
88
-
89
- if ( result . loading !== undefined && result . running !== undefined ) {
90
- const loading = + result . loading ;
91
- const running = + result . running ;
92
-
93
- time = {
94
- running,
95
- loading,
96
- total : loading + running ,
97
- } ;
98
- }
99
-
100
97
return {
101
98
// biome-ignore lint/performance/noAccumulatingSpread: <explanation>
102
99
...current ,
103
100
[ result . name ] : {
104
101
name : result . name ,
105
102
size : + result . size ,
106
- ...time ,
107
103
} ,
108
104
} ;
109
105
} , { } ) ;
110
106
}
111
107
112
108
hasSizeChanges ( base , current , threshold = 0 ) {
113
109
const names = [ ...new Set ( [ ...( base ? Object . keys ( base ) : [ ] ) , ...Object . keys ( current ) ] ) ] ;
114
- const isSize = names . some ( name => current [ name ] && current [ name ] . total === undefined ) ;
115
-
116
- // Always return true if time results are present
117
- if ( ! isSize ) {
118
- return true ;
119
- }
120
110
121
111
return ! ! names . find ( name => {
122
112
const baseResult = base ?. [ name ] || EmptyResult ;
@@ -132,16 +122,12 @@ class SizeLimit {
132
122
133
123
formatResults ( base , current ) {
134
124
const names = [ ...new Set ( [ ...( base ? Object . keys ( base ) : [ ] ) , ...Object . keys ( current ) ] ) ] ;
135
- const isSize = names . some ( name => current [ name ] && current [ name ] . total === undefined ) ;
136
- const header = isSize ? SIZE_RESULTS_HEADER : TIME_RESULTS_HEADER ;
125
+ const header = SIZE_RESULTS_HEADER ;
137
126
const fields = names . map ( name => {
138
127
const baseResult = base ?. [ name ] || EmptyResult ;
139
128
const currentResult = current [ name ] || EmptyResult ;
140
129
141
- if ( isSize ) {
142
- return this . formatSizeResult ( name , baseResult , currentResult ) ;
143
- }
144
- return this . formatTimeResult ( name , baseResult , currentResult ) ;
130
+ return this . formatSizeResult ( name , baseResult , currentResult ) ;
145
131
} ) ;
146
132
147
133
return [ header , ...fields ] ;
@@ -165,15 +151,11 @@ async function execSizeLimit() {
165
151
return { status, output } ;
166
152
}
167
153
168
- const SIZE_RESULTS_HEADER = [ 'Path' , 'Size' ] ;
169
- const TIME_RESULTS_HEADER = [ 'Path' , 'Size' , 'Loading time (3g)' , 'Running time (snapdragon)' , 'Total time' ] ;
154
+ const SIZE_RESULTS_HEADER = [ 'Path' , 'Size' , '% Change' , 'Change' ] ;
170
155
171
156
const EmptyResult = {
172
157
name : '-' ,
173
158
size : 0 ,
174
- running : 0 ,
175
- loading : 0 ,
176
- total : 0 ,
177
159
} ;
178
160
179
161
async function run ( ) {
@@ -227,6 +209,8 @@ async function run() {
227
209
// Else, we run size limit for the current branch, AND fetch it for the comparison branch
228
210
let base ;
229
211
let current ;
212
+ let baseIsNotLatest = false ;
213
+ let baseWorkflowRun ;
230
214
231
215
try {
232
216
const artifacts = await getArtifactsForBranchAndWorkflow ( octokit , {
@@ -240,6 +224,8 @@ async function run() {
240
224
throw new Error ( 'No artifacts found' ) ;
241
225
}
242
226
227
+ baseWorkflowRun = artifacts . workflowRun ;
228
+
243
229
await downloadOtherWorkflowArtifact ( octokit , {
244
230
...repo ,
245
231
artifactName : ARTIFACT_NAME ,
@@ -248,6 +234,11 @@ async function run() {
248
234
} ) ;
249
235
250
236
base = JSON . parse ( await fs . readFile ( resultsFilePath , { encoding : 'utf8' } ) ) ;
237
+
238
+ if ( ! artifacts . isLatest ) {
239
+ baseIsNotLatest = true ;
240
+ core . info ( 'Base artifact is not the latest one. This may lead to incorrect results.' ) ;
241
+ }
251
242
} catch ( error ) {
252
243
core . startGroup ( 'Warning, unable to find base results' ) ;
253
244
core . error ( error ) ;
@@ -271,7 +262,22 @@ async function run() {
271
262
isNaN ( thresholdNumber ) || limit . hasSizeChanges ( base , current , thresholdNumber ) || sizeLimitComment ;
272
263
273
264
if ( shouldComment ) {
274
- const body = [ SIZE_LIMIT_HEADING , markdownTable ( limit . formatResults ( base , current ) ) ] . join ( '\r\n' ) ;
265
+ const bodyParts = [ SIZE_LIMIT_HEADING ] ;
266
+
267
+ if ( baseIsNotLatest ) {
268
+ bodyParts . push (
269
+ '⚠️ **Warning:** Base artifact is not the latest one, because the latest workflow run is not done yet. This may lead to incorrect results. Try to re-run all tests to get up to date results.' ,
270
+ ) ;
271
+ }
272
+
273
+ bodyParts . push ( markdownTable ( limit . formatResults ( base , current ) ) ) ;
274
+
275
+ if ( baseWorkflowRun ) {
276
+ bodyParts . push ( '' ) ;
277
+ bodyParts . push ( `[View base workflow run](${ baseWorkflowRun . html_url } )` ) ;
278
+ }
279
+
280
+ const body = bodyParts . join ( '\r\n' ) ;
275
281
276
282
try {
277
283
if ( ! sizeLimitComment ) {
@@ -320,7 +326,7 @@ const DEFAULT_PAGE_LIMIT = 10;
320
326
* This is a bit hacky since GitHub Actions currently does not directly
321
327
* support downloading artifacts from other workflows
322
328
*/
323
- export async function getArtifactsForBranchAndWorkflow ( octokit , { owner, repo, workflowName, branch, artifactName } ) {
329
+ async function getArtifactsForBranchAndWorkflow ( octokit , { owner, repo, workflowName, branch, artifactName } ) {
324
330
core . startGroup ( `getArtifactsForBranchAndWorkflow - workflow:"${ workflowName } ", branch:"${ branch } "` ) ;
325
331
326
332
let repositoryWorkflow = null ;
@@ -361,14 +367,13 @@ export async function getArtifactsForBranchAndWorkflow(octokit, { owner, repo, w
361
367
const workflow_id = repositoryWorkflow . id ;
362
368
363
369
let currentPage = 0 ;
364
- const completedWorkflowRuns = [ ] ;
370
+ let latestWorkflowRun = null ;
365
371
366
372
for await ( const response of octokit . paginate . iterator ( octokit . rest . actions . listWorkflowRuns , {
367
373
owner,
368
374
repo,
369
375
workflow_id,
370
376
branch,
371
- status : 'completed' ,
372
377
per_page : DEFAULT_PAGE_LIMIT ,
373
378
event : 'push' ,
374
379
} ) ) {
@@ -379,12 +384,47 @@ export async function getArtifactsForBranchAndWorkflow(octokit, { owner, repo, w
379
384
}
380
385
381
386
// Do not allow downloading artifacts from a fork.
382
- completedWorkflowRuns . push (
383
- ...response . data . filter ( workflowRun => workflowRun . head_repository . full_name === `${ owner } /${ repo } ` ) ,
384
- ) ;
387
+ const filtered = response . data . filter ( workflowRun => workflowRun . head_repository . full_name === `${ owner } /${ repo } ` ) ;
388
+
389
+ // Sort to ensure the latest workflow run is the first
390
+ filtered . sort ( ( a , b ) => {
391
+ return new Date ( b . created_at ) . getTime ( ) - new Date ( a . created_at ) . getTime ( ) ;
392
+ } ) ;
393
+
394
+ // Store the first workflow run, to determine if this is the latest one...
395
+ if ( ! latestWorkflowRun ) {
396
+ latestWorkflowRun = filtered [ 0 ] ;
397
+ }
398
+
399
+ // Search through workflow artifacts until we find a workflow run w/ artifact name that we are looking for
400
+ for ( const workflowRun of filtered ) {
401
+ core . info ( `Checking artifacts for workflow run: ${ workflowRun . html_url } ` ) ;
385
402
386
- if ( completedWorkflowRuns . length ) {
387
- break ;
403
+ const {
404
+ data : { artifacts } ,
405
+ } = await octokit . rest . actions . listWorkflowRunArtifacts ( {
406
+ owner,
407
+ repo,
408
+ run_id : workflowRun . id ,
409
+ } ) ;
410
+
411
+ if ( ! artifacts ) {
412
+ core . warning (
413
+ `Unable to fetch artifacts for branch: ${ branch } , workflow: ${ workflow_id } , workflowRunId: ${ workflowRun . id } ` ,
414
+ ) ;
415
+ } else {
416
+ const foundArtifact = artifacts . find ( ( { name } ) => name === artifactName ) ;
417
+ if ( foundArtifact ) {
418
+ core . info ( `Found suitable artifact: ${ foundArtifact . url } ` ) ;
419
+ return {
420
+ artifact : foundArtifact ,
421
+ workflowRun,
422
+ isLatest : latestWorkflowRun . id === workflowRun . id ,
423
+ } ;
424
+ } else {
425
+ core . info ( `No artifact found for ${ artifactName } , trying next workflow run...` ) ;
426
+ }
427
+ }
388
428
}
389
429
390
430
if ( currentPage > DEFAULT_MAX_PAGES ) {
@@ -396,34 +436,6 @@ export async function getArtifactsForBranchAndWorkflow(octokit, { owner, repo, w
396
436
currentPage ++ ;
397
437
}
398
438
399
- // Search through workflow artifacts until we find a workflow run w/ artifact name that we are looking for
400
- for ( const workflowRun of completedWorkflowRuns ) {
401
- core . info ( `Checking artifacts for workflow run: ${ workflowRun . html_url } ` ) ;
402
-
403
- const {
404
- data : { artifacts } ,
405
- } = await octokit . rest . actions . listWorkflowRunArtifacts ( {
406
- owner,
407
- repo,
408
- run_id : workflowRun . id ,
409
- } ) ;
410
-
411
- if ( ! artifacts ) {
412
- core . warning (
413
- `Unable to fetch artifacts for branch: ${ branch } , workflow: ${ workflow_id } , workflowRunId: ${ workflowRun . id } ` ,
414
- ) ;
415
- } else {
416
- const foundArtifact = artifacts . find ( ( { name } ) => name === artifactName ) ;
417
- if ( foundArtifact ) {
418
- core . info ( `Found suitable artifact: ${ foundArtifact . url } ` ) ;
419
- return {
420
- artifact : foundArtifact ,
421
- workflowRun,
422
- } ;
423
- }
424
- }
425
- }
426
-
427
439
core . warning ( `Artifact not found: ${ artifactName } ` ) ;
428
440
core . endGroup ( ) ;
429
441
return null ;
0 commit comments