21
21
import re
22
22
import shutil
23
23
import sys
24
+ import tempfile
24
25
import uuid
25
26
26
27
import click
31
32
32
33
from fireci import ci_command
33
34
from fireci import ci_utils
34
- from fireci .dir_utils import chdir
35
35
from fireci import uploader
36
+ from fireci .dir_utils import chdir
36
37
37
38
_logger = logging .getLogger ('fireci.macrobenchmark' )
38
39
39
40
41
+ @click .option (
42
+ '--build-only/--no-build-only' ,
43
+ default = False ,
44
+ help = 'Whether to only build tracing test apps or to also run them on FTL afterwards'
45
+ )
40
46
@ci_command ()
41
- def macrobenchmark ():
47
+ def macrobenchmark (build_only ):
42
48
"""Measures app startup times for Firebase SDKs."""
43
- asyncio .run (_launch_macrobenchmark_test ())
49
+ asyncio .run (_launch_macrobenchmark_test (build_only ))
44
50
45
51
46
- async def _launch_macrobenchmark_test ():
52
+ async def _launch_macrobenchmark_test (build_only ):
47
53
_logger .info ('Starting macrobenchmark test...' )
48
54
49
- artifact_versions , config , _ , _ = await asyncio .gather (
50
- _parse_artifact_versions (),
51
- _parse_config_yaml (),
52
- _create_gradle_wrapper (),
53
- _copy_google_services (),
54
- )
55
-
55
+ artifact_versions = await _assemble_all_artifacts ()
56
56
_logger .info (f'Artifact versions: { artifact_versions } ' )
57
57
58
- with chdir ('health-metrics/macrobenchmark' ):
59
- runners = [MacrobenchmarkTest (k , v , artifact_versions ) for k , v in config .items ()]
60
- results = await asyncio .gather (* [x .run () for x in runners ], return_exceptions = True )
58
+ test_dir = await _prepare_test_directory ()
59
+ _logger .info (f'Directory for test apps: { test_dir } ' )
60
+
61
+ config = await _process_config_yaml ()
62
+ _logger .info (f'Processed yaml configurations: { config } ' )
63
+
64
+ tests = [MacrobenchmarkTest (app , artifact_versions , os .getcwd (), test_dir ) for app in config ['test-apps' ]]
61
65
62
- await _post_processing (results )
66
+ _logger .info (f'Building { len (tests )} macrobenchmark test apps...' )
67
+ # TODO(yifany): investigate why it is much slower with asyncio.gather
68
+ # - on corp workstations (9 min) than M1 macbook pro (3 min)
69
+ # - with gradle 7.5.1 (9 min) than gradle 6.9.2 (5 min)
70
+ # await asyncio.gather(*[x.build() for x in tests])
71
+ for test in tests :
72
+ await test .build ()
73
+
74
+ if not build_only :
75
+ _logger .info (f'Submitting { len (tests )} tests to Firebase Test Lab...' )
76
+ results = await asyncio .gather (* [x .test () for x in tests ], return_exceptions = True )
77
+ await _post_processing (results )
63
78
64
79
_logger .info ('Macrobenchmark test finished.' )
65
80
66
81
67
- async def _parse_artifact_versions ():
68
- proc = await asyncio .subprocess .create_subprocess_exec ('./gradlew' , 'assembleAllForSmokeTests' )
69
- await proc .wait ()
82
+ async def _assemble_all_artifacts ():
83
+ await (await asyncio .create_subprocess_exec ('./gradlew' , 'assembleAllForSmokeTests' )).wait ()
70
84
71
85
with open ('build/m2repository/changed-artifacts.json' ) as json_file :
72
86
artifacts = json .load (json_file )
@@ -78,35 +92,36 @@ def _artifact_key_version(artifact):
78
92
return f'{ group_id } :{ artifact_id } ' , version
79
93
80
94
81
- async def _parse_config_yaml ():
82
- with open ('health-metrics/macrobenchmark/config.yaml' ) as yaml_file :
83
- return yaml .safe_load (yaml_file )
95
+ async def _process_config_yaml ():
96
+ with open ('health-metrics/benchmark/config.yaml' ) as yaml_file :
97
+ config = yaml .safe_load (yaml_file )
98
+ for app in config ['test-apps' ]:
99
+ app ['plugins' ] = app .get ('plugins' , [])
100
+ app ['traces' ] = app .get ('traces' , [])
101
+ app ['plugins' ].extend (config ['common-plugins' ])
102
+ app ['traces' ].extend (config ['common-traces' ])
103
+ return config
84
104
85
105
86
- async def _create_gradle_wrapper ():
87
- with open ('health-metrics/macrobenchmark/settings.gradle' , 'w' ):
88
- pass
106
+ async def _prepare_test_directory ():
107
+ test_dir = tempfile .mkdtemp (prefix = 'benchmark-test-' )
89
108
90
- proc = await asyncio .subprocess .create_subprocess_exec (
91
- './gradlew' ,
92
- 'wrapper' ,
93
- '--gradle-version' ,
94
- '6.9' ,
95
- '--project-dir' ,
96
- 'health-metrics/macrobenchmark'
97
- )
98
- await proc .wait ()
109
+ # Required for creating gradle wrapper, as the dir is not defined in the root settings.gradle
110
+ open (os .path .join (test_dir , 'settings.gradle' ), 'w' ).close ()
99
111
112
+ command = ['./gradlew' , 'wrapper' , '--gradle-version' , '7.5.1' , '--project-dir' , test_dir ]
113
+ await (await asyncio .create_subprocess_exec (* command )).wait ()
100
114
101
- async def _copy_google_services ():
102
- if 'FIREBASE_CI' in os .environ :
103
- src = os .environ ['FIREBASE_GOOGLE_SERVICES_PATH' ]
104
- dst = 'health-metrics/macrobenchmark/template/app/google-services.json'
105
- _logger .info (f'Running on CI. Copying "{ src } " to "{ dst } "...' )
106
- shutil .copyfile (src , dst )
115
+ return test_dir
107
116
108
117
109
118
async def _post_processing (results ):
119
+ _logger .info (f'Macrobenchmark results: { results } ' )
120
+
121
+ if os .getenv ('CI' ) is None :
122
+ _logger .info ('Running locally. Results upload skipped.' )
123
+ return
124
+
110
125
# Upload successful measurements to the metric service
111
126
measurements = []
112
127
for result in results :
@@ -130,51 +145,63 @@ class MacrobenchmarkTest:
130
145
"""Builds the test based on configurations and runs the test on FTL."""
131
146
def __init__ (
132
147
self ,
133
- sdk_name ,
134
148
test_app_config ,
135
149
artifact_versions ,
150
+ repo_root_dir ,
151
+ test_dir ,
136
152
logger = _logger
137
153
):
138
- self .sdk_name = sdk_name
139
154
self .test_app_config = test_app_config
140
155
self .artifact_versions = artifact_versions
141
- self .logger = MacrobenchmarkLoggerAdapter (logger , sdk_name )
142
- self .test_app_dir = os .path .join ('test-apps' , test_app_config ['name' ])
156
+ self .repo_root_dir = repo_root_dir
157
+ self .test_dir = test_dir
158
+ self .logger = MacrobenchmarkLoggerAdapter (logger , test_app_config ['sdk' ])
159
+ self .test_app_dir = os .path .join (test_dir , test_app_config ['name' ])
143
160
self .test_results_bucket = 'fireescape-benchmark-results'
144
161
self .test_results_dir = str (uuid .uuid4 ())
145
162
self .gcs_client = storage .Client ()
146
163
147
- async def run (self ):
148
- """Starts the workflow of src creation, apks assembly, FTL testing and results upload ."""
164
+ async def build (self ):
165
+ """Creates test app project and assembles app and test apks ."""
149
166
await self ._create_benchmark_projects ()
150
167
await self ._assemble_benchmark_apks ()
168
+
169
+ async def test (self ):
170
+ """Runs benchmark tests on FTL and fetches FTL results from GCS."""
151
171
await self ._execute_benchmark_tests ()
152
172
return await self ._aggregate_benchmark_results ()
153
173
154
174
async def _create_benchmark_projects (self ):
155
175
app_name = self .test_app_config ['name' ]
156
176
self .logger .info (f'Creating test app "{ app_name } "...' )
157
177
158
- mustache_context = await self ._prepare_mustache_context ()
178
+ self .logger .info (f'Copying project template files into "{ self .test_app_dir } "...' )
179
+ template_dir = os .path .join (self .repo_root_dir , 'health-metrics/benchmark/template' )
180
+ shutil .copytree (template_dir , self .test_app_dir )
181
+
182
+ self .logger .info (f'Copying gradle wrapper binary into "{ self .test_app_dir } "...' )
183
+ shutil .copy (os .path .join (self .test_dir , 'gradlew' ), self .test_app_dir )
184
+ shutil .copy (os .path .join (self .test_dir , 'gradlew.bat' ), self .test_app_dir )
185
+ shutil .copytree (os .path .join (self .test_dir , 'gradle' ), os .path .join (self .test_app_dir , 'gradle' ))
159
186
160
- shutil .copytree ('template' , self .test_app_dir )
161
187
with chdir (self .test_app_dir ):
188
+ mustache_context = await self ._prepare_mustache_context ()
162
189
renderer = pystache .Renderer ()
163
190
mustaches = glob .glob ('**/*.mustache' , recursive = True )
164
191
for mustache in mustaches :
192
+ self .logger .info (f'Processing template file: { mustache } ' )
165
193
result = renderer .render_path (mustache , mustache_context )
166
- original_name = mustache [: - 9 ] # TODO(yifany): mustache .removesuffix('.mustache')
194
+ original_name = mustache .removesuffix ('.mustache' )
167
195
with open (original_name , 'w' ) as file :
168
196
file .write (result )
169
197
170
198
async def _assemble_benchmark_apks (self ):
171
- executable = './gradlew'
172
- args = ['assemble' , 'assembleAndroidTest' , '--project-dir' , self .test_app_dir ]
173
- await self ._exec_subprocess (executable , args )
199
+ with chdir (self .test_app_dir ):
200
+ await self ._exec_subprocess ('./gradlew' , ['assemble' ])
174
201
175
202
async def _execute_benchmark_tests (self ):
176
- app_apk_path = glob .glob (f'{ self .test_app_dir } /app/ **/* .apk' , recursive = True )[0 ]
177
- test_apk_path = glob .glob (f'{ self .test_app_dir } /benchmark/ **/* .apk' , recursive = True )[0 ]
203
+ app_apk_path = glob .glob (f'{ self .test_app_dir } /**/app-benchmark .apk' , recursive = True )[0 ]
204
+ test_apk_path = glob .glob (f'{ self .test_app_dir } /**/macrobenchmark-benchmark .apk' , recursive = True )[0 ]
178
205
179
206
self .logger .info (f'App apk: { app_apk_path } ' )
180
207
self .logger .info (f'Test apk: { test_apk_path } ' )
@@ -189,7 +216,7 @@ async def _execute_benchmark_tests(self):
189
216
args += ['--type' , 'instrumentation' ]
190
217
args += ['--app' , app_apk_path ]
191
218
args += ['--test' , test_apk_path ]
192
- args += ['--device' , 'model=redfin ,version=30 ,locale=en,orientation=portrait' ]
219
+ args += ['--device' , 'model=oriole ,version=32 ,locale=en,orientation=portrait' ]
193
220
args += ['--directories-to-pull' , '/sdcard/Download' ]
194
221
args += ['--results-bucket' , f'gs://{ self .test_results_bucket } ' ]
195
222
args += ['--results-dir' , self .test_results_dir ]
@@ -200,19 +227,13 @@ async def _execute_benchmark_tests(self):
200
227
await self ._exec_subprocess (executable , args )
201
228
202
229
async def _prepare_mustache_context (self ):
203
- app_name = self .test_app_config ['name' ]
204
-
205
230
mustache_context = {
206
- 'plugins' : [],
231
+ 'm2repository' : os .path .join (self .repo_root_dir , 'build/m2repository' ),
232
+ 'plugins' : self .test_app_config .get ('plugins' , []),
233
+ 'traces' : self .test_app_config .get ('traces' , []),
207
234
'dependencies' : [],
208
235
}
209
236
210
- if app_name != 'baseline' :
211
- mustache_context ['plugins' ].append ('com.google.gms.google-services' )
212
-
213
- if 'plugins' in self .test_app_config :
214
- mustache_context ['plugins' ].extend (self .test_app_config ['plugins' ])
215
-
216
237
if 'dependencies' in self .test_app_config :
217
238
for dep in self .test_app_config ['dependencies' ]:
218
239
if '@' in dep :
@@ -234,9 +255,9 @@ async def _aggregate_benchmark_results(self):
234
255
for benchmark in benchmarks :
235
256
method = benchmark ['name' ]
236
257
clazz = benchmark ['className' ].split ('.' )[- 1 ]
237
- runs = benchmark ['metrics' ]['startupMs ' ]['runs' ]
258
+ runs = benchmark ['metrics' ]['timeToInitialDisplayMs ' ]['runs' ]
238
259
results .append ({
239
- 'sdk' : self .sdk_name ,
260
+ 'sdk' : self .test_app_config [ 'sdk' ] ,
240
261
'device' : device ,
241
262
'name' : f'{ clazz } .{ method } ' ,
242
263
'min' : min (runs ),
0 commit comments