Skip to content

Refine startup time test app #4407

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Dec 6, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions .github/workflows/health-metrics.yml
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,12 @@ jobs:
echo $INTEG_TESTS_GOOGLE_SERVICES | base64 -d > $BENCHMARK_APP_LOCATION
- name: Run startup-time tests (presubmit)
if: ${{ github.event_name == 'pull_request' }}
run: fireci macrobenchmark ci --pull-request
run: |
git diff --name-only HEAD~1 | \
xargs printf -- '--changed-git-paths %s\n' | \
xargs ./gradlew writeChangedProjects --output-file-path=modules.json
fireci macrobenchmark ci --pull-request --changed-modules-file modules.json
- name: Run startup-time tests (post-submit)
if: ${{ github.event_name == 'push' }}
run: fireci macrobenchmark ci --push
run: |
fireci macrobenchmark ci --push
32 changes: 26 additions & 6 deletions ci/fireci/fireciplugins/macrobenchmark/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,16 @@
import asyncio
import click
import json
import logging

from .analyze import analyzer
from .run import runner
from fireci import ci_command, ci_utils, uploader
from pathlib import Path
from typing import List

logger = logging.getLogger('fireci.macrobenchmark')


@ci_command(cls=click.Group)
def macrobenchmark():
Expand All @@ -38,7 +41,7 @@ def macrobenchmark():
)
@click.option(
'--local/--remote',
required=True,
default=True,
help='Run the test on local devices or Firebase Test Lab.'
)
@click.option(
Expand Down Expand Up @@ -132,28 +135,45 @@ def analyze(
required=True,
help='Whether the test is running for a pull request or a push event.'
)
@click.option(
'--changed-modules-file',
type=click.Path(resolve_path=True, path_type=Path),
help='Contains a list of changed modules in the current pull request.'
)
@click.option(
'--repeat',
default=10,
show_default=True,
help='Number of times to repeat the test (for obtaining more data points).'
)
@ci_command(group=macrobenchmark)
def ci(pull_request, repeat):
def ci(pull_request: bool, changed_modules_file: Path, repeat: int):
"""Run tests in CI and upload results to the metric service."""

# TODO(yifany): run tests only for affected product in pull requests

output_path = Path("macrobenchmark-test-output.json")
exception = None

try:
asyncio.run(runner.start(build_only=False, local=False, repeat=repeat, output=output_path))
if pull_request:
asyncio.run(
runner.start(
build_only=False,
local=False,
repeat=repeat,
output=output_path,
changed_modules_file=changed_modules_file,
)
)
else:
asyncio.run(runner.start(build_only=False, local=False, repeat=repeat, output=output_path))
except Exception as e:
logger.error(f"Error: {e}")
exception = e

with open(output_path) as output_file:
output = json.load(output_file)
ftl_dirs = list(filter(lambda x: x['project'] == 'all-included', output))[0]['successful_runs']
project_name = 'test-changed' if pull_request else 'test-all'
ftl_dirs = list(filter(lambda x: x['project'] == project_name, output))[0]['successful_runs']
ftl_bucket_name = 'fireescape-benchmark-results'

log = ci_utils.ci_log_link()
Expand Down
51 changes: 49 additions & 2 deletions ci/fireci/fireciplugins/macrobenchmark/run/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,18 +22,25 @@
from .test_project_builder import TestProjectBuilder
from .utils import execute
from pathlib import Path
from typing import Dict
from typing import Dict, List, Set


logger = logging.getLogger('fireci.macrobenchmark')


async def start(build_only: bool, local: bool, repeat: int, output: Path):
async def start(
build_only: bool,
local: bool,
repeat: int,
output: Path,
changed_modules_file: Path = None
):
logger.info('Starting macrobenchmark test ...')

config = _process_config_yaml()
product_versions = _assemble_all_products()
test_dir = _prepare_test_directory()
changed_traces = _process_changed_modules(changed_modules_file)
template_project_dir = Path('health-metrics/benchmark/template')

test_projects = [
Expand All @@ -42,6 +49,7 @@ async def start(build_only: bool, local: bool, repeat: int, output: Path):
test_dir,
template_project_dir,
product_versions,
changed_traces,
).build() for test_config in config['test-apps']]

if not build_only:
Expand Down Expand Up @@ -99,3 +107,42 @@ def _prepare_test_directory() -> Path:
test_dir = tempfile.mkdtemp(prefix='benchmark-test-')
logger.info(f'Temporary test directory created at: {test_dir}')
return Path(test_dir)


def _process_changed_modules(path: Path) -> List[str]:
trace_names = {
":appcheck": ["fire-app-check"],
":firebase-abt": ["fire-abt"],
":firebase-appdistribution": ["fire-appdistribution"],
":firebase-config": ["fire-rc"],
":firebase-common": ["Firebase", "ComponentDiscovery", "Runtime"],
":firebase-components": ["Firebase", "ComponentDiscovery", "Runtime"],
":firebase-database": ["fire-rtdb"],
":firebase-datatransport": ["fire-transport"],
":firebase-dynamic-links": ["fire-dl"],
":firebase-crashlytics": ["fire-cls"],
":firebase-crashlytics-ndk": ["fire-cls"],
":firebase-firestore": ["fire-fst"],
":firebase-functions": ["fire-fn"],
":firebase-inappmessaging": ["fire-fiam"],
":firebase-inappmessaging-display": ["fire-fiamd"],
":firebase-installations": ["fire-installations"],
":firebase-installations-interop": ["fire-installations"],
":firebase-messaging": ["fire-fcm"],
":firebase-messaging-directboot": ["fire-fcm"],
":firebase-ml-modeldownloader": ["firebase-ml-modeldownloader"],
":firebase-perf": ["fire-perf"],
":firebase-storage": ["fire-gcs"],
":transport": ["fire-transport"],
}

results: Set[str] = set()
if path:
with open(path) as changed_modules_file:
changed_modules = json.load(changed_modules_file)
for module in changed_modules:
for product in trace_names:
if module.startswith(product):
results.update(trace_names[product])
logger.info(f"Extracted changed traces {results} from {path}")
return list(results)
15 changes: 11 additions & 4 deletions ci/fireci/fireciplugins/macrobenchmark/run/test_project_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from .test_project import TestProject
from .utils import execute
from pathlib import Path
from typing import Any, Dict
from typing import Any, Dict, List


logger = logging.getLogger('fireci.macrobenchmark')
Expand All @@ -33,13 +33,15 @@ def __init__(
test_config: Any,
test_dir: Path,
template_project_dir: Path,
product_versions: Dict[str, str]
product_versions: Dict[str, str],
changed_traces: List[str],
):
self.test_config = test_config
self.template_project_dir = template_project_dir
self.product_versions = product_versions
self.changed_traces = changed_traces

self.name = test_config['name']
self.name = 'test-changed' if changed_traces else 'test-all'
self.logger = LogDecorator(logger, self.name)
self.project_dir = test_dir.joinpath(self.name)

Expand All @@ -66,7 +68,7 @@ def _flesh_out_mustache_template_files(self):
mustache_context = {
'm2repository': os.path.abspath('build/m2repository'),
'plugins': self.test_config.get('plugins', []),
'traces': self.test_config.get('traces', []),
'traces': [],
'dependencies': [],
}

Expand All @@ -79,6 +81,11 @@ def _flesh_out_mustache_template_files(self):
dependency = {'key': dep, 'version': self.product_versions[dep]}
mustache_context['dependencies'].append(dependency)

if 'traces' in self.test_config:
for trace in self.test_config['traces']:
if not self.changed_traces or trace in self.changed_traces:
mustache_context['traces'].append(trace)

renderer = pystache.Renderer()
mustaches = self.project_dir.rglob('**/*.mustache')
for mustache in mustaches:
Expand Down
1 change: 1 addition & 0 deletions health-metrics/benchmark/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ test-apps:
- fire-installations
- firebase-ml-modeldownloader
- fire-perf
- fire-perf-early
- fire-rc
- fire-rtdb
- fire-transport
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.


package com.google.firebase

import com.google.firebase.FirebaseApp

internal fun initializeAllComponentsForBenchmark(app: FirebaseApp) {
app.initializeAllComponents()
}

Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,15 @@ package com.google.firebase.benchmark

import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import com.google.firebase.FirebaseApp
import com.google.firebase.initializeAllComponentsForBenchmark

class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)

val app = FirebaseApp.getInstance()
initializeAllComponentsForBenchmark(app)
}
}