From d87248f7f9709fa0a364f15aebeadcba1045ac2b Mon Sep 17 00:00:00 2001 From: Dimitar Topuzov Date: Tue, 22 Jan 2019 07:50:00 -0800 Subject: [PATCH] chore: fix non cli tests Fix lint on code sharing and perf tests. --- .travis.yml | 3 +- README.md | 3 +- tests/apps/apps_tests.py | 1 + tests/code_sharing/migrate_web_tests.py | 1 + tests/code_sharing/ng_new_tests.py | 2 + tests/perf/build/build_perf_tests.py | 77 ++++++++++----------- tests/perf/build/create_perf_tests.py | 46 ++++++------ tests/perf/build/platform_add_perf_tests.py | 33 ++++----- 8 files changed, 82 insertions(+), 84 deletions(-) diff --git a/.travis.yml b/.travis.yml index c19e56b1..923e6549 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,4 +13,5 @@ script: - python -m nose core_tests/utils -v -s --nologcapture --with-doctest --with-xunit - python -m flake8 --max-line-length=120 core core_tests data products tests - python -m pylint --disable=locally-disabled --rcfile=.pylintrc core data products - - find tests/cli | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc \ No newline at end of file + - find core_tests | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc + - find tests | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc \ No newline at end of file diff --git a/README.md b/README.md index bd4afeb2..dba2414f 100644 --- a/README.md +++ b/README.md @@ -85,7 +85,8 @@ python -m pylint --disable=locally-disabled --rcfile=.pylintrc core data product Due to the fact tests are not modules pylint can not be executed directly. Workaround: ```bash -find tests/cli | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc +find core_tests | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc +find tests | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc ``` ## Hints, Tips and Tricks diff --git a/tests/apps/apps_tests.py b/tests/apps/apps_tests.py index cd78d04d..068c353d 100644 --- a/tests/apps/apps_tests.py +++ b/tests/apps/apps_tests.py @@ -1,3 +1,4 @@ +# pylint: disable=unused-argument import os import unittest diff --git a/tests/code_sharing/migrate_web_tests.py b/tests/code_sharing/migrate_web_tests.py index 273a519c..82089af0 100644 --- a/tests/code_sharing/migrate_web_tests.py +++ b/tests/code_sharing/migrate_web_tests.py @@ -1,3 +1,4 @@ +# pylint: disable=unused-argument import os import unittest diff --git a/tests/code_sharing/ng_new_tests.py b/tests/code_sharing/ng_new_tests.py index 37ec3a8e..f3ea9e1e 100644 --- a/tests/code_sharing/ng_new_tests.py +++ b/tests/code_sharing/ng_new_tests.py @@ -1,3 +1,5 @@ +# pylint: disable=too-many-branches +# pylint: disable=too-many-statements import os import unittest diff --git a/tests/perf/build/build_perf_tests.py b/tests/perf/build/build_perf_tests.py index 86ecc425..e17e4fe5 100644 --- a/tests/perf/build/build_perf_tests.py +++ b/tests/perf/build/build_perf_tests.py @@ -1,3 +1,6 @@ +# pylint: disable=unused-argument +# pylint: disable=undefined-variable + import json import os import unittest @@ -9,7 +12,6 @@ from core.enums.platform_type import Platform from core.settings import Settings from core.utils.file_utils import Folder, File -from core.utils.git import Git from core.utils.gradle import Gradle from core.utils.json_utils import JsonUtils from core.utils.npm import Npm @@ -19,10 +21,10 @@ from data.templates import Template from products.nativescript.tns import Tns -retry_count = 3 -tolerance = 0.20 -app_name = Settings.AppName.DEFAULT -expected_results = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json')) +RETRY_COUNT = 3 +TOLERANCE = 0.20 +APP_NAME = Settings.AppName.DEFAULT +EXPECTED_RESULTS = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json')) # noinspection PyMethodMayBeStatic,PyUnusedLocal @@ -40,15 +42,6 @@ class PrepareAndBuildPerfTests(TnsTest): def setUpClass(cls): TnsTest.setUpClass() - # Get master detail template locally. - local_folder = os.path.join(Settings.TEST_SUT_HOME, Template.MASTER_DETAIL_NG.name) - local_package = os.path.join(Settings.TEST_SUT_HOME, Template.MASTER_DETAIL_NG.name + '.tgz') - Folder.clean(local_folder) - Git.clone(repo_url=Template.MASTER_DETAIL_NG.repo, local_folder=local_folder) - Npm.pack(folder=local_folder, output_file=local_package) - Folder.clean(local_folder) - Template.MASTER_DETAIL_NG.local_package = local_package - def setUp(self): TnsTest.setUp(self) @@ -69,66 +62,66 @@ def test_001_prepare_data(self, template, template_package, change_set, bundle): def test_200_prepare_android_initial(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'prepare_initial') expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'prepare_initial') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Initial android prepare time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Initial android prepare time is not OK.' @parameterized.expand(TEST_DATA) @unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.') def test_201_prepare_ios_initial(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'prepare_initial') expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'prepare_initial') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Initial ios prepare time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Initial ios prepare time is not OK.' @parameterized.expand(TEST_DATA) def test_210_prepare_android_skip(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'prepare_skip') expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'prepare_skip') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Skip android prepare time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Skip android prepare time is not OK.' @parameterized.expand(TEST_DATA) @unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.') def test_211_prepare_ios_skip(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'prepare_skip') expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'prepare_skip') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Skip ios prepare time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Skip ios prepare time is not OK.' @parameterized.expand(TEST_DATA) def test_220_prepare_android_incremental(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'prepare_incremental') expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'prepare_incremental') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Incremental android prepare time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Incremental android prepare time is not OK.' @parameterized.expand(TEST_DATA) @unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.') def test_221_prepare_ios_incremental(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'prepare_incremental') expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'prepare_incremental') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Incremental ios prepare time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Incremental ios prepare time is not OK.' @parameterized.expand(TEST_DATA) def test_300_build_android_initial(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'build_initial') expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'build_initial') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Initial android build time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Initial android build time is not OK.' @parameterized.expand(TEST_DATA) @unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.') def test_301_build_ios_initial(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'build_initial') expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'build_initial') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Initial ios build time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Initial ios build time is not OK.' @parameterized.expand(TEST_DATA) def test_310_build_android_incremental(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'build_incremental') expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'build_incremental') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Incremental android build time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Incremental android build time is not OK.' @parameterized.expand(TEST_DATA) @unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.') def test_311_build_ios_incremental(self, template, template_package, change_set, bundle): actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'build_incremental') expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'build_incremental') - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Incremental ios build time is not OK.' + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Incremental ios build time is not OK.' class PrepareBuildInfo(object): @@ -147,43 +140,43 @@ def prepare_and_build(template, platform, bundle, change_set, result_file): prepare_incremental = 0 build_initial = 0 build_incremental = 0 - for i in range(retry_count): + for _ in range(RETRY_COUNT): Tns.kill() Gradle.kill() Npm.cache_clean() Xcode.cache_clean() - Folder.clean(folder=os.path.join(Settings.TEST_RUN_HOME, app_name)) - Tns.create(app_name=app_name, template=template, update=True) + Folder.clean(folder=os.path.join(Settings.TEST_RUN_HOME, APP_NAME)) + Tns.create(app_name=APP_NAME, template=template, update=True) if platform == Platform.ANDROID: - Tns.platform_add_android(app_name=app_name, framework_path=Settings.Android.FRAMEWORK_PATH) + Tns.platform_add_android(app_name=APP_NAME, framework_path=Settings.Android.FRAMEWORK_PATH) elif platform == Platform.IOS: - Tns.platform_add_ios(app_name=app_name, framework_path=Settings.IOS.FRAMEWORK_PATH) + Tns.platform_add_ios(app_name=APP_NAME, framework_path=Settings.IOS.FRAMEWORK_PATH) else: raise Exception('Unknown platform: ' + str(platform)) # Prepare - time = Tns.prepare(app_name=app_name, platform=platform, bundle=bundle).duration + time = Tns.prepare(app_name=APP_NAME, platform=platform, bundle=bundle).duration prepare_initial = prepare_initial + time - time = Tns.prepare(app_name=app_name, platform=platform, bundle=bundle).duration + time = Tns.prepare(app_name=APP_NAME, platform=platform, bundle=bundle).duration prepare_skip = prepare_skip + time - Sync.replace(app_name=app_name, change_set=change_set) - time = Tns.prepare(app_name=app_name, platform=platform, bundle=bundle).duration + Sync.replace(app_name=APP_NAME, change_set=change_set) + time = Tns.prepare(app_name=APP_NAME, platform=platform, bundle=bundle).duration prepare_incremental = prepare_incremental + time # Build - time = Tns.build(app_name=app_name, platform=platform, bundle=bundle).duration + time = Tns.build(app_name=APP_NAME, platform=platform, bundle=bundle).duration build_initial = build_initial + time - Sync.revert(app_name=app_name, change_set=change_set) - time = Tns.build(app_name=app_name, platform=platform, bundle=bundle).duration + Sync.revert(app_name=APP_NAME, change_set=change_set) + time = Tns.build(app_name=APP_NAME, platform=platform, bundle=bundle).duration build_incremental = build_incremental + time # Calculate averages result = PrepareBuildInfo() - result.prepare_initial = prepare_initial / retry_count - result.prepare_skip = prepare_skip / retry_count - result.prepare_incremental = prepare_incremental / retry_count - result.build_initial = build_initial / retry_count - result.build_incremental = build_incremental / retry_count + result.prepare_initial = prepare_initial / RETRY_COUNT + result.prepare_skip = prepare_skip / RETRY_COUNT + result.prepare_incremental = prepare_incremental / RETRY_COUNT + result.build_initial = build_initial / RETRY_COUNT + result.build_incremental = build_incremental / RETRY_COUNT # Save to results file File.delete(path=result_file) @@ -206,4 +199,4 @@ def get_actual_result(template, platform, bundle, entry): def get_expected_result(template, platform, bundle, entry): if bundle: platform = str(platform) + '_bundle' - return expected_results[template][platform][entry] + return EXPECTED_RESULTS[template][platform][entry] diff --git a/tests/perf/build/create_perf_tests.py b/tests/perf/build/create_perf_tests.py index 8e7c60c1..5fa0d204 100644 --- a/tests/perf/build/create_perf_tests.py +++ b/tests/perf/build/create_perf_tests.py @@ -1,19 +1,20 @@ +# pylint: disable=unused-argument +# pylint: disable=undefined-variable + import os from core.base_test.tns_test import TnsTest from core.settings import Settings -from core.utils.file_utils import Folder -from core.utils.git import Git from core.utils.json_utils import JsonUtils from core.utils.npm import Npm from core.utils.perf_utils import PerfUtils from data.templates import Template from products.nativescript.tns import Tns -retry_count = 3 -tolerance = 0.20 -app_name = Settings.AppName.DEFAULT -expected_results = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json')) +RETRY_COUNT = 3 +TOLERANCE = 0.20 +APP_NAME = Settings.AppName.DEFAULT +EXPECTED_RESULTS = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json')) # noinspection PyMethodMayBeStatic @@ -33,26 +34,23 @@ def tearDownClass(cls): def test_001_create_js_app(self): actual = PerfUtils.get_average_time( - lambda: Tns.create(app_name=app_name, template=Template.HELLO_WORLD_JS.local_package, update=False), - retry_count=retry_count) - expected = expected_results['hello-world-js']['create'] - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'JS Hello Word project create time is not OK.' + lambda: Tns.create(app_name=APP_NAME, template=Template.HELLO_WORLD_JS.local_package, update=False), + retry_count=RETRY_COUNT) + expected = EXPECTED_RESULTS['hello-world-js']['create'] + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'JS Hello Word project create time is not OK.' def test_002_create_ng_app(self): actual = PerfUtils.get_average_time( - lambda: Tns.create(app_name=app_name, template=Template.HELLO_WORLD_NG.local_package, update=False), - retry_count=retry_count) - expected = expected_results['hello-world-ng']['create'] - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'NG Hello Word project create time is not OK.' + lambda: Tns.create(app_name=APP_NAME, template=Template.HELLO_WORLD_NG.local_package, update=False), + retry_count=RETRY_COUNT) + expected = EXPECTED_RESULTS['hello-world-ng']['create'] + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'NG Hello Word project create time is not OK.' def test_010_create_master_detail_app(self): - local_folder = os.path.join(Settings.TEST_SUT_HOME, Template.MASTER_DETAIL_NG.name) - local_package = os.path.join(Settings.TEST_SUT_HOME, Template.MASTER_DETAIL_NG.name + '.tgz') - Folder.clean(local_folder) - Git.clone(repo_url=Template.MASTER_DETAIL_NG.repo, local_folder=local_folder) - Npm.pack(folder=local_folder, output_file=local_package) - Folder.clean(local_folder) - actual = PerfUtils.get_average_time(lambda: Tns.create(app_name=app_name, template=local_package, update=False), - retry_count=retry_count) - expected = expected_results['master-detail-ng']['create'] - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'MasterDetailNG project create time is not OK.' + actual = PerfUtils.get_average_time( + lambda: Tns.create(app_name=APP_NAME, + template=Template.MASTER_DETAIL_NG.local_package, + update=False), + retry_count=RETRY_COUNT) + expected = EXPECTED_RESULTS['master-detail-ng']['create'] + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'MasterDetailNG project create time is not OK.' diff --git a/tests/perf/build/platform_add_perf_tests.py b/tests/perf/build/platform_add_perf_tests.py index e942067d..3ba515c6 100644 --- a/tests/perf/build/platform_add_perf_tests.py +++ b/tests/perf/build/platform_add_perf_tests.py @@ -10,10 +10,10 @@ from data.templates import Template from products.nativescript.tns import Tns -retry_count = 3 -tolerance = 0.20 -app_name = Settings.AppName.DEFAULT -expected_results = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json')) +RETRY_COUNT = 3 +TOLERANCE = 0.20 +APP_NAME = Settings.AppName.DEFAULT +EXPECTED_RESULTS = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json')) # noinspection PyMethodMayBeStatic @@ -32,23 +32,24 @@ def tearDownClass(cls): def test_100_platform_add_android(self): total_time = 0 - for i in range(retry_count): + for _ in range(RETRY_COUNT): Npm.cache_clean() - Tns.create(app_name=app_name, template=Template.HELLO_WORLD_JS.local_package, update=False) - time = Tns.platform_add_android(app_name=app_name, framework_path=Settings.Android.FRAMEWORK_PATH).duration + Tns.create(app_name=APP_NAME, template=Template.HELLO_WORLD_JS.local_package, update=False) + time = Tns.platform_add_android(app_name=APP_NAME, framework_path=Settings.Android.FRAMEWORK_PATH).duration total_time = total_time + time - actual = total_time / retry_count - expected = expected_results['hello-world-js']['platform_add_android'] - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Time for platform add android is not OK.' + actual = total_time / RETRY_COUNT + expected = EXPECTED_RESULTS['hello-world-js']['platform_add_android'] + assert PerfUtils.is_value_in_range(actual, expected, + EXPECTED_RESULTS), 'Time for platform add android is not OK.' @unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.') def test_101_platform_add_ios(self): total_time = 0 - for i in range(retry_count): + for _ in range(RETRY_COUNT): Npm.cache_clean() - Tns.create(app_name=app_name, template=Template.HELLO_WORLD_JS.local_package, update=False) - time = Tns.platform_add_ios(app_name=app_name, framework_path=Settings.IOS.FRAMEWORK_PATH).duration + Tns.create(app_name=APP_NAME, template=Template.HELLO_WORLD_JS.local_package, update=False) + time = Tns.platform_add_ios(app_name=APP_NAME, framework_path=Settings.IOS.FRAMEWORK_PATH).duration total_time = total_time + time - actual = total_time / retry_count - expected = expected_results['hello-world-js']['platform_add_ios'] - assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Time for platform add ios is not OK.' + actual = total_time / RETRY_COUNT + expected = EXPECTED_RESULTS['hello-world-js']['platform_add_ios'] + assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Time for platform add ios is not OK.'