Skip to content

chore: fix non cli tests #19

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jan 22, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,5 @@ script:
- python -m nose core_tests/utils -v -s --nologcapture --with-doctest --with-xunit
- python -m flake8 --max-line-length=120 core core_tests data products tests
- python -m pylint --disable=locally-disabled --rcfile=.pylintrc core data products
- find tests/cli | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc
- find core_tests | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc
- find tests | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,8 @@ python -m pylint --disable=locally-disabled --rcfile=.pylintrc core data product
Due to the fact tests are not modules pylint can not be executed directly.
Workaround:
```bash
find tests/cli | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc
find core_tests | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc
find tests | grep .py | grep -v .pyc | xargs python -m pylint --disable=locally-disabled --min-similarity-lines=15 --rcfile=.pylintrc
```

## Hints, Tips and Tricks
Expand Down
1 change: 1 addition & 0 deletions tests/apps/apps_tests.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: disable=unused-argument
import os
import unittest

Expand Down
1 change: 1 addition & 0 deletions tests/code_sharing/migrate_web_tests.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: disable=unused-argument
import os
import unittest

Expand Down
2 changes: 2 additions & 0 deletions tests/code_sharing/ng_new_tests.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
# pylint: disable=too-many-branches
# pylint: disable=too-many-statements
import os
import unittest

Expand Down
77 changes: 35 additions & 42 deletions tests/perf/build/build_perf_tests.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# pylint: disable=unused-argument
# pylint: disable=undefined-variable

import json
import os
import unittest
Expand All @@ -9,7 +12,6 @@
from core.enums.platform_type import Platform
from core.settings import Settings
from core.utils.file_utils import Folder, File
from core.utils.git import Git
from core.utils.gradle import Gradle
from core.utils.json_utils import JsonUtils
from core.utils.npm import Npm
Expand All @@ -19,10 +21,10 @@
from data.templates import Template
from products.nativescript.tns import Tns

retry_count = 3
tolerance = 0.20
app_name = Settings.AppName.DEFAULT
expected_results = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json'))
RETRY_COUNT = 3
TOLERANCE = 0.20
APP_NAME = Settings.AppName.DEFAULT
EXPECTED_RESULTS = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json'))


# noinspection PyMethodMayBeStatic,PyUnusedLocal
Expand All @@ -40,15 +42,6 @@ class PrepareAndBuildPerfTests(TnsTest):
def setUpClass(cls):
TnsTest.setUpClass()

# Get master detail template locally.
local_folder = os.path.join(Settings.TEST_SUT_HOME, Template.MASTER_DETAIL_NG.name)
local_package = os.path.join(Settings.TEST_SUT_HOME, Template.MASTER_DETAIL_NG.name + '.tgz')
Folder.clean(local_folder)
Git.clone(repo_url=Template.MASTER_DETAIL_NG.repo, local_folder=local_folder)
Npm.pack(folder=local_folder, output_file=local_package)
Folder.clean(local_folder)
Template.MASTER_DETAIL_NG.local_package = local_package

def setUp(self):
TnsTest.setUp(self)

Expand All @@ -69,66 +62,66 @@ def test_001_prepare_data(self, template, template_package, change_set, bundle):
def test_200_prepare_android_initial(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'prepare_initial')
expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'prepare_initial')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Initial android prepare time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Initial android prepare time is not OK.'

@parameterized.expand(TEST_DATA)
@unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.')
def test_201_prepare_ios_initial(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'prepare_initial')
expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'prepare_initial')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Initial ios prepare time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Initial ios prepare time is not OK.'

@parameterized.expand(TEST_DATA)
def test_210_prepare_android_skip(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'prepare_skip')
expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'prepare_skip')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Skip android prepare time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Skip android prepare time is not OK.'

@parameterized.expand(TEST_DATA)
@unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.')
def test_211_prepare_ios_skip(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'prepare_skip')
expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'prepare_skip')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Skip ios prepare time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Skip ios prepare time is not OK.'

@parameterized.expand(TEST_DATA)
def test_220_prepare_android_incremental(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'prepare_incremental')
expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'prepare_incremental')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Incremental android prepare time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Incremental android prepare time is not OK.'

@parameterized.expand(TEST_DATA)
@unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.')
def test_221_prepare_ios_incremental(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'prepare_incremental')
expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'prepare_incremental')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Incremental ios prepare time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Incremental ios prepare time is not OK.'

@parameterized.expand(TEST_DATA)
def test_300_build_android_initial(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'build_initial')
expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'build_initial')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Initial android build time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Initial android build time is not OK.'

@parameterized.expand(TEST_DATA)
@unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.')
def test_301_build_ios_initial(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'build_initial')
expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'build_initial')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Initial ios build time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Initial ios build time is not OK.'

@parameterized.expand(TEST_DATA)
def test_310_build_android_incremental(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.ANDROID, bundle, 'build_incremental')
expected = Helpers.get_expected_result(template, Platform.ANDROID, bundle, 'build_incremental')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Incremental android build time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Incremental android build time is not OK.'

@parameterized.expand(TEST_DATA)
@unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.')
def test_311_build_ios_incremental(self, template, template_package, change_set, bundle):
actual = Helpers.get_actual_result(template, Platform.iOS, bundle, 'build_incremental')
expected = Helpers.get_expected_result(template, Platform.iOS, bundle, 'build_incremental')
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Incremental ios build time is not OK.'
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Incremental ios build time is not OK.'


class PrepareBuildInfo(object):
Expand All @@ -147,43 +140,43 @@ def prepare_and_build(template, platform, bundle, change_set, result_file):
prepare_incremental = 0
build_initial = 0
build_incremental = 0
for i in range(retry_count):
for _ in range(RETRY_COUNT):
Tns.kill()
Gradle.kill()
Npm.cache_clean()
Xcode.cache_clean()
Folder.clean(folder=os.path.join(Settings.TEST_RUN_HOME, app_name))
Tns.create(app_name=app_name, template=template, update=True)
Folder.clean(folder=os.path.join(Settings.TEST_RUN_HOME, APP_NAME))
Tns.create(app_name=APP_NAME, template=template, update=True)
if platform == Platform.ANDROID:
Tns.platform_add_android(app_name=app_name, framework_path=Settings.Android.FRAMEWORK_PATH)
Tns.platform_add_android(app_name=APP_NAME, framework_path=Settings.Android.FRAMEWORK_PATH)
elif platform == Platform.IOS:
Tns.platform_add_ios(app_name=app_name, framework_path=Settings.IOS.FRAMEWORK_PATH)
Tns.platform_add_ios(app_name=APP_NAME, framework_path=Settings.IOS.FRAMEWORK_PATH)
else:
raise Exception('Unknown platform: ' + str(platform))

# Prepare
time = Tns.prepare(app_name=app_name, platform=platform, bundle=bundle).duration
time = Tns.prepare(app_name=APP_NAME, platform=platform, bundle=bundle).duration
prepare_initial = prepare_initial + time
time = Tns.prepare(app_name=app_name, platform=platform, bundle=bundle).duration
time = Tns.prepare(app_name=APP_NAME, platform=platform, bundle=bundle).duration
prepare_skip = prepare_skip + time
Sync.replace(app_name=app_name, change_set=change_set)
time = Tns.prepare(app_name=app_name, platform=platform, bundle=bundle).duration
Sync.replace(app_name=APP_NAME, change_set=change_set)
time = Tns.prepare(app_name=APP_NAME, platform=platform, bundle=bundle).duration
prepare_incremental = prepare_incremental + time

# Build
time = Tns.build(app_name=app_name, platform=platform, bundle=bundle).duration
time = Tns.build(app_name=APP_NAME, platform=platform, bundle=bundle).duration
build_initial = build_initial + time
Sync.revert(app_name=app_name, change_set=change_set)
time = Tns.build(app_name=app_name, platform=platform, bundle=bundle).duration
Sync.revert(app_name=APP_NAME, change_set=change_set)
time = Tns.build(app_name=APP_NAME, platform=platform, bundle=bundle).duration
build_incremental = build_incremental + time

# Calculate averages
result = PrepareBuildInfo()
result.prepare_initial = prepare_initial / retry_count
result.prepare_skip = prepare_skip / retry_count
result.prepare_incremental = prepare_incremental / retry_count
result.build_initial = build_initial / retry_count
result.build_incremental = build_incremental / retry_count
result.prepare_initial = prepare_initial / RETRY_COUNT
result.prepare_skip = prepare_skip / RETRY_COUNT
result.prepare_incremental = prepare_incremental / RETRY_COUNT
result.build_initial = build_initial / RETRY_COUNT
result.build_incremental = build_incremental / RETRY_COUNT

# Save to results file
File.delete(path=result_file)
Expand All @@ -206,4 +199,4 @@ def get_actual_result(template, platform, bundle, entry):
def get_expected_result(template, platform, bundle, entry):
if bundle:
platform = str(platform) + '_bundle'
return expected_results[template][platform][entry]
return EXPECTED_RESULTS[template][platform][entry]
46 changes: 22 additions & 24 deletions tests/perf/build/create_perf_tests.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
# pylint: disable=unused-argument
# pylint: disable=undefined-variable

import os

from core.base_test.tns_test import TnsTest
from core.settings import Settings
from core.utils.file_utils import Folder
from core.utils.git import Git
from core.utils.json_utils import JsonUtils
from core.utils.npm import Npm
from core.utils.perf_utils import PerfUtils
from data.templates import Template
from products.nativescript.tns import Tns

retry_count = 3
tolerance = 0.20
app_name = Settings.AppName.DEFAULT
expected_results = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json'))
RETRY_COUNT = 3
TOLERANCE = 0.20
APP_NAME = Settings.AppName.DEFAULT
EXPECTED_RESULTS = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json'))


# noinspection PyMethodMayBeStatic
Expand All @@ -33,26 +34,23 @@ def tearDownClass(cls):

def test_001_create_js_app(self):
actual = PerfUtils.get_average_time(
lambda: Tns.create(app_name=app_name, template=Template.HELLO_WORLD_JS.local_package, update=False),
retry_count=retry_count)
expected = expected_results['hello-world-js']['create']
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'JS Hello Word project create time is not OK.'
lambda: Tns.create(app_name=APP_NAME, template=Template.HELLO_WORLD_JS.local_package, update=False),
retry_count=RETRY_COUNT)
expected = EXPECTED_RESULTS['hello-world-js']['create']
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'JS Hello Word project create time is not OK.'

def test_002_create_ng_app(self):
actual = PerfUtils.get_average_time(
lambda: Tns.create(app_name=app_name, template=Template.HELLO_WORLD_NG.local_package, update=False),
retry_count=retry_count)
expected = expected_results['hello-world-ng']['create']
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'NG Hello Word project create time is not OK.'
lambda: Tns.create(app_name=APP_NAME, template=Template.HELLO_WORLD_NG.local_package, update=False),
retry_count=RETRY_COUNT)
expected = EXPECTED_RESULTS['hello-world-ng']['create']
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'NG Hello Word project create time is not OK.'

def test_010_create_master_detail_app(self):
local_folder = os.path.join(Settings.TEST_SUT_HOME, Template.MASTER_DETAIL_NG.name)
local_package = os.path.join(Settings.TEST_SUT_HOME, Template.MASTER_DETAIL_NG.name + '.tgz')
Folder.clean(local_folder)
Git.clone(repo_url=Template.MASTER_DETAIL_NG.repo, local_folder=local_folder)
Npm.pack(folder=local_folder, output_file=local_package)
Folder.clean(local_folder)
actual = PerfUtils.get_average_time(lambda: Tns.create(app_name=app_name, template=local_package, update=False),
retry_count=retry_count)
expected = expected_results['master-detail-ng']['create']
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'MasterDetailNG project create time is not OK.'
actual = PerfUtils.get_average_time(
lambda: Tns.create(app_name=APP_NAME,
template=Template.MASTER_DETAIL_NG.local_package,
update=False),
retry_count=RETRY_COUNT)
expected = EXPECTED_RESULTS['master-detail-ng']['create']
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'MasterDetailNG project create time is not OK.'
33 changes: 17 additions & 16 deletions tests/perf/build/platform_add_perf_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@
from data.templates import Template
from products.nativescript.tns import Tns

retry_count = 3
tolerance = 0.20
app_name = Settings.AppName.DEFAULT
expected_results = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json'))
RETRY_COUNT = 3
TOLERANCE = 0.20
APP_NAME = Settings.AppName.DEFAULT
EXPECTED_RESULTS = JsonUtils.read(os.path.join(Settings.TEST_RUN_HOME, 'tests', 'perf', 'data.json'))


# noinspection PyMethodMayBeStatic
Expand All @@ -32,23 +32,24 @@ def tearDownClass(cls):

def test_100_platform_add_android(self):
total_time = 0
for i in range(retry_count):
for _ in range(RETRY_COUNT):
Npm.cache_clean()
Tns.create(app_name=app_name, template=Template.HELLO_WORLD_JS.local_package, update=False)
time = Tns.platform_add_android(app_name=app_name, framework_path=Settings.Android.FRAMEWORK_PATH).duration
Tns.create(app_name=APP_NAME, template=Template.HELLO_WORLD_JS.local_package, update=False)
time = Tns.platform_add_android(app_name=APP_NAME, framework_path=Settings.Android.FRAMEWORK_PATH).duration
total_time = total_time + time
actual = total_time / retry_count
expected = expected_results['hello-world-js']['platform_add_android']
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Time for platform add android is not OK.'
actual = total_time / RETRY_COUNT
expected = EXPECTED_RESULTS['hello-world-js']['platform_add_android']
assert PerfUtils.is_value_in_range(actual, expected,
EXPECTED_RESULTS), 'Time for platform add android is not OK.'

@unittest.skipIf(Settings.HOST_OS is not OSType.OSX, 'iOS tests can be executed only on macOS.')
def test_101_platform_add_ios(self):
total_time = 0
for i in range(retry_count):
for _ in range(RETRY_COUNT):
Npm.cache_clean()
Tns.create(app_name=app_name, template=Template.HELLO_WORLD_JS.local_package, update=False)
time = Tns.platform_add_ios(app_name=app_name, framework_path=Settings.IOS.FRAMEWORK_PATH).duration
Tns.create(app_name=APP_NAME, template=Template.HELLO_WORLD_JS.local_package, update=False)
time = Tns.platform_add_ios(app_name=APP_NAME, framework_path=Settings.IOS.FRAMEWORK_PATH).duration
total_time = total_time + time
actual = total_time / retry_count
expected = expected_results['hello-world-js']['platform_add_ios']
assert PerfUtils.is_value_in_range(actual, expected, tolerance), 'Time for platform add ios is not OK.'
actual = total_time / RETRY_COUNT
expected = EXPECTED_RESULTS['hello-world-js']['platform_add_ios']
assert PerfUtils.is_value_in_range(actual, expected, TOLERANCE), 'Time for platform add ios is not OK.'