Changed --no-performance-tests to --run-performance-tests for convenience (skipping performance tests by default) and documented changes in CHANGELOG.md

This commit is contained in:
VakarisZ 2021-04-29 17:42:59 +03:00 committed by VakarisZ
parent 9a169629bf
commit 5f9672c4c4
3 changed files with 11 additions and 7 deletions

View File

@ -22,6 +22,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Use pipenv for python dependency management. #1091 - Use pipenv for python dependency management. #1091
- Moved unit tests to a dedicated `tests/` directory to improve pytest - Moved unit tests to a dedicated `tests/` directory to improve pytest
collection time. #1102 collection time. #1102
- Added `--run-performance-tests` flag to BB tests. If this flag is not specified,
performance tests are skipped.
### Fixed ### Fixed
- Attempted to delete a directory when monkey config reset was called. #1054 - Attempted to delete a directory when monkey config reset was called. #1054

View File

@ -22,10 +22,10 @@ def pytest_addoption(parser):
"instead will just test performance of already present island state.", "instead will just test performance of already present island state.",
) )
parser.addoption( parser.addoption(
"--no-performance-tests", "--run-performance-tests",
action="store_true", action="store_true",
default=False, default=False,
help="If enabled all performance tests will be skipped.", help="If enabled performance tests will be run.",
) )
@ -45,7 +45,9 @@ def quick_performance_tests(request):
def pytest_runtest_setup(item): def pytest_runtest_setup(item):
if "no_performance_tests" in item.keywords and item.config.getoption("--no-performance-tests"): if "run_performance_tests" in item.keywords and not item.config.getoption(
"--run-performance-tests"
):
pytest.skip( pytest.skip(
"Skipping performance test because " "--no-performance-tests flag is specified." "Skipping performance test because " "--run-performance-tests flag isn't specified."
) )

View File

@ -254,15 +254,15 @@ class TestMonkeyBlackbox:
LOGGER.error("This test doesn't support 'quick_performance_tests' option.") LOGGER.error("This test doesn't support 'quick_performance_tests' option.")
assert False assert False
@pytest.mark.no_performance_tests @pytest.mark.run_performance_tests
def test_report_generation_from_fake_telemetries(self, island_client, quick_performance_tests): def test_report_generation_from_fake_telemetries(self, island_client, quick_performance_tests):
ReportGenerationFromTelemetryTest(island_client, quick_performance_tests).run() ReportGenerationFromTelemetryTest(island_client, quick_performance_tests).run()
@pytest.mark.no_performance_tests @pytest.mark.run_performance_tests
def test_map_generation_from_fake_telemetries(self, island_client, quick_performance_tests): def test_map_generation_from_fake_telemetries(self, island_client, quick_performance_tests):
MapGenerationFromTelemetryTest(island_client, quick_performance_tests).run() MapGenerationFromTelemetryTest(island_client, quick_performance_tests).run()
@pytest.mark.no_performance_tests @pytest.mark.run_performance_tests
def test_telem_performance(self, island_client, quick_performance_tests): def test_telem_performance(self, island_client, quick_performance_tests):
TelemetryPerformanceTest( TelemetryPerformanceTest(
island_client, quick_performance_tests island_client, quick_performance_tests