Merge pull request #619 from VakarisZ/map_generation_test

Map generation test
This commit is contained in:
VakarisZ 2020-04-28 17:23:44 +03:00 committed by GitHub
commit 67c8b48d9c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 328 additions and 177 deletions

View File

@ -1,56 +1,36 @@
import logging
from datetime import timedelta
from typing import Dict
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
REPORT_URLS = [
"api/report/security",
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars"
]
logger = logging.getLogger(__name__)
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
LOGGER = logging.getLogger(__name__)
class PerformanceAnalyzer(Analyzer):
def __init__(self, island_client: MonkeyIslandClient, break_if_took_too_long=False):
self.break_if_took_too_long = break_if_took_too_long
self.island_client = island_client
def analyze_test_results(self) -> bool:
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
# Collect timings for all pages
self.island_client.clear_caches()
report_resource_to_response_time = {}
for url in REPORT_URLS:
report_resource_to_response_time[url] = self.island_client.get_elapsed_for_get_request(url)
def __init__(self, performance_test_config: PerformanceTestConfig, endpoint_timings: Dict[str, timedelta]):
self.performance_test_config = performance_test_config
self.endpoint_timings = endpoint_timings
def analyze_test_results(self):
# Calculate total time and check each page
single_page_time_less_then_max = True
total_time = timedelta()
for page, elapsed in report_resource_to_response_time.items():
logger.info(f"page {page} took {str(elapsed)}")
for page, elapsed in self.endpoint_timings.items():
LOGGER.info(f"page {page} took {str(elapsed)}")
total_time += elapsed
if elapsed > MAX_ALLOWED_SINGLE_PAGE_TIME:
if elapsed > self.performance_test_config.max_allowed_single_page_time:
single_page_time_less_then_max = False
total_time_less_then_max = total_time < MAX_ALLOWED_TOTAL_TIME
total_time_less_then_max = total_time < self.performance_test_config.max_allowed_total_time
logger.info(f"total time is {str(total_time)}")
LOGGER.info(f"total time is {str(total_time)}")
performance_is_good_enough = total_time_less_then_max and single_page_time_less_then_max
if self.break_if_took_too_long and not performance_is_good_enough:
logger.warning(
if self.performance_test_config.break_on_timeout and not performance_is_good_enough:
LOGGER.warning(
"Calling breakpoint - pausing to enable investigation of island. Type 'c' to continue once you're done "
"investigating. Type 'p timings' and 'p total_time' to see performance information."
)

View File

@ -96,13 +96,3 @@ class MonkeyIslandClient(object):
response = self.requests.get("api/test/clear_caches")
response.raise_for_status()
return response
def get_elapsed_for_get_request(self, url):
response = self.requests.get(url)
if response.ok:
LOGGER.debug(f"Got ok for {url} content peek:\n{response.content[:120].strip()}")
return response.elapsed
else:
LOGGER.error(f"Trying to get {url} but got unexpected {str(response)}")
# instead of raising for status, mark failed responses as maxtime
return timedelta.max()

View File

@ -4,12 +4,13 @@ import logging
import pytest
from time import sleep
from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceAnalyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import CommunicationAnalyzer
from envs.monkey_zoo.blackbox.island_client.island_config_parser import IslandConfigParser
from envs.monkey_zoo.blackbox.tests.performance.map_generation import MapGenerationTest
from envs.monkey_zoo.blackbox.tests.performance.report_generation import ReportGenerationTest
from envs.monkey_zoo.blackbox.utils import gcp_machine_handlers
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import TestLogsHandler
DEFAULT_TIMEOUT_SECONDS = 5*60
@ -55,34 +56,32 @@ def island_client(island):
class TestMonkeyBlackbox(object):
@staticmethod
def run_basic_test(island_client, conf_filename, test_name, timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS):
def run_exploitation_test(island_client, conf_filename, test_name, timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS):
config_parser = IslandConfigParser(conf_filename)
analyzer = CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(
ExploitationTest(
name=test_name,
island_client=island_client,
config_parser=config_parser,
analyzers=[analyzer],
timeout=timeout_in_seconds,
post_exec_analyzers=[],
log_handler=log_handler).run()
@staticmethod
def run_performance_test(island_client, conf_filename, test_name, timeout_in_seconds):
def run_performance_test(performance_test_class, island_client,
conf_filename, timeout_in_seconds, break_on_timeout=False):
config_parser = IslandConfigParser(conf_filename)
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(
name=test_name,
island_client=island_client,
config_parser=config_parser,
analyzers=[CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())],
timeout=timeout_in_seconds,
post_exec_analyzers=[PerformanceAnalyzer(
log_handler = TestLogsHandler(performance_test_class.TEST_NAME,
island_client,
break_if_took_too_long=False
)],
log_handler=log_handler).run()
TestMonkeyBlackbox.get_log_dir_path())
analyzers = [CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())]
performance_test_class(island_client=island_client,
config_parser=config_parser,
analyzers=analyzers,
timeout=timeout_in_seconds,
log_handler=log_handler,
break_on_timeout=break_on_timeout).run()
@staticmethod
def get_log_dir_path():
@ -92,43 +91,42 @@ class TestMonkeyBlackbox(object):
assert island_client.get_api_status() is not None
def test_ssh_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SSH.conf", "SSH_exploiter_and_keys")
TestMonkeyBlackbox.run_exploitation_test(island_client, "SSH.conf", "SSH_exploiter_and_keys")
def test_hadoop_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "HADOOP.conf", "Hadoop_exploiter", 6*60)
TestMonkeyBlackbox.run_exploitation_test(island_client, "HADOOP.conf", "Hadoop_exploiter", 6 * 60)
def test_mssql_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "MSSQL.conf", "MSSQL_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "MSSQL.conf", "MSSQL_exploiter")
def test_smb_and_mimikatz_exploiters(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SMB_MIMIKATZ.conf", "SMB_exploiter_mimikatz")
TestMonkeyBlackbox.run_exploitation_test(island_client, "SMB_MIMIKATZ.conf", "SMB_exploiter_mimikatz")
def test_smb_pth(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SMB_PTH.conf", "SMB_PTH")
TestMonkeyBlackbox.run_exploitation_test(island_client, "SMB_PTH.conf", "SMB_PTH")
def test_elastic_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "ELASTIC.conf", "Elastic_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "ELASTIC.conf", "Elastic_exploiter")
def test_struts_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "STRUTS2.conf", "Strtuts2_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "STRUTS2.conf", "Strtuts2_exploiter")
def test_weblogic_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WEBLOGIC.conf", "Weblogic_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "WEBLOGIC.conf", "Weblogic_exploiter")
def test_shellshock_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SHELLSHOCK.conf", "Shellschock_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "SHELLSHOCK.conf", "Shellschock_exploiter")
def test_tunneling(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "TUNNELING.conf", "Tunneling_exploiter", 15*60)
TestMonkeyBlackbox.run_exploitation_test(island_client, "TUNNELING.conf", "Tunneling_exploiter", 15 * 60)
def test_wmi_and_mimikatz_exploiters(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WMI_MIMIKATZ.conf", "WMI_exploiter,_mimikatz")
TestMonkeyBlackbox.run_exploitation_test(island_client, "WMI_MIMIKATZ.conf", "WMI_exploiter,_mimikatz")
def test_wmi_pth(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WMI_PTH.conf", "WMI_PTH")
TestMonkeyBlackbox.run_exploitation_test(island_client, "WMI_PTH.conf", "WMI_PTH")
@pytest.mark.xfail(reason="Performance is slow, will improve on release 1.9.")
def test_performance(self, island_client):
def test_report_generation_performance(self, island_client):
"""
This test includes the SSH + Elastic + Hadoop + MSSQL machines all in one test
for a total of 8 machines including the Monkey Island.
@ -136,8 +134,14 @@ class TestMonkeyBlackbox(object):
Is has 2 analyzers - the regular one which checks all the Monkeys
and the Timing one which checks how long the report took to execute
"""
TestMonkeyBlackbox.run_performance_test(
TestMonkeyBlackbox.run_performance_test(ReportGenerationTest,
island_client,
"PERFORMANCE.conf",
"test_report_performance",
timeout_in_seconds=10*60)
def test_map_generation_performance(self, island_client):
TestMonkeyBlackbox.run_performance_test(MapGenerationTest,
island_client,
"PERFORMANCE.conf",
timeout_in_seconds=10*60)

View File

@ -1,100 +1,8 @@
from time import sleep
import logging
from envs.monkey_zoo.blackbox.utils.test_timer import TestTimer
MAX_TIME_FOR_MONKEYS_TO_DIE = 5 * 60
WAIT_TIME_BETWEEN_REQUESTS = 10
TIME_FOR_MONKEY_PROCESS_TO_FINISH = 40
DELAY_BETWEEN_ANALYSIS = 3
LOGGER = logging.getLogger(__name__)
import abc
class BasicTest(object):
def __init__(self, name, island_client, config_parser, analyzers, timeout, post_exec_analyzers, log_handler):
self.name = name
self.island_client = island_client
self.config_parser = config_parser
self.analyzers = analyzers
self.post_exec_analyzers = post_exec_analyzers
self.timeout = timeout
self.log_handler = log_handler
class BasicTest(abc.ABC):
@abc.abstractmethod
def run(self):
self.island_client.import_config(self.config_parser.config_raw)
self.print_test_starting_info()
try:
self.island_client.run_monkey_local()
self.test_until_timeout()
finally:
self.island_client.kill_all_monkeys()
self.wait_until_monkeys_die()
self.wait_for_monkey_process_to_finish()
self.test_post_exec_analyzers()
self.parse_logs()
self.island_client.reset_env()
def print_test_starting_info(self):
LOGGER.info("Started {} test".format(self.name))
LOGGER.info("Machines participating in test: " + ", ".join(self.config_parser.get_ips_of_targets()))
print("")
def test_until_timeout(self):
timer = TestTimer(self.timeout)
while not timer.is_timed_out():
if self.all_analyzers_pass():
self.log_success(timer)
return
sleep(DELAY_BETWEEN_ANALYSIS)
LOGGER.debug("Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken()))
self.log_failure(timer)
assert False
def log_success(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.info("{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken()))
def log_failure(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.error("{} test failed because of timeout. Time taken: {:.1f} seconds.".format(self.name,
timer.get_time_taken()))
def all_analyzers_pass(self):
analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
return all(analyzers_results)
def get_analyzer_logs(self):
log = ""
for analyzer in self.analyzers:
log += "\n" + analyzer.log.get_contents()
return log
def wait_until_monkeys_die(self):
time_passed = 0
while not self.island_client.is_all_monkeys_dead() and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE:
sleep(WAIT_TIME_BETWEEN_REQUESTS)
time_passed += WAIT_TIME_BETWEEN_REQUESTS
LOGGER.debug("Waiting for all monkeys to die. Time passed: {}".format(time_passed))
if time_passed > MAX_TIME_FOR_MONKEYS_TO_DIE:
LOGGER.error("Some monkeys didn't die after the test, failing")
assert False
def parse_logs(self):
LOGGER.info("Parsing test logs:")
self.log_handler.parse_test_logs()
@staticmethod
def wait_for_monkey_process_to_finish():
"""
There is a time period when monkey is set to dead, but the process is still closing.
If we try to launch monkey during that time window monkey will fail to start, that's
why test needs to wait a bit even after all monkeys are dead.
"""
LOGGER.debug("Waiting for Monkey process to close...")
sleep(TIME_FOR_MONKEY_PROCESS_TO_FINISH)
def test_post_exec_analyzers(self):
post_exec_analyzers_results = [analyzer.analyze_test_results() for analyzer in self.post_exec_analyzers]
assert all(post_exec_analyzers_results)
pass

View File

@ -0,0 +1,95 @@
from time import sleep
import logging
from envs.monkey_zoo.blackbox.utils.test_timer import TestTimer
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
MAX_TIME_FOR_MONKEYS_TO_DIE = 5 * 60
WAIT_TIME_BETWEEN_REQUESTS = 10
TIME_FOR_MONKEY_PROCESS_TO_FINISH = 40
DELAY_BETWEEN_ANALYSIS = 3
LOGGER = logging.getLogger(__name__)
class ExploitationTest(BasicTest):
def __init__(self, name, island_client, config_parser, analyzers, timeout, log_handler):
self.name = name
self.island_client = island_client
self.config_parser = config_parser
self.analyzers = analyzers
self.timeout = timeout
self.log_handler = log_handler
def run(self):
self.island_client.import_config(self.config_parser.config_raw)
self.print_test_starting_info()
try:
self.island_client.run_monkey_local()
self.test_until_timeout()
finally:
self.island_client.kill_all_monkeys()
self.wait_until_monkeys_die()
self.wait_for_monkey_process_to_finish()
self.parse_logs()
self.island_client.reset_env()
def print_test_starting_info(self):
LOGGER.info("Started {} test".format(self.name))
LOGGER.info("Machines participating in test: " + ", ".join(self.config_parser.get_ips_of_targets()))
print("")
def test_until_timeout(self):
timer = TestTimer(self.timeout)
while not timer.is_timed_out():
if self.all_analyzers_pass():
self.log_success(timer)
return
sleep(DELAY_BETWEEN_ANALYSIS)
LOGGER.debug("Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken()))
self.log_failure(timer)
assert False
def log_success(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.info("{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken()))
def log_failure(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.error("{} test failed because of timeout. Time taken: {:.1f} seconds.".format(self.name,
timer.get_time_taken()))
def all_analyzers_pass(self):
analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
return all(analyzers_results)
def get_analyzer_logs(self):
log = ""
for analyzer in self.analyzers:
log += "\n" + analyzer.log.get_contents()
return log
def wait_until_monkeys_die(self):
time_passed = 0
while not self.island_client.is_all_monkeys_dead() and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE:
sleep(WAIT_TIME_BETWEEN_REQUESTS)
time_passed += WAIT_TIME_BETWEEN_REQUESTS
LOGGER.debug("Waiting for all monkeys to die. Time passed: {}".format(time_passed))
if time_passed > MAX_TIME_FOR_MONKEYS_TO_DIE:
LOGGER.error("Some monkeys didn't die after the test, failing")
assert False
def parse_logs(self):
LOGGER.info("Parsing test logs:")
self.log_handler.parse_test_logs()
@staticmethod
def wait_for_monkey_process_to_finish():
"""
There is a time period when monkey is set to dead, but the process is still closing.
If we try to launch monkey during that time window monkey will fail to start, that's
why test needs to wait a bit even after all monkeys are dead.
"""
LOGGER.debug("Waiting for Monkey process to close...")
sleep(TIME_FOR_MONKEY_PROCESS_TO_FINISH)

View File

@ -0,0 +1,42 @@
import logging
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceAnalyzer
LOGGER = logging.getLogger(__name__)
class EndpointPerformanceTest(BasicTest):
def __init__(self, name, test_config: PerformanceTestConfig, island_client: MonkeyIslandClient):
self.name = name
self.test_config = test_config
self.island_client = island_client
def run(self) -> bool:
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
# Collect timings for all pages
self.island_client.clear_caches()
endpoint_timings = {}
for endpoint in self.test_config.endpoints_to_test:
endpoint_timings[endpoint] = self.get_elapsed_for_get_request(endpoint)
analyzer = PerformanceAnalyzer(self.test_config, endpoint_timings)
return analyzer.analyze_test_results()
def get_elapsed_for_get_request(self, url):
response = self.island_client.requests.get(url)
if response.ok:
LOGGER.debug(f"Got ok for {url} content peek:\n{response.content[:120].strip()}")
return response.elapsed
else:
LOGGER.error(f"Trying to get {url} but got unexpected {str(response)}")
# instead of raising for status, mark failed responses as maxtime
return timedelta.max

View File

@ -0,0 +1,35 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
MAP_RESOURCES = [
"api/netmap",
]
class MapGenerationTest(PerformanceTest):
TEST_NAME = "Map generation performance test"
def __init__(self, island_client, config_parser, analyzers,
timeout, log_handler, break_on_timeout):
self.island_client = island_client
self.config_parser = config_parser
exploitation_test = ExploitationTest(MapGenerationTest.TEST_NAME, island_client,
config_parser, analyzers, timeout, log_handler)
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=MAP_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = PerformanceTestWorkflow(MapGenerationTest.TEST_NAME,
exploitation_test,
performance_config)
def run(self):
self.performance_test_workflow.run()

View File

@ -0,0 +1,16 @@
from abc import ABCMeta, abstractmethod
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
class PerformanceTest(BasicTest, metaclass=ABCMeta):
@abstractmethod
def __init__(self, island_client, config_parser, analyzers,
timeout, log_handler, break_on_timeout):
pass
@property
@abstractmethod
def TEST_NAME(self):
pass

View File

@ -0,0 +1,12 @@
from datetime import timedelta
from typing import List
class PerformanceTestConfig:
def __init__(self, max_allowed_single_page_time: timedelta, max_allowed_total_time: timedelta,
endpoints_to_test: List[str], break_on_timeout=False):
self.max_allowed_single_page_time = max_allowed_single_page_time
self.max_allowed_total_time = max_allowed_total_time
self.endpoints_to_test = endpoints_to_test
self.break_on_timeout = break_on_timeout

View File

@ -0,0 +1,31 @@
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import EndpointPerformanceTest
class PerformanceTestWorkflow(BasicTest):
def __init__(self, name, exploitation_test: ExploitationTest, performance_config: PerformanceTestConfig):
self.name = name
self.exploitation_test = exploitation_test
self.island_client = exploitation_test.island_client
self.config_parser = exploitation_test.config_parser
self.performance_config = performance_config
def run(self):
self.island_client.import_config(self.config_parser.config_raw)
self.exploitation_test.print_test_starting_info()
try:
self.island_client.run_monkey_local()
self.exploitation_test.test_until_timeout()
finally:
self.island_client.kill_all_monkeys()
self.exploitation_test.wait_until_monkeys_die()
self.exploitation_test.wait_for_monkey_process_to_finish()
performance_test = EndpointPerformanceTest(self.name, self.performance_config, self.island_client)
try:
assert performance_test.run()
finally:
self.exploitation_test.parse_logs()
self.island_client.reset_env()

View File

@ -0,0 +1,38 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
REPORT_RESOURCES = [
"api/report/security",
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars"
]
class ReportGenerationTest(PerformanceTest):
TEST_NAME = "Report generation performance test"
def __init__(self, island_client, config_parser, analyzers,
timeout, log_handler, break_on_timeout):
self.island_client = island_client
self.config_parser = config_parser
exploitation_test = ExploitationTest(ReportGenerationTest.TEST_NAME, island_client,
config_parser, analyzers, timeout, log_handler)
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=REPORT_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = PerformanceTestWorkflow(ReportGenerationTest.TEST_NAME,
exploitation_test,
performance_config)
def run(self):
self.performance_test_workflow.run()