BB, Island: Remove BB performance tests and *everything* related to it

This commit is contained in:
Shreya Malviya 2022-07-21 19:57:02 +05:30 committed by Mike Salvatore
parent 8b43d910b6
commit 35d5592da0
30 changed files with 1 additions and 810 deletions

View File

@ -1,2 +1 @@
logs/
/blackbox/tests/performance/telemetry_sample

View File

@ -16,8 +16,6 @@ Either run pytest from `/monkey` directory or set `PYTHONPATH` environment varia
Blackbox tests have following parameters:
- `--island=IP` Sets island's IP
- `--no-gcp` (Optional) Use for no interaction with the cloud (local test).
- `--quick-performance-tests` (Optional) If enabled performance tests won't reset island and won't send telemetries,
instead will just test performance of endpoints in already present island state.
Example run command:
@ -26,26 +24,3 @@ Example run command:
#### Running in PyCharm
Configure a PyTest configuration with the additional arguments `-s --island=35.207.152.72:5000`, and to run from
directory `monkey\envs\monkey_zoo\blackbox`.
### Running telemetry performance test
**Before running performance test make sure browser is not sending requests to island!**
To run telemetry performance test follow these steps:
0. Set no password protection on the island.
Make sure the island parameter is an IP address(not localhost) as the name resolution will increase the time for requests.
1. Gather monkey telemetries.
1. Enable "Export monkey telemetries" in Configuration -> Internal -> Tests if you don't have
exported telemetries already.
2. Run monkey and wait until infection is done.
3. All telemetries are gathered in `monkey/telem_sample`. If not, restart the island process.
2. Run telemetry performance test.
1. Move directory `monkey/telem_sample` to `envs/monkey_zoo/blackbox/tests/performance/telemetry_sample`
2. (Optional) Use `envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/sample_multiplier.py` to multiply
telemetries gathered.
1. Run `sample_multiplier.py` script with working directory set to `monkey\envs\monkey_zoo\blackbox`
2. Pass integer to indicate the multiplier. For example running `telem_parser.py 4` will replicate
telemetries 4 times.
3. If you're using pycharm check "Emulate terminal in output console" on debug/run configuration.
3. Add a `--run-performance-tests` flag to blackbox scripts to run performance tests as part of BlackBox tests.
You can run a single test separately by adding `-k 'test_telem_performance'` option.

View File

@ -1,50 +0,0 @@
import logging
from datetime import timedelta
from typing import Dict
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
LOGGER = logging.getLogger(__name__)
class PerformanceAnalyzer(Analyzer):
def __init__(
self, performance_test_config: PerformanceTestConfig, endpoint_timings: Dict[str, timedelta]
):
self.performance_test_config = performance_test_config
self.endpoint_timings = endpoint_timings
def analyze_test_results(self):
# Calculate total time and check each endpoint
single_page_time_less_then_max = True
total_time = timedelta()
for endpoint, elapsed in self.endpoint_timings.items():
total_time += elapsed
if elapsed > self.performance_test_config.max_allowed_single_page_time:
single_page_time_less_then_max = False
total_time_less_then_max = total_time < self.performance_test_config.max_allowed_total_time
PerformanceAnalyzer.log_slowest_endpoints(self.endpoint_timings)
LOGGER.info(f"Total time is {str(total_time)}")
performance_is_good_enough = total_time_less_then_max and single_page_time_less_then_max
if self.performance_test_config.break_on_timeout and not performance_is_good_enough:
LOGGER.warning(
"Calling breakpoint - pausing to enable investigation of island. "
"Type 'c' to continue once you're done "
"investigating. Type 'p timings' and 'p total_time' to see performance information."
)
breakpoint()
return performance_is_good_enough
@staticmethod
def log_slowest_endpoints(endpoint_timings, max_endpoints_to_display=100):
slow_endpoint_list = list(endpoint_timings.items())
slow_endpoint_list.sort(key=lambda x: x[1], reverse=True)
slow_endpoint_list = slow_endpoint_list[:max_endpoints_to_display]
for endpoint in slow_endpoint_list:
LOGGER.info(f"{endpoint[0]} took {str(endpoint[1])}")

View File

@ -14,19 +14,6 @@ def pytest_addoption(parser):
default=False,
help="Use for no interaction with the cloud.",
)
parser.addoption(
"--quick-performance-tests",
action="store_true",
default=False,
help="If enabled performance tests won't reset island and won't send telemetries, "
"instead will just test performance of already present island state.",
)
parser.addoption(
"--run-performance-tests",
action="store_true",
default=False,
help="If enabled performance tests will be run.",
)
parser.addoption(
"--skip-powershell-reuse",
action="store_true",
@ -45,19 +32,7 @@ def no_gcp(request):
return request.config.getoption("--no-gcp")
@pytest.fixture(scope="session")
def quick_performance_tests(request):
return request.config.getoption("--quick-performance-tests")
def pytest_runtest_setup(item):
if "run_performance_tests" in item.keywords and not item.config.getoption(
"--run-performance-tests"
):
pytest.skip(
"Skipping performance test because " "--run-performance-tests flag isn't specified."
)
if "skip_powershell_reuse" in item.keywords and item.config.getoption(
"--skip-powershell-reuse"
):

View File

@ -77,10 +77,6 @@ class MonkeyIslandClient(object):
LOGGER.error("Failed to reset the environment.")
assert False
@avoid_race_condition
def set_scenario(self, scenario):
self.requests.post_json("api/island-mode", {"mode": scenario})
def find_monkeys_in_db(self, query):
if query is None:
raise TypeError
@ -120,13 +116,3 @@ class MonkeyIslandClient(object):
def is_all_monkeys_dead(self):
query = {"dead": False}
return len(self.find_monkeys_in_db(query)) == 0
def clear_caches(self):
"""
Tries to clear caches.
:raises: If error (by error code), raises the error
:return: The response
"""
response = self.requests.get("api/test/clear-caches")
response.raise_for_status()
return response

View File

@ -1,12 +1,9 @@
import functools
import logging
from datetime import timedelta
from typing import Dict
import requests
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
ISLAND_USERNAME = "test"
ISLAND_PASSWORD = "test"
LOGGER = logging.getLogger(__name__)
@ -25,28 +22,6 @@ class MonkeyIslandRequests(object):
def __init__(self, server_address):
self.addr = "https://{IP}/".format(IP=server_address)
self.token = self.try_get_jwt_from_server()
self.supported_request_methods = {
SupportedRequestMethod.GET: self.get,
SupportedRequestMethod.POST: self.post,
SupportedRequestMethod.PATCH: self.patch,
SupportedRequestMethod.DELETE: self.delete,
}
def get_request_time(self, url, method: SupportedRequestMethod, data=None):
response = self.send_request_by_method(url, method, data)
if response.ok:
LOGGER.debug(f"Got ok for {url} content peek:\n{response.content[:120].strip()}")
return response.elapsed
else:
LOGGER.error(f"Trying to get {url} but got unexpected {str(response)}")
# instead of raising for status, mark failed responses as maxtime
return timedelta.max
def send_request_by_method(self, url, method=SupportedRequestMethod.GET, data=None):
if data:
return self.supported_request_methods[method](url, data)
else:
return self.supported_request_methods[method](url)
def try_get_jwt_from_server(self):
try:

View File

@ -1,8 +0,0 @@
from enum import Enum
class SupportedRequestMethod(Enum):
GET = "GET"
POST = "POST"
PATCH = "PATCH"
DELETE = "DELETE"

View File

@ -26,7 +26,6 @@ from envs.monkey_zoo.blackbox.utils.gcp_machine_handlers import (
start_machines,
stop_machines,
)
from monkey_island.cc.services.mode.mode_enum import IslandModeEnum
DEFAULT_TIMEOUT_SECONDS = 2 * 60 + 30
MACHINE_BOOTUP_WAIT_SECONDS = 30
@ -63,7 +62,7 @@ def wait_machine_bootup():
@pytest.fixture(scope="class")
def island_client(island, quick_performance_tests):
def island_client(island):
client_established = False
try:
island_client_object = MonkeyIslandClient(island)
@ -73,9 +72,6 @@ def island_client(island, quick_performance_tests):
finally:
if not client_established:
pytest.exit("BB tests couldn't establish communication to the island.")
if not quick_performance_tests:
island_client_object.reset_env()
island_client_object.set_scenario(IslandModeEnum.ADVANCED.value)
yield island_client_object

View File

@ -1,28 +0,0 @@
import logging
from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceAnalyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
LOGGER = logging.getLogger(__name__)
class EndpointPerformanceTest(BasicTest):
def __init__(self, name, test_config: PerformanceTestConfig, island_client: MonkeyIslandClient):
self.name = name
self.test_config = test_config
self.island_client = island_client
def run(self) -> bool:
# Collect timings for all pages
endpoint_timings = {}
for endpoint in self.test_config.endpoints_to_test:
self.island_client.clear_caches()
endpoint_timings[endpoint] = self.island_client.requests.get_request_time(
endpoint, SupportedRequestMethod.GET
)
analyzer = PerformanceAnalyzer(self.test_config, endpoint_timings)
return analyzer.analyze_test_results()

View File

@ -1,45 +0,0 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import (
PerformanceTestWorkflow,
)
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
MAP_RESOURCES = [
"api/netmap",
]
class MapGenerationTest(PerformanceTest):
TEST_NAME = "Map generation performance test"
def __init__(
self, island_client, serialized_config, analyzers, timeout, log_handler, break_on_timeout
):
self.island_client = island_client
exploitation_test = ExploitationTest(
MapGenerationTest.TEST_NAME,
island_client,
serialized_config,
analyzers,
timeout,
log_handler,
)
performance_config = PerformanceTestConfig(
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=MAP_RESOURCES,
break_on_timeout=break_on_timeout,
)
self.performance_test_workflow = PerformanceTestWorkflow(
MapGenerationTest.TEST_NAME, exploitation_test, performance_config
)
def run(self):
self.performance_test_workflow.run()

View File

@ -1,37 +0,0 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import (
TelemetryPerformanceTestWorkflow,
)
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
MAP_RESOURCES = [
"api/netmap",
]
class MapGenerationFromTelemetryTest(PerformanceTest):
TEST_NAME = "Map generation from fake telemetries test"
def __init__(self, island_client, quick_performance_test: bool, break_on_timeout=False):
self.island_client = island_client
performance_config = PerformanceTestConfig(
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=MAP_RESOURCES,
break_on_timeout=break_on_timeout,
)
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(
MapGenerationFromTelemetryTest.TEST_NAME,
self.island_client,
performance_config,
quick_performance_test,
)
def run(self):
self.performance_test_workflow.run()

View File

@ -1,16 +0,0 @@
from abc import ABCMeta, abstractmethod
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
class PerformanceTest(BasicTest, metaclass=ABCMeta):
@abstractmethod
def __init__(
self, island_client, serialized_config, analyzers, timeout, log_handler, break_on_timeout
):
pass
@property
@abstractmethod
def TEST_NAME(self):
pass

View File

@ -1,16 +0,0 @@
from datetime import timedelta
from typing import List
class PerformanceTestConfig:
def __init__(
self,
max_allowed_single_page_time: timedelta,
max_allowed_total_time: timedelta,
endpoints_to_test: List[str] = None,
break_on_timeout=False,
):
self.max_allowed_single_page_time = max_allowed_single_page_time
self.max_allowed_total_time = max_allowed_total_time
self.endpoints_to_test = endpoints_to_test
self.break_on_timeout = break_on_timeout

View File

@ -1,40 +0,0 @@
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import (
EndpointPerformanceTest,
)
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
class PerformanceTestWorkflow(BasicTest):
def __init__(
self, name, exploitation_test: ExploitationTest, performance_config: PerformanceTestConfig
):
self.name = name
self.exploitation_test = exploitation_test
self.island_client = exploitation_test.island_client
self.serialized_config = exploitation_test.serialized_config
self.performance_config = performance_config
def run(self):
self.island_client.import_config(self.serialized_config)
self.exploitation_test.print_test_starting_info()
try:
self.island_client.run_monkey_local()
self.exploitation_test.test_until_timeout()
finally:
self.island_client.kill_all_monkeys()
self.exploitation_test.wait_until_monkeys_die()
self.exploitation_test.wait_for_monkey_process_to_finish()
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
performance_test = EndpointPerformanceTest(
self.name, self.performance_config, self.island_client
)
try:
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
assert performance_test.run()
finally:
self.exploitation_test.parse_logs()
self.island_client.reset_env()

View File

@ -1,48 +0,0 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import (
PerformanceTestWorkflow,
)
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
REPORT_RESOURCES = [
"api/report/security",
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars",
]
class ReportGenerationTest(PerformanceTest):
TEST_NAME = "Report generation performance test"
def __init__(
self, island_client, serialized_config, analyzers, timeout, log_handler, break_on_timeout
):
self.island_client = island_client
exploitation_test = ExploitationTest(
ReportGenerationTest.TEST_NAME,
island_client,
serialized_config,
analyzers,
timeout,
log_handler,
)
performance_config = PerformanceTestConfig(
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=REPORT_RESOURCES,
break_on_timeout=break_on_timeout,
)
self.performance_test_workflow = PerformanceTestWorkflow(
ReportGenerationTest.TEST_NAME, exploitation_test, performance_config
)
def run(self):
self.performance_test_workflow.run()

View File

@ -1,41 +0,0 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import (
TelemetryPerformanceTestWorkflow,
)
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
REPORT_RESOURCES = [
"api/report/security",
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars",
]
class ReportGenerationFromTelemetryTest(PerformanceTest):
TEST_NAME = "Map generation from fake telemetries test"
def __init__(self, island_client, quick_performance_test, break_on_timeout=False):
self.island_client = island_client
performance_config = PerformanceTestConfig(
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=REPORT_RESOURCES,
break_on_timeout=break_on_timeout,
)
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(
ReportGenerationFromTelemetryTest.TEST_NAME,
self.island_client,
performance_config,
quick_performance_test,
)
def run(self):
self.performance_test_workflow.run()

View File

@ -1,51 +0,0 @@
import json
import logging
from os import listdir, path
from typing import Dict, List
from tqdm import tqdm
TELEM_DIR_PATH = "../envs/monkey_zoo/blackbox/tests/performance/telemetry_sample"
MAX_SAME_TYPE_TELEM_FILES = 10000
LOGGER = logging.getLogger(__name__)
class SampleFileParser:
@staticmethod
def save_teletries_to_files(telems: List[Dict]):
for telem in tqdm(telems, desc="Telemetries saved to files", position=3):
SampleFileParser.save_telemetry_to_file(telem)
@staticmethod
def save_telemetry_to_file(telem: Dict):
telem_filename = telem["name"] + telem["method"]
for i in range(MAX_SAME_TYPE_TELEM_FILES):
if not path.exists(path.join(TELEM_DIR_PATH, (str(i) + telem_filename))):
telem_filename = str(i) + telem_filename
break
with open(path.join(TELEM_DIR_PATH, telem_filename), "w") as file:
file.write(json.dumps(telem))
@staticmethod
def read_telem_files() -> List[str]:
telems = []
try:
file_paths = [
path.join(TELEM_DIR_PATH, f)
for f in listdir(TELEM_DIR_PATH)
if path.isfile(path.join(TELEM_DIR_PATH, f))
]
except FileNotFoundError:
raise FileNotFoundError(
"Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them."
)
for file_path in file_paths:
with open(file_path, "r") as telem_file:
telem_string = "".join(telem_file.readlines()).replace("\n", "")
telems.append(telem_string)
return telems
@staticmethod
def get_all_telemetries() -> List[Dict]:
return [json.loads(t) for t in SampleFileParser.read_telem_files()]

View File

@ -1,25 +0,0 @@
from typing import List
class FakeIpGenerator:
def __init__(self):
self.fake_ip_parts = [1, 1, 1, 1]
def generate_fake_ips_for_real_ips(self, real_ips: List[str]) -> List[str]:
fake_ips = []
for i in range(len(real_ips)):
fake_ips.append(".".join(str(part) for part in self.fake_ip_parts))
self.increment_ip()
return fake_ips
def increment_ip(self):
self.fake_ip_parts[3] += 1
self.try_fix_ip_range()
def try_fix_ip_range(self):
for i in range(len(self.fake_ip_parts)):
if self.fake_ip_parts[i] > 256:
if i - 1 < 0:
raise Exception("Fake IP's out of range.")
self.fake_ip_parts[i - 1] += 1
self.fake_ip_parts[i] = 1

View File

@ -1,19 +0,0 @@
import random
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import ( # noqa: E501
FakeIpGenerator,
)
class FakeMonkey:
def __init__(self, ips, guid, fake_ip_generator: FakeIpGenerator, on_island=False):
self.original_ips = ips
self.original_guid = guid
self.fake_ip_generator = fake_ip_generator
self.on_island = on_island
self.fake_guid = str(random.randint(1000000000000, 9999999999999)) # noqa: DUO102
self.fake_ips = fake_ip_generator.generate_fake_ips_for_real_ips(ips)
def change_fake_data(self):
self.fake_ips = self.fake_ip_generator.generate_fake_ips_for_real_ips(self.original_ips)
self.fake_guid = str(random.randint(1000000000000, 9999999999999)) # noqa: DUO102

View File

@ -1,107 +0,0 @@
import copy
import json
import logging
import sys
from typing import Dict, List
from tqdm import tqdm
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
SampleFileParser,
)
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import ( # noqa: E501
FakeIpGenerator,
)
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import ( # noqa: E501
FakeMonkey,
)
LOGGER = logging.getLogger(__name__)
class SampleMultiplier:
def __init__(self, multiplier: int):
self.multiplier = multiplier
self.fake_ip_generator = FakeIpGenerator()
def multiply_telems(self):
telems = SampleFileParser.get_all_telemetries()
telem_contents = [json.loads(telem["content"]) for telem in telems]
monkeys = self.get_monkeys_from_telems(telem_contents)
for i in tqdm(range(self.multiplier), desc="Batch of fabricated telemetries", position=1):
for monkey in monkeys:
monkey.change_fake_data()
fake_telem_batch = copy.deepcopy(telems)
SampleMultiplier.fabricate_monkeys_in_telems(fake_telem_batch, monkeys)
SampleMultiplier.offset_telem_times(iteration=i, telems=fake_telem_batch)
SampleFileParser.save_teletries_to_files(fake_telem_batch)
LOGGER.info("")
@staticmethod
def fabricate_monkeys_in_telems(telems: List[Dict], monkeys: List[FakeMonkey]):
for telem in tqdm(telems, desc="Telemetries fabricated", position=2):
for monkey in monkeys:
if monkey.on_island:
continue
if (
monkey.original_guid in telem["content"]
or monkey.original_guid in telem["endpoint"]
) and not monkey.on_island:
telem["content"] = telem["content"].replace(
monkey.original_guid, monkey.fake_guid
)
telem["endpoint"] = telem["endpoint"].replace(
monkey.original_guid, monkey.fake_guid
)
for i in range(len(monkey.original_ips)):
telem["content"] = telem["content"].replace(
monkey.original_ips[i], monkey.fake_ips[i]
)
@staticmethod
def offset_telem_times(iteration: int, telems: List[Dict]):
for telem in telems:
telem["time"]["$date"] += iteration * 1000
def get_monkeys_from_telems(self, telems: List[Dict]):
island_ips = SampleMultiplier.get_island_ips_from_telems(telems)
monkeys = []
for telem in [
telem
for telem in telems
if "telem_category" in telem and telem["telem_category"] == "system_info"
]:
if "network_info" not in telem["data"]:
continue
guid = telem["monkey_guid"]
monkey_present = [monkey for monkey in monkeys if monkey.original_guid == guid]
if not monkey_present:
ips = [net_info["addr"] for net_info in telem["data"]["network_info"]["networks"]]
if set(island_ips).intersection(ips):
on_island = True
else:
on_island = False
monkeys.append(
FakeMonkey(
ips=ips,
guid=guid,
fake_ip_generator=self.fake_ip_generator,
on_island=on_island,
)
)
return monkeys
@staticmethod
def get_island_ips_from_telems(telems: List[Dict]) -> List[str]:
island_ips = []
for telem in telems:
if "config" in telem:
island_ips = telem["config"]["command_servers"]
for i in range(len(island_ips)):
island_ips[i] = island_ips[i].replace(":5000", "")
return island_ips
if __name__ == "__main__":
SampleMultiplier(multiplier=int(sys.argv[1])).multiply_telems()

View File

@ -1,21 +0,0 @@
from unittest import TestCase
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import ( # noqa: E501
FakeIpGenerator,
)
class TestFakeIpGenerator(TestCase):
def test_fake_ip_generation(self):
fake_ip_gen = FakeIpGenerator()
self.assertListEqual([1, 1, 1, 1], fake_ip_gen.fake_ip_parts)
for i in range(256):
fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"])
self.assertListEqual(["1.1.2.1"], fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"]))
fake_ip_gen.fake_ip_parts = [256, 256, 255, 256]
self.assertListEqual(
["256.256.255.256", "256.256.256.1"],
fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1", "1.1.1.2"]),
)
fake_ip_gen.fake_ip_parts = [256, 256, 256, 256]
self.assertRaises(Exception, fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"]))

View File

@ -1,66 +0,0 @@
import json
import logging
from datetime import timedelta
from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceAnalyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
SampleFileParser,
)
LOGGER = logging.getLogger(__name__)
MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=60)
class TelemetryPerformanceTest:
def __init__(self, island_client: MonkeyIslandClient, quick_performance_test: bool):
self.island_client = island_client
self.quick_performance_test = quick_performance_test
def test_telemetry_performance(self):
LOGGER.info("Starting telemetry performance test.")
try:
all_telemetries = SampleFileParser.get_all_telemetries()
except FileNotFoundError:
raise FileNotFoundError(
"Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them."
)
LOGGER.info("Telemetries imported successfully.")
all_telemetries.sort(key=lambda telem: telem["time"]["$date"])
telemetry_parse_times = {}
for i in range(len(all_telemetries)):
telemetry_endpoint = TelemetryPerformanceTest.get_verbose_telemetry_endpoint(
all_telemetries[i]
)
telemetry_parse_times[telemetry_endpoint] = self.get_telemetry_time(all_telemetries[i])
LOGGER.info(f"Telemetry Nr.{i} sent out of {len(all_telemetries)} total.")
test_config = PerformanceTestConfig(
MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME, MAX_ALLOWED_TOTAL_TIME
)
PerformanceAnalyzer(test_config, telemetry_parse_times).analyze_test_results()
if not self.quick_performance_test:
self.island_client.reset_env()
def get_telemetry_time(self, telemetry):
content = telemetry["content"]
url = telemetry["endpoint"]
method = SupportedRequestMethod.__getattr__(telemetry["method"])
return self.island_client.requests.get_request_time(url=url, method=method, data=content)
@staticmethod
def get_verbose_telemetry_endpoint(telemetry):
telem_category = ""
if "telem_category" in telemetry["content"]:
telem_category = (
"_"
+ json.loads(telemetry["content"])["telem_category"]
+ "_"
+ telemetry["_id"]["$oid"]
)
return telemetry["endpoint"] + telem_category

View File

@ -1,34 +0,0 @@
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import (
EndpointPerformanceTest,
)
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import (
TelemetryPerformanceTest,
)
class TelemetryPerformanceTestWorkflow(BasicTest):
def __init__(
self, name, island_client, performance_config: PerformanceTestConfig, quick_performance_test
):
self.name = name
self.island_client = island_client
self.performance_config = performance_config
self.quick_performance_test = quick_performance_test
def run(self):
try:
if not self.quick_performance_test:
telem_sending_test = TelemetryPerformanceTest(
island_client=self.island_client,
quick_performance_test=self.quick_performance_test,
)
telem_sending_test.test_telemetry_performance()
performance_test = EndpointPerformanceTest(
self.name, self.performance_config, self.island_client
)
assert performance_test.run()
finally:
if not self.quick_performance_test:
self.island_client.reset_env()

View File

@ -23,7 +23,6 @@ from monkey_island.cc.resources.agent_controls import StopAgentCheck, StopAllAge
from monkey_island.cc.resources.attack.attack_report import AttackReport
from monkey_island.cc.resources.auth.auth import Authenticate, init_jwt
from monkey_island.cc.resources.auth.registration import Registration
from monkey_island.cc.resources.blackbox.clear_caches import ClearCaches
from monkey_island.cc.resources.blackbox.log_blackbox_endpoint import LogBlackboxEndpoint
from monkey_island.cc.resources.blackbox.monkey_blackbox_endpoint import MonkeyBlackboxEndpoint
from monkey_island.cc.resources.blackbox.telemetry_blackbox_endpoint import (
@ -195,7 +194,6 @@ def init_restful_endpoints(api: FlaskDIWrapper):
# Note: Preferably, the API will provide a rich feature set and allow access to all of the
# necessary data. This would make these endpoints obsolete.
api.add_resource(MonkeyBlackboxEndpoint)
api.add_resource(ClearCaches)
api.add_resource(LogBlackboxEndpoint)
api.add_resource(TelemetryBlackboxEndpoint)

View File

@ -1,40 +0,0 @@
import logging
import flask_restful
from monkey_island.cc.resources.AbstractResource import AbstractResource
from monkey_island.cc.resources.request_authentication import jwt_required
from monkey_island.cc.services.attack.attack_report import AttackReportService
from monkey_island.cc.services.reporting.report import ReportService
NOT_ALL_REPORTS_DELETED = "Not all reports have been cleared from the DB!"
logger = logging.getLogger(__name__)
class ClearCaches(AbstractResource):
# API Spec: Action, not a resource; RPC-style endpoint?
urls = ["/api/test/clear-caches"]
"""
Used for timing tests - we want to get actual execution time of functions in BlackBox without
caching -
so we use this to clear the caches.
:note: DO NOT CALL THIS IN PRODUCTION CODE as this will slow down the user experience.
"""
@jwt_required
def get(self, **kw):
try:
logger.warning("Trying to clear caches! Make sure this is not production")
ReportService.delete_saved_report_if_exists()
AttackReportService.delete_saved_report_if_exists()
# TODO: Monkey.clear_caches(), clear LRU caches of function in the Monkey object
except RuntimeError as e:
logger.exception(e)
flask_restful.abort(500, error_info=str(e))
if ReportService.is_report_generated() or AttackReportService.is_report_generated():
logger.error(NOT_ALL_REPORTS_DELETED)
flask_restful.abort(500, error_info=NOT_ALL_REPORTS_DELETED)
return {"success": "true"}

View File

@ -137,14 +137,6 @@ class AttackReportService:
generated_report = mongo.db.attack_report.find_one({})
return generated_report is not None
@staticmethod
def delete_saved_report_if_exists():
delete_result = mongo.db.attack_report.delete_many({})
if mongo.db.attack_report.count_documents({}) != 0:
raise RuntimeError(
"Attack Report cache not cleared. DeleteResult: " + delete_result.raw_result
)
@staticmethod
def get_techniques_for_report():
"""

View File

@ -499,19 +499,6 @@ class ReportService:
return False
@staticmethod
def delete_saved_report_if_exists():
"""
This function clears the saved report from the DB.
:raises RuntimeError if deletion failed
"""
delete_result = mongo.db.report.delete_many({})
if mongo.db.report.count_documents({}) != 0:
raise RuntimeError(
"Report cache not cleared. DeleteResult: " + delete_result.raw_result
)
@staticmethod
def get_report():
if not ReportService.is_latest_report_exists():