Merge pull request #548 from guardicore/547/island-report-response-time

Calculate response time for testing
This commit is contained in:
Shay Nehmad 2020-03-16 14:39:59 +02:00 committed by GitHub
commit dfa9d99ae9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 399 additions and 45 deletions

View File

@ -0,0 +1,8 @@
from abc import ABCMeta, abstractmethod
class Analyzer(object, metaclass=ABCMeta):
@abstractmethod
def analyze_test_results(self):
raise NotImplementedError()

View File

@ -1,7 +1,8 @@
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
class CommunicationAnalyzer(object): class CommunicationAnalyzer(Analyzer):
def __init__(self, island_client, machine_ips): def __init__(self, island_client, machine_ips):
self.island_client = island_client self.island_client = island_client

View File

@ -0,0 +1,59 @@
import logging
from datetime import timedelta
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
REPORT_URLS = [
"api/report/security",
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars"
]
logger = logging.getLogger(__name__)
class PerformanceAnalyzer(Analyzer):
def __init__(self, island_client: MonkeyIslandClient, break_if_took_too_long=False):
self.break_if_took_too_long = break_if_took_too_long
self.island_client = island_client
def analyze_test_results(self) -> bool:
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
# Collect timings for all pages
self.island_client.clear_caches()
report_resource_to_response_time = {}
for url in REPORT_URLS:
report_resource_to_response_time[url] = self.island_client.get_elapsed_for_get_request(url)
# Calculate total time and check each page
single_page_time_less_then_max = True
total_time = timedelta()
for page, elapsed in report_resource_to_response_time.items():
logger.info(f"page {page} took {str(elapsed)}")
total_time += elapsed
if elapsed > MAX_ALLOWED_SINGLE_PAGE_TIME:
single_page_time_less_then_max = False
total_time_less_then_max = total_time < MAX_ALLOWED_TOTAL_TIME
logger.info(f"total time is {str(total_time)}")
performance_is_good_enough = total_time_less_then_max and single_page_time_less_then_max
if self.break_if_took_too_long and not performance_is_good_enough:
logger.warning(
"Calling breakpoint - pausing to enable investigation of island. Type 'c' to continue once you're done "
"investigating. Type 'p timings' and 'p total_time' to see performance information."
)
breakpoint()
return performance_is_good_enough

View File

@ -1,3 +1,4 @@
from datetime import timedelta
from time import sleep from time import sleep
import json import json
@ -85,3 +86,23 @@ class MonkeyIslandClient(object):
def is_all_monkeys_dead(self): def is_all_monkeys_dead(self):
query = {'dead': False} query = {'dead': False}
return len(self.find_monkeys_in_db(query)) == 0 return len(self.find_monkeys_in_db(query)) == 0
def clear_caches(self):
"""
Tries to clear caches.
:raises: If error (by error code), raises the error
:return: The response
"""
response = self.requests.get("api/test/clear_caches")
response.raise_for_status()
return response
def get_elapsed_for_get_request(self, url):
response = self.requests.get(url)
if response.ok:
LOGGER.debug(f"Got ok for {url} content peek:\n{response.content[:120].strip()}")
return response.elapsed
else:
LOGGER.error(f"Trying to get {url} but got unexpected {str(response)}")
# instead of raising for status, mark failed responses as maxtime
return timedelta.max()

View File

@ -61,6 +61,14 @@ class MonkeyIslandRequests(object):
headers=self.get_jwt_header(), headers=self.get_jwt_header(),
verify=False) verify=False)
@_Decorators.refresh_jwt_token
def delete(self, url):
return requests.delete( # noqa: DOU123
self.addr + url,
headers=self.get_jwt_header(),
verify=False
)
@_Decorators.refresh_jwt_token @_Decorators.refresh_jwt_token
def get_jwt_header(self): def get_jwt_header(self):
return {"Authorization": "JWT " + self.token} return {"Authorization": "JWT " + self.token}

View File

@ -0,0 +1,186 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"12345678",
"^NgDvY59~8"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.2",
"10.2.2.4"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"SSHExploiter",
"MSSQLExploiter",
"ElasticGroovyExploiter",
"HadoopExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": false,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 500
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 1000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -1,17 +1,8 @@
{ {
"basic": { "basic": {
"credentials": { "credentials": {
"exploit_password_list": [ "exploit_password_list": [],
"Password1!", "exploit_user_list": []
"1234",
"password",
"12345678"
],
"exploit_user_list": [
"Administrator",
"root",
"user"
]
}, },
"general": { "general": {
"should_exploit": true "should_exploit": true
@ -46,18 +37,9 @@
"exploits": { "exploits": {
"general": { "general": {
"exploiter_classes": [ "exploiter_classes": [
"SmbExploiter", "Struts2Exploiter"
"WmiExploiter", ],
"SSHExploiter", "skip_exploit_if_file_exist": true
"ShellShockExploiter",
"SambaCryExploiter",
"ElasticGroovyExploiter",
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter",
"VSFTPDExploiter"
],
"skip_exploit_if_file_exist": false
}, },
"ms08_067": { "ms08_067": {
"ms08_067_exploit_attempts": 5, "ms08_067_exploit_attempts": 5,
@ -149,19 +131,19 @@
"life_cycle": { "life_cycle": {
"max_iterations": 1, "max_iterations": 1,
"retry_failed_explotation": true, "retry_failed_explotation": true,
"timeout_between_iterations": 100, "timeout_between_iterations": 30,
"victims_max_exploit": 7, "victims_max_exploit": 7,
"victims_max_find": 30 "victims_max_find": 30
}, },
"system_info": { "system_info": {
"collect_system_info": true, "collect_system_info": false,
"extract_azure_creds": true, "extract_azure_creds": false,
"should_use_mimikatz": true "should_use_mimikatz": false
} }
}, },
"network": { "network": {
"ping_scanner": { "ping_scanner": {
"ping_scan_timeout": 1000 "ping_scan_timeout": 100
}, },
"tcp_scanner": { "tcp_scanner": {
"HTTP_PORTS": [ "HTTP_PORTS": [
@ -173,7 +155,7 @@
], ],
"tcp_scan_get_banner": true, "tcp_scan_get_banner": true,
"tcp_scan_interval": 0, "tcp_scan_interval": 0,
"tcp_scan_timeout": 3000, "tcp_scan_timeout": 300,
"tcp_target_ports": [ "tcp_target_ports": [
22, 22,
2222, 2222,

View File

@ -4,6 +4,7 @@ import logging
import pytest import pytest
from time import sleep from time import sleep
from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceAnalyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import CommunicationAnalyzer from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import CommunicationAnalyzer
from envs.monkey_zoo.blackbox.island_client.island_config_parser import IslandConfigParser from envs.monkey_zoo.blackbox.island_client.island_config_parser import IslandConfigParser
@ -58,12 +59,30 @@ class TestMonkeyBlackbox(object):
config_parser = IslandConfigParser(conf_filename) config_parser = IslandConfigParser(conf_filename)
analyzer = CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets()) analyzer = CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path()) log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(test_name, BasicTest(
island_client, name=test_name,
config_parser, island_client=island_client,
[analyzer], config_parser=config_parser,
timeout_in_seconds, analyzers=[analyzer],
log_handler).run() timeout=timeout_in_seconds,
post_exec_analyzers=[],
log_handler=log_handler).run()
@staticmethod
def run_performance_test(island_client, conf_filename, test_name, timeout_in_seconds):
config_parser = IslandConfigParser(conf_filename)
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(
name=test_name,
island_client=island_client,
config_parser=config_parser,
analyzers=[CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())],
timeout=timeout_in_seconds,
post_exec_analyzers=[PerformanceAnalyzer(
island_client,
break_if_took_too_long=False
)],
log_handler=log_handler).run()
@staticmethod @staticmethod
def get_log_dir_path(): def get_log_dir_path():
@ -108,3 +127,17 @@ class TestMonkeyBlackbox(object):
def test_wmi_pth(self, island_client): def test_wmi_pth(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WMI_PTH.conf", "WMI_PTH") TestMonkeyBlackbox.run_basic_test(island_client, "WMI_PTH.conf", "WMI_PTH")
def test_performance(self, island_client):
"""
This test includes the SSH + Elastic + Hadoop + MSSQL machines all in one test
for a total of 8 machines including the Monkey Island.
Is has 2 analyzers - the regular one which checks all the Monkeys
and the Timing one which checks how long the report took to execute
"""
TestMonkeyBlackbox.run_performance_test(
island_client,
"PERFORMANCE.conf",
"test_report_performance",
timeout_in_seconds=10*60)

View File

@ -1,4 +1,3 @@
import json
from time import sleep from time import sleep
import logging import logging
@ -14,11 +13,12 @@ LOGGER = logging.getLogger(__name__)
class BasicTest(object): class BasicTest(object):
def __init__(self, name, island_client, config_parser, analyzers, timeout, log_handler): def __init__(self, name, island_client, config_parser, analyzers, timeout, post_exec_analyzers, log_handler):
self.name = name self.name = name
self.island_client = island_client self.island_client = island_client
self.config_parser = config_parser self.config_parser = config_parser
self.analyzers = analyzers self.analyzers = analyzers
self.post_exec_analyzers = post_exec_analyzers
self.timeout = timeout self.timeout = timeout
self.log_handler = log_handler self.log_handler = log_handler
@ -32,13 +32,13 @@ class BasicTest(object):
self.island_client.kill_all_monkeys() self.island_client.kill_all_monkeys()
self.wait_until_monkeys_die() self.wait_until_monkeys_die()
self.wait_for_monkey_process_to_finish() self.wait_for_monkey_process_to_finish()
self.test_post_exec_analyzers()
self.parse_logs() self.parse_logs()
self.island_client.reset_env() self.island_client.reset_env()
def print_test_starting_info(self): def print_test_starting_info(self):
LOGGER.info("Started {} test".format(self.name)) LOGGER.info("Started {} test".format(self.name))
LOGGER.info("Machines participating in test:") LOGGER.info("Machines participating in test: " + ", ".join(self.config_parser.get_ips_of_targets()))
LOGGER.info(" ".join(self.config_parser.get_ips_of_targets()))
print("") print("")
def test_until_timeout(self): def test_until_timeout(self):
@ -62,10 +62,8 @@ class BasicTest(object):
timer.get_time_taken())) timer.get_time_taken()))
def all_analyzers_pass(self): def all_analyzers_pass(self):
for analyzer in self.analyzers: analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
if not analyzer.analyze_test_results(): return all(analyzers_results)
return False
return True
def get_analyzer_logs(self): def get_analyzer_logs(self):
log = "" log = ""
@ -94,4 +92,9 @@ class BasicTest(object):
If we try to launch monkey during that time window monkey will fail to start, that's If we try to launch monkey during that time window monkey will fail to start, that's
why test needs to wait a bit even after all monkeys are dead. why test needs to wait a bit even after all monkeys are dead.
""" """
LOGGER.debug("Waiting for Monkey process to close...")
sleep(TIME_FOR_MONKEY_PROCESS_TO_FINISH) sleep(TIME_FOR_MONKEY_PROCESS_TO_FINISH)
def test_post_exec_analyzers(self):
post_exec_analyzers_results = [analyzer.analyze_test_results() for analyzer in self.post_exec_analyzers]
assert all(post_exec_analyzers_results)

View File

@ -25,6 +25,7 @@ from monkey_island.cc.resources.root import Root
from monkey_island.cc.resources.telemetry import Telemetry from monkey_island.cc.resources.telemetry import Telemetry
from monkey_island.cc.resources.telemetry_feed import TelemetryFeed from monkey_island.cc.resources.telemetry_feed import TelemetryFeed
from monkey_island.cc.resources.pba_file_download import PBAFileDownload from monkey_island.cc.resources.pba_file_download import PBAFileDownload
from monkey_island.cc.resources.test.clear_caches import ClearCaches
from monkey_island.cc.resources.version_update import VersionUpdate from monkey_island.cc.resources.version_update import VersionUpdate
from monkey_island.cc.resources.pba_file_upload import FileUpload from monkey_island.cc.resources.pba_file_upload import FileUpload
from monkey_island.cc.resources.attack.attack_config import AttackConfiguration from monkey_island.cc.resources.attack.attack_config import AttackConfiguration
@ -116,6 +117,7 @@ def init_api_resources(api):
api.add_resource(VersionUpdate, '/api/version-update', '/api/version-update/') api.add_resource(VersionUpdate, '/api/version-update', '/api/version-update/')
api.add_resource(MonkeyTest, '/api/test/monkey') api.add_resource(MonkeyTest, '/api/test/monkey')
api.add_resource(ClearCaches, '/api/test/clear_caches')
api.add_resource(LogTest, '/api/test/log') api.add_resource(LogTest, '/api/test/log')

View File

@ -1,4 +1,4 @@
""" """
This package contains resources used by blackbox tests This package contains resources used by blackbox tests
to analize test results, download logs and so on. to analyze test results, download logs and so on.
""" """

View File

@ -0,0 +1,35 @@
import logging
import flask_restful
from monkey_island.cc.auth import jwt_required
from monkey_island.cc.services.attack.attack_report import AttackReportService
from monkey_island.cc.services.reporting.report import ReportService
NOT_ALL_REPORTS_DELETED = "Not all reports have been cleared from the DB!"
logger = logging.getLogger(__name__)
class ClearCaches(flask_restful.Resource):
"""
Used for timing tests - we want to get actual execution time of functions in BlackBox without caching - so we use this
to clear the caches.
:note: DO NOT CALL THIS IN PRODUCTION CODE as this will slow down the user experience.
"""
@jwt_required()
def get(self, **kw):
try:
logger.warning("Trying to clear caches! Make sure this is not production")
ReportService.delete_saved_report_if_exists()
AttackReportService.delete_saved_report_if_exists()
# TODO: Monkey.clear_caches(), clear LRU caches of function in the Monkey object
except RuntimeError as e:
logger.exception(e)
flask_restful.abort(500, error_info=str(e))
if ReportService.is_report_generated() or AttackReportService.is_report_generated():
logger.error(NOT_ALL_REPORTS_DELETED)
flask_restful.abort(500, error_info=NOT_ALL_REPORTS_DELETED)
return {"success": "true"}

View File

@ -103,3 +103,9 @@ class AttackReportService:
""" """
generated_report = mongo.db.attack_report.find_one({}) generated_report = mongo.db.attack_report.find_one({})
return generated_report is not None return generated_report is not None
@staticmethod
def delete_saved_report_if_exists():
delete_result = mongo.db.attack_report.delete_many({})
if mongo.db.attack_report.count_documents({}) != 0:
raise RuntimeError("Attack Report cache not cleared. DeleteResult: " + delete_result.raw_result)

View File

@ -773,6 +773,16 @@ class ReportService:
return False return False
@staticmethod
def delete_saved_report_if_exists():
"""
This function clears the saved report from the DB.
:raises RuntimeError if deletion failed
"""
delete_result = mongo.db.report.delete_many({})
if mongo.db.report.count_documents({}) != 0:
raise RuntimeError("Report cache not cleared. DeleteResult: " + delete_result.raw_result)
@staticmethod @staticmethod
def decode_dot_char_before_mongo_insert(report_dict): def decode_dot_char_before_mongo_insert(report_dict):
""" """