Got 500 from delete operation so simplyfing and re-trying

This commit is contained in:
Shay Nehmad 2020-02-25 11:24:28 +02:00
parent 9965947d3f
commit 97976cdbc5
7 changed files with 274 additions and 39 deletions

View File

@ -1,3 +1,4 @@
import logging
from datetime import timedelta
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
@ -7,15 +8,23 @@ from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIs
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=1)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=3)
logger = logging.getLogger(__name__)
class PerformanceAnalyzer(Analyzer):
def __init__(self, island_client: MonkeyIslandClient):
def __init__(self, island_client: MonkeyIslandClient, break_if_took_too_long=False):
self.break_if_took_too_long = break_if_took_too_long
self.island_client = island_client
self.log = AnalyzerLog(self.__class__.__name__)
def analyze_test_results(self) -> bool:
self.log.clear()
if not self.island_client.is_all_monkeys_dead():
self.log.add_entry("Can't test report times since not all Monkeys have died.")
return False
total_time = timedelta()
self.island_client.clear_caches()
@ -33,4 +42,11 @@ class PerformanceAnalyzer(Analyzer):
self.log.add_entry(f"total time is {str(total_time)}")
if self.break_if_took_too_long and (not (total_time_less_then_max and single_page_time_less_then_max)):
logger.warning(
"Calling breakpoint - pausing to enable investigation of island. Type 'c' to continue once you're done "
"investigating. type 'p timings' and 'p total_time' to see performance information."
)
breakpoint()
return total_time_less_then_max and single_page_time_less_then_max

View File

@ -1,3 +1,4 @@
from datetime import timedelta
from time import sleep
import json
@ -109,10 +110,13 @@ class MonkeyIslandClient(object):
for url in REPORT_URLS:
response = self.requests.get(url)
if response:
if response.ok:
LOGGER.debug(f"Got ok for {url} content peek:\n{response.content[:120]}")
report_resource_to_response_time[url] = response.elapsed
else:
LOGGER.error(f"Trying to get {url} but got unexpected {str(response)}")
response.raise_for_status()
# instead of raising for status, mark failed responses as maxtime
report_resource_to_response_time[url] = timedelta.max()
return report_resource_to_response_time

View File

@ -0,0 +1,186 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"12345678",
"^NgDvY59~8"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.2",
"10.2.2.4"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"SSHExploiter",
"MSSQLExploiter",
"ElasticGroovyExploiter",
"HadoopExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": false,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 500
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 1000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -13,6 +13,7 @@ from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import TestLogsHandler
DEFAULT_TIMEOUT_SECONDS = 5*60
PERFORMANCE_TIMEOUT_SECONDS = 10*60
MACHINE_BOOTUP_WAIT_SECONDS = 30
GCP_TEST_MACHINE_LIST = ['sshkeys-11', 'sshkeys-12', 'elastic-4', 'elastic-5', 'hadoop-2', 'hadoop-3', 'mssql-16',
'mimikatz-14', 'mimikatz-15', 'struts2-23', 'struts2-24', 'tunneling-9', 'tunneling-10',
@ -59,27 +60,30 @@ class TestMonkeyBlackbox(object):
config_parser = IslandConfigParser(conf_filename)
analyzer = CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(test_name,
island_client,
config_parser,
[analyzer],
timeout_in_seconds,
log_handler).run()
BasicTest(
name=test_name,
island_client=island_client,
config_parser=config_parser,
analyzers=[analyzer],
timeout=timeout_in_seconds,
post_exec_analyzers=[],
log_handler=log_handler).run()
@staticmethod
def run_performance_test(island_client, conf_filename, test_name, timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS):
config_parser = IslandConfigParser(conf_filename)
analyzers = [
# TODO CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets()),
PerformanceAnalyzer(island_client),
]
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(test_name,
island_client,
config_parser,
analyzers,
timeout_in_seconds,
log_handler).run()
BasicTest(
name=test_name,
island_client=island_client,
config_parser=config_parser,
analyzers=[CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())],
timeout=timeout_in_seconds,
post_exec_analyzers=[PerformanceAnalyzer(
island_client,
break_if_took_too_long=True # TODO change to false before merging!!!
)],
log_handler=log_handler).run()
@staticmethod
def get_log_dir_path():
@ -126,4 +130,15 @@ class TestMonkeyBlackbox(object):
TestMonkeyBlackbox.run_basic_test(island_client, "WMI_PTH.conf", "WMI_PTH")
def test_performance(self, island_client):
TestMonkeyBlackbox.run_performance_test(island_client, "STRUTS2.conf", "Report_timing")
"""
This test includes the SSH + Elastic + Hadoop + MSSQL machines all in one test
for a total of 8 machines including the Monkey Island.
Is has 2 analyzers - the regular one which checks all the Monkeys
and the Timing one which checks how long the report took to execute
"""
TestMonkeyBlackbox.run_performance_test(
island_client,
"PERFORMANCE.conf",
"test_report_performance",
timeout_in_seconds=PERFORMANCE_TIMEOUT_SECONDS)

View File

@ -14,11 +14,12 @@ LOGGER = logging.getLogger(__name__)
class BasicTest(object):
def __init__(self, name, island_client, config_parser, analyzers, timeout, log_handler):
def __init__(self, name, island_client, config_parser, analyzers, timeout, post_exec_analyzers, log_handler):
self.name = name
self.island_client = island_client
self.config_parser = config_parser
self.analyzers = analyzers
self.post_exec_analyzers = post_exec_analyzers
self.timeout = timeout
self.log_handler = log_handler
@ -32,13 +33,13 @@ class BasicTest(object):
self.island_client.kill_all_monkeys()
self.wait_until_monkeys_die()
self.wait_for_monkey_process_to_finish()
self.test_post_exec_analyzers()
self.parse_logs()
self.island_client.reset_env()
def print_test_starting_info(self):
LOGGER.info("Started {} test".format(self.name))
LOGGER.info("Machines participating in test:")
LOGGER.info(" ".join(self.config_parser.get_ips_of_targets()))
LOGGER.info("Machines participating in test: " + ", ".join(self.config_parser.get_ips_of_targets()))
print("")
def test_until_timeout(self):
@ -62,14 +63,12 @@ class BasicTest(object):
timer.get_time_taken()))
def all_analyzers_pass(self):
for analyzer in self.analyzers:
if not analyzer.analyze_test_results():
return False
return True
analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
return all(analyzers_results)
def get_analyzer_logs(self):
log = ""
for analyzer in self.analyzers:
for analyzer in self.get_all_analyzers():
log += "\n" + analyzer.log.get_contents()
return log
@ -94,4 +93,12 @@ class BasicTest(object):
If we try to launch monkey during that time window monkey will fail to start, that's
why test needs to wait a bit even after all monkeys are dead.
"""
LOGGER.debug()
sleep(TIME_FOR_MONKEY_PROCESS_TO_FINISH)
def test_post_exec_analyzers(self):
post_exec_analyzers_results = [analyzer.analyze_test_results() for analyzer in self.post_exec_analyzers]
assert all(post_exec_analyzers_results)
def get_all_analyzers(self):
return self.analyzers + self.post_exec_analyzers

View File

@ -106,8 +106,12 @@ class AttackReportService:
@staticmethod
def delete_saved_report_if_exists():
if AttackReportService.is_report_generated():
latest_report = mongo.db.attack_report.find_one({'name': REPORT_NAME})
delete_result = mongo.db.report.delete_one({"_id": latest_report['_id']})
if delete_result.deleted_count != 1:
raise RuntimeError("Error while deleting report:" + str(delete_result))
delete_result = mongo.db.attack_report.delete_many({})
if mongo.db.attack_report.count_documents({}) != 0:
raise RuntimeError("Attack Report cache not cleared. DeleteResult: " + delete_result.raw_result)
# if AttackReportService.is_report_generated():
# mongo.db.attack_report.delete_many({})
# latest_report = mongo.db.attack_report.find_one({'name': REPORT_NAME})
# delete_result = mongo.db.report.delete_one({"_id": latest_report['_id']})
# if delete_result.deleted_count != 1:
# raise RuntimeError("Error while deleting report. Deleted count: " + str(delete_result.deleted_count))

View File

@ -779,12 +779,15 @@ class ReportService:
This function clears the saved report from the DB.
:raises RuntimeError if deletion failed
"""
latest_report_doc = mongo.db.report.find_one({}, {'meta.latest_monkey_modifytime': 1})
if latest_report_doc:
delete_result = mongo.db.report.delete_one({"_id": latest_report_doc['_id']})
if delete_result.deleted_count != 1:
raise RuntimeError("Error while deleting report:" + str(delete_result))
delete_result = mongo.db.report.delete_many({})
if mongo.db.report.count_documents({}) != 0:
raise RuntimeError("Report cache not cleared. DeleteResult: " + delete_result.raw_result)
# latest_report_doc = mongo.db.report.find_one({}, {'meta.latest_monkey_modifytime': 1})
#
# if latest_report_doc:
# delete_result = mongo.db.report.delete_one({"_id": latest_report_doc['_id']})
# if delete_result.deleted_count != 1:
# raise RuntimeError("Error while deleting report:" + str(delete_result))
@staticmethod
def decode_dot_char_before_mongo_insert(report_dict):