Merge remote-tracking branch 'upstream/develop' into 393/python-3

# Conflicts:
#	monkey/common/cloud/aws_instance.py
This commit is contained in:
VakarisZ 2019-10-10 11:40:51 +03:00
commit 8ede629b13
82 changed files with 3766 additions and 357 deletions

0
envs/__init__.py Normal file
View File

1
envs/monkey_zoo/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
logs/

View File

View File

@ -0,0 +1,19 @@
# Automatic blackbox tests
### Prerequisites
1. Download google sdk: https://cloud.google.com/sdk/docs/
2. Download service account key for MonkeyZoo project (if you deployed MonkeyZoo via terraform scripts then you already have it).
GCP console -> IAM -> service accounts(you can use the same key used to authenticate terraform scripts)
3. Deploy the relevant branch + complied executables to the Island machine on GCP.
### Running the tests
In order to execute the entire test suite, you must know the external IP of the Island machine on GCP. You can find
this information in the GCP Console `Compute Engine/VM Instances` under _External IP_.
#### Running in command line
Run the following command:
`monkey\envs\monkey_zoo\blackbox>python -m pytest --island=35.207.152.72:5000 test_blackbox.py`
#### Running in PyCharm
Configure a PyTest configuration with the additional argument `--island=35.207.152.72` on the
`monkey\envs\monkey_zoo\blackbox`.

View File

View File

@ -0,0 +1,17 @@
LOG_INIT_MESSAGE = "Analysis didn't run."
class AnalyzerLog(object):
def __init__(self, analyzer_name):
self.contents = LOG_INIT_MESSAGE
self.name = analyzer_name
def clear(self):
self.contents = ""
def add_entry(self, message):
self.contents = "{}\n{}".format(self.contents, message)
def get_contents(self):
return "{}: {}\n".format(self.name, self.contents)

View File

@ -0,0 +1,24 @@
from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
class CommunicationAnalyzer(object):
def __init__(self, island_client, machine_ips):
self.island_client = island_client
self.machine_ips = machine_ips
self.log = AnalyzerLog(self.__class__.__name__)
def analyze_test_results(self):
self.log.clear()
all_monkeys_communicated = True
for machine_ip in self.machine_ips:
if not self.did_monkey_communicate_back(machine_ip):
self.log.add_entry("Monkey from {} didn't communicate back".format(machine_ip))
all_monkeys_communicated = False
else:
self.log.add_entry("Monkey from {} communicated back".format(machine_ip))
return all_monkeys_communicated
def did_monkey_communicate_back(self, machine_ip):
query = {'ip_addresses': {'$elemMatch': {'$eq': machine_ip}}}
return len(self.island_client.find_monkeys_in_db(query)) > 0

View File

@ -0,0 +1,11 @@
import pytest
def pytest_addoption(parser):
parser.addoption("--island", action="store", default="",
help="Specify the Monkey Island address (host+port).")
@pytest.fixture(scope='module')
def island(request):
return request.config.getoption("--island")

View File

@ -0,0 +1,18 @@
import json
import os
class IslandConfigParser(object):
def __init__(self, config_filename):
self.config_raw = open(IslandConfigParser.get_conf_file_path(config_filename), 'r').read()
self.config_json = json.loads(self.config_raw)
def get_ips_of_targets(self):
return self.config_json['basic_network']['general']['subnet_scan_list']
@staticmethod
def get_conf_file_path(conf_file_name):
return os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
"island_configs",
conf_file_name)

View File

@ -0,0 +1,87 @@
from time import sleep
import json
import logging
from bson import json_util
from envs.monkey_zoo.blackbox.island_client.monkey_island_requests import MonkeyIslandRequests
SLEEP_BETWEEN_REQUESTS_SECONDS = 0.5
MONKEY_TEST_ENDPOINT = 'api/test/monkey'
LOG_TEST_ENDPOINT = 'api/test/log'
LOGGER = logging.getLogger(__name__)
def avoid_race_condition(func):
sleep(SLEEP_BETWEEN_REQUESTS_SECONDS)
return func
class MonkeyIslandClient(object):
def __init__(self, server_address):
self.requests = MonkeyIslandRequests(server_address)
def get_api_status(self):
return self.requests.get("api")
@avoid_race_condition
def import_config(self, config_contents):
_ = self.requests.post("api/configuration/island", data=config_contents)
@avoid_race_condition
def run_monkey_local(self):
response = self.requests.post_json("api/local-monkey", dict_data={"action": "run"})
if MonkeyIslandClient.monkey_ran_successfully(response):
LOGGER.info("Running the monkey.")
else:
LOGGER.error("Failed to run the monkey.")
assert False
@staticmethod
def monkey_ran_successfully(response):
return response.ok and json.loads(response.content)['is_running']
@avoid_race_condition
def kill_all_monkeys(self):
if self.requests.get("api", {"action": "killall"}).ok:
LOGGER.info("Killing all monkeys after the test.")
else:
LOGGER.error("Failed to kill all monkeys.")
assert False
@avoid_race_condition
def reset_env(self):
if self.requests.get("api", {"action": "reset"}).ok:
LOGGER.info("Resetting environment after the test.")
else:
LOGGER.error("Failed to reset the environment.")
assert False
def find_monkeys_in_db(self, query):
if query is None:
raise TypeError
response = self.requests.get(MONKEY_TEST_ENDPOINT,
MonkeyIslandClient.form_find_query_for_request(query))
return MonkeyIslandClient.get_test_query_results(response)
def get_all_monkeys_from_db(self):
response = self.requests.get(MONKEY_TEST_ENDPOINT,
MonkeyIslandClient.form_find_query_for_request(None))
return MonkeyIslandClient.get_test_query_results(response)
def find_log_in_db(self, query):
response = self.requests.get(LOG_TEST_ENDPOINT,
MonkeyIslandClient.form_find_query_for_request(query))
return MonkeyIslandClient.get_test_query_results(response)
@staticmethod
def form_find_query_for_request(query):
return {'find_query': json_util.dumps(query)}
@staticmethod
def get_test_query_results(response):
return json.loads(response.content)['results']
def is_all_monkeys_dead(self):
query = {'dead': False}
return len(self.find_monkeys_in_db(query)) == 0

View File

@ -0,0 +1,49 @@
import requests
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
import logging
NO_AUTH_CREDS = '55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062' \
'8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557'
LOGGER = logging.getLogger(__name__)
class MonkeyIslandRequests(object):
def __init__(self, server_address):
self.addr = "https://{IP}/".format(IP=server_address)
self.token = self.try_get_jwt_from_server()
def try_get_jwt_from_server(self):
try:
return self.get_jwt_from_server()
except requests.ConnectionError as err:
LOGGER.error(
"Unable to connect to island, aborting! Error information: {}. Server: {}".format(err, self.addr))
assert False
def get_jwt_from_server(self):
resp = requests.post(self.addr + "api/auth",
json={"username": NO_AUTH_CREDS, "password": NO_AUTH_CREDS},
verify=False)
return resp.json()["access_token"]
def get(self, url, data=None):
return requests.get(self.addr + url,
headers=self.get_jwt_header(),
params=data,
verify=False)
def post(self, url, data):
return requests.post(self.addr + url,
data=data,
headers=self.get_jwt_header(),
verify=False)
def post_json(self, url, dict_data):
return requests.post(self.addr + url,
json=dict_data,
headers=self.get_jwt_header(),
verify=False)
def get_jwt_header(self):
return {"Authorization": "JWT " + self.token}

View File

@ -0,0 +1,184 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"1234",
"password",
"12345678"
],
"exploit_user_list": [
"Administrator",
"root",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.4",
"10.2.2.5"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"ElasticGroovyExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,186 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"1234",
"password",
"12345678"
],
"exploit_user_list": [
"Administrator",
"root",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.3",
"10.2.2.10"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"HadoopExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [
"e1c0dc690821c13b10a41dccfc72e43a"
],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,183 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"Xk8VDTsC",
"password",
"12345678"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.16"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"MSSQLExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,183 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"1234",
"password",
"12345678"
],
"exploit_user_list": [
"Administrator",
"root",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.38"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"ShellShockExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,182 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"Ivrrw5zEzs"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.44",
"10.2.2.15"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"SmbExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,180 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.15"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"SmbExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [ "f7e457346f7743daece17258667c936d" ],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,192 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"12345678",
"^NgDvY59~8"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.41",
"10.2.2.42"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"SmbExploiter",
"WmiExploiter",
"SSHExploiter",
"ShellShockExploiter",
"SambaCryExploiter",
"ElasticGroovyExploiter",
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter",
"VSFTPDExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,193 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"1234",
"password",
"12345678"
],
"exploit_user_list": [
"Administrator",
"root",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.9",
"10.2.2.11"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"SmbExploiter",
"WmiExploiter",
"SSHExploiter",
"ShellShockExploiter",
"SambaCryExploiter",
"ElasticGroovyExploiter",
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter",
"VSFTPDExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,194 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"3Q=(Ge(+&w]*",
"`))jU7L(w}",
"12345678"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 3,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.32",
"10.2.1.10",
"10.2.0.11"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"SmbExploiter",
"WmiExploiter",
"SSHExploiter",
"ShellShockExploiter",
"SambaCryExploiter",
"ElasticGroovyExploiter",
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter",
"VSFTPDExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 60,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,184 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"1234",
"password",
"12345678"
],
"exploit_user_list": [
"Administrator",
"root",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.18",
"10.2.2.19"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"WebLogicExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,190 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"Ivrrw5zEzs"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.44",
"10.2.2.15"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"WmiExploiter",
"SSHExploiter",
"ShellShockExploiter",
"SambaCryExploiter",
"ElasticGroovyExploiter",
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter",
"VSFTPDExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,188 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.15"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"WmiExploiter",
"SSHExploiter",
"ShellShockExploiter",
"SambaCryExploiter",
"ElasticGroovyExploiter",
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter",
"VSFTPDExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [ "f7e457346f7743daece17258667c936d" ],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -0,0 +1,38 @@
import os
import logging
from bson import ObjectId
LOGGER = logging.getLogger(__name__)
class MonkeyLog(object):
def __init__(self, monkey, log_dir_path):
self.monkey = monkey
self.log_dir_path = log_dir_path
def download_log(self, island_client):
log = island_client.find_log_in_db({'monkey_id': ObjectId(self.monkey['id'])})
if not log:
LOGGER.error("Log for monkey {} not found".format(self.monkey['ip_addresses'][0]))
return False
else:
self.write_log_to_file(log)
return True
def write_log_to_file(self, log):
with open(self.get_log_path_for_monkey(self.monkey), 'w') as log_file:
log_file.write(MonkeyLog.parse_log(log))
@staticmethod
def parse_log(log):
log = log.strip('"')
log = log.replace("\\n", "\n ")
return log
@staticmethod
def get_filename_for_monkey_log(monkey):
return "{}.txt".format(monkey['ip_addresses'][0])
def get_log_path_for_monkey(self, monkey):
return os.path.join(self.log_dir_path, MonkeyLog.get_filename_for_monkey_log(monkey))

View File

@ -0,0 +1,43 @@
import logging
import re
LOGGER = logging.getLogger(__name__)
class MonkeyLogParser(object):
def __init__(self, log_path):
self.log_path = log_path
self.log_contents = self.read_log()
def read_log(self):
with open(self.log_path, 'r') as log:
return log.read()
def print_errors(self):
errors = MonkeyLogParser.get_errors(self.log_contents)
if len(errors) > 0:
LOGGER.info("Found {} errors:".format(len(errors)))
for index, error_line in enumerate(errors):
LOGGER.info("Err #{}: {}".format(index, error_line))
else:
LOGGER.info("No errors!")
@staticmethod
def get_errors(log_contents):
searcher = re.compile(r"^.*:ERROR].*$", re.MULTILINE)
return searcher.findall(log_contents)
def print_warnings(self):
warnings = MonkeyLogParser.get_warnings(self.log_contents)
if len(warnings) > 0:
LOGGER.info("Found {} warnings:".format(len(warnings)))
for index, warning_line in enumerate(warnings):
LOGGER.info("Warn #{}: {}".format(index, warning_line))
else:
LOGGER.info("No warnings!")
@staticmethod
def get_warnings(log_contents):
searcher = re.compile(r"^.*:WARNING].*$", re.MULTILINE)
return searcher.findall(log_contents)

View File

@ -0,0 +1,26 @@
import logging
from envs.monkey_zoo.blackbox.log_handlers.monkey_log import MonkeyLog
LOGGER = logging.getLogger(__name__)
class MonkeyLogsDownloader(object):
def __init__(self, island_client, log_dir_path):
self.island_client = island_client
self.log_dir_path = log_dir_path
self.monkey_log_paths = []
def download_monkey_logs(self):
LOGGER.info("Downloading each monkey log.")
all_monkeys = self.island_client.get_all_monkeys_from_db()
for monkey in all_monkeys:
downloaded_log_path = self._download_monkey_log(monkey)
if downloaded_log_path:
self.monkey_log_paths.append(downloaded_log_path)
def _download_monkey_log(self, monkey):
log_handler = MonkeyLog(monkey, self.log_dir_path)
download_successful = log_handler.download_log(self.island_client)
return log_handler.get_log_path_for_monkey(monkey) if download_successful else None

View File

@ -0,0 +1,50 @@
import os
import shutil
import logging
from envs.monkey_zoo.blackbox.log_handlers.monkey_log_parser import MonkeyLogParser
from envs.monkey_zoo.blackbox.log_handlers.monkey_logs_downloader import MonkeyLogsDownloader
LOG_DIR_NAME = 'logs'
LOGGER = logging.getLogger(__name__)
class TestLogsHandler(object):
def __init__(self, test_name, island_client, log_dir_path):
self.test_name = test_name
self.island_client = island_client
self.log_dir_path = os.path.join(log_dir_path, self.test_name)
def parse_test_logs(self):
log_paths = self.download_logs()
if not log_paths:
LOGGER.error("No logs were downloaded. Maybe no monkeys were ran "
"or early exception prevented log download?")
return
TestLogsHandler.parse_logs(log_paths)
def download_logs(self):
self.try_create_log_dir_for_test()
downloader = MonkeyLogsDownloader(self.island_client, self.log_dir_path)
downloader.download_monkey_logs()
return downloader.monkey_log_paths
def try_create_log_dir_for_test(self):
try:
os.mkdir(self.log_dir_path)
except Exception as e:
LOGGER.error("Can't create a dir for test logs: {}".format(e))
@staticmethod
def delete_log_folder_contents(log_dir_path):
shutil.rmtree(log_dir_path, ignore_errors=True)
os.mkdir(log_dir_path)
@staticmethod
def parse_logs(log_paths):
for log_path in log_paths:
LOGGER.info("Info from log at {}".format(log_path))
log_parser = MonkeyLogParser(log_path)
log_parser.print_errors()
log_parser.print_warnings()

View File

@ -0,0 +1,5 @@
[pytest]
log_cli = 1
log_cli_level = INFO
log_cli_format = %(asctime)s [%(levelname)s] %(module)s.%(funcName)s.%(lineno)d: %(message)s
log_cli_date_format=%H:%M:%S

View File

@ -0,0 +1,2 @@
pytest
unittest

View File

@ -0,0 +1,110 @@
import os
import logging
import pytest
from time import sleep
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import CommunicationAnalyzer
from envs.monkey_zoo.blackbox.island_client.island_config_parser import IslandConfigParser
from envs.monkey_zoo.blackbox.utils import gcp_machine_handlers
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import TestLogsHandler
DEFAULT_TIMEOUT_SECONDS = 5*60
MACHINE_BOOTUP_WAIT_SECONDS = 30
GCP_TEST_MACHINE_LIST = ['sshkeys-11', 'sshkeys-12', 'elastic-4', 'elastic-5', 'haddop-2-v3', 'hadoop-3', 'mssql-16',
'mimikatz-14', 'mimikatz-15', 'final-test-struts2-23', 'final-test-struts2-24',
'tunneling-9', 'tunneling-10', 'tunneling-11', 'weblogic-18', 'weblogic-19', 'shellshock-8']
LOG_DIR_PATH = "./logs"
LOGGER = logging.getLogger(__name__)
@pytest.fixture(autouse=True, scope='session')
def GCPHandler(request):
GCPHandler = gcp_machine_handlers.GCPHandler()
GCPHandler.start_machines(" ".join(GCP_TEST_MACHINE_LIST))
wait_machine_bootup()
def fin():
GCPHandler.stop_machines(" ".join(GCP_TEST_MACHINE_LIST))
request.addfinalizer(fin)
@pytest.fixture(autouse=True, scope='session')
def delete_logs():
LOGGER.info("Deleting monkey logs before new tests.")
TestLogsHandler.delete_log_folder_contents(TestMonkeyBlackbox.get_log_dir_path())
def wait_machine_bootup():
sleep(MACHINE_BOOTUP_WAIT_SECONDS)
@pytest.fixture(scope='class')
def island_client(island):
island_client_object = MonkeyIslandClient(island)
island_client_object.reset_env()
yield island_client_object
@pytest.mark.usefixtures('island_client')
# noinspection PyUnresolvedReferences
class TestMonkeyBlackbox(object):
@staticmethod
def run_basic_test(island_client, conf_filename, test_name, timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS):
config_parser = IslandConfigParser(conf_filename)
analyzer = CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(test_name,
island_client,
config_parser,
[analyzer],
timeout_in_seconds,
log_handler).run()
@staticmethod
def get_log_dir_path():
return os.path.abspath(LOG_DIR_PATH)
def test_server_online(self, island_client):
assert island_client.get_api_status() is not None
def test_ssh_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SSH.conf", "SSH_exploiter_and_keys")
def test_hadoop_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "HADOOP.conf", "Hadoop_exploiter", 6*60)
def test_mssql_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "MSSQL.conf", "MSSQL_exploiter")
def test_smb_and_mimikatz_exploiters(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SMB_MIMIKATZ.conf", "SMB_exploiter_mimikatz")
def test_smb_pth(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SMB_PTH.conf", "SMB_PTH")
def test_elastic_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "ELASTIC.conf", "Elastic_exploiter")
def test_struts_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "STRUTS2.conf", "Strtuts2_exploiter")
def test_weblogic_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WEBLOGIC.conf", "Weblogic_exploiter")
def test_shellshock_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SHELLSHOCK.conf", "Shellschock_exploiter")
@pytest.mark.xfail(reason="Test fails randomly - still investigating.")
def test_tunneling(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "TUNNELING.conf", "Tunneling_exploiter", 10*60)
def test_wmi_and_mimikatz_exploiters(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WMI_MIMIKATZ.conf", "WMI_exploiter,_mimikatz")
def test_wmi_pth(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WMI_PTH.conf", "WMI_PTH")

View File

@ -0,0 +1,98 @@
import json
from time import sleep
import logging
from envs.monkey_zoo.blackbox.utils.test_timer import TestTimer
MAX_TIME_FOR_MONKEYS_TO_DIE = 5 * 60
WAIT_TIME_BETWEEN_REQUESTS = 10
TIME_FOR_MONKEY_PROCESS_TO_FINISH = 40
DELAY_BETWEEN_ANALYSIS = 3
LOGGER = logging.getLogger(__name__)
class BasicTest(object):
def __init__(self, name, island_client, config_parser, analyzers, timeout, log_handler):
self.name = name
self.island_client = island_client
self.config_parser = config_parser
self.analyzers = analyzers
self.timeout = timeout
self.log_handler = log_handler
def run(self):
LOGGER.info("Uploading configuration:\n{}".format(json.dumps(self.config_parser.config_json, indent=2)))
self.island_client.import_config(self.config_parser.config_raw)
self.print_test_starting_info()
try:
self.island_client.run_monkey_local()
self.test_until_timeout()
finally:
self.island_client.kill_all_monkeys()
self.wait_until_monkeys_die()
self.wait_for_monkey_process_to_finish()
self.parse_logs()
self.island_client.reset_env()
def print_test_starting_info(self):
LOGGER.info("Started {} test".format(self.name))
LOGGER.info("Machines participating in test:")
LOGGER.info(" ".join(self.config_parser.get_ips_of_targets()))
print("")
def test_until_timeout(self):
timer = TestTimer(self.timeout)
while not timer.is_timed_out():
if self.all_analyzers_pass():
self.log_success(timer)
return
sleep(DELAY_BETWEEN_ANALYSIS)
LOGGER.debug("Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken()))
self.log_failure(timer)
assert False
def log_success(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.info("{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken()))
def log_failure(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.error("{} test failed because of timeout. Time taken: {:.1f} seconds.".format(self.name,
timer.get_time_taken()))
def all_analyzers_pass(self):
for analyzer in self.analyzers:
if not analyzer.analyze_test_results():
return False
return True
def get_analyzer_logs(self):
log = ""
for analyzer in self.analyzers:
log += "\n" + analyzer.log.get_contents()
return log
def wait_until_monkeys_die(self):
time_passed = 0
while not self.island_client.is_all_monkeys_dead() and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE:
sleep(WAIT_TIME_BETWEEN_REQUESTS)
time_passed += WAIT_TIME_BETWEEN_REQUESTS
LOGGER.debug("Waiting for all monkeys to die. Time passed: {}".format(time_passed))
if time_passed > MAX_TIME_FOR_MONKEYS_TO_DIE:
LOGGER.error("Some monkeys didn't die after the test, failing")
assert False
def parse_logs(self):
LOGGER.info("Parsing test logs:")
self.log_handler.parse_test_logs()
@staticmethod
def wait_for_monkey_process_to_finish():
"""
There is a time period when monkey is set to dead, but the process is still closing.
If we try to launch monkey during that time window monkey will fail to start, that's
why test needs to wait a bit even after all monkeys are dead.
"""
sleep(TIME_FOR_MONKEY_PROCESS_TO_FINISH)

View File

@ -0,0 +1,54 @@
import subprocess
import logging
LOGGER = logging.getLogger(__name__)
class GCPHandler(object):
AUTHENTICATION_COMMAND = "gcloud auth activate-service-account --key-file=%s"
SET_PROPERTY_PROJECT = "gcloud config set project %s"
MACHINE_STARTING_COMMAND = "gcloud compute instances start %s --zone=%s"
MACHINE_STOPPING_COMMAND = "gcloud compute instances stop %s --zone=%s"
def __init__(self, key_path="../gcp_keys/gcp_key.json", zone="europe-west3-a", project_id="guardicore-22050661"):
self.zone = zone
try:
# pass the key file to gcp
subprocess.call(GCPHandler.get_auth_command(key_path), shell=True)
LOGGER.info("GCP Handler passed key")
# set project
subprocess.call(GCPHandler.get_set_project_command(project_id), shell=True)
LOGGER.info("GCP Handler set project")
LOGGER.info("GCP Handler initialized successfully")
except Exception as e:
LOGGER.error("GCP Handler failed to initialize: %s." % e)
def start_machines(self, machine_list):
"""
Start all the machines in the list.
:param machine_list: A space-separated string with all the machine names. Example:
start_machines(`" ".join(["elastic-3", "mssql-16"])`)
"""
LOGGER.info("Setting up all GCP machines...")
try:
subprocess.call((GCPHandler.MACHINE_STARTING_COMMAND % (machine_list, self.zone)), shell=True)
LOGGER.info("GCP machines successfully started.")
except Exception as e:
LOGGER.error("GCP Handler failed to start GCP machines: %s" % e)
def stop_machines(self, machine_list):
try:
subprocess.call((GCPHandler.MACHINE_STOPPING_COMMAND % (machine_list, self.zone)), shell=True)
LOGGER.info("GCP machines stopped successfully.")
except Exception as e:
LOGGER.error("GCP Handler failed to stop network machines: %s" % e)
@staticmethod
def get_auth_command(key_path):
return GCPHandler.AUTHENTICATION_COMMAND % key_path
@staticmethod
def get_set_project_command(project):
return GCPHandler.SET_PROPERTY_PROJECT % project

View File

@ -0,0 +1,9 @@
import json
from bson import ObjectId
class MongoQueryJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
return json.JSONEncoder.default(self, o)

View File

@ -0,0 +1,17 @@
from time import time
class TestTimer(object):
def __init__(self, timeout):
self.timeout_time = TestTimer.get_timeout_time(timeout)
self.start_time = time()
def is_timed_out(self):
return time() > self.timeout_time
def get_time_taken(self):
return time() - self.start_time
@staticmethod
def get_timeout_time(timeout):
return time() + timeout

4
envs/monkey_zoo/gcp_keys/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@ -1,6 +1,6 @@
import json
import re
import urllib.request, urllib.error, urllib.parse
import urllib2
import logging
@ -25,19 +25,19 @@ class AwsInstance(object):
self.account_id = None
try:
self.instance_id = urllib.request.urlopen(
self.instance_id = urllib2.urlopen(
AWS_LATEST_METADATA_URI_PREFIX + 'meta-data/instance-id', timeout=2).read()
self.region = self._parse_region(
urllib.request.urlopen(AWS_LATEST_METADATA_URI_PREFIX + 'meta-data/placement/availability-zone').read())
except urllib.error.URLError as e:
logger.warning("Failed init of AwsInstance while getting metadata: {}".format(e))
urllib2.urlopen(AWS_LATEST_METADATA_URI_PREFIX + 'meta-data/placement/availability-zone').read())
except urllib2.URLError as e:
logger.debug("Failed init of AwsInstance while getting metadata: {}".format(e.message))
try:
self.account_id = self._extract_account_id(
urllib.request.urlopen(
urllib2.urlopen(
AWS_LATEST_METADATA_URI_PREFIX + 'dynamic/instance-identity/document', timeout=2).read())
except urllib.error.URLError as e:
logger.warning("Failed init of AwsInstance while getting dynamic instance data: {}".format(e))
except urllib2.URLError as e:
logger.debug("Failed init of AwsInstance while getting dynamic instance data: {}".format(e))
@staticmethod
def _parse_region(region_url_response):

View File

@ -1 +1 @@
pyinstaller -F --log-level=DEBUG --clean --upx-dir=.\bin monkey.spec
pyinstaller -F --log-level=DEBUG --clean --upx-dir=.\bin monkey.spec

View File

@ -141,10 +141,10 @@ class Configuration(object):
exploiter_classes = []
# how many victims to look for in a single scan iteration
victims_max_find = 30
victims_max_find = 100
# how many victims to exploit before stopping
victims_max_exploit = 7
victims_max_exploit = 15
# depth of propagation
depth = 2
@ -199,7 +199,7 @@ class Configuration(object):
9200]
tcp_target_ports.extend(HTTP_PORTS)
tcp_scan_timeout = 3000 # 3000 Milliseconds
tcp_scan_interval = 0
tcp_scan_interval = 0 # in milliseconds
tcp_scan_get_banner = True
# Ping Scanner

View File

@ -97,8 +97,8 @@
],
"timeout_between_iterations": 10,
"use_file_logging": true,
"victims_max_exploit": 7,
"victims_max_find": 30,
"victims_max_exploit": 15,
"victims_max_find": 100,
"post_breach_actions" : []
custom_PBA_linux_cmd = ""
custom_PBA_windows_cmd = ""

View File

@ -68,8 +68,12 @@ class SmbExploiter(HostExploiter):
self._config.smb_download_timeout)
if remote_full_path is not None:
LOG.debug("Successfully logged in %r using SMB (%s : (SHA-512) %s : %s : %s)",
self.host, user, self._config.hash_sensitive_data(password), lm_hash, ntlm_hash)
LOG.debug("Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)",
self.host,
user,
self._config.hash_sensitive_data(password),
self._config.hash_sensitive_data(lm_hash),
self._config.hash_sensitive_data(ntlm_hash))
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)
self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1],
SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1]))
@ -80,9 +84,15 @@ class SmbExploiter(HostExploiter):
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
except Exception as exc:
LOG.debug("Exception when trying to copy file using SMB to %r with user:"
" %s, password (SHA-512): '%s', LM hash: %s, NTLM hash: %s: (%s)", self.host,
user, self._config.hash_sensitive_data(password), lm_hash, ntlm_hash, exc)
LOG.debug(
"Exception when trying to copy file using SMB to %r with user:"
" %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s: (%s)",
self.host,
user,
self._config.hash_sensitive_data(password),
self._config.hash_sensitive_data(lm_hash),
self._config.hash_sensitive_data(ntlm_hash),
exc)
continue
if not exploited:
@ -92,7 +102,8 @@ class SmbExploiter(HostExploiter):
# execute the remote dropper in case the path isn't final
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {'dropper_path': remote_full_path} + \
build_monkey_commandline(self.host, get_monkey_depth() - 1, self._config.dropper_target_path_win_32)
build_monkey_commandline(self.host, get_monkey_depth() - 1,
self._config.dropper_target_path_win_32)
else:
cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {'monkey_path': remote_full_path} + \
build_monkey_commandline(self.host, get_monkey_depth() - 1)

View File

@ -11,7 +11,7 @@ import infection_monkey.monkeyfs as monkeyfs
from common.utils.attack_utils import ScanStatus
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
from infection_monkey.exploit.tools.helpers import get_interface_to_target
from infection_monkey.config import Configuration
__author__ = 'itamar'
LOG = logging.getLogger(__name__)
@ -31,9 +31,13 @@ class SmbTools(object):
# skip guest users
if smb.isGuestSession() > 0:
LOG.debug("Connection to %r granted guest privileges with user: %s, password: '%s',"
" LM hash: %s, NTLM hash: %s",
host, username, password, lm_hash, ntlm_hash)
LOG.debug("Connection to %r granted guest privileges with user: %s, password (SHA-512): '%s',"
" LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
host,
username,
Configuration.hash_sensitive_data(password),
Configuration.hash_sensitive_data(lm_hash),
Configuration.hash_sensitive_data(ntlm_hash))
try:
smb.logoff()
@ -164,9 +168,13 @@ class SmbTools(object):
smb = None
if not file_uploaded:
LOG.debug("Couldn't find a writable share for exploiting"
" victim %r with username: %s, password: '%s', LM hash: %s, NTLM hash: %s",
host, username, password, lm_hash, ntlm_hash)
LOG.debug("Couldn't find a writable share for exploiting victim %r with "
"username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
host,
username,
Configuration.hash_sensitive_data(password),
Configuration.hash_sensitive_data(lm_hash),
Configuration.hash_sensitive_data(ntlm_hash))
return None
return remote_full_path
@ -194,8 +202,15 @@ class SmbTools(object):
try:
smb.login(username, password, '', lm_hash, ntlm_hash)
except Exception as exc:
LOG.debug("Error while logging into %r using user: %s, password: '%s', LM hash: %s, NTLM hash: %s: %s",
host, username, password, lm_hash, ntlm_hash, exc)
LOG.debug(
"Error while logging into %r using user: %s, password (SHA-512): '%s', "
"LM hash (SHA-512): %s, NTLM hash (SHA-512): %s: %s",
host,
username,
Configuration.hash_sensitive_data(password),
Configuration.hash_sensitive_data(lm_hash),
Configuration.hash_sensitive_data(ntlm_hash),
exc)
return None, dialect
smb.setTimeout(timeout)

View File

@ -252,6 +252,7 @@ class WebLogic201710271(WebRCE):
# https://github.com/rapid7/metasploit-framework/pull/11780
class WebLogic20192725(WebRCE):
URLS = ["_async/AsyncResponseServiceHttps"]
DELAY_BEFORE_EXPLOITING_SECONDS = 5
_TARGET_OS_TYPE = WebLogicExploiter._TARGET_OS_TYPE
_EXPLOITED_SERVICE = WebLogicExploiter._EXPLOITED_SERVICE
@ -266,6 +267,11 @@ class WebLogic20192725(WebRCE):
exploit_config['dropper'] = True
return exploit_config
def execute_remote_monkey(self, url, path, dropper=False):
# Without delay exploiter tries to launch monkey file that is still finishing up after downloading.
time.sleep(WebLogic20192725.DELAY_BEFORE_EXPLOITING_SECONDS)
super(WebLogic20192725, self).execute_remote_monkey(url, path, dropper)
def exploit(self, url, command):
if 'linux' in self.host.os['type']:
payload = self.get_exploit_payload('/bin/sh', '-c', command)

View File

@ -37,9 +37,10 @@ class WmiExploiter(HostExploiter):
for user, password, lm_hash, ntlm_hash in creds:
password_hashed = self._config.hash_sensitive_data(password)
LOG.debug("Attempting to connect %r using WMI with "
"user,password (SHA-512),lm hash,ntlm hash: ('%s','%s','%s','%s')",
self.host, user, password_hashed, lm_hash, ntlm_hash)
lm_hash_hashed = self._config.hash_sensitive_data(lm_hash)
mtlm_hash_hashed = self._config.hash_sensitive_data(ntlm_hash)
creds_for_logging = "user, password (SHA-512), lm hash (SHA-512), ntlm hash (SHA-512): ({},{},{},{})".format(user, password_hashed, lm_hash_hashed, mtlm_hash_hashed)
LOG.debug(("Attempting to connect %r using WMI with " % self.host) + creds_for_logging)
wmi_connection = WmiTools.WmiConnection()
@ -47,25 +48,21 @@ class WmiExploiter(HostExploiter):
wmi_connection.connect(self.host, user, password, None, lm_hash, ntlm_hash)
except AccessDeniedException:
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
LOG.debug("Failed connecting to %r using WMI with "
"user,password,lm hash,ntlm hash: ('%s','%s','%s','%s')",
self.host, user, password_hashed, lm_hash, ntlm_hash)
LOG.debug(("Failed connecting to %r using WMI with " % self.host) + creds_for_logging)
continue
except DCERPCException:
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
LOG.debug("Failed connecting to %r using WMI with "
"user,password,lm hash,ntlm hash: ('%s','%s','%s','%s')",
self.host, user, password_hashed, lm_hash, ntlm_hash)
LOG.debug(("Failed connecting to %r using WMI with " % self.host) + creds_for_logging)
continue
except socket.error:
LOG.debug("Network error in WMI connection to %r with "
"user,password,lm hash,ntlm hash: ('%s','%s','%s','%s')",
self.host, user, password_hashed, lm_hash, ntlm_hash)
LOG.debug(("Network error in WMI connection to %r with " % self.host) + creds_for_logging)
return False
except Exception as exc:
LOG.debug("Unknown WMI connection error to %r with "
"user,password,lm hash,ntlm hash: ('%s','%s','%s','%s') (%s):\n%s",
self.host, user, password_hashed, lm_hash, ntlm_hash, exc, traceback.format_exc())
LOG.debug(
("Unknown WMI connection error to %r with " % self.host)
+ creds_for_logging
+ (" (%s):\n%s" % (exc, traceback.format_exc()))
)
return False
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)

View File

@ -7,6 +7,7 @@ import logging.config
import os
import sys
import traceback
from multiprocessing import freeze_support
from infection_monkey.utils.monkey_log_path import get_dropper_log_path, get_monkey_log_path
from infection_monkey.config import WormConfiguration, EXTERNAL_CONFIG_FILE
@ -43,7 +44,7 @@ def main():
if 2 > len(sys.argv):
return True
freeze_support() # required for multiprocessing + pyinstaller on windows
monkey_mode = sys.argv[1]
if not (monkey_mode in [MONKEY_ARG, DROPPER_ARG]):

View File

@ -38,4 +38,4 @@ HADOOP_LINUX_COMMAND = "! [ -f %(monkey_path)s ] " \
"; chmod +x %(monkey_path)s " \
"&& %(monkey_path)s %(monkey_type)s %(parameters)s"
DOWNLOAD_TIMEOUT = 300
DOWNLOAD_TIMEOUT = 180

View File

@ -0,0 +1,45 @@
from infection_monkey.model.host import VictimHost
class VictimHostGenerator(object):
def __init__(self, network_ranges, blocked_ips, same_machine_ips):
self.blocked_ips = blocked_ips
self.ranges = network_ranges
self.local_addresses = same_machine_ips
def generate_victims(self, chunk_size):
"""
Generates VictimHosts in chunks from all the instances network ranges
:param chunk_size: Maximum size of each chunk
"""
chunk = []
for net_range in self.ranges:
for victim in self.generate_victims_from_range(net_range):
chunk.append(victim)
if len(chunk) == chunk_size:
yield chunk
chunk = []
if chunk: # finished with number of victims < chunk_size
yield chunk
def generate_victims_from_range(self, net_range):
"""
Generates VictimHosts from a given netrange
:param net_range: Network range object
:return: Generator of VictimHost objects
"""
for address in net_range:
if not self.is_ip_scannable(address): # check if the IP should be skipped
continue
if hasattr(net_range, 'domain_name'):
victim = VictimHost(address, net_range.domain_name)
else:
victim = VictimHost(address)
yield victim
def is_ip_scannable(self, ip_address):
if ip_address in self.local_addresses:
return False
if ip_address in self.blocked_ips:
return False
return True

View File

@ -0,0 +1,46 @@
from unittest import TestCase
from infection_monkey.model.victim_host_generator import VictimHostGenerator
from common.network.network_range import CidrRange, SingleIpRange
class VictimHostGeneratorTester(TestCase):
def setUp(self):
self.cidr_range = CidrRange("10.0.0.0/28", False) # this gives us 15 hosts
self.local_host_range = SingleIpRange('localhost')
self.random_single_ip_range = SingleIpRange('41.50.13.37')
def test_chunking(self):
chunk_size = 3
# current test setup is 15+1+1-1 hosts
test_ranges = [self.cidr_range, self.local_host_range, self.random_single_ip_range]
generator = VictimHostGenerator(test_ranges, '10.0.0.1', [])
victims = generator.generate_victims(chunk_size)
for i in range(5): # quickly check the equally sided chunks
self.assertEqual(len(victims.next()), chunk_size)
victim_chunk_last = victims.next()
self.assertEqual(len(victim_chunk_last), 1)
def test_remove_blocked_ip(self):
generator = VictimHostGenerator(self.cidr_range, ['10.0.0.1'], [])
victims = list(generator.generate_victims_from_range(self.cidr_range))
self.assertEqual(len(victims), 14) # 15 minus the 1 we blocked
def test_remove_local_ips(self):
generator = VictimHostGenerator([], [], [])
generator.local_addresses = ['127.0.0.1']
victims = list(generator.generate_victims_from_range(self.local_host_range))
self.assertEqual(len(victims), 0) # block the local IP
def test_generate_domain_victim(self):
# domain name victim
generator = VictimHostGenerator([], [], []) # dummy object
victims = list(generator.generate_victims_from_range(self.local_host_range))
self.assertEqual(len(victims), 1)
self.assertEqual(victims[0].domain_name, 'localhost')
# don't generate for other victims
victims = list(generator.generate_victims_from_range(self.random_single_ip_range))
self.assertEqual(len(victims), 1)
self.assertEqual(victims[0].domain_name, '')

View File

@ -6,6 +6,7 @@ import sys
import time
import infection_monkey.tunnel as tunnel
from infection_monkey.utils.environment import is_windows_os
from infection_monkey.utils.monkey_dir import create_monkey_dir, get_monkey_dir_path, remove_monkey_dir
from infection_monkey.utils.monkey_log_path import get_monkey_log_path
from infection_monkey.utils.environment import is_windows_os

View File

@ -1,28 +1,28 @@
import time
import logging
from multiprocessing.dummy import Pool
from common.network.network_range import *
from common.network.network_range import NetworkRange
from infection_monkey.config import WormConfiguration
from infection_monkey.model.victim_host_generator import VictimHostGenerator
from infection_monkey.network.info import local_ips, get_interfaces_ranges
from infection_monkey.model import VictimHost
from infection_monkey.network import TcpScanner, PingScanner
__author__ = 'itamar'
LOG = logging.getLogger(__name__)
SCAN_DELAY = 0
ITERATION_BLOCK_SIZE = 5
class NetworkScanner(object):
def __init__(self):
self._ip_addresses = None
self._ranges = None
self.scanners = [TcpScanner(), PingScanner()]
def initialize(self):
"""
Set up scanning.
based on configuration: scans local network and/or scans fixed list of IPs/subnets.
:return:
"""
# get local ip addresses
self._ip_addresses = local_ips()
@ -68,49 +68,35 @@ class NetworkScanner(object):
:param stop_callback: A callback to check at any point if we should stop scanning
:return: yields a sequence of VictimHost instances
"""
# We currently use the ITERATION_BLOCK_SIZE as the pool size, however, this may not be the best decision
# However, the decision what ITERATION_BLOCK_SIZE also requires balancing network usage (pps and bw)
# Because we are using this to spread out IO heavy tasks, we can probably go a lot higher than CPU core size
# But again, balance
pool = Pool(ITERATION_BLOCK_SIZE)
victim_generator = VictimHostGenerator(self._ranges, WormConfiguration.blocked_ips, local_ips())
TCPscan = TcpScanner()
Pinger = PingScanner()
victims_count = 0
for victim_chunk in victim_generator.generate_victims(ITERATION_BLOCK_SIZE):
LOG.debug("Scanning for potential victims in chunk %r", victim_chunk)
for net_range in self._ranges:
LOG.debug("Scanning for potential victims in the network %r", net_range)
for ip_addr in net_range:
if hasattr(net_range, 'domain_name'):
victim = VictimHost(ip_addr, net_range.domain_name)
else:
victim = VictimHost(ip_addr)
if stop_callback and stop_callback():
LOG.debug("Got stop signal")
break
# check before running scans
if stop_callback and stop_callback():
LOG.debug("Got stop signal")
return
# skip self IP address
if victim.ip_addr in self._ip_addresses:
continue
results = pool.map(self.scan_machine, victim_chunk)
resulting_victims = filter(lambda x: x is not None, results)
for victim in resulting_victims:
LOG.debug("Found potential victim: %r", victim)
victims_count += 1
yield victim
# skip IPs marked as blocked
if victim.ip_addr in WormConfiguration.blocked_ips:
LOG.info("Skipping %s due to blacklist" % victim)
continue
LOG.debug("Scanning %r...", victim)
pingAlive = Pinger.is_host_alive(victim)
tcpAlive = TCPscan.is_host_alive(victim)
# if scanner detect machine is up, add it to victims list
if pingAlive or tcpAlive:
LOG.debug("Found potential victim: %r", victim)
victims_count += 1
yield victim
if victims_count >= max_find:
LOG.debug("Found max needed victims (%d), stopping scan", max_find)
break
if WormConfiguration.tcp_scan_interval:
# time.sleep uses seconds, while config is in milliseconds
time.sleep(WormConfiguration.tcp_scan_interval/float(1000))
if victims_count >= max_find:
LOG.debug("Found max needed victims (%d), stopping scan", max_find)
return
if WormConfiguration.tcp_scan_interval:
# time.sleep uses seconds, while config is in milliseconds
time.sleep(WormConfiguration.tcp_scan_interval / float(1000))
@staticmethod
def _is_any_ip_in_subnet(ip_addresses, subnet_str):
@ -119,5 +105,18 @@ class NetworkScanner(object):
return True
return False
def scan_machine(self, victim):
"""
Scans specific machine using instance scanners
:param victim: VictimHost machine
:return: Victim or None if victim isn't alive
"""
LOG.debug("Scanning target address: %r", victim)
if any([scanner.is_host_alive(victim) for scanner in self.scanners]):
LOG.debug("Found potential target_ip: %r", victim)
return victim
else:
return None
def on_island(self, server):
return bool([x for x in self._ip_addresses if x in server])

View File

@ -3,25 +3,20 @@ import os
import random
import string
import subprocess
import time
import win32event
from infection_monkey.utils.windows.auto_new_user import AutoNewUser, NewUserError
from infection_monkey.utils.new_user_error import NewUserError
from infection_monkey.utils.auto_new_user_factory import create_auto_new_user
from common.data.post_breach_consts import POST_BREACH_COMMUNICATE_AS_NEW_USER
from infection_monkey.post_breach.pba import PBA
from infection_monkey.telemetry.post_breach_telem import PostBreachTelem
from infection_monkey.utils.environment import is_windows_os
from infection_monkey.utils.linux.users import get_linux_commands_to_delete_user, get_linux_commands_to_add_user
PING_TEST_DOMAIN = "google.com"
PING_WAIT_TIMEOUT_IN_MILLISECONDS = 20 * 1000
CREATED_PROCESS_AS_USER_PING_SUCCESS_FORMAT = "Created process '{}' as user '{}', and successfully pinged."
CREATED_PROCESS_AS_USER_PING_FAILED_FORMAT = "Created process '{}' as user '{}', but failed to ping (exit status {})."
USERNAME = "somenewuser"
USERNAME_PREFIX = "somenewuser"
PASSWORD = "N3WPa55W0rD!1"
logger = logging.getLogger(__name__)
@ -38,94 +33,24 @@ class CommunicateAsNewUser(PBA):
def run(self):
username = CommunicateAsNewUser.get_random_new_user_name()
if is_windows_os():
self.communicate_as_new_user_windows(username)
else:
self.communicate_as_new_user_linux(username)
try:
with create_auto_new_user(username, PASSWORD) as new_user:
ping_commandline = CommunicateAsNewUser.get_commandline_for_ping()
exit_status = new_user.run_as(ping_commandline)
self.send_ping_result_telemetry(exit_status, ping_commandline, username)
except subprocess.CalledProcessError as e:
PostBreachTelem(self, (e.output, False)).send()
except NewUserError as e:
PostBreachTelem(self, (str(e), False)).send()
@staticmethod
def get_random_new_user_name():
return USERNAME + ''.join(random.choice(string.ascii_lowercase) for _ in range(5))
return USERNAME_PREFIX + ''.join(random.choice(string.ascii_lowercase) for _ in range(5))
def communicate_as_new_user_linux(self, username):
try:
# add user + ping
linux_cmds = get_linux_commands_to_add_user(username)
commandline = "ping -c 1 {}".format(PING_TEST_DOMAIN)
linux_cmds.extend([";", "sudo", "-u", username, commandline])
final_command = ' '.join(linux_cmds)
exit_status = os.system(final_command)
self.send_ping_result_telemetry(exit_status, commandline, username)
# delete the user, async in case it gets stuck.
_ = subprocess.Popen(
get_linux_commands_to_delete_user(username), stderr=subprocess.STDOUT, shell=True)
# Leaking the process on purpose - nothing we can do if it's stuck.
except subprocess.CalledProcessError as e:
PostBreachTelem(self, (e.output, False)).send()
def communicate_as_new_user_windows(self, username):
# Importing these only on windows, as they won't exist on linux.
import win32con
import win32process
import win32api
try:
with AutoNewUser(username, PASSWORD) as new_user:
# Using os.path is OK, as this is on windows for sure
ping_app_path = os.path.join(os.environ["WINDIR"], "system32", "PING.exe")
if not os.path.exists(ping_app_path):
PostBreachTelem(self, ("{} not found.".format(ping_app_path), False)).send()
return # Can't continue without ping.
try:
# Open process as that user:
# https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-createprocessasusera
commandline = "{} {} {} {}".format(ping_app_path, PING_TEST_DOMAIN, "-n", "1")
process_handle, thread_handle, _, _ = win32process.CreateProcessAsUser(
new_user.get_logon_handle(), # A handle to the primary token that represents a user.
None, # The name of the module to be executed.
commandline, # The command line to be executed.
None, # Process attributes
None, # Thread attributes
True, # Should inherit handles
win32con.NORMAL_PRIORITY_CLASS, # The priority class and the creation of the process.
None, # An environment block for the new process. If this parameter is NULL, the new process
# uses the environment of the calling process.
None, # CWD. If this parameter is NULL, the new process will have the same current drive and
# directory as the calling process.
win32process.STARTUPINFO() # STARTUPINFO structure.
# https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/ns-processthreadsapi-startupinfoa
)
logger.debug(
"Waiting for ping process to finish. Timeout: {}ms".format(PING_WAIT_TIMEOUT_IN_MILLISECONDS))
# Ignoring return code, as we'll use `GetExitCode` to determine the state of the process later.
_ = win32event.WaitForSingleObject( # Waits until the specified object is signaled, or time-out.
process_handle, # Ping process handle
PING_WAIT_TIMEOUT_IN_MILLISECONDS # Timeout in milliseconds
)
ping_exit_code = win32process.GetExitCodeProcess(process_handle)
self.send_ping_result_telemetry(ping_exit_code, commandline, username)
except Exception as e:
# If failed on 1314, it's possible to try to elevate the rights of the current user with the
# "Replace a process level token" right, using Local Security Policy editing.
PostBreachTelem(self, (
"Failed to open process as user {}. Error: {}".format(username, str(e)), False)).send()
finally:
try:
win32api.CloseHandle(process_handle)
win32api.CloseHandle(thread_handle)
except Exception as err:
logger.error("Close handle error: " + str(err))
except subprocess.CalledProcessError as err:
PostBreachTelem(self, (
"Couldn't create the user '{}'. Error output is: '{}'".format(username, str(err)),
False)).send()
except NewUserError as e:
PostBreachTelem(self, (str(e), False)).send()
@staticmethod
def get_commandline_for_ping(domain=PING_TEST_DOMAIN, is_windows=is_windows_os()):
format_string = "PING.exe {domain} -n 1" if is_windows else "ping -c 1 {domain}"
return format_string.format(domain=domain)
def send_ping_result_telemetry(self, exit_status, commandline, username):
"""

View File

@ -0,0 +1,42 @@
import logging
import abc
logger = logging.getLogger(__name__)
class AutoNewUser:
"""
RAII object to use for creating and using a new user. Use with `with`.
User will be created when the instance is instantiated.
User will be available for use (log on for Windows, for example) at the start of the `with` scope.
User will be removed (deactivated and deleted for Windows, for example) at the end of said `with` scope.
Example:
# Created # Logged on
with AutoNewUser("user", "pass", is_on_windows()) as new_user:
...
...
# Logged off and deleted
...
"""
__metaclass__ = abc.ABCMeta
def __init__(self, username, password):
self.username = username
self.password = password
@abc.abstractmethod
def __enter__(self):
raise NotImplementedError()
@abc.abstractmethod
def __exit__(self, exc_type, exc_val, exc_tb):
raise NotImplementedError()
@abc.abstractmethod
def run_as(self, command):
"""
Run the given command as the new user that was created.
:param command: The command to run - give as shell commandline (e.g. "ping google.com -n 1")
"""
raise NotImplementedError()

View File

@ -0,0 +1,21 @@
from infection_monkey.utils.environment import is_windows_os
from infection_monkey.utils.linux.users import AutoNewLinuxUser
from infection_monkey.utils.windows.users import AutoNewWindowsUser
def create_auto_new_user(username, password, is_windows=is_windows_os()):
"""
Factory method for creating an AutoNewUser. See AutoNewUser's documentation for more information.
Example usage:
with create_auto_new_user(username, PASSWORD) as new_user:
...
:param username: The username of the new user.
:param password: The password of the new user.
:param is_windows: If True, a new Windows user is created. Otherwise, a Linux user is created. Leave blank for
automatic detection.
:return: The new AutoNewUser object - use with a `with` scope.
"""
if is_windows:
return AutoNewWindowsUser(username, password)
else:
return AutoNewLinuxUser(username, password)

View File

@ -1,14 +1,21 @@
import datetime
import logging
import os
import subprocess
from infection_monkey.utils.auto_new_user import AutoNewUser
logger = logging.getLogger(__name__)
def get_linux_commands_to_add_user(username):
return [
'useradd',
'useradd', # https://linux.die.net/man/8/useradd
'-M', # Do not create homedir
'--expiredate',
'--expiredate', # The date on which the user account will be disabled.
datetime.datetime.today().strftime('%Y-%m-%d'),
'--inactive',
'0',
'--inactive', # The number of days after a password expires until the account is permanently disabled.
'0', # A value of 0 disables the account as soon as the password has expired
'-c', # Comment
'MONKEY_USER', # Comment
username]
@ -19,3 +26,33 @@ def get_linux_commands_to_delete_user(username):
'deluser',
username
]
class AutoNewLinuxUser(AutoNewUser):
"""
See AutoNewUser's documentation for details.
"""
def __init__(self, username, password):
"""
Creates a user with the username + password.
:raises: subprocess.CalledProcessError if failed to add the user.
"""
super(AutoNewLinuxUser, self).__init__(username, password)
commands_to_add_user = get_linux_commands_to_add_user(username)
logger.debug("Trying to add {} with commands {}".format(self.username, str(commands_to_add_user)))
_ = subprocess.check_output(' '.join(commands_to_add_user), stderr=subprocess.STDOUT, shell=True)
def __enter__(self):
return self # No initialization/logging on needed in Linux
def run_as(self, command):
command_as_new_user = "sudo -u {username} {command}".format(username=self.username, command=command)
return os.system(command_as_new_user)
def __exit__(self, exc_type, exc_val, exc_tb):
# delete the user.
commands_to_delete_user = get_linux_commands_to_delete_user(self.username)
logger.debug("Trying to delete {} with commands {}".format(self.username, str(commands_to_delete_user)))
_ = subprocess.check_output(" ".join(commands_to_delete_user), stderr=subprocess.STDOUT, shell=True)

View File

@ -0,0 +1,2 @@
class NewUserError(Exception):
pass

View File

@ -1,69 +0,0 @@
import logging
import subprocess
from infection_monkey.post_breach.actions.add_user import BackdoorUser
from infection_monkey.utils.windows.users import get_windows_commands_to_delete_user, get_windows_commands_to_add_user
logger = logging.getLogger(__name__)
class NewUserError(Exception):
pass
class AutoNewUser(object):
"""
RAII object to use for creating and using a new user in Windows. Use with `with`.
User will be created when the instance is instantiated.
User will log on at the start of the `with` scope.
User will log off and get deleted at the end of said `with` scope.
Example:
# Created # Logged on
with AutoNewUser("user", "pass") as new_user:
...
...
# Logged off and deleted
...
"""
def __init__(self, username, password):
"""
Creates a user with the username + password.
:raises: subprocess.CalledProcessError if failed to add the user.
"""
self.username = username
self.password = password
windows_cmds = get_windows_commands_to_add_user(self.username, self.password, True)
_ = subprocess.check_output(windows_cmds, stderr=subprocess.STDOUT, shell=True)
def __enter__(self):
# Importing these only on windows, as they won't exist on linux.
import win32security
import win32con
try:
# Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-logonusera
self.logon_handle = win32security.LogonUser(
self.username,
".", # Use current domain.
self.password,
win32con.LOGON32_LOGON_INTERACTIVE, # Logon type - interactive (normal user).
win32con.LOGON32_PROVIDER_DEFAULT) # Which logon provider to use - whatever Windows offers.
except Exception as err:
raise NewUserError("Can't logon as {}. Error: {}".format(self.username, str(err)))
return self
def get_logon_handle(self):
return self.logon_handle
def __exit__(self, exc_type, exc_val, exc_tb):
# Logoff
self.logon_handle.Close()
# Try to delete user
try:
_ = subprocess.Popen(
get_windows_commands_to_delete_user(self.username), stderr=subprocess.STDOUT, shell=True)
except Exception as err:
raise NewUserError("Can't delete user {}. Info: {}".format(self.username, err))

View File

@ -1,3 +1,15 @@
import logging
import subprocess
from infection_monkey.utils.auto_new_user import AutoNewUser
from infection_monkey.utils.new_user_error import NewUserError
ACTIVE_NO_NET_USER = '/ACTIVE:NO'
WAIT_TIMEOUT_IN_MILLISECONDS = 20 * 1000
logger = logging.getLogger(__name__)
def get_windows_commands_to_add_user(username, password, should_be_active=False):
windows_cmds = [
'net',
@ -6,7 +18,7 @@ def get_windows_commands_to_add_user(username, password, should_be_active=False)
password,
'/add']
if not should_be_active:
windows_cmds.append('/ACTIVE:NO')
windows_cmds.append(ACTIVE_NO_NET_USER)
return windows_cmds
@ -16,3 +28,128 @@ def get_windows_commands_to_delete_user(username):
'user',
username,
'/delete']
def get_windows_commands_to_deactivate_user(username):
return [
'net',
'user',
username,
ACTIVE_NO_NET_USER]
class AutoNewWindowsUser(AutoNewUser):
"""
See AutoNewUser's documentation for details.
"""
def __init__(self, username, password):
"""
Creates a user with the username + password.
:raises: subprocess.CalledProcessError if failed to add the user.
"""
super(AutoNewWindowsUser, self).__init__(username, password)
windows_cmds = get_windows_commands_to_add_user(self.username, self.password, True)
logger.debug("Trying to add {} with commands {}".format(self.username, str(windows_cmds)))
_ = subprocess.check_output(windows_cmds, stderr=subprocess.STDOUT, shell=True)
def __enter__(self):
# Importing these only on windows, as they won't exist on linux.
import win32security
import win32con
try:
# Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-logonusera
self.logon_handle = win32security.LogonUser(
self.username,
".", # Use current domain.
self.password,
win32con.LOGON32_LOGON_INTERACTIVE, # Logon type - interactive (normal user). Need this to open ping
# using a shell.
win32con.LOGON32_PROVIDER_DEFAULT) # Which logon provider to use - whatever Windows offers.
except Exception as err:
raise NewUserError("Can't logon as {}. Error: {}".format(self.username, str(err)))
return self
def run_as(self, command):
# Importing these only on windows, as they won't exist on linux.
import win32con
import win32process
import win32api
import win32event
exit_code = -1
process_handle = None
thread_handle = None
try:
# Open process as that user:
# https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-createprocessasusera
process_handle, thread_handle, _, _ = win32process.CreateProcessAsUser(
self.get_logon_handle(), # A handle to the primary token that represents a user.
None, # The name of the module to be executed.
command, # The command line to be executed.
None, # Process attributes
None, # Thread attributes
True, # Should inherit handles
win32con.NORMAL_PRIORITY_CLASS, # The priority class and the creation of the process.
None, # An environment block for the new process. If this parameter is NULL, the new process
# uses the environment of the calling process.
None, # CWD. If this parameter is NULL, the new process will have the same current drive and
# directory as the calling process.
win32process.STARTUPINFO() # STARTUPINFO structure.
# https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/ns-processthreadsapi-startupinfoa
)
logger.debug(
"Waiting for process to finish. Timeout: {}ms".format(WAIT_TIMEOUT_IN_MILLISECONDS))
# Ignoring return code, as we'll use `GetExitCode` to determine the state of the process later.
_ = win32event.WaitForSingleObject( # Waits until the specified object is signaled, or time-out.
process_handle, # Ping process handle
WAIT_TIMEOUT_IN_MILLISECONDS # Timeout in milliseconds
)
exit_code = win32process.GetExitCodeProcess(process_handle)
finally:
try:
if process_handle is not None:
win32api.CloseHandle(process_handle)
if thread_handle is not None:
win32api.CloseHandle(thread_handle)
except Exception as err:
logger.error("Close handle error: " + str(err))
return exit_code
def get_logon_handle(self):
return self.logon_handle
def __exit__(self, exc_type, exc_val, exc_tb):
# Logoff
self.logon_handle.Close()
# Try to disable and then delete the user.
self.try_deactivate_user()
self.try_delete_user()
def try_deactivate_user(self):
try:
commands_to_deactivate_user = get_windows_commands_to_deactivate_user(self.username)
logger.debug(
"Trying to deactivate {} with commands {}".format(self.username, str(commands_to_deactivate_user)))
_ = subprocess.check_output(
commands_to_deactivate_user, stderr=subprocess.STDOUT, shell=True)
except Exception as err:
raise NewUserError("Can't deactivate user {}. Info: {}".format(self.username, err))
def try_delete_user(self):
try:
commands_to_delete_user = get_windows_commands_to_delete_user(self.username)
logger.debug(
"Trying to delete {} with commands {}".format(self.username, str(commands_to_delete_user)))
_ = subprocess.check_output(
commands_to_delete_user, stderr=subprocess.STDOUT, shell=True)
except Exception as err:
raise NewUserError("Can't delete user {}. Info: {}".format(self.username, err))

View File

@ -36,6 +36,9 @@ from monkey_island.cc.resources.pba_file_upload import FileUpload
from monkey_island.cc.resources.attack.attack_config import AttackConfiguration
from monkey_island.cc.resources.attack.attack_report import AttackReport
from monkey_island.cc.resources.test.monkey_test import MonkeyTest
from monkey_island.cc.resources.test.log_test import LogTest
__author__ = 'Barak'
@ -141,6 +144,9 @@ def init_api_resources(api):
api.add_resource(AttackReport, '/api/attack/report')
api.add_resource(VersionUpdate, '/api/version-update', '/api/version-update/')
api.add_resource(MonkeyTest, '/api/test/monkey')
api.add_resource(LogTest, '/api/test/log')
def init_app(mongo_url):
app = Flask(__name__)

View File

@ -11,7 +11,8 @@ class Environment(object, metaclass=ABCMeta):
_MONGO_DB_NAME = "monkeyisland"
_MONGO_DB_HOST = "localhost"
_MONGO_DB_PORT = 27017
_MONGO_URL = os.environ.get("MONKEY_MONGO_URL", "mongodb://{0}:{1}/{2}".format(_MONGO_DB_HOST, _MONGO_DB_PORT, str(_MONGO_DB_NAME)))
_MONGO_URL = os.environ.get("MONKEY_MONGO_URL",
"mongodb://{0}:{1}/{2}".format(_MONGO_DB_HOST, _MONGO_DB_PORT, str(_MONGO_DB_NAME)))
_DEBUG_SERVER = False
_AUTH_EXPIRATION_TIME = timedelta(hours=1)
@ -25,8 +26,7 @@ class Environment(object, metaclass=ABCMeta):
def testing(self, value):
self._testing = value
_MONKEY_VERSION = "1.6.3"
_MONKEY_VERSION = "1.7.0"
def __init__(self):
self.config = None

View File

@ -23,6 +23,7 @@ from monkey_island.cc.services.reporting.exporter_init import populate_exporter_
from monkey_island.cc.utils import local_ip_addresses
from monkey_island.cc.environment.environment import env
from monkey_island.cc.database import is_db_server_up, get_db_version
from monkey_island.cc.resources.monkey_download import MonkeyDownload
def main():
@ -47,12 +48,19 @@ def main():
ssl_options={'certfile': os.environ.get('SERVER_CRT', crt_path),
'keyfile': os.environ.get('SERVER_KEY', key_path)})
http_server.listen(env.get_island_port())
logger.info(
'Monkey Island Server is running on https://{}:{}'.format(local_ip_addresses()[0], env.get_island_port()))
log_init_info()
IOLoop.instance().start()
def log_init_info():
logger.info(
'Monkey Island Server is running. Listening on the following URLs: {}'.format(
", ".join(["https://{}:{}".format(x, env.get_island_port()) for x in local_ip_addresses()])
)
)
MonkeyDownload.log_executable_hashes()
def wait_for_mongo_db_server(mongo_url):
while not is_db_server_up(mongo_url):
logger.info('Waiting for MongoDB server on {0}'.format(mongo_url))

View File

@ -3,10 +3,14 @@ Define a Document Schema for the Monkey document.
"""
from mongoengine import Document, StringField, ListField, BooleanField, EmbeddedDocumentField, ReferenceField, \
DateTimeField, DynamicField, DoesNotExist
import ring
from monkey_island.cc.models.monkey_ttl import MonkeyTtl, create_monkey_ttl_document
from monkey_island.cc.consts import DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS
from monkey_island.cc.models.command_control_channel import CommandControlChannel
from monkey_island.cc.utils import local_ip_addresses
MAX_MONKEYS_AMOUNT_TO_CACHE = 100
class Monkey(Document):
@ -84,6 +88,35 @@ class Monkey(Document):
os = "windows"
return os
@staticmethod
@ring.lru()
def get_label_by_id(object_id):
current_monkey = Monkey.get_single_monkey_by_id(object_id)
label = Monkey.get_hostname_by_id(object_id) + " : " + current_monkey.ip_addresses[0]
if len(set(current_monkey.ip_addresses).intersection(local_ip_addresses())) > 0:
label = "MonkeyIsland - " + label
return label
@staticmethod
@ring.lru()
def get_hostname_by_id(object_id):
"""
:param object_id: the object ID of a Monkey in the database.
:return: The hostname of that machine.
:note: Use this and not monkey.hostname for performance - this is lru-cached.
"""
return Monkey.get_single_monkey_by_id(object_id).hostname
def set_hostname(self, hostname):
"""
Sets a new hostname for a machine and clears the cache for getting it.
:param hostname: The new hostname for the machine.
"""
self.hostname = hostname
self.save()
Monkey.get_hostname_by_id.delete(self.id)
Monkey.get_label_by_id.delete(self.id)
def get_network_info(self):
"""
Formats network info from monkey's model
@ -91,6 +124,17 @@ class Monkey(Document):
"""
return {'ips': self.ip_addresses, 'hostname': self.hostname}
@staticmethod
@ring.lru(
expire=1 # data has TTL of 1 second. This is useful for rapid calls for report generation.
)
def is_monkey(object_id):
try:
_ = Monkey.get_single_monkey_by_id(object_id)
return True
except:
return False
@staticmethod
def get_tunneled_monkeys():
return Monkey.objects(tunnel__exists=True)

View File

@ -112,3 +112,62 @@ class TestMonkey(IslandTestCase):
and linux_monkey not in tunneled_monkeys
and len(tunneled_monkeys) == 2)
self.assertTrue(test, "Tunneling test")
def test_get_label_by_id(self):
self.fail_if_not_testing_env()
self.clean_monkey_db()
hostname_example = "a_hostname"
ip_example = "1.1.1.1"
linux_monkey = Monkey(guid=str(uuid.uuid4()),
description="Linux shay-Virtual-Machine",
hostname=hostname_example,
ip_addresses=[ip_example])
linux_monkey.save()
cache_info_before_query = Monkey.get_label_by_id.storage.backend.cache_info()
self.assertEquals(cache_info_before_query.hits, 0)
# not cached
label = Monkey.get_label_by_id(linux_monkey.id)
self.assertIsNotNone(label)
self.assertIn(hostname_example, label)
self.assertIn(ip_example, label)
# should be cached
_ = Monkey.get_label_by_id(linux_monkey.id)
cache_info_after_query = Monkey.get_label_by_id.storage.backend.cache_info()
self.assertEquals(cache_info_after_query.hits, 1)
linux_monkey.set_hostname("Another hostname")
# should be a miss
label = Monkey.get_label_by_id(linux_monkey.id)
cache_info_after_second_query = Monkey.get_label_by_id.storage.backend.cache_info()
# still 1 hit only
self.assertEquals(cache_info_after_second_query.hits, 1)
self.assertEquals(cache_info_after_second_query.misses, 2)
def test_is_monkey(self):
self.fail_if_not_testing_env()
self.clean_monkey_db()
a_monkey = Monkey(guid=str(uuid.uuid4()))
a_monkey.save()
cache_info_before_query = Monkey.is_monkey.storage.backend.cache_info()
self.assertEquals(cache_info_before_query.hits, 0)
# not cached
self.assertTrue(Monkey.is_monkey(a_monkey.id))
fake_id = "123456789012"
self.assertFalse(Monkey.is_monkey(fake_id))
# should be cached
self.assertTrue(Monkey.is_monkey(a_monkey.id))
self.assertFalse(Monkey.is_monkey(fake_id))
cache_info_after_query = Monkey.is_monkey.storage.backend.cache_info()
self.assertEquals(cache_info_after_query.hits, 2)

View File

@ -1,3 +1,4 @@
import hashlib
import json
import logging
import os
@ -83,9 +84,33 @@ class MonkeyDownload(flask_restful.Resource):
if result:
# change resulting from new base path
real_path = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", 'binaries', result['filename'])
executable_filename = result['filename']
real_path = MonkeyDownload.get_executable_full_path(executable_filename)
if os.path.isfile(real_path):
result['size'] = os.path.getsize(real_path)
return result
return {}
@staticmethod
def get_executable_full_path(executable_filename):
real_path = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", 'binaries', executable_filename)
return real_path
@staticmethod
def log_executable_hashes():
"""
Logs all the hashes of the monkey executables for debugging ease (can check what Monkey version you have etc.).
"""
filenames = set([x['filename'] for x in MONKEY_DOWNLOADS])
for filename in filenames:
filepath = MonkeyDownload.get_executable_full_path(filename)
if os.path.isfile(filepath):
with open(filepath, 'rb') as monkey_exec_file:
file_contents = monkey_exec_file.read()
logger.debug("{} hashes:\nSHA-256 {}".format(
filename,
hashlib.sha256(file_contents).hexdigest()
))
else:
logger.debug("No monkey executable for {}.".format(filepath))

View File

@ -1,5 +1,6 @@
from datetime import datetime
import logging
import threading
import flask_restful
from flask import request, make_response, jsonify
@ -9,6 +10,7 @@ from monkey_island.cc.database import mongo
from monkey_island.cc.services.node import NodeService
from monkey_island.cc.services.reporting.report import ReportService
from monkey_island.cc.services.attack.attack_report import AttackReportService
from monkey_island.cc.services.reporting.report_generation_synchronisation import is_report_being_generated, safe_generate_reports
from monkey_island.cc.utils import local_ip_addresses
from monkey_island.cc.services.database import Database
@ -18,13 +20,15 @@ logger = logging.getLogger(__name__)
class Root(flask_restful.Resource):
def __init__(self):
self.report_generating_lock = threading.Event()
def get(self, action=None):
if not action:
action = request.args.get('action')
if not action:
return Root.get_server_info()
return self.get_server_info()
elif action == "reset":
return jwt_required()(Database.reset_db)()
elif action == "killall":
@ -34,11 +38,12 @@ class Root(flask_restful.Resource):
else:
return make_response(400, {'error': 'unknown action'})
@staticmethod
@jwt_required()
def get_server_info():
return jsonify(ip_addresses=local_ip_addresses(), mongo=str(mongo.db),
completed_steps=Root.get_completed_steps())
def get_server_info(self):
return jsonify(
ip_addresses=local_ip_addresses(),
mongo=str(mongo.db),
completed_steps=self.get_completed_steps())
@staticmethod
@jwt_required()
@ -49,17 +54,22 @@ class Root(flask_restful.Resource):
logger.info('Kill all monkeys was called')
return jsonify(status='OK')
@staticmethod
@jwt_required()
def get_completed_steps():
def get_completed_steps(self):
is_any_exists = NodeService.is_any_monkey_exists()
infection_done = NodeService.is_monkey_finished_running()
if not infection_done:
report_done = False
else:
if is_any_exists:
ReportService.get_report()
AttackReportService.get_latest_report()
if infection_done:
# Checking is_report_being_generated here, because we don't want to wait to generate a report; rather,
# we want to skip and reply.
if not is_report_being_generated() and not ReportService.is_latest_report_exists():
safe_generate_reports()
report_done = ReportService.is_report_generated()
return dict(run_server=True, run_monkey=is_any_exists, infection_done=infection_done,
report_done=report_done)
else: # Infection is not done
report_done = False
return dict(
run_server=True,
run_monkey=is_any_exists,
infection_done=infection_done,
report_done=report_done)

View File

@ -29,7 +29,7 @@ class TelemetryFeed(flask_restful.Resource):
try:
return \
{
'telemetries': [TelemetryFeed.get_displayed_telemetry(telem) for telem in telemetries],
'telemetries': [TelemetryFeed.get_displayed_telemetry(telem) for telem in telemetries if TelemetryFeed],
'timestamp': datetime.now().isoformat()
}
except KeyError as err:
@ -45,9 +45,18 @@ class TelemetryFeed(flask_restful.Resource):
'id': telem['_id'],
'timestamp': telem['timestamp'].strftime('%d/%m/%Y %H:%M:%S'),
'hostname': monkey.get('hostname', default_hostname) if monkey else default_hostname,
'brief': TELEM_PROCESS_DICT[telem['telem_category']](telem)
'brief': TelemetryFeed.get_telem_brief(telem)
}
@staticmethod
def get_telem_brief(telem):
telem_brief_parser = TelemetryFeed.get_telem_brief_parser_by_category(telem['telem_category'])
return telem_brief_parser(telem)
@staticmethod
def get_telem_brief_parser_by_category(telem_category):
return TELEM_PROCESS_DICT[telem_category]
@staticmethod
def get_tunnel_telem_brief(telem):
tunnel = telem['data']['proxy']
@ -94,8 +103,8 @@ class TelemetryFeed(flask_restful.Resource):
telem['data']['ip'])
@staticmethod
def get_attack_telem_brief(telem):
return 'Monkey collected MITRE ATT&CK info.'
def should_show_brief(telem):
return telem['telem_category'] in TELEM_PROCESS_DICT
TELEM_PROCESS_DICT = \
@ -106,6 +115,5 @@ TELEM_PROCESS_DICT = \
'scan': TelemetryFeed.get_scan_telem_brief,
'system_info': TelemetryFeed.get_systeminfo_telem_brief,
'trace': TelemetryFeed.get_trace_telem_brief,
'post_breach': TelemetryFeed.get_post_breach_telem_brief,
'attack': TelemetryFeed.get_attack_telem_brief
'post_breach': TelemetryFeed.get_post_breach_telem_brief
}

View File

@ -0,0 +1,4 @@
"""
This package contains resources used by blackbox tests
to analize test results, download logs and so on.
"""

View File

@ -0,0 +1,18 @@
from bson import json_util
import flask_restful
from flask import request
from monkey_island.cc.auth import jwt_required
from monkey_island.cc.database import mongo, database
class LogTest(flask_restful.Resource):
@jwt_required()
def get(self):
find_query = json_util.loads(request.args.get('find_query'))
log = mongo.db.log.find_one(find_query)
if not log:
return {'results': None}
log_file = database.gridfs.get(log['file_id'])
return {'results': log_file.read()}

View File

@ -0,0 +1,13 @@
from bson import json_util
import flask_restful
from flask import request
from monkey_island.cc.auth import jwt_required
from monkey_island.cc.database import mongo
class MonkeyTest(flask_restful.Resource):
@jwt_required()
def get(self, **kw):
find_query = json_util.loads(request.args.get('find_query'))
return {'results': list(mongo.db.monkey.find(find_query))}

View File

@ -6,6 +6,7 @@ from monkey_island.cc.services.attack.technique_reports import T1145, T1105, T10
from monkey_island.cc.services.attack.technique_reports import T1090, T1041, T1222, T1005, T1018, T1016, T1021, T1064
from monkey_island.cc.services.attack.attack_config import AttackConfig
from monkey_island.cc.database import mongo
from monkey_island.cc.services.reporting.report_generation_synchronisation import safe_generate_attack_report
__author__ = "VakarisZ"
@ -88,7 +89,8 @@ class AttackReportService:
report_modifytime = latest_report['meta']['latest_monkey_modifytime']
if monkey_modifytime and report_modifytime and monkey_modifytime == report_modifytime:
return latest_report
return AttackReportService.generate_new_report()
return safe_generate_attack_report()
@staticmethod
def is_report_generated():

View File

@ -2,24 +2,6 @@ SCHEMA = {
"title": "ATT&CK configuration",
"type": "object",
"properties": {
"initial_access": {
"title": "Initial access",
"type": "object",
"properties": {
"T1078": {
"title": "T1078 Valid accounts",
"type": "bool",
"value": True,
"necessary": False,
"description": "Mapped with T1003 Credential dumping because both techniques "
"require same credential harvesting modules. "
"Adversaries may steal the credentials of a specific user or service account using "
"Credential Access techniques or capture credentials earlier in their "
"reconnaissance process.",
"depends_on": ["T1003"]
}
}
},
"lateral_movement": {
"title": "Lateral movement",
"type": "object",

View File

@ -448,13 +448,13 @@ SCHEMA = {
"victims_max_find": {
"title": "Max victims to find",
"type": "integer",
"default": 30,
"default": 100,
"description": "Determines the maximum number of machines the monkey is allowed to scan"
},
"victims_max_exploit": {
"title": "Max victims to exploit",
"type": "integer",
"default": 7,
"default": 15,
"description":
"Determines the maximum number of machines the monkey"
" is allowed to successfully exploit. " + WARNING_SIGN

View File

@ -2,6 +2,7 @@ from bson import ObjectId
from monkey_island.cc.database import mongo
import monkey_island.cc.services.node
from monkey_island.cc.models import Monkey
__author__ = "itay.mizeretz"
@ -141,15 +142,18 @@ class EdgeService:
@staticmethod
def get_edge_label(edge):
NodeService = monkey_island.cc.services.node.NodeService
from_label = NodeService.get_monkey_label(NodeService.get_monkey_by_id(edge["from"]))
if edge["to"] == ObjectId("000000000000000000000000"):
from_id = edge["from"]
to_id = edge["to"]
from_label = Monkey.get_label_by_id(from_id)
if to_id == ObjectId("000000000000000000000000"):
to_label = 'MonkeyIsland'
else:
to_id = NodeService.get_monkey_by_id(edge["to"])
if to_id is None:
to_label = NodeService.get_node_label(NodeService.get_node_by_id(edge["to"]))
if Monkey.is_monkey(to_id):
to_label = Monkey.get_label_by_id(to_id)
else:
to_label = NodeService.get_monkey_label(to_id)
to_label = NodeService.get_node_label(NodeService.get_node_by_id(to_id))
RIGHT_ARROW = "\\u2192"
return "%s %s %s" % (from_label, RIGHT_ARROW, to_label)

View File

@ -22,10 +22,6 @@ class NodeService:
if ObjectId(node_id) == NodeService.get_monkey_island_pseudo_id():
return NodeService.get_monkey_island_node()
edges = EdgeService.get_displayed_edges_by_to(node_id, for_report)
accessible_from_nodes = []
exploits = []
new_node = {"id": node_id}
node = NodeService.get_node_by_id(node_id)
@ -46,16 +42,29 @@ class NodeService:
new_node["ip_addresses"] = node["ip_addresses"]
new_node["domain_name"] = node["domain_name"]
accessible_from_nodes = []
accessible_from_nodes_hostnames = []
exploits = []
edges = EdgeService.get_displayed_edges_by_to(node_id, for_report)
for edge in edges:
accessible_from_nodes.append(NodeService.get_monkey_label(NodeService.get_monkey_by_id(edge["from"])))
from_node_id = edge["from"]
from_node_label = Monkey.get_label_by_id(from_node_id)
from_node_hostname = Monkey.get_hostname_by_id(from_node_id)
accessible_from_nodes.append(from_node_label)
accessible_from_nodes_hostnames.append(from_node_hostname)
for exploit in edge["exploits"]:
exploit["origin"] = NodeService.get_monkey_label(NodeService.get_monkey_by_id(edge["from"]))
exploit["origin"] = from_node_label
exploits.append(exploit)
exploits = sorted(exploits, key=lambda exploit: exploit['timestamp'])
new_node["exploits"] = exploits
new_node["accessible_from_nodes"] = accessible_from_nodes
new_node["accessible_from_nodes_hostnames"] = accessible_from_nodes_hostnames
if len(edges) > 0:
new_node["services"] = edges[-1]["services"]
else:
@ -104,6 +113,7 @@ class NodeService:
@staticmethod
def get_monkey_label(monkey):
# todo
label = monkey["hostname"] + " : " + monkey["ip_addresses"][0]
ip_addresses = local_ip_addresses()
if len(set(monkey["ip_addresses"]).intersection(ip_addresses)) > 0:
@ -129,15 +139,18 @@ class NodeService:
@staticmethod
def monkey_to_net_node(monkey, for_report=False):
label = monkey['hostname'] if for_report else NodeService.get_monkey_label(monkey)
is_monkey_dead = Monkey.get_single_monkey_by_id(monkey["_id"]).is_dead()
monkey_id = monkey["_id"]
label = Monkey.get_hostname_by_id(monkey_id) if for_report else Monkey.get_label_by_id(monkey_id)
monkey_group = NodeService.get_monkey_group(monkey)
return \
{
"id": monkey["_id"],
"id": monkey_id,
"label": label,
"group": NodeService.get_monkey_group(monkey),
"group": monkey_group,
"os": NodeService.get_monkey_os(monkey),
"dead": is_monkey_dead,
# The monkey is running IFF the group contains "_running". Therefore it's dead IFF the group does NOT
# contain "_running". This is a small optimisation, to not call "is_dead" twice.
"dead": "_running" not in monkey_group,
"domain_name": "",
"pba_results": monkey["pba_results"] if "pba_results" in monkey else []
}

View File

@ -1,29 +1,27 @@
import itertools
import functools
import ipaddress
import itertools
import logging
import ipaddress
from bson import json_util
from enum import Enum
from six import text_type
from common.network.network_range import NetworkRange
from common.network.segmentation_utils import get_ip_in_src_and_not_in_dst
from monkey_island.cc.database import mongo
from monkey_island.cc.models import Monkey
from monkey_island.cc.services.reporting.report_exporter_manager import ReportExporterManager
from monkey_island.cc.services.config import ConfigService
from monkey_island.cc.services.configuration.utils import get_config_network_segments_as_subnet_groups
from monkey_island.cc.services.edge import EdgeService
from monkey_island.cc.services.node import NodeService
from monkey_island.cc.utils import local_ip_addresses, get_subnets
from monkey_island.cc.services.reporting.pth_report import PTHReportService
from common.network.network_range import NetworkRange
from monkey_island.cc.services.reporting.report_exporter_manager import ReportExporterManager
from monkey_island.cc.services.reporting.report_generation_synchronisation import safe_generate_regular_report
from monkey_island.cc.utils import local_ip_addresses, get_subnets
__author__ = "itay.mizeretz"
logger = logging.getLogger(__name__)
@ -119,22 +117,17 @@ class ReportService:
@staticmethod
def get_scanned():
formatted_nodes = []
nodes = \
[NodeService.get_displayed_node_by_id(node['_id'], True) for node in mongo.db.node.find({}, {'_id': 1})] \
+ [NodeService.get_displayed_node_by_id(monkey['_id'], True) for monkey in
mongo.db.monkey.find({}, {'_id': 1})]
nodes = ReportService.get_all_displayed_nodes()
for node in nodes:
nodes_that_can_access_current_node = node['accessible_from_nodes_hostnames']
formatted_nodes.append(
{
'label': node['label'],
'ip_addresses': node['ip_addresses'],
'accessible_from_nodes':
list((x['hostname'] for x in
(NodeService.get_displayed_node_by_id(edge['from'], True)
for edge in EdgeService.get_displayed_edges_by_to(node['id'], True)))),
'accessible_from_nodes': nodes_that_can_access_current_node,
'services': node['services'],
'domain_name': node['domain_name'],
'pba_results': node['pba_results'] if 'pba_results' in node else 'None'
@ -144,25 +137,37 @@ class ReportService:
return formatted_nodes
@staticmethod
def get_all_displayed_nodes():
nodes_without_monkeys = [NodeService.get_displayed_node_by_id(node['_id'], True) for node in
mongo.db.node.find({}, {'_id': 1})]
nodes_with_monkeys = [NodeService.get_displayed_node_by_id(monkey['_id'], True) for monkey in
mongo.db.monkey.find({}, {'_id': 1})]
nodes = nodes_without_monkeys + nodes_with_monkeys
return nodes
@staticmethod
def get_exploited():
exploited = \
exploited_with_monkeys = \
[NodeService.get_displayed_node_by_id(monkey['_id'], True) for monkey in
mongo.db.monkey.find({}, {'_id': 1})
if not NodeService.get_monkey_manual_run(NodeService.get_monkey_by_id(monkey['_id']))] \
+ [NodeService.get_displayed_node_by_id(node['_id'], True)
for node in mongo.db.node.find({'exploited': True}, {'_id': 1})]
mongo.db.monkey.find({}, {'_id': 1}) if
not NodeService.get_monkey_manual_run(NodeService.get_monkey_by_id(monkey['_id']))]
exploited_without_monkeys = [NodeService.get_displayed_node_by_id(node['_id'], True) for node in
mongo.db.node.find({'exploited': True}, {'_id': 1})]
exploited = exploited_with_monkeys + exploited_without_monkeys
exploited = [
{
'label': monkey['label'],
'ip_addresses': monkey['ip_addresses'],
'domain_name': monkey['domain_name'],
'label': exploited_node['label'],
'ip_addresses': exploited_node['ip_addresses'],
'domain_name': exploited_node['domain_name'],
'exploits': list(set(
[ReportService.EXPLOIT_DISPLAY_DICT[exploit['exploiter']] for exploit in monkey['exploits'] if
exploit['result']]))
[ReportService.EXPLOIT_DISPLAY_DICT[exploit['exploiter']] for exploit in exploited_node['exploits']
if exploit['result']]))
}
for monkey in exploited]
for exploited_node in exploited]
logger.info('Exploited nodes generated for reporting')
@ -209,8 +214,9 @@ class ReportService:
# Pick out all ssh keys not yet included in creds
ssh_keys = [{'username': key_pair['name'], 'type': 'Clear SSH private key',
'origin': origin} for key_pair in telem['data']['ssh_info']
if key_pair['private_key'] and {'username': key_pair['name'], 'type': 'Clear SSH private key',
'origin': origin} not in creds]
if
key_pair['private_key'] and {'username': key_pair['name'], 'type': 'Clear SSH private key',
'origin': origin} not in creds]
creds.extend(ssh_keys)
return creds
@ -699,6 +705,8 @@ class ReportService:
cross_segment_issues = ReportService.get_cross_segment_issues()
monkey_latest_modify_time = Monkey.get_latest_modifytime()
scanned_nodes = ReportService.get_scanned()
exploited_nodes = ReportService.get_exploited()
report = \
{
'overview':
@ -717,8 +725,8 @@ class ReportService:
},
'glance':
{
'scanned': ReportService.get_scanned(),
'exploited': ReportService.get_exploited(),
'scanned': scanned_nodes,
'exploited': exploited_nodes,
'stolen_creds': ReportService.get_stolen_creds(),
'azure_passwords': ReportService.get_azure_creds(),
'ssh_keys': ReportService.get_ssh_keys(),
@ -751,7 +759,6 @@ class ReportService:
report_as_json = json_util.dumps(report_dict).replace('.', ',,,')
return json_util.loads(report_as_json)
@staticmethod
def is_latest_report_exists():
"""
@ -780,7 +787,7 @@ class ReportService:
def get_report():
if ReportService.is_latest_report_exists():
return ReportService.decode_dot_char_before_mongo_insert(mongo.db.report.find_one())
return ReportService.generate_report()
return safe_generate_regular_report()
@staticmethod
def did_exploit_type_succeed(exploit_type):

View File

@ -0,0 +1,52 @@
import logging
import threading
logger = logging.getLogger(__name__)
# These are pseudo-singletons - global Locks. These locks will allow only one thread to generate a report at a time.
# Report generation can be quite slow if there is a lot of data, and the UI queries the Root service often; without
# the locks, these requests would accumulate, overload the server, eventually causing it to crash.
logger.debug("Initializing report generation locks.")
__report_generating_lock = threading.Semaphore()
__attack_report_generating_lock = threading.Semaphore()
__regular_report_generating_lock = threading.Semaphore()
def safe_generate_reports():
# Entering the critical section; Wait until report generation is available.
__report_generating_lock.acquire()
report = safe_generate_regular_report()
attack_report = safe_generate_attack_report()
# Leaving the critical section.
__report_generating_lock.release()
return report, attack_report
def safe_generate_regular_report():
# Local import to avoid circular imports
from monkey_island.cc.services.reporting.report import ReportService
__regular_report_generating_lock.acquire()
report = ReportService.generate_report()
__regular_report_generating_lock.release()
return report
def safe_generate_attack_report():
# Local import to avoid circular imports
from monkey_island.cc.services.attack.attack_report import AttackReportService
__attack_report_generating_lock.acquire()
attack_report = AttackReportService.generate_new_report()
__attack_report_generating_lock.release()
return attack_report
def is_report_being_generated():
# From https://docs.python.org/2/library/threading.html#threading.Semaphore.acquire:
# When invoked with blocking set to false, do not block.
# If a call without an argument would block, return false immediately;
# otherwise, do the same thing as when called without arguments, and return true.
is_report_being_generated_right_now = not __report_generating_lock.acquire(blocking=False)
if not is_report_being_generated_right_now:
# We're not using the critical resource; we just checked its state.
__report_generating_lock.release()
return is_report_being_generated_right_now

View File

@ -1,4 +1,5 @@
from monkey_island.cc.database import mongo
from monkey_island.cc.models import Monkey
from monkey_island.cc.services import mimikatz_utils
from monkey_island.cc.services.node import NodeService
from monkey_island.cc.services.config import ConfigService
@ -12,6 +13,7 @@ def process_system_info_telemetry(telemetry_json):
process_credential_info(telemetry_json)
process_mimikatz_and_wmi_info(telemetry_json)
process_aws_data(telemetry_json)
update_db_with_new_hostname(telemetry_json)
test_antivirus_existence(telemetry_json)
@ -97,3 +99,7 @@ def process_aws_data(telemetry_json):
monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid']).get('_id')
mongo.db.monkey.update_one({'_id': monkey_id},
{'$set': {'aws_instance_id': telemetry_json['data']['aws']['instance_id']}})
def update_db_with_new_hostname(telemetry_json):
Monkey.get_single_monkey_by_id(telemetry_json['_id']).set_hostname(telemetry_json['data']['hostname'])

View File

@ -6,6 +6,7 @@ import array
import struct
import ipaddress
from netifaces import interfaces, ifaddresses, AF_INET
from ring import lru
__author__ = 'Barak'
@ -46,9 +47,13 @@ else:
# name of interface is (namestr[i:i+16].split('\0', 1)[0]
finally:
return result
# End of local ips function
# The local IP addresses list should not change often. Therefore, we can cache the result and never call this function
# more than once. This stopgap measure is here since this function is called a lot of times during the report
# generation.
# This means that if the interfaces of the Island machine change, the Island process needs to be restarted.
@lru(maxsize=1)
def local_ip_addresses():
ip_list = []
for interface in interfaces():
@ -57,6 +62,11 @@ def local_ip_addresses():
return ip_list
# The subnets list should not change often. Therefore, we can cache the result and never call this function
# more than once. This stopgap measure is here since this function is called a lot of times during the report
# generation.
# This means that if the interfaces or subnets of the Island machine change, the Island process needs to be restarted.
@lru(maxsize=1)
def get_subnets():
subnets = []
for interface in interfaces():

View File

@ -26,3 +26,4 @@ mongoengine
mongomock
requests
dpath
ring