forked from p15670423/monkey
commit
2792bac5e6
2
.flake8
2
.flake8
|
@ -1,7 +1,7 @@
|
||||||
[flake8]
|
[flake8]
|
||||||
## Warn about linter issues.
|
## Warn about linter issues.
|
||||||
|
|
||||||
exclude = ../monkey/monkey_island/cc/ui
|
exclude = monkey/monkey_island/cc/ui
|
||||||
show-source = True
|
show-source = True
|
||||||
max-complexity = 10
|
max-complexity = 10
|
||||||
max-line-length = 100
|
max-line-length = 100
|
||||||
|
|
|
@ -55,7 +55,7 @@ install:
|
||||||
script:
|
script:
|
||||||
# Check Python code
|
# Check Python code
|
||||||
## Check syntax errors and fail the build if any are found.
|
## Check syntax errors and fail the build if any are found.
|
||||||
- flake8 ./monkey
|
- flake8 .
|
||||||
|
|
||||||
## Check import order
|
## Check import order
|
||||||
- python -m isort ./monkey --check-only
|
- python -m isort ./monkey --check-only
|
||||||
|
|
|
@ -17,4 +17,5 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Monkey agents are stored in the configurable data_dir when monkey is "run
|
- Monkey agents are stored in the configurable data_dir when monkey is "run
|
||||||
from the island". #997
|
from the island". #997
|
||||||
- Reformated all code using black. #1070
|
- Reformated all code using black. #1070
|
||||||
- Sort all imports usind isort. #1081
|
- Sorted all imports usind isort. #1081
|
||||||
|
- Addressed all flake8 issues. #1071
|
||||||
|
|
|
@ -33,7 +33,8 @@ class PerformanceAnalyzer(Analyzer):
|
||||||
|
|
||||||
if self.performance_test_config.break_on_timeout and not performance_is_good_enough:
|
if self.performance_test_config.break_on_timeout and not performance_is_good_enough:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Calling breakpoint - pausing to enable investigation of island. Type 'c' to continue once you're done "
|
"Calling breakpoint - pausing to enable investigation of island. "
|
||||||
|
"Type 'c' to continue once you're done "
|
||||||
"investigating. Type 'p timings' and 'p total_time' to see performance information."
|
"investigating. Type 'p timings' and 'p total_time' to see performance information."
|
||||||
)
|
)
|
||||||
breakpoint()
|
breakpoint()
|
||||||
|
|
|
@ -3,12 +3,7 @@ from typing import List
|
||||||
|
|
||||||
import dpath.util
|
import dpath.util
|
||||||
|
|
||||||
from common.config_value_paths import (
|
from common.config_value_paths import LM_HASH_LIST_PATH, NTLM_HASH_LIST_PATH, USER_LIST_PATH
|
||||||
LM_HASH_LIST_PATH,
|
|
||||||
NTLM_HASH_LIST_PATH,
|
|
||||||
PASSWORD_LIST_PATH,
|
|
||||||
USER_LIST_PATH,
|
|
||||||
)
|
|
||||||
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
|
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
|
||||||
from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
|
from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
|
||||||
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
|
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import ( # noqa: E501
|
||||||
FakeIpGenerator,
|
FakeIpGenerator,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -9,10 +9,10 @@ from tqdm import tqdm
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
|
||||||
SampleFileParser,
|
SampleFileParser,
|
||||||
)
|
)
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import ( # noqa: E501
|
||||||
FakeIpGenerator,
|
FakeIpGenerator,
|
||||||
)
|
)
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import (
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import ( # noqa: E501
|
||||||
FakeMonkey,
|
FakeMonkey,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
|
||||||
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
|
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import ( # noqa: E501
|
||||||
FakeIpGenerator,
|
FakeIpGenerator,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ from common.cloud.instance import CloudInstance
|
||||||
|
|
||||||
__author__ = "itay.mizeretz"
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS = "169.254.169.254"
|
AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS = "169.254.169.254"
|
||||||
AWS_LATEST_METADATA_URI_PREFIX = "http://{0}/latest/".format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS)
|
AWS_LATEST_METADATA_URI_PREFIX = "http://{0}/latest/".format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS)
|
||||||
ACCOUNT_ID_KEY = "accountId"
|
ACCOUNT_ID_KEY = "accountId"
|
||||||
|
@ -60,7 +59,8 @@ class AwsInstance(CloudInstance):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_region(region_url_response):
|
def _parse_region(region_url_response):
|
||||||
# For a list of regions, see:
|
# For a list of regions, see:
|
||||||
# https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html
|
# https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts
|
||||||
|
# .RegionsAndAvailabilityZones.html
|
||||||
# This regex will find any AWS region format string in the response.
|
# This regex will find any AWS region format string in the response.
|
||||||
re_phrase = r"((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])"
|
re_phrase = r"((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])"
|
||||||
finding = re.findall(re_phrase, region_url_response, re.IGNORECASE)
|
finding = re.findall(re_phrase, region_url_response, re.IGNORECASE)
|
||||||
|
@ -79,9 +79,11 @@ class AwsInstance(CloudInstance):
|
||||||
def _extract_account_id(instance_identity_document_response):
|
def _extract_account_id(instance_identity_document_response):
|
||||||
"""
|
"""
|
||||||
Extracts the account id from the dynamic/instance-identity/document metadata path.
|
Extracts the account id from the dynamic/instance-identity/document metadata path.
|
||||||
Based on https://forums.aws.amazon.com/message.jspa?messageID=409028 which has a few more solutions,
|
Based on https://forums.aws.amazon.com/message.jspa?messageID=409028 which has a few more
|
||||||
|
solutions,
|
||||||
in case Amazon break this mechanism.
|
in case Amazon break this mechanism.
|
||||||
:param instance_identity_document_response: json returned via the web page ../dynamic/instance-identity/document
|
:param instance_identity_document_response: json returned via the web page
|
||||||
|
../dynamic/instance-identity/document
|
||||||
:return: The account id
|
:return: The account id
|
||||||
"""
|
"""
|
||||||
return json.loads(instance_identity_document_response)[ACCOUNT_ID_KEY]
|
return json.loads(instance_identity_document_response)[ACCOUNT_ID_KEY]
|
||||||
|
|
|
@ -31,12 +31,14 @@ def filter_instance_data_from_aws_response(response):
|
||||||
|
|
||||||
class AwsService(object):
|
class AwsService(object):
|
||||||
"""
|
"""
|
||||||
A wrapper class around the boto3 client and session modules, which supplies various AWS services.
|
A wrapper class around the boto3 client and session modules, which supplies various AWS
|
||||||
|
services.
|
||||||
|
|
||||||
This class will assume:
|
This class will assume:
|
||||||
1. That it's running on an EC2 instance
|
1. That it's running on an EC2 instance
|
||||||
2. That the instance is associated with the correct IAM role. See
|
2. That the instance is associated with the correct IAM role. See
|
||||||
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam-role for details.
|
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam-role
|
||||||
|
for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
region = None
|
region = None
|
||||||
|
@ -73,7 +75,8 @@ class AwsService(object):
|
||||||
Get the information for all instances with the relevant roles.
|
Get the information for all instances with the relevant roles.
|
||||||
|
|
||||||
This function will assume that it's running on an EC2 instance with the correct IAM role.
|
This function will assume that it's running on an EC2 instance with the correct IAM role.
|
||||||
See https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam-role for details.
|
See https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam
|
||||||
|
-role for details.
|
||||||
|
|
||||||
:raises: botocore.exceptions.ClientError if can't describe local instance information.
|
:raises: botocore.exceptions.ClientError if can't describe local instance information.
|
||||||
:return: All visible instances from this instance
|
:return: All visible instances from this instance
|
||||||
|
|
|
@ -30,7 +30,6 @@ INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
EXPECTED_INSTANCE_ID = "i-1234567890abcdef0"
|
EXPECTED_INSTANCE_ID = "i-1234567890abcdef0"
|
||||||
|
|
||||||
EXPECTED_REGION = "us-west-2"
|
EXPECTED_REGION = "us-west-2"
|
||||||
|
|
|
@ -18,7 +18,8 @@ logger = logging.getLogger(__name__)
|
||||||
class AzureInstance(CloudInstance):
|
class AzureInstance(CloudInstance):
|
||||||
"""
|
"""
|
||||||
Access to useful information about the current machine if it's an Azure VM.
|
Access to useful information about the current machine if it's an Azure VM.
|
||||||
Based on Azure metadata service: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service
|
Based on Azure metadata service:
|
||||||
|
https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def is_instance(self):
|
def is_instance(self):
|
||||||
|
@ -44,7 +45,8 @@ class AzureInstance(CloudInstance):
|
||||||
)
|
)
|
||||||
|
|
||||||
# If not on cloud, the metadata URL is non-routable and the connection will fail.
|
# If not on cloud, the metadata URL is non-routable and the connection will fail.
|
||||||
# If on AWS, should get 404 since the metadata service URL is different, so bool(response) will be false.
|
# If on AWS, should get 404 since the metadata service URL is different,
|
||||||
|
# so bool(response) will be false.
|
||||||
if response:
|
if response:
|
||||||
logger.debug("Trying to parse Azure metadata.")
|
logger.debug("Trying to parse Azure metadata.")
|
||||||
self.try_parse_response(response)
|
self.try_parse_response(response)
|
||||||
|
@ -52,7 +54,8 @@ class AzureInstance(CloudInstance):
|
||||||
logger.warning(f"Metadata response not ok: {response.status_code}")
|
logger.warning(f"Metadata response not ok: {response.status_code}")
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Failed to get response from Azure metadata service: This instance is not on Azure."
|
"Failed to get response from Azure metadata service: This instance is not on "
|
||||||
|
"Azure."
|
||||||
)
|
)
|
||||||
|
|
||||||
def try_parse_response(self, response):
|
def try_parse_response(self, response):
|
||||||
|
|
|
@ -30,7 +30,8 @@ GOOD_DATA = {
|
||||||
],
|
],
|
||||||
"publisher": "RDFE-Test-Microsoft-Windows-Server-Group",
|
"publisher": "RDFE-Test-Microsoft-Windows-Server-Group",
|
||||||
"resourceGroupName": "macikgo-test-may-23",
|
"resourceGroupName": "macikgo-test-may-23",
|
||||||
"resourceId": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test-may-23/"
|
"resourceId": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test"
|
||||||
|
"-may-23/"
|
||||||
"providers/Microsoft.Compute/virtualMachines/examplevmname",
|
"providers/Microsoft.Compute/virtualMachines/examplevmname",
|
||||||
"securityProfile": {"secureBootEnabled": "true", "virtualTpmEnabled": "false"},
|
"securityProfile": {"secureBootEnabled": "true", "virtualTpmEnabled": "false"},
|
||||||
"sku": "Windows-Server-2012-R2-Datacenter",
|
"sku": "Windows-Server-2012-R2-Datacenter",
|
||||||
|
@ -101,12 +102,16 @@ GOOD_DATA = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
BAD_DATA_NOT_JSON = (
|
||||||
BAD_DATA_NOT_JSON = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/\
|
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" '
|
||||||
xhtml1-transitional.dtd">\n<html xmlns="http://www.w3.org/1999/xhtml">\n<head>\n<meta content="text/html; charset=utf-8" \
|
'"http://www.w3.org/TR/xhtml1/DTD/\
|
||||||
http-equiv="Content-Type" />\n<meta content="no-cache" http-equiv="Pragma" />\n<title>Waiting...</title>\n<script type="text/\
|
xhtml1-transitional.dtd">\n<html xmlns="http://www.w3.org/1999/xhtml">\n<head>\n<meta '
|
||||||
javascript">\nvar pageName = \'/\';\ntop.location.replace(pageName);\n</script>\n</head>\n<body> </body>\n</html>\n'
|
'content="text/html; charset=utf-8" \
|
||||||
|
http-equiv="Content-Type" />\n<meta content="no-cache" http-equiv="Pragma" '
|
||||||
|
"/>\n<title>Waiting...</title>\n<script type=\"text/\
|
||||||
|
javascript\">\nvar pageName = '/';\ntop.location.replace(pageName);\n</script>\n</head>\n<body> "
|
||||||
|
"</body>\n</html>\n"
|
||||||
|
)
|
||||||
|
|
||||||
BAD_DATA_JSON = {"": ""}
|
BAD_DATA_JSON = {"": ""}
|
||||||
|
|
||||||
|
|
|
@ -8,13 +8,13 @@ from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
GCP_METADATA_SERVICE_URL = "http://metadata.google.internal/"
|
GCP_METADATA_SERVICE_URL = "http://metadata.google.internal/"
|
||||||
|
|
||||||
|
|
||||||
class GcpInstance(CloudInstance):
|
class GcpInstance(CloudInstance):
|
||||||
"""
|
"""
|
||||||
Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving-metadata#runninggce
|
Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving
|
||||||
|
-metadata#runninggce
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def is_instance(self):
|
def is_instance(self):
|
||||||
|
|
|
@ -20,7 +20,8 @@ class AwsCmdResult(CmdResult):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_successful(command_info, is_timeout=False):
|
def is_successful(command_info, is_timeout=False):
|
||||||
"""
|
"""
|
||||||
Determines whether the command was successful. If it timed out and was still in progress, we assume it worked.
|
Determines whether the command was successful. If it timed out and was still in progress,
|
||||||
|
we assume it worked.
|
||||||
:param command_info: Command info struct (returned by ssm.get_command_invocation)
|
:param command_info: Command info struct (returned by ssm.get_command_invocation)
|
||||||
:param is_timeout: Whether the given command timed out
|
:param is_timeout: Whether the given command timed out
|
||||||
:return: True if successful, False otherwise.
|
:return: True if successful, False otherwise.
|
||||||
|
|
|
@ -21,8 +21,10 @@ class CmdRunner(object):
|
||||||
* command id - any unique identifier of a command which was already run
|
* command id - any unique identifier of a command which was already run
|
||||||
* command result - represents the result of running a command. Always of type CmdResult
|
* command result - represents the result of running a command. Always of type CmdResult
|
||||||
* command status - represents the current status of a command. Always of type CmdStatus
|
* command status - represents the current status of a command. Always of type CmdStatus
|
||||||
* command info - Any consistent structure representing additional information of a command which was already run
|
* command info - Any consistent structure representing additional information of a command
|
||||||
* instance - a machine that commands will be run on. Can be any dictionary with 'instance_id' as a field
|
which was already run
|
||||||
|
* instance - a machine that commands will be run on. Can be any dictionary with 'instance_id'
|
||||||
|
as a field
|
||||||
* instance_id - any unique identifier of an instance (machine). Can be of any format
|
* instance_id - any unique identifier of an instance (machine). Can be of any format
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -49,7 +51,8 @@ class CmdRunner(object):
|
||||||
"""
|
"""
|
||||||
Run multiple commands on various instances
|
Run multiple commands on various instances
|
||||||
:param instances: List of instances.
|
:param instances: List of instances.
|
||||||
:param inst_to_cmd: Function which receives an instance, runs a command asynchronously and returns Cmd
|
:param inst_to_cmd: Function which receives an instance, runs a command asynchronously
|
||||||
|
and returns Cmd
|
||||||
:param inst_n_cmd_res_to_res: Function which receives an instance and CmdResult
|
:param inst_n_cmd_res_to_res: Function which receives an instance and CmdResult
|
||||||
and returns a parsed result (of any format)
|
and returns a parsed result (of any format)
|
||||||
:return: Dictionary with 'instance_id' as key and parsed result as value
|
:return: Dictionary with 'instance_id' as key and parsed result as value
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
"""
|
"""
|
||||||
This file contains all the static data relating to Zero Trust. It is mostly used in the zero trust report generation and
|
This file contains all the static data relating to Zero Trust. It is mostly used in the zero
|
||||||
|
trust report generation and
|
||||||
in creating findings.
|
in creating findings.
|
||||||
|
|
||||||
This file contains static mappings between zero trust components such as: pillars, principles, tests, statuses.
|
This file contains static mappings between zero trust components such as: pillars, principles,
|
||||||
|
tests, statuses.
|
||||||
Some of the mappings are computed when this module is loaded.
|
Some of the mappings are computed when this module is loaded.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -82,12 +84,16 @@ PRINCIPLES = {
|
||||||
PRINCIPLE_SEGMENTATION: "Apply segmentation and micro-segmentation inside your network.",
|
PRINCIPLE_SEGMENTATION: "Apply segmentation and micro-segmentation inside your network.",
|
||||||
PRINCIPLE_ANALYZE_NETWORK_TRAFFIC: "Analyze network traffic for malicious activity.",
|
PRINCIPLE_ANALYZE_NETWORK_TRAFFIC: "Analyze network traffic for malicious activity.",
|
||||||
PRINCIPLE_USER_BEHAVIOUR: "Adopt security user behavior analytics.",
|
PRINCIPLE_USER_BEHAVIOUR: "Adopt security user behavior analytics.",
|
||||||
PRINCIPLE_ENDPOINT_SECURITY: "Use anti-virus and other traditional endpoint security solutions.",
|
PRINCIPLE_ENDPOINT_SECURITY: "Use anti-virus and other traditional endpoint "
|
||||||
|
"security solutions.",
|
||||||
PRINCIPLE_DATA_CONFIDENTIALITY: "Ensure data's confidentiality by encrypting it.",
|
PRINCIPLE_DATA_CONFIDENTIALITY: "Ensure data's confidentiality by encrypting it.",
|
||||||
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES: "Configure network policies to be as restrictive as possible.",
|
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES: "Configure network policies to be as restrictive as "
|
||||||
PRINCIPLE_USERS_MAC_POLICIES: "Users' permissions to the network and to resources should be MAC (Mandatory "
|
"possible.",
|
||||||
|
PRINCIPLE_USERS_MAC_POLICIES: "Users' permissions to the network and to resources "
|
||||||
|
"should be MAC (Mandatory "
|
||||||
"Access Control) only.",
|
"Access Control) only.",
|
||||||
PRINCIPLE_DISASTER_RECOVERY: "Ensure data and infrastructure backups for disaster recovery scenarios.",
|
PRINCIPLE_DISASTER_RECOVERY: "Ensure data and infrastructure backups for disaster "
|
||||||
|
"recovery scenarios.",
|
||||||
PRINCIPLE_SECURE_AUTHENTICATION: "Ensure secure authentication process's.",
|
PRINCIPLE_SECURE_AUTHENTICATION: "Ensure secure authentication process's.",
|
||||||
PRINCIPLE_MONITORING_AND_LOGGING: "Ensure monitoring and logging in network resources.",
|
PRINCIPLE_MONITORING_AND_LOGGING: "Ensure monitoring and logging in network resources.",
|
||||||
}
|
}
|
||||||
|
@ -99,32 +105,40 @@ FINDING_EXPLANATION_BY_STATUS_KEY = "finding_explanation"
|
||||||
TEST_EXPLANATION_KEY = "explanation"
|
TEST_EXPLANATION_KEY = "explanation"
|
||||||
TESTS_MAP = {
|
TESTS_MAP = {
|
||||||
TEST_SEGMENTATION: {
|
TEST_SEGMENTATION: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey tried to scan and find machines that it can communicate with from the machine it's "
|
TEST_EXPLANATION_KEY: "The Monkey tried to scan and find machines that it can "
|
||||||
|
"communicate with from the machine it's "
|
||||||
"running on, that belong to different network segments.",
|
"running on, that belong to different network segments.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and logs.",
|
STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and"
|
||||||
STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs.",
|
" logs.",
|
||||||
|
STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, "
|
||||||
|
"check firewall logs.",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_SEGMENTATION,
|
PRINCIPLE_KEY: PRINCIPLE_SEGMENTATION,
|
||||||
PILLARS_KEY: [NETWORKS],
|
PILLARS_KEY: [NETWORKS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED],
|
||||||
},
|
},
|
||||||
TEST_MALICIOUS_ACTIVITY_TIMELINE: {
|
TEST_MALICIOUS_ACTIVITY_TIMELINE: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkeys in the network performed malicious-looking actions, like scanning and attempting "
|
TEST_EXPLANATION_KEY: "The Monkeys in the network performed malicious-looking "
|
||||||
|
"actions, like scanning and attempting "
|
||||||
"exploitation.",
|
"exploitation.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_VERIFY: "Monkey performed malicious actions in the network. Check SOC logs and alerts."
|
STATUS_VERIFY: "Monkey performed malicious actions in the network. Check SOC logs and "
|
||||||
|
"alerts."
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
|
PRINCIPLE_KEY: PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
|
||||||
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
|
||||||
},
|
},
|
||||||
TEST_ENDPOINT_SECURITY_EXISTS: {
|
TEST_ENDPOINT_SECURITY_EXISTS: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey checked if there is an active process of an endpoint security software.",
|
TEST_EXPLANATION_KEY: "The Monkey checked if there is an active process of an "
|
||||||
|
"endpoint security software.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and activate anti-virus "
|
STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and "
|
||||||
|
"activate anti-virus "
|
||||||
"software on endpoints.",
|
"software on endpoints.",
|
||||||
STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to see if Monkey was a "
|
STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to "
|
||||||
|
"see if Monkey was a "
|
||||||
"security concern. ",
|
"security concern. ",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
||||||
|
@ -132,9 +146,11 @@ TESTS_MAP = {
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_MACHINE_EXPLOITED: {
|
TEST_MACHINE_EXPLOITED: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey tries to exploit machines in order to breach them and propagate in the network.",
|
TEST_EXPLANATION_KEY: "The Monkey tries to exploit machines in order to "
|
||||||
|
"breach them and propagate in the network.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see activity recognized and see "
|
STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see "
|
||||||
|
"activity recognized and see "
|
||||||
"which endpoints were compromised.",
|
"which endpoints were compromised.",
|
||||||
STATUS_PASSED: "Monkey didn't manage to exploit an endpoint.",
|
STATUS_PASSED: "Monkey didn't manage to exploit an endpoint.",
|
||||||
},
|
},
|
||||||
|
@ -145,7 +161,8 @@ TESTS_MAP = {
|
||||||
TEST_SCHEDULED_EXECUTION: {
|
TEST_SCHEDULED_EXECUTION: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey was executed in a scheduled manner.",
|
TEST_EXPLANATION_KEY: "The Monkey was executed in a scheduled manner.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security "
|
STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in "
|
||||||
|
"User-Behavior security "
|
||||||
"software.",
|
"software.",
|
||||||
STATUS_PASSED: "Monkey failed to execute in a scheduled manner.",
|
STATUS_PASSED: "Monkey failed to execute in a scheduled manner.",
|
||||||
},
|
},
|
||||||
|
@ -154,10 +171,13 @@ TESTS_MAP = {
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
|
||||||
},
|
},
|
||||||
TEST_DATA_ENDPOINT_ELASTIC: {
|
TEST_DATA_ENDPOINT_ELASTIC: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to ElasticSearch instances.",
|
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to "
|
||||||
|
"ElasticSearch instances.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by encrypting it in in-transit.",
|
STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by "
|
||||||
STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such instances, look for alerts "
|
"encrypting it in in-transit.",
|
||||||
|
STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such "
|
||||||
|
"instances, look for alerts "
|
||||||
"that indicate attempts to access them. ",
|
"that indicate attempts to access them. ",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
||||||
|
@ -165,10 +185,12 @@ TESTS_MAP = {
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_DATA_ENDPOINT_HTTP: {
|
TEST_DATA_ENDPOINT_HTTP: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to HTTP servers.",
|
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to HTTP " "servers.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in in-transit.",
|
STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in"
|
||||||
STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, look for alerts that indicate "
|
" in-transit.",
|
||||||
|
STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, "
|
||||||
|
"look for alerts that indicate "
|
||||||
"attempts to access them. ",
|
"attempts to access them. ",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
||||||
|
@ -176,10 +198,12 @@ TESTS_MAP = {
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_DATA_ENDPOINT_POSTGRESQL: {
|
TEST_DATA_ENDPOINT_POSTGRESQL: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to PostgreSQL servers.",
|
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to " "PostgreSQL servers.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey accessed PostgreSQL servers. Limit access to data by encrypting it in in-transit.",
|
STATUS_FAILED: "Monkey accessed PostgreSQL servers. Limit access to data by encrypting"
|
||||||
STATUS_PASSED: "Monkey didn't find open PostgreSQL servers. If you have such servers, look for alerts that "
|
" it in in-transit.",
|
||||||
|
STATUS_PASSED: "Monkey didn't find open PostgreSQL servers. If you have such servers, "
|
||||||
|
"look for alerts that "
|
||||||
"indicate attempts to access them. ",
|
"indicate attempts to access them. ",
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
|
||||||
|
@ -189,7 +213,8 @@ TESTS_MAP = {
|
||||||
TEST_TUNNELING: {
|
TEST_TUNNELING: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey tried to tunnel traffic using other monkeys.",
|
TEST_EXPLANATION_KEY: "The Monkey tried to tunnel traffic using other monkeys.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey tunneled its traffic using other monkeys. Your network policies are too permissive - "
|
STATUS_FAILED: "Monkey tunneled its traffic using other monkeys. Your network policies "
|
||||||
|
"are too permissive - "
|
||||||
"restrict them. "
|
"restrict them. "
|
||||||
},
|
},
|
||||||
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
|
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
|
||||||
|
@ -197,9 +222,11 @@ TESTS_MAP = {
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED],
|
||||||
},
|
},
|
||||||
TEST_COMMUNICATE_AS_NEW_USER: {
|
TEST_COMMUNICATE_AS_NEW_USER: {
|
||||||
TEST_EXPLANATION_KEY: "The Monkey tried to create a new user and communicate with the internet from it.",
|
TEST_EXPLANATION_KEY: "The Monkey tried to create a new user and communicate "
|
||||||
|
"with the internet from it.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies are too permissive - "
|
STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies "
|
||||||
|
"are too permissive - "
|
||||||
"restrict them to MAC only.",
|
"restrict them to MAC only.",
|
||||||
STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network.",
|
STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network.",
|
||||||
},
|
},
|
||||||
|
@ -218,7 +245,7 @@ TESTS_MAP = {
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_UNENCRYPTED_DATA: {
|
TEST_SCOUTSUITE_UNENCRYPTED_DATA: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources containing unencrypted data.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources containing " "unencrypted data.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found resources with unencrypted data.",
|
STATUS_FAILED: "ScoutSuite found resources with unencrypted data.",
|
||||||
STATUS_PASSED: "ScoutSuite found no resources with unencrypted data.",
|
STATUS_PASSED: "ScoutSuite found no resources with unencrypted data.",
|
||||||
|
@ -228,7 +255,8 @@ TESTS_MAP = {
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_DATA_LOSS_PREVENTION: {
|
TEST_SCOUTSUITE_DATA_LOSS_PREVENTION: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources which are not protected against data loss.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources which are not "
|
||||||
|
"protected against data loss.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found resources not protected against data loss.",
|
STATUS_FAILED: "ScoutSuite found resources not protected against data loss.",
|
||||||
STATUS_PASSED: "ScoutSuite found that all resources are secured against data loss.",
|
STATUS_PASSED: "ScoutSuite found that all resources are secured against data loss.",
|
||||||
|
@ -238,7 +266,7 @@ TESTS_MAP = {
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_SECURE_AUTHENTICATION: {
|
TEST_SCOUTSUITE_SECURE_AUTHENTICATION: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for issues related to users' authentication.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for issues related to users' " "authentication.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found issues related to users' authentication.",
|
STATUS_FAILED: "ScoutSuite found issues related to users' authentication.",
|
||||||
STATUS_PASSED: "ScoutSuite found no issues related to users' authentication.",
|
STATUS_PASSED: "ScoutSuite found no issues related to users' authentication.",
|
||||||
|
@ -248,7 +276,7 @@ TESTS_MAP = {
|
||||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
|
||||||
},
|
},
|
||||||
TEST_SCOUTSUITE_RESTRICTIVE_POLICIES: {
|
TEST_SCOUTSUITE_RESTRICTIVE_POLICIES: {
|
||||||
TEST_EXPLANATION_KEY: "ScoutSuite searched for permissive user access policies.",
|
TEST_EXPLANATION_KEY: "ScoutSuite searched for permissive user access " "policies.",
|
||||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||||
STATUS_FAILED: "ScoutSuite found permissive user access policies.",
|
STATUS_FAILED: "ScoutSuite found permissive user access policies.",
|
||||||
STATUS_PASSED: "ScoutSuite found no issues related to user access policies.",
|
STATUS_PASSED: "ScoutSuite found no issues related to user access policies.",
|
||||||
|
|
|
@ -159,7 +159,8 @@ class SingleIpRange(NetworkRange):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def string_to_host(string_):
|
def string_to_host(string_):
|
||||||
"""
|
"""
|
||||||
Converts the string that user entered in "Scan IP/subnet list" to a tuple of domain name and ip
|
Converts the string that user entered in "Scan IP/subnet list" to a tuple of domain name
|
||||||
|
and ip
|
||||||
:param string_: String that was entered in "Scan IP/subnet list"
|
:param string_: String that was entered in "Scan IP/subnet list"
|
||||||
:return: A tuple in format (IP, domain_name). Eg. (192.168.55.1, www.google.com)
|
:return: A tuple in format (IP, domain_name). Eg. (192.168.55.1, www.google.com)
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -4,8 +4,10 @@ from urllib.parse import urlparse
|
||||||
|
|
||||||
def get_host_from_network_location(network_location: str) -> str:
|
def get_host_from_network_location(network_location: str) -> str:
|
||||||
"""
|
"""
|
||||||
URL structure is "<scheme>://<net_loc>/<path>;<params>?<query>#<fragment>" (https://tools.ietf.org/html/rfc1808.html)
|
URL structure is "<scheme>://<net_loc>/<path>;<params>?<query>#<fragment>" (
|
||||||
And the net_loc is "<user>:<password>@<host>:<port>" (https://tools.ietf.org/html/rfc1738#section-3.1)
|
https://tools.ietf.org/html/rfc1808.html)
|
||||||
|
And the net_loc is "<user>:<password>@<host>:<port>" (
|
||||||
|
https://tools.ietf.org/html/rfc1738#section-3.1)
|
||||||
:param network_location: server network location
|
:param network_location: server network location
|
||||||
:return: host part of the network location
|
:return: host part of the network location
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -14,8 +14,10 @@ def get_ip_in_src_and_not_in_dst(ip_addresses, source_subnet, target_subnet):
|
||||||
def get_ip_if_in_subnet(ip_addresses, subnet):
|
def get_ip_if_in_subnet(ip_addresses, subnet):
|
||||||
"""
|
"""
|
||||||
:param ip_addresses: IP address list.
|
:param ip_addresses: IP address list.
|
||||||
:param subnet: Subnet to check if one of ip_addresses is in there. This is common.network.network_range.NetworkRange
|
:param subnet: Subnet to check if one of ip_addresses is in there. This is
|
||||||
:return: The first IP in ip_addresses which is in the subnet if there is one, otherwise returns None.
|
common.network.network_range.NetworkRange
|
||||||
|
:return: The first IP in ip_addresses which is in the subnet if there is one, otherwise
|
||||||
|
returns None.
|
||||||
"""
|
"""
|
||||||
for ip_address in ip_addresses:
|
for ip_address in ip_addresses:
|
||||||
if subnet.is_in_range(ip_address):
|
if subnet.is_in_range(ip_address):
|
||||||
|
|
|
@ -15,7 +15,8 @@ class ScanStatus(Enum):
|
||||||
class UsageEnum(Enum):
|
class UsageEnum(Enum):
|
||||||
SMB = {
|
SMB = {
|
||||||
ScanStatus.USED.value: "SMB exploiter ran the monkey by creating a service via MS-SCMR.",
|
ScanStatus.USED.value: "SMB exploiter ran the monkey by creating a service via MS-SCMR.",
|
||||||
ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service via MS-SCMR.",
|
ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service "
|
||||||
|
"via MS-SCMR.",
|
||||||
}
|
}
|
||||||
MIMIKATZ = {
|
MIMIKATZ = {
|
||||||
ScanStatus.USED.value: "Windows module loader was used to load Mimikatz DLL.",
|
ScanStatus.USED.value: "Windows module loader was used to load Mimikatz DLL.",
|
||||||
|
@ -29,7 +30,8 @@ class UsageEnum(Enum):
|
||||||
ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."
|
ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."
|
||||||
}
|
}
|
||||||
SINGLETON_WINAPI = {
|
SINGLETON_WINAPI = {
|
||||||
ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's process.",
|
ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's "
|
||||||
|
"process.",
|
||||||
ScanStatus.SCANNED.value: "WinAPI call to acquire system singleton"
|
ScanStatus.SCANNED.value: "WinAPI call to acquire system singleton"
|
||||||
" for monkey process wasn't successful.",
|
" for monkey process wasn't successful.",
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# To get the version from shell, run `python ./version.py` (see `python ./version.py -h` for details).
|
# To get the version from shell, run `python ./version.py` (see `python ./version.py -h` for
|
||||||
|
# details).
|
||||||
import argparse
|
import argparse
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
|
@ -227,7 +227,8 @@ class Configuration(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def hash_sensitive_data(sensitive_data):
|
def hash_sensitive_data(sensitive_data):
|
||||||
"""
|
"""
|
||||||
Hash sensitive data (e.g. passwords). Used so the log won't contain sensitive data plain-text, as the log is
|
Hash sensitive data (e.g. passwords). Used so the log won't contain sensitive data
|
||||||
|
plain-text, as the log is
|
||||||
saved on client machines plain-text.
|
saved on client machines plain-text.
|
||||||
|
|
||||||
:param sensitive_data: the data to hash.
|
:param sensitive_data: the data to hash.
|
||||||
|
|
|
@ -23,7 +23,6 @@ from infection_monkey.utils.exceptions.planned_shutdown_exception import Planned
|
||||||
|
|
||||||
__author__ = "hoffer"
|
__author__ = "hoffer"
|
||||||
|
|
||||||
|
|
||||||
requests.packages.urllib3.disable_warnings()
|
requests.packages.urllib3.disable_warnings()
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -32,7 +31,8 @@ DOWNLOAD_CHUNK = 1024
|
||||||
PBA_FILE_DOWNLOAD = "https://%s/api/pba/download/%s"
|
PBA_FILE_DOWNLOAD = "https://%s/api/pba/download/%s"
|
||||||
|
|
||||||
# random number greater than 5,
|
# random number greater than 5,
|
||||||
# to prevent the monkey from just waiting forever to try and connect to an island before going elsewhere.
|
# to prevent the monkey from just waiting forever to try and connect to an island before going
|
||||||
|
# elsewhere.
|
||||||
TIMEOUT_IN_SECONDS = 15
|
TIMEOUT_IN_SECONDS = 15
|
||||||
|
|
||||||
|
|
||||||
|
@ -412,7 +412,10 @@ class ControlClient(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def can_island_see_port(port):
|
def can_island_see_port(port):
|
||||||
try:
|
try:
|
||||||
url = f"https://{WormConfiguration.current_server}/api/monkey_control/check_remote_port/{port}"
|
url = (
|
||||||
|
f"https://{WormConfiguration.current_server}/api/monkey_control"
|
||||||
|
f"/check_remote_port/{port}"
|
||||||
|
)
|
||||||
response = requests.get(url, verify=False, timeout=SHORT_REQUEST_TIMEOUT)
|
response = requests.get(url, verify=False, timeout=SHORT_REQUEST_TIMEOUT)
|
||||||
response = json.loads(response.content.decode())
|
response = json.loads(response.content.decode())
|
||||||
return response["status"] == "port_visible"
|
return response["status"] == "port_visible"
|
||||||
|
|
|
@ -152,7 +152,8 @@ class MonkeyDrops(object):
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
dest_path = self._config["destination_path"]
|
dest_path = self._config["destination_path"]
|
||||||
# In linux we have a more complex commandline. There's a general outer one, and the inner one which actually
|
# In linux we have a more complex commandline. There's a general outer one,
|
||||||
|
# and the inner one which actually
|
||||||
# runs the monkey
|
# runs the monkey
|
||||||
inner_monkey_cmdline = (
|
inner_monkey_cmdline = (
|
||||||
MONKEY_CMDLINE_LINUX % {"monkey_filename": dest_path.split("/")[-1]}
|
MONKEY_CMDLINE_LINUX % {"monkey_filename": dest_path.split("/")[-1]}
|
||||||
|
@ -207,7 +208,8 @@ class MonkeyDrops(object):
|
||||||
dropper_source_path_ctypes, None, MOVEFILE_DELAY_UNTIL_REBOOT
|
dropper_source_path_ctypes, None, MOVEFILE_DELAY_UNTIL_REBOOT
|
||||||
):
|
):
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"Error marking source file '%s' for deletion on next boot (error %d)",
|
"Error marking source file '%s' for deletion on next boot (error "
|
||||||
|
"%d)",
|
||||||
self._config["source_path"],
|
self._config["source_path"],
|
||||||
ctypes.windll.kernel32.GetLastError(),
|
ctypes.windll.kernel32.GetLastError(),
|
||||||
)
|
)
|
||||||
|
|
|
@ -10,7 +10,6 @@ from infection_monkey.utils.plugins.plugin import Plugin
|
||||||
|
|
||||||
__author__ = "itamar"
|
__author__ = "itamar"
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,7 +36,8 @@ class HostExploiter(Plugin):
|
||||||
EXPLOIT_TYPE = ExploitType.VULNERABILITY
|
EXPLOIT_TYPE = ExploitType.VULNERABILITY
|
||||||
|
|
||||||
# Determines if successful exploitation should stop further exploit attempts on that machine.
|
# Determines if successful exploitation should stop further exploit attempts on that machine.
|
||||||
# Generally, should be True for RCE type exploiters and False if we don't expect the exploiter to run the monkey agent.
|
# Generally, should be True for RCE type exploiters and False if we don't expect the
|
||||||
|
# exploiter to run the monkey agent.
|
||||||
# Example: Zerologon steals credentials
|
# Example: Zerologon steals credentials
|
||||||
RUNS_AGENT_ON_SUCCESS = True
|
RUNS_AGENT_ON_SUCCESS = True
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
"""
|
"""
|
||||||
Remote Code Execution on Drupal server - CVE-2019-6340
|
Remote Code Execution on Drupal server - CVE-2019-6340
|
||||||
Implementation is based on:
|
Implementation is based on:
|
||||||
https://gist.github.com/leonjza/d0ab053be9b06fa020b66f00358e3d88/f9f6a5bb6605745e292bee3a4079f261d891738a.
|
https://gist.github.com/leonjza/d0ab053be9b06fa020b66f00358e3d88
|
||||||
|
/f9f6a5bb6605745e292bee3a4079f261d891738a.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
@ -28,7 +29,8 @@ class DrupalExploiter(WebRCE):
|
||||||
|
|
||||||
def get_exploit_config(self):
|
def get_exploit_config(self):
|
||||||
"""
|
"""
|
||||||
We override this function because the exploits requires a special extension in the URL, "node",
|
We override this function because the exploits requires a special extension in the URL,
|
||||||
|
"node",
|
||||||
e.g. an exploited URL would be http://172.1.2.3:<port>/node/3.
|
e.g. an exploited URL would be http://172.1.2.3:<port>/node/3.
|
||||||
:return: the Drupal exploit config
|
:return: the Drupal exploit config
|
||||||
"""
|
"""
|
||||||
|
@ -42,7 +44,8 @@ class DrupalExploiter(WebRCE):
|
||||||
|
|
||||||
def add_vulnerable_urls(self, potential_urls, stop_checking=False):
|
def add_vulnerable_urls(self, potential_urls, stop_checking=False):
|
||||||
"""
|
"""
|
||||||
We need a specific implementation of this function in order to add the URLs *with the node IDs*.
|
We need a specific implementation of this function in order to add the URLs *with the
|
||||||
|
node IDs*.
|
||||||
We therefore check, for every potential URL, all possible node IDs.
|
We therefore check, for every potential URL, all possible node IDs.
|
||||||
:param potential_urls: Potentially-vulnerable URLs
|
:param potential_urls: Potentially-vulnerable URLs
|
||||||
:param stop_checking: Stop if one vulnerable URL is found
|
:param stop_checking: Stop if one vulnerable URL is found
|
||||||
|
@ -71,7 +74,8 @@ class DrupalExploiter(WebRCE):
|
||||||
def check_if_exploitable(self, url):
|
def check_if_exploitable(self, url):
|
||||||
"""
|
"""
|
||||||
Check if a certain URL is exploitable.
|
Check if a certain URL is exploitable.
|
||||||
We use this specific implementation (and not simply run self.exploit) because this function does not "waste"
|
We use this specific implementation (and not simply run self.exploit) because this
|
||||||
|
function does not "waste"
|
||||||
a vulnerable URL. Namely, we're not actually exploiting, merely checking using a heuristic.
|
a vulnerable URL. Namely, we're not actually exploiting, merely checking using a heuristic.
|
||||||
:param url: Drupal's URL and port
|
:param url: Drupal's URL and port
|
||||||
:return: Vulnerable URL if exploitable, otherwise False
|
:return: Vulnerable URL if exploitable, otherwise False
|
||||||
|
@ -117,7 +121,8 @@ class DrupalExploiter(WebRCE):
|
||||||
|
|
||||||
def get_target_url(self):
|
def get_target_url(self):
|
||||||
"""
|
"""
|
||||||
We're overriding this method such that every time self.exploit is invoked, we use a fresh vulnerable URL.
|
We're overriding this method such that every time self.exploit is invoked, we use a fresh
|
||||||
|
vulnerable URL.
|
||||||
Reusing the same URL eliminates its exploitability because of caching reasons :)
|
Reusing the same URL eliminates its exploitability because of caching reasons :)
|
||||||
:return: vulnerable URL to exploit
|
:return: vulnerable URL to exploit
|
||||||
"""
|
"""
|
||||||
|
@ -128,13 +133,15 @@ class DrupalExploiter(WebRCE):
|
||||||
For the Drupal exploit, 5 distinct URLs are needed to perform the full attack.
|
For the Drupal exploit, 5 distinct URLs are needed to perform the full attack.
|
||||||
:return: Whether the list of vulnerable URLs has at least 5 elements.
|
:return: Whether the list of vulnerable URLs has at least 5 elements.
|
||||||
"""
|
"""
|
||||||
# We need 5 URLs for a "full-chain": check remote files, check architecture, drop monkey, chmod it and run it.
|
# We need 5 URLs for a "full-chain": check remote files, check architecture, drop monkey,
|
||||||
|
# chmod it and run it.
|
||||||
num_urls_needed_for_full_exploit = 5
|
num_urls_needed_for_full_exploit = 5
|
||||||
num_available_urls = len(self.vulnerable_urls)
|
num_available_urls = len(self.vulnerable_urls)
|
||||||
result = num_available_urls >= num_urls_needed_for_full_exploit
|
result = num_available_urls >= num_urls_needed_for_full_exploit
|
||||||
if not result:
|
if not result:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
f"{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a Drupal server "
|
f"{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a "
|
||||||
|
f"Drupal server "
|
||||||
f"but only {num_available_urls} found"
|
f"but only {num_available_urls} found"
|
||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
"""
|
"""
|
||||||
Implementation is based on elastic search groovy exploit by metasploit
|
Implementation is based on elastic search groovy exploit by metasploit
|
||||||
https://github.com/rapid7/metasploit-framework/blob/12198a088132f047e0a86724bc5ebba92a73ac66/modules/exploits/multi/elasticsearch/search_groovy_script.rb
|
https://github.com/rapid7/metasploit-framework/blob/12198a088132f047e0a86724bc5ebba92a73ac66
|
||||||
|
/modules/exploits/multi/elasticsearch/search_groovy_script.rb
|
||||||
Max vulnerable elasticsearch version is "1.4.2"
|
Max vulnerable elasticsearch version is "1.4.2"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -35,9 +36,9 @@ class ElasticGroovyExploiter(WebRCE):
|
||||||
GENERIC_QUERY = (
|
GENERIC_QUERY = (
|
||||||
"""{"size":1, "script_fields":{"%s": {"script": "%%s"}}}""" % MONKEY_RESULT_FIELD
|
"""{"size":1, "script_fields":{"%s": {"script": "%%s"}}}""" % MONKEY_RESULT_FIELD
|
||||||
)
|
)
|
||||||
JAVA_CMD = (
|
JAVA_CMD = GENERIC_QUERY % (
|
||||||
GENERIC_QUERY
|
"""java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec("""
|
||||||
% """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()"""
|
"""\\"%s\\").getText()"""
|
||||||
)
|
)
|
||||||
|
|
||||||
_TARGET_OS_TYPE = ["linux", "windows"]
|
_TARGET_OS_TYPE = ["linux", "windows"]
|
||||||
|
@ -57,7 +58,8 @@ class ElasticGroovyExploiter(WebRCE):
|
||||||
return exploit_config
|
return exploit_config
|
||||||
|
|
||||||
def get_open_service_ports(self, port_list, names):
|
def get_open_service_ports(self, port_list, names):
|
||||||
# We must append elastic port we get from elastic fingerprint module because It's not marked as 'http' service
|
# We must append elastic port we get from elastic fingerprint module because It's not
|
||||||
|
# marked as 'http' service
|
||||||
valid_ports = super(ElasticGroovyExploiter, self).get_open_service_ports(port_list, names)
|
valid_ports = super(ElasticGroovyExploiter, self).get_open_service_ports(port_list, names)
|
||||||
if ES_SERVICE in self.host.services:
|
if ES_SERVICE in self.host.services:
|
||||||
valid_ports.append([ES_PORT, False])
|
valid_ports.append([ES_PORT, False])
|
||||||
|
@ -70,7 +72,8 @@ class ElasticGroovyExploiter(WebRCE):
|
||||||
response = requests.get(url, data=payload, timeout=DOWNLOAD_TIMEOUT)
|
response = requests.get(url, data=payload, timeout=DOWNLOAD_TIMEOUT)
|
||||||
except requests.ReadTimeout:
|
except requests.ReadTimeout:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Elastic couldn't upload monkey, because server didn't respond to upload request."
|
"Elastic couldn't upload monkey, because server didn't respond to upload "
|
||||||
|
"request."
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
result = self.get_results(response)
|
result = self.get_results(response)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
"""
|
"""
|
||||||
Remote code execution on HADOOP server with YARN and default settings
|
Remote code execution on HADOOP server with YARN and default settings
|
||||||
Implementation is based on code from https://github.com/vulhub/vulhub/tree/master/hadoop/unauthorized-yarn
|
Implementation is based on code from
|
||||||
|
https://github.com/vulhub/vulhub/tree/master/hadoop/unauthorized-yarn
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
|
@ -56,7 +56,8 @@ class MSSQLExploiter(HostExploiter):
|
||||||
def _exploit_host(self):
|
def _exploit_host(self):
|
||||||
"""
|
"""
|
||||||
First this method brute forces to get the mssql connection (cursor).
|
First this method brute forces to get the mssql connection (cursor).
|
||||||
Also, don't forget to start_monkey_server() before self.upload_monkey() and self.stop_monkey_server() after
|
Also, don't forget to start_monkey_server() before self.upload_monkey() and
|
||||||
|
self.stop_monkey_server() after
|
||||||
"""
|
"""
|
||||||
# Brute force to get connection
|
# Brute force to get connection
|
||||||
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
|
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
|
||||||
|
@ -181,10 +182,12 @@ class MSSQLExploiter(HostExploiter):
|
||||||
Args:
|
Args:
|
||||||
host (str): Host ip address
|
host (str): Host ip address
|
||||||
port (str): Tcp port that the host listens to
|
port (str): Tcp port that the host listens to
|
||||||
users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce with
|
users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce
|
||||||
|
with
|
||||||
|
|
||||||
Return:
|
Return:
|
||||||
True or False depends if the whole bruteforce and attack process was completed successfully or not
|
True or False depends if the whole bruteforce and attack process was completed
|
||||||
|
successfully or not
|
||||||
"""
|
"""
|
||||||
# Main loop
|
# Main loop
|
||||||
# Iterates on users list
|
# Iterates on users list
|
||||||
|
@ -196,9 +199,8 @@ class MSSQLExploiter(HostExploiter):
|
||||||
host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT
|
host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT
|
||||||
)
|
)
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"Successfully connected to host: {0}, using user: {1}, password (SHA-512): {2}".format(
|
"Successfully connected to host: {0}, using user: {1}, password ("
|
||||||
host, user, self._config.hash_sensitive_data(password)
|
"SHA-512): {2}".format(host, user, self._config.hash_sensitive_data(password))
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self.add_vuln_port(MSSQLExploiter.SQL_DEFAULT_TCP_PORT)
|
self.add_vuln_port(MSSQLExploiter.SQL_DEFAULT_TCP_PORT)
|
||||||
self.report_login_attempt(True, user, password)
|
self.report_login_attempt(True, user, password)
|
||||||
|
|
|
@ -54,7 +54,8 @@ LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
class SambaCryExploiter(HostExploiter):
|
class SambaCryExploiter(HostExploiter):
|
||||||
"""
|
"""
|
||||||
SambaCry exploit module, partially based on the following implementation by CORE Security Technologies' impacket:
|
SambaCry exploit module, partially based on the following implementation by CORE Security
|
||||||
|
Technologies' impacket:
|
||||||
https://github.com/CoreSecurity/impacket/blob/master/examples/sambaPipe.py
|
https://github.com/CoreSecurity/impacket/blob/master/examples/sambaPipe.py
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -372,7 +373,8 @@ class SambaCryExploiter(HostExploiter):
|
||||||
# the extra / on the beginning is required for the vulnerability
|
# the extra / on the beginning is required for the vulnerability
|
||||||
self.open_pipe(smb_client, "/" + module_path)
|
self.open_pipe(smb_client, "/" + module_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# This is the expected result. We can't tell whether we succeeded or not just by this error code.
|
# This is the expected result. We can't tell whether we succeeded or not just by this
|
||||||
|
# error code.
|
||||||
if str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
|
if str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
@ -450,7 +452,8 @@ class SambaCryExploiter(HostExploiter):
|
||||||
)
|
)
|
||||||
return smb_client
|
return smb_client
|
||||||
|
|
||||||
# Following are slightly modified SMB functions from impacket to fit our needs of the vulnerability #
|
# Following are slightly modified SMB functions from impacket to fit our needs of the
|
||||||
|
# vulnerability #
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_smb(
|
def create_smb(
|
||||||
smb_client,
|
smb_client,
|
||||||
|
@ -513,7 +516,8 @@ class SambaCryExploiter(HostExploiter):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def open_pipe(smb_client, pathName):
|
def open_pipe(smb_client, pathName):
|
||||||
# We need to overwrite Impacket's openFile functions since they automatically convert paths to NT style
|
# We need to overwrite Impacket's openFile functions since they automatically convert
|
||||||
|
# paths to NT style
|
||||||
# to make things easier for the caller. Not this time ;)
|
# to make things easier for the caller. Not this time ;)
|
||||||
treeId = smb_client.connectTree("IPC$")
|
treeId = smb_client.connectTree("IPC$")
|
||||||
LOG.debug("Triggering path: %s" % pathName)
|
LOG.debug("Triggering path: %s" % pathName)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Implementation is based on shellshock script provided https://github.com/nccgroup/shocker/blob/master/shocker.py
|
# Implementation is based on shellshock script provided
|
||||||
|
# https://github.com/nccgroup/shocker/blob/master/shocker.py
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import string
|
import string
|
||||||
|
@ -113,7 +114,8 @@ class ShellShockExploiter(HostExploiter):
|
||||||
self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)
|
self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)
|
||||||
):
|
):
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"Host %s was already infected under the current configuration, done" % self.host
|
"Host %s was already infected under the current configuration, "
|
||||||
|
"done" % self.host
|
||||||
)
|
)
|
||||||
return True # return already infected
|
return True # return already infected
|
||||||
|
|
||||||
|
@ -270,7 +272,8 @@ class ShellShockExploiter(HostExploiter):
|
||||||
break
|
break
|
||||||
if timeout:
|
if timeout:
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"Some connections timed out while sending request to potentially vulnerable urls."
|
"Some connections timed out while sending request to potentially vulnerable "
|
||||||
|
"urls."
|
||||||
)
|
)
|
||||||
valid_resps = [req for req in reqs if req and req.status_code == requests.codes.ok]
|
valid_resps = [req for req in reqs if req and req.status_code == requests.codes.ok]
|
||||||
urls = [resp.url for resp in valid_resps]
|
urls = [resp.url for resp in valid_resps]
|
||||||
|
|
|
@ -75,7 +75,8 @@ class SmbExploiter(HostExploiter):
|
||||||
|
|
||||||
if remote_full_path is not None:
|
if remote_full_path is not None:
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)",
|
"Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) "
|
||||||
|
"%s : (SHA-512) %s)",
|
||||||
self.host,
|
self.host,
|
||||||
user,
|
user,
|
||||||
self._config.hash_sensitive_data(password),
|
self._config.hash_sensitive_data(password),
|
||||||
|
@ -99,7 +100,8 @@ class SmbExploiter(HostExploiter):
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"Exception when trying to copy file using SMB to %r with user:"
|
"Exception when trying to copy file using SMB to %r with user:"
|
||||||
" %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s: (%s)",
|
" %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash ("
|
||||||
|
"SHA-512): %s: (%s)",
|
||||||
self.host,
|
self.host,
|
||||||
user,
|
user,
|
||||||
self._config.hash_sensitive_data(password),
|
self._config.hash_sensitive_data(password),
|
||||||
|
|
|
@ -157,7 +157,8 @@ class SSHExploiter(HostExploiter):
|
||||||
if stdout_res:
|
if stdout_res:
|
||||||
# file exists
|
# file exists
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"Host %s was already infected under the current configuration, done" % self.host
|
"Host %s was already infected under the current configuration, "
|
||||||
|
"done" % self.host
|
||||||
)
|
)
|
||||||
return True # return already infected
|
return True # return already infected
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,8 @@ class Struts2Exploiter(WebRCE):
|
||||||
def build_potential_urls(self, ports, extensions=None):
|
def build_potential_urls(self, ports, extensions=None):
|
||||||
"""
|
"""
|
||||||
We need to override this method to get redirected url's
|
We need to override this method to get redirected url's
|
||||||
:param ports: Array of ports. One port is described as size 2 array: [port.no(int), isHTTPS?(bool)]
|
:param ports: Array of ports. One port is described as size 2 array: [port.no(int),
|
||||||
|
isHTTPS?(bool)]
|
||||||
Eg. ports: [[80, False], [443, True]]
|
Eg. ports: [[80, False], [443, True]]
|
||||||
:param extensions: What subdirectories to scan. www.domain.com[/extension]
|
:param extensions: What subdirectories to scan. www.domain.com[/extension]
|
||||||
:return: Array of url's to try and attack
|
:return: Array of url's to try and attack
|
||||||
|
|
|
@ -29,7 +29,8 @@ def get_target_monkey(host):
|
||||||
|
|
||||||
if not monkey_path:
|
if not monkey_path:
|
||||||
if host.os.get("type") == platform.system().lower():
|
if host.os.get("type") == platform.system().lower():
|
||||||
# if exe not found, and we have the same arch or arch is unknown and we are 32bit, use our exe
|
# if exe not found, and we have the same arch or arch is unknown and we are 32bit,
|
||||||
|
# use our exe
|
||||||
if (not host.os.get("machine") and sys.maxsize < 2 ** 32) or host.os.get(
|
if (not host.os.get("machine") and sys.maxsize < 2 ** 32) or host.os.get(
|
||||||
"machine", ""
|
"machine", ""
|
||||||
).lower() == platform.machine().lower():
|
).lower() == platform.machine().lower():
|
||||||
|
|
|
@ -17,7 +17,8 @@ class Payload(object):
|
||||||
def get_payload(self, command=""):
|
def get_payload(self, command=""):
|
||||||
"""
|
"""
|
||||||
Returns prefixed and suffixed command (payload)
|
Returns prefixed and suffixed command (payload)
|
||||||
:param command: Command to suffix/prefix. If no command is passed than objects' property is used
|
:param command: Command to suffix/prefix. If no command is passed than objects' property
|
||||||
|
is used
|
||||||
:return: prefixed and suffixed command (full payload)
|
:return: prefixed and suffixed command (full payload)
|
||||||
"""
|
"""
|
||||||
if not command:
|
if not command:
|
||||||
|
@ -46,7 +47,8 @@ class LimitedSizePayload(Payload):
|
||||||
def split_into_array_of_smaller_payloads(self):
|
def split_into_array_of_smaller_payloads(self):
|
||||||
if self.is_suffix_and_prefix_too_long():
|
if self.is_suffix_and_prefix_too_long():
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"Can't split command into smaller sub-commands because commands' prefix and suffix already "
|
"Can't split command into smaller sub-commands because commands' prefix and "
|
||||||
|
"suffix already "
|
||||||
"exceeds required length of command."
|
"exceeds required length of command."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,8 @@ class SmbTools(object):
|
||||||
# skip guest users
|
# skip guest users
|
||||||
if smb.isGuestSession() > 0:
|
if smb.isGuestSession() > 0:
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"Connection to %r granted guest privileges with user: %s, password (SHA-512): '%s',"
|
"Connection to %r granted guest privileges with user: %s, password (SHA-512): "
|
||||||
|
"'%s',"
|
||||||
" LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
|
" LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
|
||||||
host,
|
host,
|
||||||
username,
|
username,
|
||||||
|
@ -151,7 +152,7 @@ class SmbTools(object):
|
||||||
return remote_full_path
|
return remote_full_path
|
||||||
|
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"Remote monkey file is found but different, moving along with attack"
|
"Remote monkey file is found but different, moving along with " "attack"
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass # file isn't found on remote victim, moving on
|
pass # file isn't found on remote victim, moving on
|
||||||
|
@ -197,7 +198,8 @@ class SmbTools(object):
|
||||||
if not file_uploaded:
|
if not file_uploaded:
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"Couldn't find a writable share for exploiting victim %r with "
|
"Couldn't find a writable share for exploiting victim %r with "
|
||||||
"username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
|
"username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash ("
|
||||||
|
"SHA-512): %s",
|
||||||
host,
|
host,
|
||||||
username,
|
username,
|
||||||
Configuration.hash_sensitive_data(password),
|
Configuration.hash_sensitive_data(password),
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
"""
|
"""
|
||||||
Implementation is based on VSFTPD v2.3.4 Backdoor Command Execution exploit by metasploit
|
Implementation is based on VSFTPD v2.3.4 Backdoor Command Execution exploit by metasploit
|
||||||
https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/unix/ftp/vsftpd_234_backdoor.rb
|
https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/unix/ftp
|
||||||
|
/vsftpd_234_backdoor.rb
|
||||||
only vulnerable version is "2.3.4"
|
only vulnerable version is "2.3.4"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -151,7 +152,8 @@ class VSFTPDExploiter(HostExploiter):
|
||||||
}
|
}
|
||||||
|
|
||||||
# Set unlimited to memory
|
# Set unlimited to memory
|
||||||
# we don't have to revert the ulimit because it just applies to the shell obtained by our exploit
|
# we don't have to revert the ulimit because it just applies to the shell obtained by our
|
||||||
|
# exploit
|
||||||
run_monkey = ULIMIT_V + UNLIMITED + run_monkey
|
run_monkey = ULIMIT_V + UNLIMITED + run_monkey
|
||||||
run_monkey = str.encode(str(run_monkey) + "\n")
|
run_monkey = str.encode(str(run_monkey) + "\n")
|
||||||
time.sleep(FTP_TIME_BUFFER)
|
time.sleep(FTP_TIME_BUFFER)
|
||||||
|
|
|
@ -69,21 +69,27 @@ class WebRCE(HostExploiter):
|
||||||
"""
|
"""
|
||||||
exploit_config = {}
|
exploit_config = {}
|
||||||
|
|
||||||
# dropper: If true monkey will use dropper parameter that will detach monkey's process and try to copy
|
# dropper: If true monkey will use dropper parameter that will detach monkey's process
|
||||||
|
# and try to copy
|
||||||
# it's file to the default destination path.
|
# it's file to the default destination path.
|
||||||
exploit_config["dropper"] = False
|
exploit_config["dropper"] = False
|
||||||
|
|
||||||
# upload_commands: Unformatted dict with one or two commands {'linux': WGET_HTTP_UPLOAD,'windows': WIN_CMD}
|
# upload_commands: Unformatted dict with one or two commands {'linux': WGET_HTTP_UPLOAD,
|
||||||
# Command must have "monkey_path" and "http_path" format parameters. If None defaults will be used.
|
# 'windows': WIN_CMD}
|
||||||
|
# Command must have "monkey_path" and "http_path" format parameters. If None defaults
|
||||||
|
# will be used.
|
||||||
exploit_config["upload_commands"] = None
|
exploit_config["upload_commands"] = None
|
||||||
|
|
||||||
# url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home", "index.php"]
|
# url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home",
|
||||||
|
# "index.php"]
|
||||||
exploit_config["url_extensions"] = []
|
exploit_config["url_extensions"] = []
|
||||||
|
|
||||||
# stop_checking_urls: If true it will stop checking vulnerable urls once one was found vulnerable.
|
# stop_checking_urls: If true it will stop checking vulnerable urls once one was found
|
||||||
|
# vulnerable.
|
||||||
exploit_config["stop_checking_urls"] = False
|
exploit_config["stop_checking_urls"] = False
|
||||||
|
|
||||||
# blind_exploit: If true we won't check if file exist and won't try to get the architecture of target.
|
# blind_exploit: If true we won't check if file exist and won't try to get the
|
||||||
|
# architecture of target.
|
||||||
exploit_config["blind_exploit"] = False
|
exploit_config["blind_exploit"] = False
|
||||||
|
|
||||||
return exploit_config
|
return exploit_config
|
||||||
|
@ -200,7 +206,8 @@ class WebRCE(HostExploiter):
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Provided command is missing/bad for this type of host! "
|
"Provided command is missing/bad for this type of host! "
|
||||||
"Check upload_monkey function docs before using custom monkey's upload commands."
|
"Check upload_monkey function docs before using custom monkey's upload "
|
||||||
|
"commands."
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
return command
|
return command
|
||||||
|
@ -225,8 +232,10 @@ class WebRCE(HostExploiter):
|
||||||
|
|
||||||
def build_potential_urls(self, ports, extensions=None):
|
def build_potential_urls(self, ports, extensions=None):
|
||||||
"""
|
"""
|
||||||
Build all possibly-vulnerable URLs on a specific host, based on the relevant ports and extensions.
|
Build all possibly-vulnerable URLs on a specific host, based on the relevant ports and
|
||||||
:param ports: Array of ports. One port is described as size 2 array: [port.no(int), isHTTPS?(bool)]
|
extensions.
|
||||||
|
:param ports: Array of ports. One port is described as size 2 array: [port.no(int),
|
||||||
|
isHTTPS?(bool)]
|
||||||
Eg. ports: [[80, False], [443, True]]
|
Eg. ports: [[80, False], [443, True]]
|
||||||
:param extensions: What subdirectories to scan. www.domain.com[/extension]
|
:param extensions: What subdirectories to scan. www.domain.com[/extension]
|
||||||
:return: Array of url's to try and attack
|
:return: Array of url's to try and attack
|
||||||
|
@ -253,7 +262,8 @@ class WebRCE(HostExploiter):
|
||||||
"""
|
"""
|
||||||
Gets vulnerable url(s) from url list
|
Gets vulnerable url(s) from url list
|
||||||
:param urls: Potentially vulnerable urls
|
:param urls: Potentially vulnerable urls
|
||||||
:param stop_checking: If we want to continue checking for vulnerable url even though one is found (bool)
|
:param stop_checking: If we want to continue checking for vulnerable url even though one
|
||||||
|
is found (bool)
|
||||||
:return: None (we append to class variable vulnerable_urls)
|
:return: None (we append to class variable vulnerable_urls)
|
||||||
"""
|
"""
|
||||||
for url in urls:
|
for url in urls:
|
||||||
|
@ -330,7 +340,8 @@ class WebRCE(HostExploiter):
|
||||||
Get ports wrapped with log
|
Get ports wrapped with log
|
||||||
:param ports: Potential ports to exploit. For example WormConfiguration.HTTP_PORTS
|
:param ports: Potential ports to exploit. For example WormConfiguration.HTTP_PORTS
|
||||||
:param names: [] of service names. Example: ["http"]
|
:param names: [] of service names. Example: ["http"]
|
||||||
:return: Array of ports: [[80, False], [443, True]] or False. Port always consists of [ port.nr, IsHTTPS?]
|
:return: Array of ports: [[80, False], [443, True]] or False. Port always consists of [
|
||||||
|
port.nr, IsHTTPS?]
|
||||||
"""
|
"""
|
||||||
ports = self.get_open_service_ports(ports, names)
|
ports = self.get_open_service_ports(ports, names)
|
||||||
if not ports:
|
if not ports:
|
||||||
|
@ -350,7 +361,8 @@ class WebRCE(HostExploiter):
|
||||||
|
|
||||||
def run_backup_commands(self, resp, url, dest_path, http_path):
|
def run_backup_commands(self, resp, url, dest_path, http_path):
|
||||||
"""
|
"""
|
||||||
If you need multiple commands for the same os you can override this method to add backup commands
|
If you need multiple commands for the same os you can override this method to add backup
|
||||||
|
commands
|
||||||
:param resp: Response from base command
|
:param resp: Response from base command
|
||||||
:param url: Vulnerable url
|
:param url: Vulnerable url
|
||||||
:param dest_path: Where to upload monkey
|
:param dest_path: Where to upload monkey
|
||||||
|
@ -370,7 +382,8 @@ class WebRCE(HostExploiter):
|
||||||
def upload_monkey(self, url, commands=None):
|
def upload_monkey(self, url, commands=None):
|
||||||
"""
|
"""
|
||||||
:param url: Where exploiter should send it's request
|
:param url: Where exploiter should send it's request
|
||||||
:param commands: Unformatted dict with one or two commands {'linux': LIN_CMD, 'windows': WIN_CMD}
|
:param commands: Unformatted dict with one or two commands {'linux': LIN_CMD, 'windows':
|
||||||
|
WIN_CMD}
|
||||||
Command must have "monkey_path" and "http_path" format parameters.
|
Command must have "monkey_path" and "http_path" format parameters.
|
||||||
:return: {'response': response/False, 'path': monkeys_path_in_host}
|
:return: {'response': response/False, 'path': monkeys_path_in_host}
|
||||||
"""
|
"""
|
||||||
|
@ -435,7 +448,7 @@ class WebRCE(HostExploiter):
|
||||||
return False
|
return False
|
||||||
elif "No such file or directory" in resp:
|
elif "No such file or directory" in resp:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Could not change permission because monkey was not found. Check path parameter."
|
"Could not change permission because monkey was not found. Check path " "parameter."
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
LOG.info("Permission change finished")
|
LOG.info("Permission change finished")
|
||||||
|
@ -499,7 +512,8 @@ class WebRCE(HostExploiter):
|
||||||
def get_monkey_upload_path(self, url_to_monkey):
|
def get_monkey_upload_path(self, url_to_monkey):
|
||||||
"""
|
"""
|
||||||
Gets destination path from one of WEB_RCE predetermined paths(self.monkey_target_paths).
|
Gets destination path from one of WEB_RCE predetermined paths(self.monkey_target_paths).
|
||||||
:param url_to_monkey: Hosted monkey's url. egz : http://localserver:9999/monkey/windows-32.exe
|
:param url_to_monkey: Hosted monkey's url. egz :
|
||||||
|
http://localserver:9999/monkey/windows-32.exe
|
||||||
:return: Corresponding monkey path from self.monkey_target_paths
|
:return: Corresponding monkey path from self.monkey_target_paths
|
||||||
"""
|
"""
|
||||||
if not url_to_monkey or ("linux" not in url_to_monkey and "windows" not in url_to_monkey):
|
if not url_to_monkey or ("linux" not in url_to_monkey and "windows" not in url_to_monkey):
|
||||||
|
@ -522,7 +536,8 @@ class WebRCE(HostExploiter):
|
||||||
return False
|
return False
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
'Unknown key was found. Please use "linux", "win32" and "win64" keys to initialize '
|
'Unknown key was found. Please use "linux", "win32" and "win64" keys to '
|
||||||
|
"initialize "
|
||||||
"custom dict of monkey's destination paths"
|
"custom dict of monkey's destination paths"
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
@ -577,8 +592,10 @@ class WebRCE(HostExploiter):
|
||||||
|
|
||||||
def are_vulnerable_urls_sufficient(self):
|
def are_vulnerable_urls_sufficient(self):
|
||||||
"""
|
"""
|
||||||
Determine whether the number of vulnerable URLs is sufficient in order to perform the full attack.
|
Determine whether the number of vulnerable URLs is sufficient in order to perform the
|
||||||
Often, a single URL will suffice. However, in some cases (e.g. the Drupal exploit) a vulnerable URL is for
|
full attack.
|
||||||
|
Often, a single URL will suffice. However, in some cases (e.g. the Drupal exploit) a
|
||||||
|
vulnerable URL is for
|
||||||
single use, thus we need a couple of them.
|
single use, thus we need a couple of them.
|
||||||
:return: Whether or not a full attack can be performed using the available vulnerable URLs.
|
:return: Whether or not a full attack can be performed using the available vulnerable URLs.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -160,7 +160,8 @@ class WebLogic201710271(WebRCE):
|
||||||
:param command: command itself
|
:param command: command itself
|
||||||
:return: Formatted payload
|
:return: Formatted payload
|
||||||
"""
|
"""
|
||||||
empty_payload = """<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
|
empty_payload = """<soapenv:Envelope
|
||||||
|
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
|
||||||
<soapenv:Header>
|
<soapenv:Header>
|
||||||
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
|
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
|
||||||
<java>
|
<java>
|
||||||
|
@ -195,7 +196,8 @@ class WebLogic201710271(WebRCE):
|
||||||
:param port: Server's port
|
:param port: Server's port
|
||||||
:return: Formatted payload
|
:return: Formatted payload
|
||||||
"""
|
"""
|
||||||
generic_check_payload = """<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
|
generic_check_payload = """<soapenv:Envelope
|
||||||
|
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
|
||||||
<soapenv:Header>
|
<soapenv:Header>
|
||||||
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
|
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
|
||||||
<java version="1.8" class="java.beans.XMLDecoder">
|
<java version="1.8" class="java.beans.XMLDecoder">
|
||||||
|
@ -272,7 +274,8 @@ class WebLogic20192725(WebRCE):
|
||||||
return exploit_config
|
return exploit_config
|
||||||
|
|
||||||
def execute_remote_monkey(self, url, path, dropper=False):
|
def execute_remote_monkey(self, url, path, dropper=False):
|
||||||
# Without delay exploiter tries to launch monkey file that is still finishing up after downloading.
|
# Without delay exploiter tries to launch monkey file that is still finishing up after
|
||||||
|
# downloading.
|
||||||
time.sleep(WebLogic20192725.DELAY_BEFORE_EXPLOITING_SECONDS)
|
time.sleep(WebLogic20192725.DELAY_BEFORE_EXPLOITING_SECONDS)
|
||||||
super(WebLogic20192725, self).execute_remote_monkey(url, path, dropper)
|
super(WebLogic20192725, self).execute_remote_monkey(url, path, dropper)
|
||||||
|
|
||||||
|
@ -307,7 +310,8 @@ class WebLogic20192725(WebRCE):
|
||||||
"""
|
"""
|
||||||
empty_payload = """
|
empty_payload = """
|
||||||
<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\"
|
<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\"
|
||||||
xmlns:wsa=\"http://www.w3.org/2005/08/addressing\" xmlns:asy=\"http://www.bea.com/async/AsyncResponseService\">
|
xmlns:wsa=\"http://www.w3.org/2005/08/addressing\"
|
||||||
|
xmlns:asy=\"http://www.bea.com/async/AsyncResponseService\">
|
||||||
<soapenv:Header>
|
<soapenv:Header>
|
||||||
<wsa:Action>xx</wsa:Action>
|
<wsa:Action>xx</wsa:Action>
|
||||||
<wsa:RelatesTo>xx</wsa:RelatesTo>
|
<wsa:RelatesTo>xx</wsa:RelatesTo>
|
||||||
|
|
|
@ -142,7 +142,8 @@ class WmiExploiter(HostExploiter):
|
||||||
success = True
|
success = True
|
||||||
else:
|
else:
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"Error executing dropper '%s' on remote victim %r (pid=%d, exit_code=%d, cmdline=%r)",
|
"Error executing dropper '%s' on remote victim %r (pid=%d, exit_code=%d, "
|
||||||
|
"cmdline=%r)",
|
||||||
remote_full_path,
|
remote_full_path,
|
||||||
self.host,
|
self.host,
|
||||||
result.ProcessId,
|
result.ProcessId,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
"""
|
"""
|
||||||
Zerologon, CVE-2020-1472
|
Zerologon, CVE-2020-1472
|
||||||
Implementation based on https://github.com/dirkjanm/CVE-2020-1472/ and https://github.com/risksense/zerologon/.
|
Implementation based on https://github.com/dirkjanm/CVE-2020-1472/ and
|
||||||
|
https://github.com/risksense/zerologon/.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
@ -54,7 +55,8 @@ class ZerologonExploiter(HostExploiter):
|
||||||
|
|
||||||
else:
|
else:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"Exploit not attempted. Target is most likely patched, or an error was encountered."
|
"Exploit not attempted. Target is most likely patched, or an error was "
|
||||||
|
"encountered."
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -131,7 +133,8 @@ class ZerologonExploiter(HostExploiter):
|
||||||
self.report_login_attempt(result=False, user=self.dc_name)
|
self.report_login_attempt(result=False, user=self.dc_name)
|
||||||
_exploited = False
|
_exploited = False
|
||||||
LOG.info(
|
LOG.info(
|
||||||
f"Non-zero return code: {exploit_attempt_result['ErrorCode']}. Something went wrong."
|
f"Non-zero return code: {exploit_attempt_result['ErrorCode']}. Something "
|
||||||
|
f"went wrong."
|
||||||
)
|
)
|
||||||
return _exploited
|
return _exploited
|
||||||
|
|
||||||
|
@ -194,7 +197,8 @@ class ZerologonExploiter(HostExploiter):
|
||||||
def get_all_user_creds(self) -> List[Tuple[str, Dict]]:
|
def get_all_user_creds(self) -> List[Tuple[str, Dict]]:
|
||||||
try:
|
try:
|
||||||
options = OptionsForSecretsdump(
|
options = OptionsForSecretsdump(
|
||||||
target=f"{self.dc_name}$@{self.dc_ip}", # format for DC account - "NetBIOSName$@0.0.0.0"
|
# format for DC account - "NetBIOSName$@0.0.0.0"
|
||||||
|
target=f"{self.dc_name}$@{self.dc_ip}",
|
||||||
target_ip=self.dc_ip,
|
target_ip=self.dc_ip,
|
||||||
dc_ip=self.dc_ip,
|
dc_ip=self.dc_ip,
|
||||||
)
|
)
|
||||||
|
@ -221,7 +225,8 @@ class ZerologonExploiter(HostExploiter):
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
f"Exception occurred while dumping secrets to get some username and its password's NT hash: {str(e)}"
|
f"Exception occurred while dumping secrets to get some username and its "
|
||||||
|
f"password's NT hash: {str(e)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
@ -310,7 +315,8 @@ class ZerologonExploiter(HostExploiter):
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
f"Exception occurred while dumping secrets to get original DC password's NT hash: {str(e)}"
|
f"Exception occurred while dumping secrets to get original DC password's NT "
|
||||||
|
f"hash: {str(e)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
@ -339,7 +345,8 @@ class ZerologonExploiter(HostExploiter):
|
||||||
+ "reg save HKLM\\SECURITY security.save"
|
+ "reg save HKLM\\SECURITY security.save"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get HKLM keys locally (can't run these together because it needs to call do_get()).
|
# Get HKLM keys locally (can't run these together because it needs to call
|
||||||
|
# do_get()).
|
||||||
remote_shell.onecmd("get system.save")
|
remote_shell.onecmd("get system.save")
|
||||||
remote_shell.onecmd("get sam.save")
|
remote_shell.onecmd("get sam.save")
|
||||||
remote_shell.onecmd("get security.save")
|
remote_shell.onecmd("get security.save")
|
||||||
|
|
|
@ -132,8 +132,10 @@ class DumpSecrets:
|
||||||
self.connect()
|
self.connect()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if os.getenv("KRB5CCNAME") is not None and self.__do_kerberos is True:
|
if os.getenv("KRB5CCNAME") is not None and self.__do_kerberos is True:
|
||||||
# SMBConnection failed. That might be because there was no way to log into the
|
# SMBConnection failed. That might be because there was no way to
|
||||||
# target system. We just have a last resort. Hope we have tickets cached and that they
|
# log into the
|
||||||
|
# target system. We just have a last resort. Hope we have tickets
|
||||||
|
# cached and that they
|
||||||
# will work
|
# will work
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"SMBConnection didn't work, hoping Kerberos will help (%s)"
|
"SMBConnection didn't work, hoping Kerberos will help (%s)"
|
||||||
|
@ -162,11 +164,13 @@ class DumpSecrets:
|
||||||
and os.getenv("KRB5CCNAME") is not None
|
and os.getenv("KRB5CCNAME") is not None
|
||||||
and self.__do_kerberos is True
|
and self.__do_kerberos is True
|
||||||
):
|
):
|
||||||
# Giving some hints here when SPN target name validation is set to something different to Off.
|
# Giving some hints here when SPN target name validation is set to
|
||||||
# This will prevent establishing SMB connections using TGS for SPNs different to cifs/.
|
# something different to Off.
|
||||||
|
# This will prevent establishing SMB connections using TGS for SPNs
|
||||||
|
# different to cifs/.
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Policy SPN target name validation might be restricting full DRSUAPI dump."
|
"Policy SPN target name validation might be restricting full "
|
||||||
+ "Try -just-dc-user"
|
"DRSUAPI dump." + "Try -just-dc-user"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
LOG.error("RemoteOperations failed: %s" % str(e))
|
LOG.error("RemoteOperations failed: %s" % str(e))
|
||||||
|
@ -208,7 +212,8 @@ class DumpSecrets:
|
||||||
LOG.debug(traceback.print_exc())
|
LOG.debug(traceback.print_exc())
|
||||||
LOG.error("LSA hashes extraction failed: %s" % str(e))
|
LOG.error("LSA hashes extraction failed: %s" % str(e))
|
||||||
|
|
||||||
# NTDS Extraction we can try regardless of RemoteOperations failing. It might still work.
|
# NTDS Extraction we can try regardless of RemoteOperations failing. It might
|
||||||
|
# still work.
|
||||||
if self.__is_remote is True:
|
if self.__is_remote is True:
|
||||||
if self.__use_VSS_method and self.__remote_ops is not None:
|
if self.__use_VSS_method and self.__remote_ops is not None:
|
||||||
NTDS_file_name = self.__remote_ops.saveNTDS()
|
NTDS_file_name = self.__remote_ops.saveNTDS()
|
||||||
|
@ -231,7 +236,8 @@ class DumpSecrets:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.debug(traceback.print_exc())
|
LOG.debug(traceback.print_exc())
|
||||||
if str(e).find("ERROR_DS_DRA_BAD_DN") >= 0:
|
if str(e).find("ERROR_DS_DRA_BAD_DN") >= 0:
|
||||||
# We don't store the resume file if this error happened, since this error is related to lack
|
# We don't store the resume file if this error happened, since this error
|
||||||
|
# is related to lack
|
||||||
# of enough privileges to access DRSUAPI.
|
# of enough privileges to access DRSUAPI.
|
||||||
resume_file = self.__NTDS_hashes.getResumeSessionFile()
|
resume_file = self.__NTDS_hashes.getResumeSessionFile()
|
||||||
if resume_file is not None:
|
if resume_file is not None:
|
||||||
|
@ -239,7 +245,8 @@ class DumpSecrets:
|
||||||
LOG.error(e)
|
LOG.error(e)
|
||||||
if self.__use_VSS_method is False:
|
if self.__use_VSS_method is False:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Something wen't wrong with the DRSUAPI approach. Try again with -use-vss parameter"
|
"Something wen't wrong with the DRSUAPI approach. Try again with "
|
||||||
|
"-use-vss parameter"
|
||||||
)
|
)
|
||||||
self.cleanup()
|
self.cleanup()
|
||||||
except (Exception, KeyboardInterrupt) as e:
|
except (Exception, KeyboardInterrupt) as e:
|
||||||
|
|
|
@ -35,9 +35,11 @@ class OptionsForSecretsdump:
|
||||||
target=None,
|
target=None,
|
||||||
target_ip=None,
|
target_ip=None,
|
||||||
):
|
):
|
||||||
# dc_ip is assigned in get_original_pwd_nthash() and get_admin_pwd_hashes() in ../zerologon.py
|
# dc_ip is assigned in get_original_pwd_nthash() and get_admin_pwd_hashes() in
|
||||||
|
# ../zerologon.py
|
||||||
self.dc_ip = dc_ip
|
self.dc_ip = dc_ip
|
||||||
# just_dc becomes False, and sam, security, and system are assigned in get_original_pwd_nthash() in ../zerologon.py
|
# just_dc becomes False, and sam, security, and system are assigned in
|
||||||
|
# get_original_pwd_nthash() in ../zerologon.py
|
||||||
self.just_dc = just_dc
|
self.just_dc = just_dc
|
||||||
self.sam = sam
|
self.sam = sam
|
||||||
self.security = security
|
self.security = security
|
||||||
|
|
|
@ -134,8 +134,10 @@ class RemoteShell(cmd.Cmd):
|
||||||
self.__outputBuffer += data.decode(self.CODEC)
|
self.__outputBuffer += data.decode(self.CODEC)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Decoding error detected, consider running chcp.com at the target,\nmap the result with "
|
"Decoding error detected, consider running chcp.com at the target,"
|
||||||
"https://docs.python.org/3/library/codecs.html#standard-encodings\nand then execute wmiexec.py "
|
"\nmap the result with "
|
||||||
|
"https://docs.python.org/3/library/codecs.html#standard-encodings\nand "
|
||||||
|
"then execute wmiexec.py "
|
||||||
"again with -codec and the corresponding codec"
|
"again with -codec and the corresponding codec"
|
||||||
)
|
)
|
||||||
self.__outputBuffer += data.decode(self.CODEC, errors="replace")
|
self.__outputBuffer += data.decode(self.CODEC, errors="replace")
|
||||||
|
|
|
@ -26,7 +26,8 @@ LOG_CONFIG = {
|
||||||
"disable_existing_loggers": False,
|
"disable_existing_loggers": False,
|
||||||
"formatters": {
|
"formatters": {
|
||||||
"standard": {
|
"standard": {
|
||||||
"format": "%(asctime)s [%(process)d:%(thread)d:%(levelname)s] %(module)s.%(funcName)s.%(lineno)d: %(message)s"
|
"format": "%(asctime)s [%(process)d:%(thread)d:%(levelname)s] %(module)s.%("
|
||||||
|
"funcName)s.%(lineno)d: %(message)s"
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"handlers": {
|
"handlers": {
|
||||||
|
@ -71,8 +72,8 @@ def main():
|
||||||
print("Error loading config: %s, using default" % (e,))
|
print("Error loading config: %s, using default" % (e,))
|
||||||
else:
|
else:
|
||||||
print(
|
print(
|
||||||
"Config file wasn't supplied and default path: %s wasn't found, using internal default"
|
"Config file wasn't supplied and default path: %s wasn't found, using internal "
|
||||||
% (config_file,)
|
"default" % (config_file,)
|
||||||
)
|
)
|
||||||
|
|
||||||
print(
|
print(
|
||||||
|
@ -104,7 +105,8 @@ def main():
|
||||||
|
|
||||||
if WormConfiguration.use_file_logging:
|
if WormConfiguration.use_file_logging:
|
||||||
if os.path.exists(log_path):
|
if os.path.exists(log_path):
|
||||||
# If log exists but can't be removed it means other monkey is running. This usually happens on upgrade
|
# If log exists but can't be removed it means other monkey is running. This usually
|
||||||
|
# happens on upgrade
|
||||||
# from 32bit to 64bit monkey on Windows. In all cases this shouldn't be a problem.
|
# from 32bit to 64bit monkey on Windows. In all cases this shouldn't be a problem.
|
||||||
try:
|
try:
|
||||||
os.remove(log_path)
|
os.remove(log_path)
|
||||||
|
|
|
@ -227,7 +227,8 @@ class InfectionMonkey(object):
|
||||||
host_exploited = True
|
host_exploited = True
|
||||||
VictimHostTelem("T1210", ScanStatus.USED, machine=machine).send()
|
VictimHostTelem("T1210", ScanStatus.USED, machine=machine).send()
|
||||||
if exploiter.RUNS_AGENT_ON_SUCCESS:
|
if exploiter.RUNS_AGENT_ON_SUCCESS:
|
||||||
break # if adding machine to exploited, won't try other exploits on it
|
break # if adding machine to exploited, won't try other exploits
|
||||||
|
# on it
|
||||||
if not host_exploited:
|
if not host_exploited:
|
||||||
self._fail_exploitation_machines.add(machine)
|
self._fail_exploitation_machines.add(machine)
|
||||||
VictimHostTelem("T1210", ScanStatus.SCANNED, machine=machine).send()
|
VictimHostTelem("T1210", ScanStatus.SCANNED, machine=machine).send()
|
||||||
|
@ -244,7 +245,8 @@ class InfectionMonkey(object):
|
||||||
elif not WormConfiguration.alive:
|
elif not WormConfiguration.alive:
|
||||||
LOG.info("Marked not alive from configuration")
|
LOG.info("Marked not alive from configuration")
|
||||||
|
|
||||||
# if host was exploited, before continue to closing the tunnel ensure the exploited host had its chance to
|
# if host was exploited, before continue to closing the tunnel ensure the exploited
|
||||||
|
# host had its chance to
|
||||||
# connect to the tunnel
|
# connect to the tunnel
|
||||||
if len(self._exploited_machines) > 0:
|
if len(self._exploited_machines) > 0:
|
||||||
time_to_sleep = WormConfiguration.keep_tunnel_open_time
|
time_to_sleep = WormConfiguration.keep_tunnel_open_time
|
||||||
|
@ -261,7 +263,8 @@ class InfectionMonkey(object):
|
||||||
|
|
||||||
except PlannedShutdownException:
|
except PlannedShutdownException:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"A planned shutdown of the Monkey occurred. Logging the reason and finishing execution."
|
"A planned shutdown of the Monkey occurred. Logging the reason and finishing "
|
||||||
|
"execution."
|
||||||
)
|
)
|
||||||
LOG.exception("Planned shutdown, reason:")
|
LOG.exception("Planned shutdown, reason:")
|
||||||
|
|
||||||
|
|
|
@ -88,7 +88,8 @@ else:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
ifreq = ioctl(s, SIOCGIFADDR, struct.pack("16s16x", iff))
|
ifreq = ioctl(s, SIOCGIFADDR, struct.pack("16s16x", iff))
|
||||||
except IOError: # interface is present in routing tables but does not have any assigned IP
|
except IOError: # interface is present in routing tables but does not have any
|
||||||
|
# assigned IP
|
||||||
ifaddr = "0.0.0.0"
|
ifaddr = "0.0.0.0"
|
||||||
else:
|
else:
|
||||||
addrfamily = struct.unpack("h", ifreq[16:18])[0]
|
addrfamily = struct.unpack("h", ifreq[16:18])[0]
|
||||||
|
|
|
@ -49,22 +49,21 @@ class MSSQLFinger(HostFinger):
|
||||||
data, server = sock.recvfrom(self.BUFFER_SIZE)
|
data, server = sock.recvfrom(self.BUFFER_SIZE)
|
||||||
except socket.timeout:
|
except socket.timeout:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"Socket timeout reached, maybe browser service on host: {0} doesnt exist".format(
|
"Socket timeout reached, maybe browser service on host: {0} doesnt "
|
||||||
host
|
"exist".format(host)
|
||||||
)
|
|
||||||
)
|
)
|
||||||
sock.close()
|
sock.close()
|
||||||
return False
|
return False
|
||||||
except socket.error as e:
|
except socket.error as e:
|
||||||
if e.errno == errno.ECONNRESET:
|
if e.errno == errno.ECONNRESET:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"Connection was forcibly closed by the remote host. The host: {0} is rejecting the packet.".format(
|
"Connection was forcibly closed by the remote host. The host: {0} is "
|
||||||
host
|
"rejecting the packet.".format(host)
|
||||||
)
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"An unknown socket error occurred while trying the mssql fingerprint, closing socket.",
|
"An unknown socket error occurred while trying the mssql fingerprint, "
|
||||||
|
"closing socket.",
|
||||||
exc_info=True,
|
exc_info=True,
|
||||||
)
|
)
|
||||||
sock.close()
|
sock.close()
|
||||||
|
@ -82,7 +81,8 @@ class MSSQLFinger(HostFinger):
|
||||||
if len(instance_info) > 1:
|
if len(instance_info) > 1:
|
||||||
host.services[self._SCANNED_SERVICE][instance_info[1]] = {}
|
host.services[self._SCANNED_SERVICE][instance_info[1]] = {}
|
||||||
for i in range(1, len(instance_info), 2):
|
for i in range(1, len(instance_info), 2):
|
||||||
# Each instance's info is nested under its own name, if there are multiple instances
|
# Each instance's info is nested under its own name, if there are multiple
|
||||||
|
# instances
|
||||||
# each will appear under its own name
|
# each will appear under its own name
|
||||||
host.services[self._SCANNED_SERVICE][instance_info[1]][
|
host.services[self._SCANNED_SERVICE][instance_info[1]][
|
||||||
instance_info[i - 1]
|
instance_info[i - 1]
|
||||||
|
|
|
@ -44,9 +44,11 @@ class NetworkScanner(object):
|
||||||
def _get_inaccessible_subnets_ips(self):
|
def _get_inaccessible_subnets_ips(self):
|
||||||
"""
|
"""
|
||||||
For each of the machine's IPs, checks if it's in one of the subnets specified in the
|
For each of the machine's IPs, checks if it's in one of the subnets specified in the
|
||||||
'inaccessible_subnets' config value. If so, all other subnets in the config value shouldn't be accessible.
|
'inaccessible_subnets' config value. If so, all other subnets in the config value
|
||||||
|
shouldn't be accessible.
|
||||||
All these subnets are returned.
|
All these subnets are returned.
|
||||||
:return: A list of subnets that shouldn't be accessible from the machine the monkey is running on.
|
:return: A list of subnets that shouldn't be accessible from the machine the monkey is
|
||||||
|
running on.
|
||||||
"""
|
"""
|
||||||
subnets_to_scan = []
|
subnets_to_scan = []
|
||||||
if len(WormConfiguration.inaccessible_subnets) > 1:
|
if len(WormConfiguration.inaccessible_subnets) > 1:
|
||||||
|
@ -54,7 +56,8 @@ class NetworkScanner(object):
|
||||||
if NetworkScanner._is_any_ip_in_subnet(
|
if NetworkScanner._is_any_ip_in_subnet(
|
||||||
[str(x) for x in self._ip_addresses], subnet_str
|
[str(x) for x in self._ip_addresses], subnet_str
|
||||||
):
|
):
|
||||||
# If machine has IPs from 2 different subnets in the same group, there's no point checking the other
|
# If machine has IPs from 2 different subnets in the same group, there's no
|
||||||
|
# point checking the other
|
||||||
# subnet.
|
# subnet.
|
||||||
for other_subnet_str in WormConfiguration.inaccessible_subnets:
|
for other_subnet_str in WormConfiguration.inaccessible_subnets:
|
||||||
if other_subnet_str == subnet_str:
|
if other_subnet_str == subnet_str:
|
||||||
|
@ -74,9 +77,12 @@ class NetworkScanner(object):
|
||||||
:param stop_callback: A callback to check at any point if we should stop scanning
|
:param stop_callback: A callback to check at any point if we should stop scanning
|
||||||
:return: yields a sequence of VictimHost instances
|
:return: yields a sequence of VictimHost instances
|
||||||
"""
|
"""
|
||||||
# We currently use the ITERATION_BLOCK_SIZE as the pool size, however, this may not be the best decision
|
# We currently use the ITERATION_BLOCK_SIZE as the pool size, however, this may not be
|
||||||
# However, the decision what ITERATION_BLOCK_SIZE also requires balancing network usage (pps and bw)
|
# the best decision
|
||||||
# Because we are using this to spread out IO heavy tasks, we can probably go a lot higher than CPU core size
|
# However, the decision what ITERATION_BLOCK_SIZE also requires balancing network usage (
|
||||||
|
# pps and bw)
|
||||||
|
# Because we are using this to spread out IO heavy tasks, we can probably go a lot higher
|
||||||
|
# than CPU core size
|
||||||
# But again, balance
|
# But again, balance
|
||||||
pool = Pool(ITERATION_BLOCK_SIZE)
|
pool = Pool(ITERATION_BLOCK_SIZE)
|
||||||
victim_generator = VictimHostGenerator(
|
victim_generator = VictimHostGenerator(
|
||||||
|
|
|
@ -59,7 +59,8 @@ class PingScanner(HostScanner, HostFinger):
|
||||||
ttl = int(regex_result.group(0))
|
ttl = int(regex_result.group(0))
|
||||||
if ttl <= LINUX_TTL:
|
if ttl <= LINUX_TTL:
|
||||||
host.os["type"] = "linux"
|
host.os["type"] = "linux"
|
||||||
else: # as far we we know, could also be OSX/BSD but lets handle that when it comes up.
|
else: # as far we we know, could also be OSX/BSD but lets handle that when it
|
||||||
|
# comes up.
|
||||||
host.os["type"] = "windows"
|
host.os["type"] = "windows"
|
||||||
|
|
||||||
host.icmp = True
|
host.icmp = True
|
||||||
|
|
|
@ -51,12 +51,14 @@ class PostgreSQLFinger(HostFinger):
|
||||||
self.init_service(host.services, self._SCANNED_SERVICE, self.POSTGRESQL_DEFAULT_PORT)
|
self.init_service(host.services, self._SCANNED_SERVICE, self.POSTGRESQL_DEFAULT_PORT)
|
||||||
host.services[self._SCANNED_SERVICE]["communication_encryption_details"] = (
|
host.services[self._SCANNED_SERVICE]["communication_encryption_details"] = (
|
||||||
"The PostgreSQL server was unexpectedly accessible with the credentials - "
|
"The PostgreSQL server was unexpectedly accessible with the credentials - "
|
||||||
+ f"user: '{self.CREDS['username']}' and password: '{self.CREDS['password']}'. Is this a honeypot?"
|
+ f"user: '{self.CREDS['username']}' and password: '"
|
||||||
|
f"{self.CREDS['password']}'. Is this a honeypot?"
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except psycopg2.OperationalError as ex:
|
except psycopg2.OperationalError as ex:
|
||||||
# try block will throw an OperationalError since the credentials are wrong, which we then analyze
|
# try block will throw an OperationalError since the credentials are wrong, which we
|
||||||
|
# then analyze
|
||||||
try:
|
try:
|
||||||
exception_string = str(ex)
|
exception_string = str(ex)
|
||||||
|
|
||||||
|
|
|
@ -68,14 +68,16 @@ class SMBNegoFingerData(Packet):
|
||||||
("separator1", b"\x02"),
|
("separator1", b"\x02"),
|
||||||
(
|
(
|
||||||
"dialect1",
|
"dialect1",
|
||||||
b"\x50\x43\x20\x4e\x45\x54\x57\x4f\x52\x4b\x20\x50\x52\x4f\x47\x52\x41\x4d\x20\x31\x2e\x30\x00",
|
b"\x50\x43\x20\x4e\x45\x54\x57\x4f\x52\x4b\x20\x50\x52\x4f\x47\x52\x41\x4d"
|
||||||
|
b"\x20\x31\x2e\x30\x00",
|
||||||
),
|
),
|
||||||
("separator2", b"\x02"),
|
("separator2", b"\x02"),
|
||||||
("dialect2", b"\x4c\x41\x4e\x4d\x41\x4e\x31\x2e\x30\x00"),
|
("dialect2", b"\x4c\x41\x4e\x4d\x41\x4e\x31\x2e\x30\x00"),
|
||||||
("separator3", b"\x02"),
|
("separator3", b"\x02"),
|
||||||
(
|
(
|
||||||
"dialect3",
|
"dialect3",
|
||||||
b"\x57\x69\x6e\x64\x6f\x77\x73\x20\x66\x6f\x72\x20\x57\x6f\x72\x6b\x67\x72\x6f\x75\x70\x73\x20\x33\x2e\x31\x61\x00",
|
b"\x57\x69\x6e\x64\x6f\x77\x73\x20\x66\x6f\x72\x20\x57\x6f\x72\x6b\x67\x72"
|
||||||
|
b"\x6f\x75\x70\x73\x20\x33\x2e\x31\x61\x00",
|
||||||
),
|
),
|
||||||
("separator4", b"\x02"),
|
("separator4", b"\x02"),
|
||||||
("dialect4", b"\x4c\x4d\x31\x2e\x32\x58\x30\x30\x32\x00"),
|
("dialect4", b"\x4c\x4d\x31\x2e\x32\x58\x30\x30\x32\x00"),
|
||||||
|
@ -104,12 +106,18 @@ class SMBSessionFingerData(Packet):
|
||||||
("bcc1", ""),
|
("bcc1", ""),
|
||||||
(
|
(
|
||||||
"Data",
|
"Data",
|
||||||
b"\x60\x48\x06\x06\x2b\x06\x01\x05\x05\x02\xa0\x3e\x30\x3c\xa0\x0e\x30\x0c\x06\x0a\x2b\x06\x01\x04\x01\x82\x37\x02"
|
b"\x60\x48\x06\x06\x2b\x06\x01\x05\x05\x02\xa0\x3e\x30\x3c\xa0\x0e\x30\x0c"
|
||||||
b"\x02\x0a\xa2\x2a\x04\x28\x4e\x54\x4c\x4d\x53\x53\x50\x00\x01\x00\x00\x00\x07\x82\x08\xa2\x00\x00\x00\x00\x00\x00"
|
b"\x06\x0a\x2b\x06\x01\x04\x01\x82\x37\x02"
|
||||||
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x01\x28\x0a\x00\x00\x00\x0f\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f"
|
b"\x02\x0a\xa2\x2a\x04\x28\x4e\x54\x4c\x4d\x53\x53\x50\x00\x01\x00\x00\x00"
|
||||||
b"\x00\x77\x00\x73\x00\x20\x00\x32\x00\x30\x00\x30\x00\x32\x00\x20\x00\x53\x00\x65\x00\x72\x00\x76\x00\x69\x00\x63"
|
b"\x07\x82\x08\xa2\x00\x00\x00\x00\x00\x00"
|
||||||
b"\x00\x65\x00\x20\x00\x50\x00\x61\x00\x63\x00\x6b\x00\x20\x00\x33\x00\x20\x00\x32\x00\x36\x00\x30\x00\x30\x00\x00"
|
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x01\x28\x0a\x00\x00\x00\x0f"
|
||||||
b"\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f\x00\x77\x00\x73\x00\x20\x00\x32\x00\x30\x00\x30\x00\x32\x00\x20\x00\x35"
|
b"\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f"
|
||||||
|
b"\x00\x77\x00\x73\x00\x20\x00\x32\x00\x30\x00\x30\x00\x32\x00\x20\x00\x53"
|
||||||
|
b"\x00\x65\x00\x72\x00\x76\x00\x69\x00\x63"
|
||||||
|
b"\x00\x65\x00\x20\x00\x50\x00\x61\x00\x63\x00\x6b\x00\x20\x00\x33\x00\x20"
|
||||||
|
b"\x00\x32\x00\x36\x00\x30\x00\x30\x00\x00"
|
||||||
|
b"\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f\x00\x77\x00\x73\x00\x20\x00\x32"
|
||||||
|
b"\x00\x30\x00\x30\x00\x32\x00\x20\x00\x35"
|
||||||
b"\x00\x2e\x00\x31\x00\x00\x00\x00\x00",
|
b"\x00\x2e\x00\x31\x00\x00\x00\x00\x00",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -22,7 +22,8 @@ class TcpScanner(HostScanner, HostFinger):
|
||||||
|
|
||||||
def get_host_fingerprint(self, host, only_one_port=False):
|
def get_host_fingerprint(self, host, only_one_port=False):
|
||||||
"""
|
"""
|
||||||
Scans a target host to see if it's alive using the tcp_target_ports specified in the configuration.
|
Scans a target host to see if it's alive using the tcp_target_ports specified in the
|
||||||
|
configuration.
|
||||||
:param host: VictimHost structure
|
:param host: VictimHost structure
|
||||||
:param only_one_port: Currently unused.
|
:param only_one_port: Currently unused.
|
||||||
:return: T/F if there is at least one open port.
|
:return: T/F if there is at least one open port.
|
||||||
|
|
|
@ -129,7 +129,8 @@ def check_tcp_ports(ip, ports, timeout=DEFAULT_TIMEOUT, get_banner=False):
|
||||||
possible_ports.append((port, sock))
|
possible_ports.append((port, sock))
|
||||||
continue
|
continue
|
||||||
if err == 10035: # WSAEWOULDBLOCK is valid, see
|
if err == 10035: # WSAEWOULDBLOCK is valid, see
|
||||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms740668%28v=vs.85%29.aspx?f=255&MSPPError=-2147217396
|
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms740668%28v=vs.85%29
|
||||||
|
# .aspx?f=255&MSPPError=-2147217396
|
||||||
possible_ports.append((port, sock))
|
possible_ports.append((port, sock))
|
||||||
continue
|
continue
|
||||||
if err == 115: # EINPROGRESS 115 /* Operation now in progress */
|
if err == 115: # EINPROGRESS 115 /* Operation now in progress */
|
||||||
|
@ -164,7 +165,8 @@ def check_tcp_ports(ip, ports, timeout=DEFAULT_TIMEOUT, get_banner=False):
|
||||||
readable_sockets, _, _ = select.select(
|
readable_sockets, _, _ = select.select(
|
||||||
[s[1] for s in connected_ports_sockets], [], [], 0
|
[s[1] for s in connected_ports_sockets], [], [], 0
|
||||||
)
|
)
|
||||||
# read first BANNER_READ bytes. We ignore errors because service might not send a decodable byte string.
|
# read first BANNER_READ bytes. We ignore errors because service might not send a
|
||||||
|
# decodable byte string.
|
||||||
banners = [
|
banners = [
|
||||||
sock.recv(BANNER_READ).decode(errors="ignore")
|
sock.recv(BANNER_READ).decode(errors="ignore")
|
||||||
if sock in readable_sockets
|
if sock in readable_sockets
|
||||||
|
@ -209,7 +211,8 @@ def _get_traceroute_bin_path():
|
||||||
Its been built using the buildroot utility with the following settings:
|
Its been built using the buildroot utility with the following settings:
|
||||||
* Statically link to musl and all other required libs
|
* Statically link to musl and all other required libs
|
||||||
* Optimize for size
|
* Optimize for size
|
||||||
This is done because not all linux distros come with traceroute out-of-the-box, and to ensure it behaves as expected
|
This is done because not all linux distros come with traceroute out-of-the-box, and to ensure
|
||||||
|
it behaves as expected
|
||||||
|
|
||||||
:return: Path to traceroute executable
|
:return: Path to traceroute executable
|
||||||
"""
|
"""
|
||||||
|
@ -223,7 +226,8 @@ def _parse_traceroute(output, regex, ttl):
|
||||||
:param regex: Regex for finding an IP address
|
:param regex: Regex for finding an IP address
|
||||||
:param ttl: Max TTL. Must be the same as the TTL used as param for traceroute.
|
:param ttl: Max TTL. Must be the same as the TTL used as param for traceroute.
|
||||||
:return: List of ips which are the hops on the way to the traceroute destination.
|
:return: List of ips which are the hops on the way to the traceroute destination.
|
||||||
If a hop's IP wasn't found by traceroute, instead of an IP, the array will contain None
|
If a hop's IP wasn't found by traceroute, instead of an IP, the array will
|
||||||
|
contain None
|
||||||
"""
|
"""
|
||||||
ip_lines = output.split("\n")
|
ip_lines = output.split("\n")
|
||||||
trace_list = []
|
trace_list = []
|
||||||
|
|
|
@ -27,7 +27,8 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class CommunicateAsNewUser(PBA):
|
class CommunicateAsNewUser(PBA):
|
||||||
"""
|
"""
|
||||||
This PBA creates a new user, and then creates HTTPS requests as that user. This is used for a Zero Trust test of the
|
This PBA creates a new user, and then creates HTTPS requests as that user. This is used for a
|
||||||
|
Zero Trust test of the
|
||||||
People pillar. See the relevant telemetry processing to see what findings are created.
|
People pillar. See the relevant telemetry processing to see what findings are created.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -58,7 +59,8 @@ class CommunicateAsNewUser(PBA):
|
||||||
def get_commandline_for_http_request(url, is_windows=is_windows_os()):
|
def get_commandline_for_http_request(url, is_windows=is_windows_os()):
|
||||||
if is_windows:
|
if is_windows:
|
||||||
format_string = (
|
format_string = (
|
||||||
'powershell.exe -command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; '
|
'powershell.exe -command "[Net.ServicePointManager]::SecurityProtocol = ['
|
||||||
|
"Net.SecurityProtocolType]::Tls12; "
|
||||||
'Invoke-WebRequest {url} -UseBasicParsing"'
|
'Invoke-WebRequest {url} -UseBasicParsing"'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -28,7 +28,8 @@ class SignedScriptProxyExecution(PBA):
|
||||||
super().run()
|
super().run()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.warning(
|
LOG.warning(
|
||||||
f"An exception occurred on running PBA {POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC}: {str(e)}"
|
f"An exception occurred on running PBA "
|
||||||
|
f"{POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC}: {str(e)}"
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
cleanup_changes(original_comspec)
|
cleanup_changes(original_comspec)
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
SCHEDULED_TASK_NAME = "monkey-spawn-cmd"
|
SCHEDULED_TASK_NAME = "monkey-spawn-cmd"
|
||||||
SCHEDULED_TASK_COMMAND = r"C:\windows\system32\cmd.exe"
|
SCHEDULED_TASK_COMMAND = r"C:\windows\system32\cmd.exe"
|
||||||
|
|
||||||
# Commands from: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1053.005/T1053.005.md
|
|
||||||
|
# Commands from: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1053.005
|
||||||
|
# /T1053.005.md
|
||||||
|
|
||||||
|
|
||||||
def get_windows_commands_to_schedule_jobs():
|
def get_windows_commands_to_schedule_jobs():
|
||||||
|
|
|
@ -16,7 +16,8 @@ __author__ = "VakarisZ"
|
||||||
|
|
||||||
class PBA(Plugin):
|
class PBA(Plugin):
|
||||||
"""
|
"""
|
||||||
Post breach action object. Can be extended to support more than command execution on target machine.
|
Post breach action object. Can be extended to support more than command execution on target
|
||||||
|
machine.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
TEMP_FILE = "$HOME/monkey-temp-file"
|
TEMP_FILE = "$HOME/monkey-temp-file"
|
||||||
|
|
||||||
# Commands from https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1548.001/T1548.001.md
|
|
||||||
|
# Commands from https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1548.001
|
||||||
|
# /T1548.001.md
|
||||||
|
|
||||||
|
|
||||||
def get_linux_commands_to_setuid_setgid():
|
def get_linux_commands_to_setuid_setgid():
|
||||||
return [
|
return [
|
||||||
f"touch {TEMP_FILE} && chown root {TEMP_FILE} && chmod u+s {TEMP_FILE} && chmod g+s {TEMP_FILE} &&",
|
f"touch {TEMP_FILE} && chown root {TEMP_FILE} && chmod u+s {TEMP_FILE} && chmod g+s "
|
||||||
|
f"{TEMP_FILE} &&",
|
||||||
'echo "Successfully changed setuid/setgid bits" &&',
|
'echo "Successfully changed setuid/setgid bits" &&',
|
||||||
f"rm {TEMP_FILE}",
|
f"rm {TEMP_FILE}",
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from infection_monkey.post_breach.shell_startup_files.linux.shell_startup_files_modification import (
|
from infection_monkey.post_breach.shell_startup_files.linux.shell_startup_files_modification import ( # noqa: E501
|
||||||
get_linux_commands_to_modify_shell_startup_files,
|
get_linux_commands_to_modify_shell_startup_files,
|
||||||
)
|
)
|
||||||
from infection_monkey.post_breach.shell_startup_files.windows.shell_startup_files_modification import (
|
from infection_monkey.post_breach.shell_startup_files.windows.shell_startup_files_modification import ( # noqa: E501
|
||||||
get_windows_commands_to_modify_shell_startup_files,
|
get_windows_commands_to_modify_shell_startup_files,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ def get_windows_commands_to_modify_shell_startup_files():
|
||||||
|
|
||||||
return [
|
return [
|
||||||
"powershell.exe",
|
"powershell.exe",
|
||||||
"infection_monkey/post_breach/shell_startup_files/windows/modify_powershell_startup_file.ps1",
|
"infection_monkey/post_breach/shell_startup_files/windows"
|
||||||
|
"/modify_powershell_startup_file.ps1",
|
||||||
"-startup_file_path {0}",
|
"-startup_file_path {0}",
|
||||||
], STARTUP_FILES_PER_USER
|
], STARTUP_FILES_PER_USER
|
||||||
|
|
|
@ -75,7 +75,6 @@ def test_command_windows_custom_file_and_cmd(
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_UsersPBA_linux_custom_file(set_os_linux, fake_monkey_dir_path, monkeypatch):
|
def mock_UsersPBA_linux_custom_file(set_os_linux, fake_monkey_dir_path, monkeypatch):
|
||||||
|
|
||||||
monkeypatch.setattr("infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd", None)
|
monkeypatch.setattr("infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd", None)
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"infection_monkey.config.WormConfiguration.PBA_linux_filename",
|
"infection_monkey.config.WormConfiguration.PBA_linux_filename",
|
||||||
|
@ -91,7 +90,6 @@ def test_command_linux_custom_file(mock_UsersPBA_linux_custom_file):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_UsersPBA_windows_custom_file(set_os_windows, fake_monkey_dir_path, monkeypatch):
|
def mock_UsersPBA_windows_custom_file(set_os_windows, fake_monkey_dir_path, monkeypatch):
|
||||||
|
|
||||||
monkeypatch.setattr("infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd", None)
|
monkeypatch.setattr("infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd", None)
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"infection_monkey.config.WormConfiguration.PBA_windows_filename",
|
"infection_monkey.config.WormConfiguration.PBA_windows_filename",
|
||||||
|
@ -107,7 +105,6 @@ def test_command_windows_custom_file(mock_UsersPBA_windows_custom_file):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_UsersPBA_linux_custom_cmd(set_os_linux, fake_monkey_dir_path, monkeypatch):
|
def mock_UsersPBA_linux_custom_cmd(set_os_linux, fake_monkey_dir_path, monkeypatch):
|
||||||
|
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd",
|
"infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd",
|
||||||
CUSTOM_LINUX_CMD,
|
CUSTOM_LINUX_CMD,
|
||||||
|
@ -123,7 +120,6 @@ def test_command_linux_custom_cmd(mock_UsersPBA_linux_custom_cmd):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_UsersPBA_windows_custom_cmd(set_os_windows, fake_monkey_dir_path, monkeypatch):
|
def mock_UsersPBA_windows_custom_cmd(set_os_windows, fake_monkey_dir_path, monkeypatch):
|
||||||
|
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd",
|
"infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd",
|
||||||
CUSTOM_WINDOWS_CMD,
|
CUSTOM_WINDOWS_CMD,
|
||||||
|
|
|
@ -11,4 +11,5 @@ def get_linux_timestomping_commands():
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
# Commands' source: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1070.006/T1070.006.md
|
# Commands' source: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1070.006
|
||||||
|
# /T1070.006.md
|
||||||
|
|
|
@ -5,4 +5,5 @@ def get_windows_timestomping_commands():
|
||||||
return "powershell.exe infection_monkey/post_breach/timestomping/windows/timestomping.ps1"
|
return "powershell.exe infection_monkey/post_breach/timestomping/windows/timestomping.ps1"
|
||||||
|
|
||||||
|
|
||||||
# Commands' source: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1070.006/T1070.006.md
|
# Commands' source: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1070.006
|
||||||
|
# /T1070.006.md
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
def get_linux_trap_commands():
|
def get_linux_trap_commands():
|
||||||
return [
|
return [
|
||||||
"trap 'echo \"Successfully used trap command\"' INT && kill -2 $$ ;", # trap and send SIGINT signal
|
# trap and send SIGINT signal
|
||||||
|
"trap 'echo \"Successfully used trap command\"' INT && kill -2 $$ ;",
|
||||||
"trap - INT", # untrap SIGINT
|
"trap - INT", # untrap SIGINT
|
||||||
]
|
]
|
||||||
|
|
|
@ -6,7 +6,8 @@ __author__ = "itay.mizeretz"
|
||||||
|
|
||||||
def get_binaries_dir_path():
|
def get_binaries_dir_path():
|
||||||
"""
|
"""
|
||||||
Gets the path to the binaries dir (files packaged in pyinstaller if it was used, infection_monkey dir otherwise)
|
Gets the path to the binaries dir (files packaged in pyinstaller if it was used,
|
||||||
|
infection_monkey dir otherwise)
|
||||||
:return: Binaries dir path
|
:return: Binaries dir path
|
||||||
"""
|
"""
|
||||||
if getattr(sys, "frozen", False):
|
if getattr(sys, "frozen", False):
|
||||||
|
|
|
@ -72,7 +72,8 @@ class SSHCollector(object):
|
||||||
try:
|
try:
|
||||||
with open(public) as f:
|
with open(public) as f:
|
||||||
info["public_key"] = f.read()
|
info["public_key"] = f.read()
|
||||||
# By default private key has the same name as public, only without .pub
|
# By default private key has the same name as public,
|
||||||
|
# only without .pub
|
||||||
private = os.path.splitext(public)[0]
|
private = os.path.splitext(public)[0]
|
||||||
if os.path.exists(private):
|
if os.path.exists(private):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -29,7 +29,8 @@ class OperatingSystem(IntEnum):
|
||||||
|
|
||||||
class SystemInfoCollector(object):
|
class SystemInfoCollector(object):
|
||||||
"""
|
"""
|
||||||
A class that checks the current operating system and calls system information collecting modules accordingly
|
A class that checks the current operating system and calls system information collecting
|
||||||
|
modules accordingly
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -113,5 +114,6 @@ class InfoCollector(object):
|
||||||
self.info["Azure"] = {}
|
self.info["Azure"] = {}
|
||||||
self.info["Azure"]["usernames"] = [cred[0] for cred in azure_creds]
|
self.info["Azure"]["usernames"] = [cred[0] for cred in azure_creds]
|
||||||
except Exception:
|
except Exception:
|
||||||
# If we failed to collect azure info, no reason to fail all the collection. Log and continue.
|
# If we failed to collect azure info, no reason to fail all the collection. Log and
|
||||||
|
# continue.
|
||||||
LOG.error("Failed collecting Azure info.", exc_info=True)
|
LOG.error("Failed collecting Azure info.", exc_info=True)
|
||||||
|
|
|
@ -97,7 +97,8 @@ class AzureCollector(object):
|
||||||
# we're going to do as much of this in PS as we can.
|
# we're going to do as much of this in PS as we can.
|
||||||
ps_block = ";\n".join(
|
ps_block = ";\n".join(
|
||||||
[
|
[
|
||||||
'[System.Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null',
|
'[System.Reflection.Assembly]::LoadWithPartialName("System.Security") | '
|
||||||
|
"Out-Null",
|
||||||
'$base64 = "%s"' % protected_data,
|
'$base64 = "%s"' % protected_data,
|
||||||
"$content = [Convert]::FromBase64String($base64)",
|
"$content = [Convert]::FromBase64String($base64)",
|
||||||
"$env = New-Object Security.Cryptography.Pkcs.EnvelopedCms",
|
"$env = New-Object Security.Cryptography.Pkcs.EnvelopedCms",
|
||||||
|
|
|
@ -37,7 +37,8 @@ class ProcessListCollector(SystemInfoCollector):
|
||||||
"full_image_path": process.exe(),
|
"full_image_path": process.exe(),
|
||||||
}
|
}
|
||||||
except (psutil.AccessDenied, WindowsError):
|
except (psutil.AccessDenied, WindowsError):
|
||||||
# we may be running as non root and some processes are impossible to acquire in Windows/Linux.
|
# we may be running as non root and some processes are impossible to acquire in
|
||||||
|
# Windows/Linux.
|
||||||
# In this case we'll just add what we know.
|
# In this case we'll just add what we know.
|
||||||
processes[process.pid] = {
|
processes[process.pid] = {
|
||||||
"name": "null",
|
"name": "null",
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Inspired by Giampaolo Rodola's psutil example from https://github.com/giampaolo/psutil/blob/master/scripts/netstat.py
|
# Inspired by Giampaolo Rodola's psutil example from
|
||||||
|
# https://github.com/giampaolo/psutil/blob/master/scripts/netstat.py
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
|
|
|
@ -7,9 +7,12 @@ from infection_monkey.utils.plugins.plugin import Plugin
|
||||||
|
|
||||||
class SystemInfoCollector(Plugin, metaclass=ABCMeta):
|
class SystemInfoCollector(Plugin, metaclass=ABCMeta):
|
||||||
"""
|
"""
|
||||||
ABC for system info collection. See system_info_collector_handler for more info. Basically, to implement a new system info
|
ABC for system info collection. See system_info_collector_handler for more info. Basically,
|
||||||
collector, inherit from this class in an implementation in the infection_monkey.system_info.collectors class, and override
|
to implement a new system info
|
||||||
the 'collect' method. Don't forget to parse your results in the Monkey Island and to add the collector to the configuration
|
collector, inherit from this class in an implementation in the
|
||||||
|
infection_monkey.system_info.collectors class, and override
|
||||||
|
the 'collect' method. Don't forget to parse your results in the Monkey Island and to add the
|
||||||
|
collector to the configuration
|
||||||
as well - see monkey_island.cc.services.processing.system_info_collectors for examples.
|
as well - see monkey_island.cc.services.processing.system_info_collectors for examples.
|
||||||
|
|
||||||
See the Wiki page "How to add a new System Info Collector to the Monkey?" for a detailed guide.
|
See the Wiki page "How to add a new System Info Collector to the Monkey?" for a detailed guide.
|
||||||
|
|
|
@ -24,9 +24,8 @@ class SystemInfoCollectorsHandler(object):
|
||||||
# If we failed one collector, no need to stop execution. Log and continue.
|
# If we failed one collector, no need to stop execution. Log and continue.
|
||||||
LOG.error("Collector {} failed. Error info: {}".format(collector.name, e))
|
LOG.error("Collector {} failed. Error info: {}".format(collector.name, e))
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"All system info collectors executed. Total {} executed, out of which {} collected successfully.".format(
|
"All system info collectors executed. Total {} executed, out of which {} "
|
||||||
len(self.collectors_list), successful_collections
|
"collected successfully.".format(len(self.collectors_list), successful_collections)
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
SystemInfoTelem({"collectors": system_info_telemetry}).send()
|
SystemInfoTelem({"collectors": system_info_telemetry}).send()
|
||||||
|
|
|
@ -56,7 +56,8 @@ class TestPypykatzHandler(TestCase):
|
||||||
{
|
{
|
||||||
"credtype": "dpapi",
|
"credtype": "dpapi",
|
||||||
"key_guid": "9123-123ae123de4-121239-3123-421f",
|
"key_guid": "9123-123ae123de4-121239-3123-421f",
|
||||||
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
|
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b7294"
|
||||||
|
"7f5e80920034d1275d8613532025975e"
|
||||||
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
|
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
|
||||||
"sha1_masterkey": "bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
|
"sha1_masterkey": "bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
|
||||||
"luid": 123086,
|
"luid": 123086,
|
||||||
|
@ -64,7 +65,8 @@ class TestPypykatzHandler(TestCase):
|
||||||
{
|
{
|
||||||
"credtype": "dpapi",
|
"credtype": "dpapi",
|
||||||
"key_guid": "9123-123ae123de4-121239-3123-421f",
|
"key_guid": "9123-123ae123de4-121239-3123-421f",
|
||||||
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
|
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b729"
|
||||||
|
"47f5e80920034d1275d8613532025975e"
|
||||||
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
|
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
|
||||||
"sha1_masterkey": "bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
|
"sha1_masterkey": "bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
|
||||||
"luid": 123086,
|
"luid": 123086,
|
||||||
|
@ -72,7 +74,8 @@ class TestPypykatzHandler(TestCase):
|
||||||
{
|
{
|
||||||
"credtype": "dpapi",
|
"credtype": "dpapi",
|
||||||
"key_guid": "9123-123ae123de4-121239-3123-421f",
|
"key_guid": "9123-123ae123de4-121239-3123-421f",
|
||||||
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
|
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72"
|
||||||
|
"947f5e80920034d1275d8613532025975e"
|
||||||
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
|
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
|
||||||
"sha1_masterkey": "bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
|
"sha1_masterkey": "bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
|
||||||
"luid": 123086,
|
"luid": 123086,
|
||||||
|
@ -80,7 +83,8 @@ class TestPypykatzHandler(TestCase):
|
||||||
{
|
{
|
||||||
"credtype": "dpapi",
|
"credtype": "dpapi",
|
||||||
"key_guid": "9123-123ae123de4-121239-3123-421f",
|
"key_guid": "9123-123ae123de4-121239-3123-421f",
|
||||||
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
|
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b729"
|
||||||
|
"47f5e80920034d1275d8613532025975e"
|
||||||
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
|
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
|
||||||
"sha1_masterkey": "bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
|
"sha1_masterkey": "bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
|
||||||
"luid": 123086,
|
"luid": 123086,
|
||||||
|
|
|
@ -3,7 +3,8 @@ from infection_monkey.telemetry.attack.usage_telem import AttackTelem
|
||||||
|
|
||||||
class T1064Telem(AttackTelem):
|
class T1064Telem(AttackTelem):
|
||||||
def __init__(self, status, usage):
|
def __init__(self, status, usage):
|
||||||
# TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem techniques
|
# TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem
|
||||||
|
# techniques
|
||||||
"""
|
"""
|
||||||
T1064 telemetry.
|
T1064 telemetry.
|
||||||
:param status: ScanStatus of technique
|
:param status: ScanStatus of technique
|
||||||
|
|
|
@ -5,7 +5,8 @@ __author__ = "itay.mizeretz"
|
||||||
|
|
||||||
class T1197Telem(VictimHostTelem):
|
class T1197Telem(VictimHostTelem):
|
||||||
def __init__(self, status, machine, usage):
|
def __init__(self, status, machine, usage):
|
||||||
# TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem techniques
|
# TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem
|
||||||
|
# techniques
|
||||||
"""
|
"""
|
||||||
T1197 telemetry.
|
T1197 telemetry.
|
||||||
:param status: ScanStatus of technique
|
:param status: ScanStatus of technique
|
||||||
|
|
|
@ -9,6 +9,7 @@ LOGGED_DATA_LENGTH = 300 # How many characters of telemetry data will be logged
|
||||||
|
|
||||||
__author__ = "itay.mizeretz"
|
__author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
# TODO: Rework the interface for telemetry; this class has too many responsibilities
|
# TODO: Rework the interface for telemetry; this class has too many responsibilities
|
||||||
# (i.e. too many reasons to change):
|
# (i.e. too many reasons to change):
|
||||||
#
|
#
|
||||||
|
|
|
@ -173,7 +173,8 @@ class MonkeyTunnel(Thread):
|
||||||
|
|
||||||
LOG.info("Stopping tunnel, waiting for clients: %s" % repr(self._clients))
|
LOG.info("Stopping tunnel, waiting for clients: %s" % repr(self._clients))
|
||||||
|
|
||||||
# wait till all of the tunnel clients has been disconnected, or no one used the tunnel in QUIT_TIMEOUT seconds
|
# wait till all of the tunnel clients has been disconnected, or no one used the tunnel in
|
||||||
|
# QUIT_TIMEOUT seconds
|
||||||
while self._clients and (time.time() - get_last_serve_time() < QUIT_TIMEOUT):
|
while self._clients and (time.time() - get_last_serve_time() < QUIT_TIMEOUT):
|
||||||
try:
|
try:
|
||||||
search, address = self._broad_sock.recvfrom(BUFFER_READ)
|
search, address = self._broad_sock.recvfrom(BUFFER_READ)
|
||||||
|
|
|
@ -8,8 +8,10 @@ class AutoNewUser(metaclass=abc.ABCMeta):
|
||||||
"""
|
"""
|
||||||
RAII object to use for creating and using a new user. Use with `with`.
|
RAII object to use for creating and using a new user. Use with `with`.
|
||||||
User will be created when the instance is instantiated.
|
User will be created when the instance is instantiated.
|
||||||
User will be available for use (log on for Windows, for example) at the start of the `with` scope.
|
User will be available for use (log on for Windows, for example) at the start of the `with`
|
||||||
User will be removed (deactivated and deleted for Windows, for example) at the end of said `with` scope.
|
scope.
|
||||||
|
User will be removed (deactivated and deleted for Windows, for example) at the end of said
|
||||||
|
`with` scope.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
# Created # Logged on
|
# Created # Logged on
|
||||||
|
|
|
@ -5,13 +5,15 @@ from infection_monkey.utils.windows.users import AutoNewWindowsUser
|
||||||
|
|
||||||
def create_auto_new_user(username, password, is_windows=is_windows_os()):
|
def create_auto_new_user(username, password, is_windows=is_windows_os()):
|
||||||
"""
|
"""
|
||||||
Factory method for creating an AutoNewUser. See AutoNewUser's documentation for more information.
|
Factory method for creating an AutoNewUser. See AutoNewUser's documentation for more
|
||||||
|
information.
|
||||||
Example usage:
|
Example usage:
|
||||||
with create_auto_new_user(username, PASSWORD) as new_user:
|
with create_auto_new_user(username, PASSWORD) as new_user:
|
||||||
...
|
...
|
||||||
:param username: The username of the new user.
|
:param username: The username of the new user.
|
||||||
:param password: The password of the new user.
|
:param password: The password of the new user.
|
||||||
:param is_windows: If True, a new Windows user is created. Otherwise, a Linux user is created. Leave blank for
|
:param is_windows: If True, a new Windows user is created. Otherwise, a Linux user is
|
||||||
|
created. Leave blank for
|
||||||
automatic detection.
|
automatic detection.
|
||||||
:return: The new AutoNewUser object - use with a `with` scope.
|
:return: The new AutoNewUser object - use with a `with` scope.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -14,7 +14,8 @@ def get_linux_commands_to_add_user(username):
|
||||||
"-M", # Do not create homedir
|
"-M", # Do not create homedir
|
||||||
"--expiredate", # The date on which the user account will be disabled.
|
"--expiredate", # The date on which the user account will be disabled.
|
||||||
datetime.datetime.today().strftime("%Y-%m-%d"),
|
datetime.datetime.today().strftime("%Y-%m-%d"),
|
||||||
"--inactive", # The number of days after a password expires until the account is permanently disabled.
|
# The number of days after a password expires until the account is permanently disabled.
|
||||||
|
"--inactive",
|
||||||
"0", # A value of 0 disables the account as soon as the password has expired
|
"0", # A value of 0 disables the account as soon as the password has expired
|
||||||
"-c", # Comment
|
"-c", # Comment
|
||||||
"MONKEY_USER", # Comment
|
"MONKEY_USER", # Comment
|
||||||
|
|
|
@ -47,12 +47,14 @@ class AutoNewWindowsUser(AutoNewUser):
|
||||||
import win32security
|
import win32security
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-logonusera
|
# Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf
|
||||||
|
# -winbase-logonusera
|
||||||
self.logon_handle = win32security.LogonUser(
|
self.logon_handle = win32security.LogonUser(
|
||||||
self.username,
|
self.username,
|
||||||
".", # Use current domain.
|
".", # Use current domain.
|
||||||
self.password,
|
self.password,
|
||||||
win32con.LOGON32_LOGON_INTERACTIVE, # Logon type - interactive (normal user), since we're using a shell.
|
# Logon type - interactive (normal user), since we're using a shell.
|
||||||
|
win32con.LOGON32_LOGON_INTERACTIVE,
|
||||||
win32con.LOGON32_PROVIDER_DEFAULT,
|
win32con.LOGON32_PROVIDER_DEFAULT,
|
||||||
) # Which logon provider to use - whatever Windows offers.
|
) # Which logon provider to use - whatever Windows offers.
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
|
@ -83,9 +85,13 @@ class AutoNewWindowsUser(AutoNewUser):
|
||||||
"Waiting for process to finish. Timeout: {}ms".format(WAIT_TIMEOUT_IN_MILLISECONDS)
|
"Waiting for process to finish. Timeout: {}ms".format(WAIT_TIMEOUT_IN_MILLISECONDS)
|
||||||
)
|
)
|
||||||
|
|
||||||
# https://social.msdn.microsoft.com/Forums/vstudio/en-US/b6d6a7ae-71e9-4edb-ac8f-408d2a41750d/what-events-on-a-process-handle-signal-satisify-waitforsingleobject?forum=vcgeneral
|
# https://social.msdn.microsoft.com/Forums/vstudio/en-US/b6d6a7ae-71e9-4edb-ac8f
|
||||||
# Ignoring return code, as we'll use `GetExitCode` to determine the state of the process later.
|
# -408d2a41750d/what-events-on-a-process-handle-signal-satisify-waitforsingleobject
|
||||||
_ = win32event.WaitForSingleObject( # Waits until the specified object is signaled, or time-out.
|
# ?forum=vcgeneral
|
||||||
|
# Ignoring return code, as we'll use `GetExitCode` to determine the state of the
|
||||||
|
# process later.
|
||||||
|
_ = win32event.WaitForSingleObject(
|
||||||
|
# Waits until the specified object is signaled, or time-out.
|
||||||
process_handle, # Ping process handle
|
process_handle, # Ping process handle
|
||||||
WAIT_TIMEOUT_IN_MILLISECONDS, # Timeout in milliseconds
|
WAIT_TIMEOUT_IN_MILLISECONDS, # Timeout in milliseconds
|
||||||
)
|
)
|
||||||
|
|
|
@ -62,11 +62,13 @@ def serve_static_file(static_path):
|
||||||
try:
|
try:
|
||||||
return send_from_directory(os.path.join(MONKEY_ISLAND_ABS_PATH, "cc/ui/dist"), static_path)
|
return send_from_directory(os.path.join(MONKEY_ISLAND_ABS_PATH, "cc/ui/dist"), static_path)
|
||||||
except NotFound:
|
except NotFound:
|
||||||
# Because react uses various urls for same index page, this is probably the user's intention.
|
# Because react uses various urls for same index page, this is probably the user's
|
||||||
|
# intention.
|
||||||
if static_path == HOME_FILE:
|
if static_path == HOME_FILE:
|
||||||
flask_restful.abort(
|
flask_restful.abort(
|
||||||
Response(
|
Response(
|
||||||
"Page not found. Make sure you ran the npm script and the cwd is monkey\\monkey.",
|
"Page not found. Make sure you ran the npm script and the cwd is "
|
||||||
|
"monkey\\monkey.",
|
||||||
500,
|
500,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -82,11 +84,13 @@ def init_app_config(app, mongo_url):
|
||||||
|
|
||||||
# See https://flask-jwt-extended.readthedocs.io/en/stable/options
|
# See https://flask-jwt-extended.readthedocs.io/en/stable/options
|
||||||
app.config["JWT_ACCESS_TOKEN_EXPIRES"] = env_singleton.env.get_auth_expiration_time()
|
app.config["JWT_ACCESS_TOKEN_EXPIRES"] = env_singleton.env.get_auth_expiration_time()
|
||||||
# Invalidate the signature of JWTs if the server process restarts. This avoids the edge case of getting a JWT,
|
# Invalidate the signature of JWTs if the server process restarts. This avoids the edge case
|
||||||
|
# of getting a JWT,
|
||||||
# deciding to reset credentials and then still logging in with the old JWT.
|
# deciding to reset credentials and then still logging in with the old JWT.
|
||||||
app.config["JWT_SECRET_KEY"] = str(uuid.uuid4())
|
app.config["JWT_SECRET_KEY"] = str(uuid.uuid4())
|
||||||
|
|
||||||
# By default, Flask sorts keys of JSON objects alphabetically, which messes with the ATT&CK matrix in the
|
# By default, Flask sorts keys of JSON objects alphabetically, which messes with the ATT&CK
|
||||||
|
# matrix in the
|
||||||
# configuration. See https://flask.palletsprojects.com/en/1.1.x/config/#JSON_SORT_KEYS.
|
# configuration. See https://flask.palletsprojects.com/en/1.1.x/config/#JSON_SORT_KEYS.
|
||||||
app.config["JSON_SORT_KEYS"] = False
|
app.config["JSON_SORT_KEYS"] = False
|
||||||
|
|
||||||
|
@ -101,7 +105,8 @@ def init_app_services(app):
|
||||||
database.init()
|
database.init()
|
||||||
Database.init_db()
|
Database.init_db()
|
||||||
|
|
||||||
# If running on AWS, this will initialize the instance data, which is used "later" in the execution of the island.
|
# If running on AWS, this will initialize the instance data, which is used "later" in the
|
||||||
|
# execution of the island.
|
||||||
RemoteRunAwsService.init()
|
RemoteRunAwsService.init()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,6 @@ __author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
class AwsEnvironment(Environment):
|
class AwsEnvironment(Environment):
|
||||||
|
|
||||||
_credentials_required = True
|
_credentials_required = True
|
||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
|
|
|
@ -4,7 +4,6 @@ __author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
class PasswordEnvironment(Environment):
|
class PasswordEnvironment(Environment):
|
||||||
|
|
||||||
_credentials_required = True
|
_credentials_required = True
|
||||||
|
|
||||||
def get_auth_users(self):
|
def get_auth_users(self):
|
||||||
|
|
|
@ -5,7 +5,6 @@ __author__ = "itay.mizeretz"
|
||||||
|
|
||||||
|
|
||||||
class StandardEnvironment(Environment):
|
class StandardEnvironment(Environment):
|
||||||
|
|
||||||
_credentials_required = False
|
_credentials_required = False
|
||||||
|
|
||||||
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
|
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
|
||||||
|
|
|
@ -4,7 +4,8 @@ from monkey_island.cc.environment import Environment, EnvironmentConfig
|
||||||
class TestingEnvironment(Environment):
|
class TestingEnvironment(Environment):
|
||||||
"""
|
"""
|
||||||
Use this environment for running Unit Tests.
|
Use this environment for running Unit Tests.
|
||||||
This will cause all mongo connections to happen via `mongomock` instead of using an actual mongodb instance.
|
This will cause all mongo connections to happen via `mongomock` instead of using an actual
|
||||||
|
mongodb instance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_credentials_required = True
|
_credentials_required = True
|
||||||
|
|
|
@ -5,7 +5,8 @@ import time
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
|
|
||||||
# Add the monkey_island directory to the path, to make sure imports that don't start with "monkey_island." work.
|
# Add the monkey_island directory to the path, to make sure imports that don't start with
|
||||||
|
# "monkey_island." work.
|
||||||
from gevent.pywsgi import WSGIServer
|
from gevent.pywsgi import WSGIServer
|
||||||
|
|
||||||
MONKEY_ISLAND_DIR_BASE_PATH = str(Path(__file__).parent.parent)
|
MONKEY_ISLAND_DIR_BASE_PATH = str(Path(__file__).parent.parent)
|
||||||
|
@ -48,7 +49,6 @@ def main(should_setup_only=False, server_config_filename=DEFAULT_SERVER_CONFIG_P
|
||||||
|
|
||||||
|
|
||||||
def start_island_server(should_setup_only):
|
def start_island_server(should_setup_only):
|
||||||
|
|
||||||
mongo_url = os.environ.get("MONGO_URL", env_singleton.env.get_mongo_url())
|
mongo_url = os.environ.get("MONGO_URL", env_singleton.env.get_mongo_url())
|
||||||
wait_for_mongo_db_server(mongo_url)
|
wait_for_mongo_db_server(mongo_url)
|
||||||
assert_mongo_db_version(mongo_url)
|
assert_mongo_db_version(mongo_url)
|
||||||
|
|
|
@ -4,7 +4,8 @@ import monkey_island.cc.environment.environment_singleton as env_singleton
|
||||||
|
|
||||||
from .command_control_channel import CommandControlChannel # noqa: F401
|
from .command_control_channel import CommandControlChannel # noqa: F401
|
||||||
|
|
||||||
# Order of importing matters here, for registering the embedded and referenced documents before using them.
|
# Order of importing matters here, for registering the embedded and referenced documents before
|
||||||
|
# using them.
|
||||||
from .config import Config # noqa: F401
|
from .config import Config # noqa: F401
|
||||||
from .creds import Creds # noqa: F401
|
from .creds import Creds # noqa: F401
|
||||||
from .monkey import Monkey # noqa: F401
|
from .monkey import Monkey # noqa: F401
|
||||||
|
|
|
@ -8,7 +8,6 @@ from monkey_island.cc.services.attack.test_mitre_api_interface import MitreApiIn
|
||||||
|
|
||||||
|
|
||||||
class AttackMitigations(Document):
|
class AttackMitigations(Document):
|
||||||
|
|
||||||
COLLECTION_NAME = "attack_mitigations"
|
COLLECTION_NAME = "attack_mitigations"
|
||||||
|
|
||||||
technique_id = StringField(required=True, primary_key=True)
|
technique_id = StringField(required=True, primary_key=True)
|
||||||
|
|
|
@ -5,7 +5,6 @@ from monkey_island.cc.services.attack.test_mitre_api_interface import MitreApiIn
|
||||||
|
|
||||||
|
|
||||||
class Mitigation(EmbeddedDocument):
|
class Mitigation(EmbeddedDocument):
|
||||||
|
|
||||||
name = StringField(required=True)
|
name = StringField(required=True)
|
||||||
description = StringField(required=True)
|
description = StringField(required=True)
|
||||||
url = StringField()
|
url = StringField()
|
||||||
|
|
|
@ -2,7 +2,6 @@ from mongoengine import BooleanField, Document, DynamicField, ListField, ObjectI
|
||||||
|
|
||||||
|
|
||||||
class Edge(Document):
|
class Edge(Document):
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
# SCHEMA
|
# SCHEMA
|
||||||
|
|
|
@ -26,8 +26,10 @@ MAX_MONKEYS_AMOUNT_TO_CACHE = 100
|
||||||
class Monkey(Document):
|
class Monkey(Document):
|
||||||
"""
|
"""
|
||||||
This class has 2 main section:
|
This class has 2 main section:
|
||||||
* The schema section defines the DB fields in the document. This is the data of the object.
|
* The schema section defines the DB fields in the document. This is the data of the
|
||||||
* The logic section defines complex questions we can ask about a single document which are asked multiple
|
object.
|
||||||
|
* The logic section defines complex questions we can ask about a single document which
|
||||||
|
are asked multiple
|
||||||
times, somewhat like an API.
|
times, somewhat like an API.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -42,7 +44,8 @@ class Monkey(Document):
|
||||||
ip_addresses = ListField(StringField())
|
ip_addresses = ListField(StringField())
|
||||||
keepalive = DateTimeField()
|
keepalive = DateTimeField()
|
||||||
modifytime = DateTimeField()
|
modifytime = DateTimeField()
|
||||||
# TODO make "parent" an embedded document, so this can be removed and the schema explained (and validated) verbosely.
|
# TODO make "parent" an embedded document, so this can be removed and the schema explained (
|
||||||
|
# and validated) verbosely.
|
||||||
# This is a temporary fix, since mongoengine doesn't allow for lists of strings to be null
|
# This is a temporary fix, since mongoengine doesn't allow for lists of strings to be null
|
||||||
# (even with required=False of null=True).
|
# (even with required=False of null=True).
|
||||||
# See relevant issue: https://github.com/MongoEngine/mongoengine/issues/1904
|
# See relevant issue: https://github.com/MongoEngine/mongoengine/issues/1904
|
||||||
|
@ -146,7 +149,8 @@ class Monkey(Document):
|
||||||
return {"ips": self.ip_addresses, "hostname": self.hostname}
|
return {"ips": self.ip_addresses, "hostname": self.hostname}
|
||||||
|
|
||||||
@ring.lru(
|
@ring.lru(
|
||||||
expire=1 # data has TTL of 1 second. This is useful for rapid calls for report generation.
|
# data has TTL of 1 second. This is useful for rapid calls for report generation.
|
||||||
|
expire=1
|
||||||
)
|
)
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_monkey(object_id):
|
def is_monkey(object_id):
|
||||||
|
|
|
@ -7,10 +7,12 @@ class MonkeyTtl(Document):
|
||||||
"""
|
"""
|
||||||
This model represents the monkey's TTL, and is referenced by the main Monkey document.
|
This model represents the monkey's TTL, and is referenced by the main Monkey document.
|
||||||
See https://docs.mongodb.com/manual/tutorial/expire-data/ and
|
See https://docs.mongodb.com/manual/tutorial/expire-data/ and
|
||||||
https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired-documents/56021663#56021663
|
https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired
|
||||||
|
-documents/56021663#56021663
|
||||||
for more information about how TTL indexing works and why this class is set up the way it is.
|
for more information about how TTL indexing works and why this class is set up the way it is.
|
||||||
|
|
||||||
If you wish to use this class, you can create it using the create_ttl_expire_in(seconds) function.
|
If you wish to use this class, you can create it using the create_ttl_expire_in(seconds)
|
||||||
|
function.
|
||||||
If you wish to create an instance of this class directly, see the inner implementation of
|
If you wish to create an instance of this class directly, see the inner implementation of
|
||||||
create_ttl_expire_in(seconds) to see how to do so.
|
create_ttl_expire_in(seconds) to see how to do so.
|
||||||
"""
|
"""
|
||||||
|
@ -20,11 +22,13 @@ class MonkeyTtl(Document):
|
||||||
"""
|
"""
|
||||||
Initializes a TTL object which will expire in expire_in_seconds seconds from when created.
|
Initializes a TTL object which will expire in expire_in_seconds seconds from when created.
|
||||||
Remember to call .save() on the object after creation.
|
Remember to call .save() on the object after creation.
|
||||||
:param expiry_in_seconds: How long should the TTL be in the DB, in seconds. Please take into consideration
|
:param expiry_in_seconds: How long should the TTL be in the DB, in seconds. Please take
|
||||||
|
into consideration
|
||||||
that the cleanup thread of mongo might take extra time to delete the TTL from the DB.
|
that the cleanup thread of mongo might take extra time to delete the TTL from the DB.
|
||||||
"""
|
"""
|
||||||
# Using UTC to make the mongodb TTL feature work. See
|
# Using UTC to make the mongodb TTL feature work. See
|
||||||
# https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired-documents.
|
# https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired
|
||||||
|
# -documents.
|
||||||
return MonkeyTtl(expire_at=datetime.utcnow() + timedelta(seconds=expiry_in_seconds))
|
return MonkeyTtl(expire_at=datetime.utcnow() + timedelta(seconds=expiry_in_seconds))
|
||||||
|
|
||||||
meta = {"indexes": [{"name": "TTL_index", "fields": ["expire_at"], "expireAfterSeconds": 0}]}
|
meta = {"indexes": [{"name": "TTL_index", "fields": ["expire_at"], "expireAfterSeconds": 0}]}
|
||||||
|
@ -35,7 +39,8 @@ class MonkeyTtl(Document):
|
||||||
def create_monkey_ttl_document(expiry_duration_in_seconds):
|
def create_monkey_ttl_document(expiry_duration_in_seconds):
|
||||||
"""
|
"""
|
||||||
Create a new Monkey TTL document and save it as a document.
|
Create a new Monkey TTL document and save it as a document.
|
||||||
:param expiry_duration_in_seconds: How long should the TTL last for. THIS IS A LOWER BOUND - depends on mongodb
|
:param expiry_duration_in_seconds: How long should the TTL last for. THIS IS A LOWER BOUND -
|
||||||
|
depends on mongodb
|
||||||
performance.
|
performance.
|
||||||
:return: The TTL document. To get its ID use `.id`.
|
:return: The TTL document. To get its ID use `.id`.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -26,7 +26,8 @@ class TestMonkey:
|
||||||
mia_monkey_ttl.save()
|
mia_monkey_ttl.save()
|
||||||
mia_monkey = Monkey(guid=str(uuid.uuid4()), dead=False, ttl_ref=mia_monkey_ttl.id)
|
mia_monkey = Monkey(guid=str(uuid.uuid4()), dead=False, ttl_ref=mia_monkey_ttl.id)
|
||||||
mia_monkey.save()
|
mia_monkey.save()
|
||||||
# Emulate timeout - ttl is manually deleted here, since we're using mongomock and not a real mongo instance.
|
# Emulate timeout - ttl is manually deleted here, since we're using mongomock and not a
|
||||||
|
# real mongo instance.
|
||||||
sleep(1)
|
sleep(1)
|
||||||
mia_monkey_ttl.delete()
|
mia_monkey_ttl.delete()
|
||||||
|
|
||||||
|
|
|
@ -7,12 +7,15 @@ import common.common_consts.zero_trust_consts as zero_trust_consts
|
||||||
|
|
||||||
class Event(EmbeddedDocument):
|
class Event(EmbeddedDocument):
|
||||||
"""
|
"""
|
||||||
This model represents a single event within a Finding (it is an EmbeddedDocument within Finding). It is meant to
|
This model represents a single event within a Finding (it is an EmbeddedDocument within
|
||||||
|
Finding). It is meant to
|
||||||
hold a detail of the Finding.
|
hold a detail of the Finding.
|
||||||
|
|
||||||
This class has 2 main section:
|
This class has 2 main section:
|
||||||
* The schema section defines the DB fields in the document. This is the data of the object.
|
* The schema section defines the DB fields in the document. This is the data of the
|
||||||
* The logic section defines complex questions we can ask about a single document which are asked multiple
|
object.
|
||||||
|
* The logic section defines complex questions we can ask about a single document which
|
||||||
|
are asked multiple
|
||||||
times, or complex action we will perform - somewhat like an API.
|
times, or complex action we will perform - somewhat like an API.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -12,20 +12,24 @@ import common.common_consts.zero_trust_consts as zero_trust_consts
|
||||||
|
|
||||||
class Finding(Document):
|
class Finding(Document):
|
||||||
"""
|
"""
|
||||||
This model represents a Zero-Trust finding: A result of a test the monkey/island might perform to see if a
|
This model represents a Zero-Trust finding: A result of a test the monkey/island might
|
||||||
|
perform to see if a
|
||||||
specific principle of zero trust is upheld or broken.
|
specific principle of zero trust is upheld or broken.
|
||||||
|
|
||||||
Findings might have the following statuses:
|
Findings might have the following statuses:
|
||||||
Failed ❌
|
Failed ❌
|
||||||
Meaning that we are sure that something is wrong (example: segmentation issue).
|
Meaning that we are sure that something is wrong (example: segmentation issue).
|
||||||
Verify ⁉
|
Verify ⁉
|
||||||
Meaning that we need the user to check something himself (example: 2FA logs, AV missing).
|
Meaning that we need the user to check something himself (example: 2FA logs,
|
||||||
|
AV missing).
|
||||||
Passed ✔
|
Passed ✔
|
||||||
Meaning that we are sure that something is correct (example: Monkey failed exploiting).
|
Meaning that we are sure that something is correct (example: Monkey failed exploiting).
|
||||||
|
|
||||||
This class has 2 main section:
|
This class has 2 main section:
|
||||||
* The schema section defines the DB fields in the document. This is the data of the object.
|
* The schema section defines the DB fields in the document. This is the data of the
|
||||||
* The logic section defines complex questions we can ask about a single document which are asked multiple
|
object.
|
||||||
|
* The logic section defines complex questions we can ask about a single document which
|
||||||
|
are asked multiple
|
||||||
times, or complex action we will perform - somewhat like an API.
|
times, or complex action we will perform - somewhat like an API.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,6 @@ from monkey_island.cc.models.zero_trust.event import Event
|
||||||
|
|
||||||
|
|
||||||
class MonkeyFindingDetails(Document):
|
class MonkeyFindingDetails(Document):
|
||||||
|
|
||||||
# SCHEMA
|
# SCHEMA
|
||||||
events = EmbeddedDocumentListField(document_type=Event, required=False)
|
events = EmbeddedDocumentListField(document_type=Event, required=False)
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue